##// END OF EJS Templates
models: removed utf8 markers
super-admin -
r5055:a976f41d default
parent child Browse files
Show More
@@ -1,135 +1,134 b''
1 # -*- coding: utf-8 -*-
2
1
3 # Copyright (C) 2010-2020 RhodeCode GmbH
2 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
3 #
5 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
8 #
7 #
9 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
11 # GNU General Public License for more details.
13 #
12 #
14 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
15 #
17 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
19
21
20
22 import logging
21 import logging
23
22
24 import rhodecode
23 import rhodecode
25 from rhodecode.model import meta, db
24 from rhodecode.model import meta, db
26 from rhodecode.lib.utils2 import obfuscate_url_pw, get_encryption_key
25 from rhodecode.lib.utils2 import obfuscate_url_pw, get_encryption_key
27
26
28 log = logging.getLogger(__name__)
27 log = logging.getLogger(__name__)
29
28
30
29
31 def init_model(engine, encryption_key=None):
30 def init_model(engine, encryption_key=None):
32 """
31 """
33 Initializes db session, bind the engine with the metadata,
32 Initializes db session, bind the engine with the metadata,
34 Call this before using any of the tables or classes in the model,
33 Call this before using any of the tables or classes in the model,
35 preferably once in application start
34 preferably once in application start
36
35
37 :param engine: engine to bind to
36 :param engine: engine to bind to
38 """
37 """
39 engine_str = obfuscate_url_pw(str(engine.url))
38 engine_str = obfuscate_url_pw(str(engine.url))
40 log.info("RhodeCode %s initializing db for %s", rhodecode.__version__, engine_str)
39 log.info("RhodeCode %s initializing db for %s", rhodecode.__version__, engine_str)
41 meta.Base.metadata.bind = engine
40 meta.Base.metadata.bind = engine
42 db.ENCRYPTION_KEY = encryption_key
41 db.ENCRYPTION_KEY = encryption_key
43
42
44
43
45 def init_model_encryption(migration_models, config=None):
44 def init_model_encryption(migration_models, config=None):
46 from pyramid.threadlocal import get_current_registry
45 from pyramid.threadlocal import get_current_registry
47 config = config or get_current_registry().settings
46 config = config or get_current_registry().settings
48 migration_models.ENCRYPTION_KEY = get_encryption_key(config)
47 migration_models.ENCRYPTION_KEY = get_encryption_key(config)
49 db.ENCRYPTION_KEY = get_encryption_key(config)
48 db.ENCRYPTION_KEY = get_encryption_key(config)
50
49
51
50
52 class BaseModel(object):
51 class BaseModel(object):
53 """
52 """
54 Base Model for all RhodeCode models, it adds sql alchemy session
53 Base Model for all RhodeCode models, it adds sql alchemy session
55 into instance of model
54 into instance of model
56
55
57 :param sa: If passed it reuses this session instead of creating a new one
56 :param sa: If passed it reuses this session instead of creating a new one
58 """
57 """
59
58
60 cls = None # override in child class
59 cls = None # override in child class
61
60
62 def __init__(self, sa=None):
61 def __init__(self, sa=None):
63 if sa is not None:
62 if sa is not None:
64 self.sa = sa
63 self.sa = sa
65 else:
64 else:
66 self.sa = meta.Session()
65 self.sa = meta.Session()
67
66
68 def _get_instance(self, cls, instance, callback=None):
67 def _get_instance(self, cls, instance, callback=None):
69 """
68 """
70 Gets instance of given cls using some simple lookup mechanism.
69 Gets instance of given cls using some simple lookup mechanism.
71
70
72 :param cls: classes to fetch
71 :param cls: classes to fetch
73 :param instance: int or Instance
72 :param instance: int or Instance
74 :param callback: callback to call if all lookups failed
73 :param callback: callback to call if all lookups failed
75 """
74 """
76
75
77 if isinstance(instance, cls):
76 if isinstance(instance, cls):
78 return instance
77 return instance
79 elif isinstance(instance, int):
78 elif isinstance(instance, int):
80 if isinstance(cls, tuple):
79 if isinstance(cls, tuple):
81 # if we pass multi instances we pick first to .get()
80 # if we pass multi instances we pick first to .get()
82 cls = cls[0]
81 cls = cls[0]
83 return cls.get(instance)
82 return cls.get(instance)
84 else:
83 else:
85 if instance:
84 if instance:
86 if callback is None:
85 if callback is None:
87 raise Exception(
86 raise Exception(
88 'given object must be int or Instance of %s '
87 'given object must be int or Instance of %s '
89 'got %s, no callback provided' % (cls, type(instance))
88 'got %s, no callback provided' % (cls, type(instance))
90 )
89 )
91 else:
90 else:
92 return callback(instance)
91 return callback(instance)
93
92
94 def _get_user(self, user):
93 def _get_user(self, user):
95 """
94 """
96 Helper method to get user by ID, or username fallback
95 Helper method to get user by ID, or username fallback
97
96
98 :param user: UserID, username, or User instance
97 :param user: UserID, username, or User instance
99 """
98 """
100 return self._get_instance(
99 return self._get_instance(
101 db.User, user, callback=db.User.get_by_username)
100 db.User, user, callback=db.User.get_by_username)
102
101
103 def _get_user_group(self, user_group):
102 def _get_user_group(self, user_group):
104 """
103 """
105 Helper method to get user by ID, or username fallback
104 Helper method to get user by ID, or username fallback
106
105
107 :param user_group: UserGroupID, user_group_name, or UserGroup instance
106 :param user_group: UserGroupID, user_group_name, or UserGroup instance
108 """
107 """
109 return self._get_instance(
108 return self._get_instance(
110 db.UserGroup, user_group, callback=db.UserGroup.get_by_group_name)
109 db.UserGroup, user_group, callback=db.UserGroup.get_by_group_name)
111
110
112 def _get_repo(self, repository):
111 def _get_repo(self, repository):
113 """
112 """
114 Helper method to get repository by ID, or repository name
113 Helper method to get repository by ID, or repository name
115
114
116 :param repository: RepoID, repository name or Repository Instance
115 :param repository: RepoID, repository name or Repository Instance
117 """
116 """
118 return self._get_instance(
117 return self._get_instance(
119 db.Repository, repository, callback=db.Repository.get_by_repo_name)
118 db.Repository, repository, callback=db.Repository.get_by_repo_name)
120
119
121 def _get_perm(self, permission):
120 def _get_perm(self, permission):
122 """
121 """
123 Helper method to get permission by ID, or permission name
122 Helper method to get permission by ID, or permission name
124
123
125 :param permission: PermissionID, permission_name or Permission instance
124 :param permission: PermissionID, permission_name or Permission instance
126 """
125 """
127 return self._get_instance(
126 return self._get_instance(
128 db.Permission, permission, callback=db.Permission.get_by_key)
127 db.Permission, permission, callback=db.Permission.get_by_key)
129
128
130 @classmethod
129 @classmethod
131 def get_all(cls):
130 def get_all(cls):
132 """
131 """
133 Returns all instances of what is defined in `cls` class variable
132 Returns all instances of what is defined in `cls` class variable
134 """
133 """
135 return cls.cls.getAll()
134 return cls.cls.getAll()
@@ -1,124 +1,124 b''
1 # -*- coding: utf-8 -*-
1
2
2
3 # Copyright (C) 2013-2020 RhodeCode GmbH
3 # Copyright (C) 2013-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 authentication tokens model for RhodeCode
22 authentication tokens model for RhodeCode
23 """
23 """
24
24
25 import time
25 import time
26 import logging
26 import logging
27 import traceback
27 import traceback
28 from sqlalchemy import or_
28 from sqlalchemy import or_
29
29
30 from rhodecode.model import BaseModel
30 from rhodecode.model import BaseModel
31 from rhodecode.model.db import UserApiKeys
31 from rhodecode.model.db import UserApiKeys
32 from rhodecode.model.meta import Session
32 from rhodecode.model.meta import Session
33
33
34 log = logging.getLogger(__name__)
34 log = logging.getLogger(__name__)
35
35
36
36
37 class AuthTokenModel(BaseModel):
37 class AuthTokenModel(BaseModel):
38 cls = UserApiKeys
38 cls = UserApiKeys
39
39
40 @classmethod
40 @classmethod
41 def get_lifetime_values(cls, translator):
41 def get_lifetime_values(cls, translator):
42 from rhodecode.lib import helpers as h
42 from rhodecode.lib import helpers as h
43 _ = translator
43 _ = translator
44
44
45 def date_after_min(mins):
45 def date_after_min(mins):
46 after = time.time() + (60 * mins)
46 after = time.time() + (60 * mins)
47 return h.format_date(h.time_to_datetime(after))
47 return h.format_date(h.time_to_datetime(after))
48
48
49 return [
49 return [
50 (str(-1),
50 (str(-1),
51 _('forever')),
51 _('forever')),
52 (str(5),
52 (str(5),
53 _('5 minutes {end_date}').format(end_date=date_after_min(5))),
53 _('5 minutes {end_date}').format(end_date=date_after_min(5))),
54 (str(60),
54 (str(60),
55 _('1 hour {end_date}').format(end_date=date_after_min(60))),
55 _('1 hour {end_date}').format(end_date=date_after_min(60))),
56 (str(60 * 24),
56 (str(60 * 24),
57 _('1 day {end_date}').format(end_date=date_after_min(60 * 24))),
57 _('1 day {end_date}').format(end_date=date_after_min(60 * 24))),
58 (str(60 * 24 * 30),
58 (str(60 * 24 * 30),
59 _('1 month {end_date}').format(end_date=date_after_min(60 * 24 * 30))),
59 _('1 month {end_date}').format(end_date=date_after_min(60 * 24 * 30))),
60 ]
60 ]
61
61
62 def create(self, user, description, lifetime=-1, role=UserApiKeys.ROLE_ALL):
62 def create(self, user, description, lifetime=-1, role=UserApiKeys.ROLE_ALL):
63 """
63 """
64 :param user: user or user_id
64 :param user: user or user_id
65 :param description: description of ApiKey
65 :param description: description of ApiKey
66 :param lifetime: expiration time in minutes
66 :param lifetime: expiration time in minutes
67 :param role: role for the apikey
67 :param role: role for the apikey
68 """
68 """
69 from rhodecode.lib.auth import generate_auth_token
69 from rhodecode.lib.auth import generate_auth_token
70
70
71 user = self._get_user(user)
71 user = self._get_user(user)
72
72
73 new_auth_token = UserApiKeys()
73 new_auth_token = UserApiKeys()
74 new_auth_token.api_key = generate_auth_token(user.username)
74 new_auth_token.api_key = generate_auth_token(user.username)
75 new_auth_token.user_id = user.user_id
75 new_auth_token.user_id = user.user_id
76 new_auth_token.description = description
76 new_auth_token.description = description
77 new_auth_token.role = role
77 new_auth_token.role = role
78 new_auth_token.expires = time.time() + (lifetime * 60) \
78 new_auth_token.expires = time.time() + (lifetime * 60) \
79 if lifetime != -1 else -1
79 if lifetime != -1 else -1
80 Session().add(new_auth_token)
80 Session().add(new_auth_token)
81
81
82 return new_auth_token
82 return new_auth_token
83
83
84 def delete(self, auth_token_id, user=None):
84 def delete(self, auth_token_id, user=None):
85 """
85 """
86 Deletes given api_key, if user is set it also filters the object for
86 Deletes given api_key, if user is set it also filters the object for
87 deletion by given user.
87 deletion by given user.
88 """
88 """
89 auth_token = UserApiKeys.query().filter(
89 auth_token = UserApiKeys.query().filter(
90 UserApiKeys.user_api_key_id == auth_token_id)
90 UserApiKeys.user_api_key_id == auth_token_id)
91
91
92 if user:
92 if user:
93 user = self._get_user(user)
93 user = self._get_user(user)
94 auth_token = auth_token.filter(UserApiKeys.user_id == user.user_id)
94 auth_token = auth_token.filter(UserApiKeys.user_id == user.user_id)
95 auth_token = auth_token.scalar()
95 auth_token = auth_token.scalar()
96
96
97 if auth_token:
97 if auth_token:
98 try:
98 try:
99 Session().delete(auth_token)
99 Session().delete(auth_token)
100 except Exception:
100 except Exception:
101 log.error(traceback.format_exc())
101 log.error(traceback.format_exc())
102 raise
102 raise
103
103
104 def get_auth_tokens(self, user, show_expired=True):
104 def get_auth_tokens(self, user, show_expired=True):
105 user = self._get_user(user)
105 user = self._get_user(user)
106 user_auth_tokens = UserApiKeys.query()\
106 user_auth_tokens = UserApiKeys.query()\
107 .filter(UserApiKeys.user_id == user.user_id)
107 .filter(UserApiKeys.user_id == user.user_id)
108 if not show_expired:
108 if not show_expired:
109 user_auth_tokens = user_auth_tokens\
109 user_auth_tokens = user_auth_tokens\
110 .filter(or_(UserApiKeys.expires == -1,
110 .filter(or_(UserApiKeys.expires == -1,
111 UserApiKeys.expires >= time.time()))
111 UserApiKeys.expires >= time.time()))
112 user_auth_tokens = user_auth_tokens.order_by(
112 user_auth_tokens = user_auth_tokens.order_by(
113 UserApiKeys.user_api_key_id)
113 UserApiKeys.user_api_key_id)
114 return user_auth_tokens
114 return user_auth_tokens
115
115
116 def get_auth_token(self, auth_token):
116 def get_auth_token(self, auth_token):
117 auth_token = UserApiKeys.query().filter(
117 auth_token = UserApiKeys.query().filter(
118 UserApiKeys.api_key == auth_token)
118 UserApiKeys.api_key == auth_token)
119 auth_token = auth_token \
119 auth_token = auth_token \
120 .filter(or_(UserApiKeys.expires == -1,
120 .filter(or_(UserApiKeys.expires == -1,
121 UserApiKeys.expires >= time.time()))\
121 UserApiKeys.expires >= time.time()))\
122 .first()
122 .first()
123
123
124 return auth_token
124 return auth_token
@@ -1,403 +1,402 b''
1 # -*- coding: utf-8 -*-
2
1
3 # Copyright (C) 2010-2020 RhodeCode GmbH
2 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
3 #
5 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
8 #
7 #
9 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
11 # GNU General Public License for more details.
13 #
12 #
14 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
15 #
17 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
19
21
20
22 import itertools
21 import itertools
23 import logging
22 import logging
24 import collections
23 import collections
25
24
26 from rhodecode.model import BaseModel
25 from rhodecode.model import BaseModel
27 from rhodecode.model.db import (
26 from rhodecode.model.db import (
28 ChangesetStatus, ChangesetComment, PullRequest, PullRequestReviewers, Session)
27 ChangesetStatus, ChangesetComment, PullRequest, PullRequestReviewers, Session)
29 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError
28 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError
30 from rhodecode.lib.markup_renderer import (
29 from rhodecode.lib.markup_renderer import (
31 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
30 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
32
31
33 log = logging.getLogger(__name__)
32 log = logging.getLogger(__name__)
34
33
35
34
36 class ChangesetStatusModel(BaseModel):
35 class ChangesetStatusModel(BaseModel):
37
36
38 cls = ChangesetStatus
37 cls = ChangesetStatus
39
38
40 def __get_changeset_status(self, changeset_status):
39 def __get_changeset_status(self, changeset_status):
41 return self._get_instance(ChangesetStatus, changeset_status)
40 return self._get_instance(ChangesetStatus, changeset_status)
42
41
43 def __get_pull_request(self, pull_request):
42 def __get_pull_request(self, pull_request):
44 return self._get_instance(PullRequest, pull_request)
43 return self._get_instance(PullRequest, pull_request)
45
44
46 def _get_status_query(self, repo, revision, pull_request,
45 def _get_status_query(self, repo, revision, pull_request,
47 with_revisions=False):
46 with_revisions=False):
48 repo = self._get_repo(repo)
47 repo = self._get_repo(repo)
49
48
50 q = ChangesetStatus.query()\
49 q = ChangesetStatus.query()\
51 .filter(ChangesetStatus.repo == repo)
50 .filter(ChangesetStatus.repo == repo)
52 if not with_revisions:
51 if not with_revisions:
53 q = q.filter(ChangesetStatus.version == 0)
52 q = q.filter(ChangesetStatus.version == 0)
54
53
55 if revision:
54 if revision:
56 q = q.filter(ChangesetStatus.revision == revision)
55 q = q.filter(ChangesetStatus.revision == revision)
57 elif pull_request:
56 elif pull_request:
58 pull_request = self.__get_pull_request(pull_request)
57 pull_request = self.__get_pull_request(pull_request)
59 # TODO: johbo: Think about the impact of this join, there must
58 # TODO: johbo: Think about the impact of this join, there must
60 # be a reason why ChangesetStatus and ChanagesetComment is linked
59 # be a reason why ChangesetStatus and ChanagesetComment is linked
61 # to the pull request. Might be that we want to do the same for
60 # to the pull request. Might be that we want to do the same for
62 # the pull_request_version_id.
61 # the pull_request_version_id.
63 q = q.join(ChangesetComment).filter(
62 q = q.join(ChangesetComment).filter(
64 ChangesetStatus.pull_request == pull_request,
63 ChangesetStatus.pull_request == pull_request,
65 ChangesetComment.pull_request_version_id == None)
64 ChangesetComment.pull_request_version_id == None)
66 else:
65 else:
67 raise Exception('Please specify revision or pull_request')
66 raise Exception('Please specify revision or pull_request')
68 q = q.order_by(ChangesetStatus.version.asc())
67 q = q.order_by(ChangesetStatus.version.asc())
69 return q
68 return q
70
69
71 def calculate_group_vote(self, group_id, group_statuses_by_reviewers,
70 def calculate_group_vote(self, group_id, group_statuses_by_reviewers,
72 trim_votes=True):
71 trim_votes=True):
73 """
72 """
74 Calculate status based on given group members, and voting rule
73 Calculate status based on given group members, and voting rule
75
74
76
75
77 group1 - 4 members, 3 required for approval
76 group1 - 4 members, 3 required for approval
78 user1 - approved
77 user1 - approved
79 user2 - reject
78 user2 - reject
80 user3 - approved
79 user3 - approved
81 user4 - rejected
80 user4 - rejected
82
81
83 final_state: rejected, reasons not at least 3 votes
82 final_state: rejected, reasons not at least 3 votes
84
83
85
84
86 group1 - 4 members, 2 required for approval
85 group1 - 4 members, 2 required for approval
87 user1 - approved
86 user1 - approved
88 user2 - reject
87 user2 - reject
89 user3 - approved
88 user3 - approved
90 user4 - rejected
89 user4 - rejected
91
90
92 final_state: approved, reasons got at least 2 approvals
91 final_state: approved, reasons got at least 2 approvals
93
92
94 group1 - 4 members, ALL required for approval
93 group1 - 4 members, ALL required for approval
95 user1 - approved
94 user1 - approved
96 user2 - reject
95 user2 - reject
97 user3 - approved
96 user3 - approved
98 user4 - rejected
97 user4 - rejected
99
98
100 final_state: rejected, reasons not all approvals
99 final_state: rejected, reasons not all approvals
101
100
102
101
103 group1 - 4 members, ALL required for approval
102 group1 - 4 members, ALL required for approval
104 user1 - approved
103 user1 - approved
105 user2 - approved
104 user2 - approved
106 user3 - approved
105 user3 - approved
107 user4 - approved
106 user4 - approved
108
107
109 final_state: approved, reason all approvals received
108 final_state: approved, reason all approvals received
110
109
111 group1 - 4 members, 5 required for approval
110 group1 - 4 members, 5 required for approval
112 (approval should be shorted to number of actual members)
111 (approval should be shorted to number of actual members)
113
112
114 user1 - approved
113 user1 - approved
115 user2 - approved
114 user2 - approved
116 user3 - approved
115 user3 - approved
117 user4 - approved
116 user4 - approved
118
117
119 final_state: approved, reason all approvals received
118 final_state: approved, reason all approvals received
120
119
121 """
120 """
122 group_vote_data = {}
121 group_vote_data = {}
123 got_rule = False
122 got_rule = False
124 members = collections.OrderedDict()
123 members = collections.OrderedDict()
125 for review_obj, user, reasons, mandatory, statuses \
124 for review_obj, user, reasons, mandatory, statuses \
126 in group_statuses_by_reviewers:
125 in group_statuses_by_reviewers:
127
126
128 if not got_rule:
127 if not got_rule:
129 group_vote_data = review_obj.rule_user_group_data()
128 group_vote_data = review_obj.rule_user_group_data()
130 got_rule = bool(group_vote_data)
129 got_rule = bool(group_vote_data)
131
130
132 members[user.user_id] = statuses
131 members[user.user_id] = statuses
133
132
134 if not group_vote_data:
133 if not group_vote_data:
135 return []
134 return []
136
135
137 required_votes = group_vote_data['vote_rule']
136 required_votes = group_vote_data['vote_rule']
138 if required_votes == -1:
137 if required_votes == -1:
139 # -1 means all required, so we replace it with how many people
138 # -1 means all required, so we replace it with how many people
140 # are in the members
139 # are in the members
141 required_votes = len(members)
140 required_votes = len(members)
142
141
143 if trim_votes and required_votes > len(members):
142 if trim_votes and required_votes > len(members):
144 # we require more votes than we have members in the group
143 # we require more votes than we have members in the group
145 # in this case we trim the required votes to the number of members
144 # in this case we trim the required votes to the number of members
146 required_votes = len(members)
145 required_votes = len(members)
147
146
148 approvals = sum([
147 approvals = sum([
149 1 for statuses in members.values()
148 1 for statuses in members.values()
150 if statuses and
149 if statuses and
151 statuses[0][1].status == ChangesetStatus.STATUS_APPROVED])
150 statuses[0][1].status == ChangesetStatus.STATUS_APPROVED])
152
151
153 calculated_votes = []
152 calculated_votes = []
154 # we have all votes from users, now check if we have enough votes
153 # we have all votes from users, now check if we have enough votes
155 # to fill other
154 # to fill other
156 fill_in = ChangesetStatus.STATUS_UNDER_REVIEW
155 fill_in = ChangesetStatus.STATUS_UNDER_REVIEW
157 if approvals >= required_votes:
156 if approvals >= required_votes:
158 fill_in = ChangesetStatus.STATUS_APPROVED
157 fill_in = ChangesetStatus.STATUS_APPROVED
159
158
160 for member, statuses in members.items():
159 for member, statuses in members.items():
161 if statuses:
160 if statuses:
162 ver, latest = statuses[0]
161 ver, latest = statuses[0]
163 if fill_in == ChangesetStatus.STATUS_APPROVED:
162 if fill_in == ChangesetStatus.STATUS_APPROVED:
164 calculated_votes.append(fill_in)
163 calculated_votes.append(fill_in)
165 else:
164 else:
166 calculated_votes.append(latest.status)
165 calculated_votes.append(latest.status)
167 else:
166 else:
168 calculated_votes.append(fill_in)
167 calculated_votes.append(fill_in)
169
168
170 return calculated_votes
169 return calculated_votes
171
170
172 def calculate_status(self, statuses_by_reviewers):
171 def calculate_status(self, statuses_by_reviewers):
173 """
172 """
174 Given the approval statuses from reviewers, calculates final approval
173 Given the approval statuses from reviewers, calculates final approval
175 status. There can only be 3 results, all approved, all rejected. If
174 status. There can only be 3 results, all approved, all rejected. If
176 there is no consensus the PR is under review.
175 there is no consensus the PR is under review.
177
176
178 :param statuses_by_reviewers:
177 :param statuses_by_reviewers:
179 """
178 """
180
179
181 def group_rule(element):
180 def group_rule(element):
182 review_obj = element[0]
181 review_obj = element[0]
183 rule_data = review_obj.rule_user_group_data()
182 rule_data = review_obj.rule_user_group_data()
184 if rule_data and rule_data['id']:
183 if rule_data and rule_data['id']:
185 return rule_data['id']
184 return rule_data['id']
186
185
187 voting_groups = itertools.groupby(
186 voting_groups = itertools.groupby(
188 sorted(statuses_by_reviewers, key=group_rule), group_rule)
187 sorted(statuses_by_reviewers, key=group_rule), group_rule)
189
188
190 voting_by_groups = [(x, list(y)) for x, y in voting_groups]
189 voting_by_groups = [(x, list(y)) for x, y in voting_groups]
191
190
192 reviewers_number = len(statuses_by_reviewers)
191 reviewers_number = len(statuses_by_reviewers)
193 votes = collections.defaultdict(int)
192 votes = collections.defaultdict(int)
194 for group, group_statuses_by_reviewers in voting_by_groups:
193 for group, group_statuses_by_reviewers in voting_by_groups:
195 if group:
194 if group:
196 # calculate how the "group" voted
195 # calculate how the "group" voted
197 for vote_status in self.calculate_group_vote(
196 for vote_status in self.calculate_group_vote(
198 group, group_statuses_by_reviewers):
197 group, group_statuses_by_reviewers):
199 votes[vote_status] += 1
198 votes[vote_status] += 1
200 else:
199 else:
201
200
202 for review_obj, user, reasons, mandatory, statuses \
201 for review_obj, user, reasons, mandatory, statuses \
203 in group_statuses_by_reviewers:
202 in group_statuses_by_reviewers:
204 # individual vote
203 # individual vote
205 if statuses:
204 if statuses:
206 ver, latest = statuses[0]
205 ver, latest = statuses[0]
207 votes[latest.status] += 1
206 votes[latest.status] += 1
208
207
209 approved_votes_count = votes[ChangesetStatus.STATUS_APPROVED]
208 approved_votes_count = votes[ChangesetStatus.STATUS_APPROVED]
210 rejected_votes_count = votes[ChangesetStatus.STATUS_REJECTED]
209 rejected_votes_count = votes[ChangesetStatus.STATUS_REJECTED]
211
210
212 # TODO(marcink): with group voting, how does rejected work,
211 # TODO(marcink): with group voting, how does rejected work,
213 # do we ever get rejected state ?
212 # do we ever get rejected state ?
214
213
215 if approved_votes_count and (approved_votes_count == reviewers_number):
214 if approved_votes_count and (approved_votes_count == reviewers_number):
216 return ChangesetStatus.STATUS_APPROVED
215 return ChangesetStatus.STATUS_APPROVED
217
216
218 if rejected_votes_count and (rejected_votes_count == reviewers_number):
217 if rejected_votes_count and (rejected_votes_count == reviewers_number):
219 return ChangesetStatus.STATUS_REJECTED
218 return ChangesetStatus.STATUS_REJECTED
220
219
221 return ChangesetStatus.STATUS_UNDER_REVIEW
220 return ChangesetStatus.STATUS_UNDER_REVIEW
222
221
223 def get_statuses(self, repo, revision=None, pull_request=None,
222 def get_statuses(self, repo, revision=None, pull_request=None,
224 with_revisions=False):
223 with_revisions=False):
225 q = self._get_status_query(repo, revision, pull_request,
224 q = self._get_status_query(repo, revision, pull_request,
226 with_revisions)
225 with_revisions)
227 return q.all()
226 return q.all()
228
227
229 def get_status(self, repo, revision=None, pull_request=None, as_str=True):
228 def get_status(self, repo, revision=None, pull_request=None, as_str=True):
230 """
229 """
231 Returns latest status of changeset for given revision or for given
230 Returns latest status of changeset for given revision or for given
232 pull request. Statuses are versioned inside a table itself and
231 pull request. Statuses are versioned inside a table itself and
233 version == 0 is always the current one
232 version == 0 is always the current one
234
233
235 :param repo:
234 :param repo:
236 :param revision: 40char hash or None
235 :param revision: 40char hash or None
237 :param pull_request: pull_request reference
236 :param pull_request: pull_request reference
238 :param as_str: return status as string not object
237 :param as_str: return status as string not object
239 """
238 """
240 q = self._get_status_query(repo, revision, pull_request)
239 q = self._get_status_query(repo, revision, pull_request)
241
240
242 # need to use first here since there can be multiple statuses
241 # need to use first here since there can be multiple statuses
243 # returned from pull_request
242 # returned from pull_request
244 status = q.first()
243 status = q.first()
245 if as_str:
244 if as_str:
246 status = status.status if status else status
245 status = status.status if status else status
247 st = status or ChangesetStatus.DEFAULT
246 st = status or ChangesetStatus.DEFAULT
248 return str(st)
247 return str(st)
249 return status
248 return status
250
249
251 def _render_auto_status_message(
250 def _render_auto_status_message(
252 self, status, commit_id=None, pull_request=None):
251 self, status, commit_id=None, pull_request=None):
253 """
252 """
254 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
253 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
255 so it's always looking the same disregarding on which default
254 so it's always looking the same disregarding on which default
256 renderer system is using.
255 renderer system is using.
257
256
258 :param status: status text to change into
257 :param status: status text to change into
259 :param commit_id: the commit_id we change the status for
258 :param commit_id: the commit_id we change the status for
260 :param pull_request: the pull request we change the status for
259 :param pull_request: the pull request we change the status for
261 """
260 """
262
261
263 new_status = ChangesetStatus.get_status_lbl(status)
262 new_status = ChangesetStatus.get_status_lbl(status)
264
263
265 params = {
264 params = {
266 'new_status_label': new_status,
265 'new_status_label': new_status,
267 'pull_request': pull_request,
266 'pull_request': pull_request,
268 'commit_id': commit_id,
267 'commit_id': commit_id,
269 }
268 }
270 renderer = RstTemplateRenderer()
269 renderer = RstTemplateRenderer()
271 return renderer.render('auto_status_change.mako', **params)
270 return renderer.render('auto_status_change.mako', **params)
272
271
273 def set_status(self, repo, status, user, comment=None, revision=None,
272 def set_status(self, repo, status, user, comment=None, revision=None,
274 pull_request=None, dont_allow_on_closed_pull_request=False):
273 pull_request=None, dont_allow_on_closed_pull_request=False):
275 """
274 """
276 Creates new status for changeset or updates the old ones bumping their
275 Creates new status for changeset or updates the old ones bumping their
277 version, leaving the current status at
276 version, leaving the current status at
278
277
279 :param repo:
278 :param repo:
280 :param revision:
279 :param revision:
281 :param status:
280 :param status:
282 :param user:
281 :param user:
283 :param comment:
282 :param comment:
284 :param dont_allow_on_closed_pull_request: don't allow a status change
283 :param dont_allow_on_closed_pull_request: don't allow a status change
285 if last status was for pull request and it's closed. We shouldn't
284 if last status was for pull request and it's closed. We shouldn't
286 mess around this manually
285 mess around this manually
287 """
286 """
288 repo = self._get_repo(repo)
287 repo = self._get_repo(repo)
289
288
290 q = ChangesetStatus.query()
289 q = ChangesetStatus.query()
291
290
292 if revision:
291 if revision:
293 q = q.filter(ChangesetStatus.repo == repo)
292 q = q.filter(ChangesetStatus.repo == repo)
294 q = q.filter(ChangesetStatus.revision == revision)
293 q = q.filter(ChangesetStatus.revision == revision)
295 elif pull_request:
294 elif pull_request:
296 pull_request = self.__get_pull_request(pull_request)
295 pull_request = self.__get_pull_request(pull_request)
297 q = q.filter(ChangesetStatus.repo == pull_request.source_repo)
296 q = q.filter(ChangesetStatus.repo == pull_request.source_repo)
298 q = q.filter(ChangesetStatus.revision.in_(pull_request.revisions))
297 q = q.filter(ChangesetStatus.revision.in_(pull_request.revisions))
299 cur_statuses = q.all()
298 cur_statuses = q.all()
300
299
301 # if statuses exists and last is associated with a closed pull request
300 # if statuses exists and last is associated with a closed pull request
302 # we need to check if we can allow this status change
301 # we need to check if we can allow this status change
303 if (dont_allow_on_closed_pull_request and cur_statuses
302 if (dont_allow_on_closed_pull_request and cur_statuses
304 and getattr(cur_statuses[0].pull_request, 'status', '')
303 and getattr(cur_statuses[0].pull_request, 'status', '')
305 == PullRequest.STATUS_CLOSED):
304 == PullRequest.STATUS_CLOSED):
306 raise StatusChangeOnClosedPullRequestError(
305 raise StatusChangeOnClosedPullRequestError(
307 'Changing status on closed pull request is not allowed'
306 'Changing status on closed pull request is not allowed'
308 )
307 )
309
308
310 # update all current statuses with older version
309 # update all current statuses with older version
311 if cur_statuses:
310 if cur_statuses:
312 for st in cur_statuses:
311 for st in cur_statuses:
313 st.version += 1
312 st.version += 1
314 Session().add(st)
313 Session().add(st)
315 Session().flush()
314 Session().flush()
316
315
317 def _create_status(user, repo, status, comment, revision, pull_request):
316 def _create_status(user, repo, status, comment, revision, pull_request):
318 new_status = ChangesetStatus()
317 new_status = ChangesetStatus()
319 new_status.author = self._get_user(user)
318 new_status.author = self._get_user(user)
320 new_status.repo = self._get_repo(repo)
319 new_status.repo = self._get_repo(repo)
321 new_status.status = status
320 new_status.status = status
322 new_status.comment = comment
321 new_status.comment = comment
323 new_status.revision = revision
322 new_status.revision = revision
324 new_status.pull_request = pull_request
323 new_status.pull_request = pull_request
325 return new_status
324 return new_status
326
325
327 if not comment:
326 if not comment:
328 from rhodecode.model.comment import CommentsModel
327 from rhodecode.model.comment import CommentsModel
329 comment = CommentsModel().create(
328 comment = CommentsModel().create(
330 text=self._render_auto_status_message(
329 text=self._render_auto_status_message(
331 status, commit_id=revision, pull_request=pull_request),
330 status, commit_id=revision, pull_request=pull_request),
332 repo=repo,
331 repo=repo,
333 user=user,
332 user=user,
334 pull_request=pull_request,
333 pull_request=pull_request,
335 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER
334 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER
336 )
335 )
337
336
338 if revision:
337 if revision:
339 new_status = _create_status(
338 new_status = _create_status(
340 user=user, repo=repo, status=status, comment=comment,
339 user=user, repo=repo, status=status, comment=comment,
341 revision=revision, pull_request=pull_request)
340 revision=revision, pull_request=pull_request)
342 Session().add(new_status)
341 Session().add(new_status)
343 return new_status
342 return new_status
344 elif pull_request:
343 elif pull_request:
345 # pull request can have more than one revision associated to it
344 # pull request can have more than one revision associated to it
346 # we need to create new version for each one
345 # we need to create new version for each one
347 new_statuses = []
346 new_statuses = []
348 repo = pull_request.source_repo
347 repo = pull_request.source_repo
349 for rev in pull_request.revisions:
348 for rev in pull_request.revisions:
350 new_status = _create_status(
349 new_status = _create_status(
351 user=user, repo=repo, status=status, comment=comment,
350 user=user, repo=repo, status=status, comment=comment,
352 revision=rev, pull_request=pull_request)
351 revision=rev, pull_request=pull_request)
353 new_statuses.append(new_status)
352 new_statuses.append(new_status)
354 Session().add(new_status)
353 Session().add(new_status)
355 return new_statuses
354 return new_statuses
356
355
357 def aggregate_votes_by_user(self, commit_statuses, reviewers_data, user=None):
356 def aggregate_votes_by_user(self, commit_statuses, reviewers_data, user=None):
358
357
359 commit_statuses_map = collections.defaultdict(list)
358 commit_statuses_map = collections.defaultdict(list)
360 for st in commit_statuses:
359 for st in commit_statuses:
361 commit_statuses_map[st.author.username] += [st]
360 commit_statuses_map[st.author.username] += [st]
362
361
363 reviewers = []
362 reviewers = []
364
363
365 def version(commit_status):
364 def version(commit_status):
366 return commit_status.version
365 return commit_status.version
367
366
368 for obj in reviewers_data:
367 for obj in reviewers_data:
369 if not obj.user:
368 if not obj.user:
370 continue
369 continue
371 if user and obj.user.username != user.username:
370 if user and obj.user.username != user.username:
372 # single user filter
371 # single user filter
373 continue
372 continue
374
373
375 statuses = commit_statuses_map.get(obj.user.username, None)
374 statuses = commit_statuses_map.get(obj.user.username, None)
376 if statuses:
375 if statuses:
377 status_groups = itertools.groupby(
376 status_groups = itertools.groupby(
378 sorted(statuses, key=version), version)
377 sorted(statuses, key=version), version)
379 statuses = [(x, list(y)[0]) for x, y in status_groups]
378 statuses = [(x, list(y)[0]) for x, y in status_groups]
380
379
381 reviewers.append((obj, obj.user, obj.reasons, obj.mandatory, statuses))
380 reviewers.append((obj, obj.user, obj.reasons, obj.mandatory, statuses))
382
381
383 if user:
382 if user:
384 return reviewers[0] if reviewers else reviewers
383 return reviewers[0] if reviewers else reviewers
385 else:
384 else:
386 return reviewers
385 return reviewers
387
386
388 def reviewers_statuses(self, pull_request, user=None):
387 def reviewers_statuses(self, pull_request, user=None):
389 _commit_statuses = self.get_statuses(
388 _commit_statuses = self.get_statuses(
390 pull_request.source_repo,
389 pull_request.source_repo,
391 pull_request=pull_request,
390 pull_request=pull_request,
392 with_revisions=True)
391 with_revisions=True)
393 reviewers = pull_request.get_pull_request_reviewers(
392 reviewers = pull_request.get_pull_request_reviewers(
394 role=PullRequestReviewers.ROLE_REVIEWER)
393 role=PullRequestReviewers.ROLE_REVIEWER)
395 return self.aggregate_votes_by_user(_commit_statuses, reviewers, user=user)
394 return self.aggregate_votes_by_user(_commit_statuses, reviewers, user=user)
396
395
397 def calculated_review_status(self, pull_request):
396 def calculated_review_status(self, pull_request):
398 """
397 """
399 calculate pull request status based on reviewers, it should be a list
398 calculate pull request status based on reviewers, it should be a list
400 of two element lists.
399 of two element lists.
401 """
400 """
402 reviewers = self.reviewers_statuses(pull_request)
401 reviewers = self.reviewers_statuses(pull_request)
403 return self.calculate_status(reviewers)
402 return self.calculate_status(reviewers)
@@ -1,857 +1,857 b''
1 # -*- coding: utf-8 -*-
1
2
2
3 # Copyright (C) 2011-2020 RhodeCode GmbH
3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 comments model for RhodeCode
22 comments model for RhodeCode
23 """
23 """
24 import datetime
24 import datetime
25
25
26 import logging
26 import logging
27 import traceback
27 import traceback
28 import collections
28 import collections
29
29
30 from pyramid.threadlocal import get_current_registry, get_current_request
30 from pyramid.threadlocal import get_current_registry, get_current_request
31 from sqlalchemy.sql.expression import null
31 from sqlalchemy.sql.expression import null
32 from sqlalchemy.sql.functions import coalesce
32 from sqlalchemy.sql.functions import coalesce
33
33
34 from rhodecode.lib import helpers as h, diffs, channelstream, hooks_utils
34 from rhodecode.lib import helpers as h, diffs, channelstream, hooks_utils
35 from rhodecode.lib import audit_logger
35 from rhodecode.lib import audit_logger
36 from rhodecode.lib.exceptions import CommentVersionMismatch
36 from rhodecode.lib.exceptions import CommentVersionMismatch
37 from rhodecode.lib.utils2 import extract_mentioned_users, safe_str, safe_int
37 from rhodecode.lib.utils2 import extract_mentioned_users, safe_str, safe_int
38 from rhodecode.model import BaseModel
38 from rhodecode.model import BaseModel
39 from rhodecode.model.db import (
39 from rhodecode.model.db import (
40 false, true,
40 false, true,
41 ChangesetComment,
41 ChangesetComment,
42 User,
42 User,
43 Notification,
43 Notification,
44 PullRequest,
44 PullRequest,
45 AttributeDict,
45 AttributeDict,
46 ChangesetCommentHistory,
46 ChangesetCommentHistory,
47 )
47 )
48 from rhodecode.model.notification import NotificationModel
48 from rhodecode.model.notification import NotificationModel
49 from rhodecode.model.meta import Session
49 from rhodecode.model.meta import Session
50 from rhodecode.model.settings import VcsSettingsModel
50 from rhodecode.model.settings import VcsSettingsModel
51 from rhodecode.model.notification import EmailNotificationModel
51 from rhodecode.model.notification import EmailNotificationModel
52 from rhodecode.model.validation_schema.schemas import comment_schema
52 from rhodecode.model.validation_schema.schemas import comment_schema
53
53
54
54
55 log = logging.getLogger(__name__)
55 log = logging.getLogger(__name__)
56
56
57
57
58 class CommentsModel(BaseModel):
58 class CommentsModel(BaseModel):
59
59
60 cls = ChangesetComment
60 cls = ChangesetComment
61
61
62 DIFF_CONTEXT_BEFORE = 3
62 DIFF_CONTEXT_BEFORE = 3
63 DIFF_CONTEXT_AFTER = 3
63 DIFF_CONTEXT_AFTER = 3
64
64
65 def __get_commit_comment(self, changeset_comment):
65 def __get_commit_comment(self, changeset_comment):
66 return self._get_instance(ChangesetComment, changeset_comment)
66 return self._get_instance(ChangesetComment, changeset_comment)
67
67
68 def __get_pull_request(self, pull_request):
68 def __get_pull_request(self, pull_request):
69 return self._get_instance(PullRequest, pull_request)
69 return self._get_instance(PullRequest, pull_request)
70
70
71 def _extract_mentions(self, s):
71 def _extract_mentions(self, s):
72 user_objects = []
72 user_objects = []
73 for username in extract_mentioned_users(s):
73 for username in extract_mentioned_users(s):
74 user_obj = User.get_by_username(username, case_insensitive=True)
74 user_obj = User.get_by_username(username, case_insensitive=True)
75 if user_obj:
75 if user_obj:
76 user_objects.append(user_obj)
76 user_objects.append(user_obj)
77 return user_objects
77 return user_objects
78
78
79 def _get_renderer(self, global_renderer='rst', request=None):
79 def _get_renderer(self, global_renderer='rst', request=None):
80 request = request or get_current_request()
80 request = request or get_current_request()
81
81
82 try:
82 try:
83 global_renderer = request.call_context.visual.default_renderer
83 global_renderer = request.call_context.visual.default_renderer
84 except AttributeError:
84 except AttributeError:
85 log.debug("Renderer not set, falling back "
85 log.debug("Renderer not set, falling back "
86 "to default renderer '%s'", global_renderer)
86 "to default renderer '%s'", global_renderer)
87 except Exception:
87 except Exception:
88 log.error(traceback.format_exc())
88 log.error(traceback.format_exc())
89 return global_renderer
89 return global_renderer
90
90
91 def aggregate_comments(self, comments, versions, show_version, inline=False):
91 def aggregate_comments(self, comments, versions, show_version, inline=False):
92 # group by versions, and count until, and display objects
92 # group by versions, and count until, and display objects
93
93
94 comment_groups = collections.defaultdict(list)
94 comment_groups = collections.defaultdict(list)
95 [comment_groups[_co.pull_request_version_id].append(_co) for _co in comments]
95 [comment_groups[_co.pull_request_version_id].append(_co) for _co in comments]
96
96
97 def yield_comments(pos):
97 def yield_comments(pos):
98 for co in comment_groups[pos]:
98 for co in comment_groups[pos]:
99 yield co
99 yield co
100
100
101 comment_versions = collections.defaultdict(
101 comment_versions = collections.defaultdict(
102 lambda: collections.defaultdict(list))
102 lambda: collections.defaultdict(list))
103 prev_prvid = -1
103 prev_prvid = -1
104 # fake last entry with None, to aggregate on "latest" version which
104 # fake last entry with None, to aggregate on "latest" version which
105 # doesn't have an pull_request_version_id
105 # doesn't have an pull_request_version_id
106 for ver in versions + [AttributeDict({'pull_request_version_id': None})]:
106 for ver in versions + [AttributeDict({'pull_request_version_id': None})]:
107 prvid = ver.pull_request_version_id
107 prvid = ver.pull_request_version_id
108 if prev_prvid == -1:
108 if prev_prvid == -1:
109 prev_prvid = prvid
109 prev_prvid = prvid
110
110
111 for co in yield_comments(prvid):
111 for co in yield_comments(prvid):
112 comment_versions[prvid]['at'].append(co)
112 comment_versions[prvid]['at'].append(co)
113
113
114 # save until
114 # save until
115 current = comment_versions[prvid]['at']
115 current = comment_versions[prvid]['at']
116 prev_until = comment_versions[prev_prvid]['until']
116 prev_until = comment_versions[prev_prvid]['until']
117 cur_until = prev_until + current
117 cur_until = prev_until + current
118 comment_versions[prvid]['until'].extend(cur_until)
118 comment_versions[prvid]['until'].extend(cur_until)
119
119
120 # save outdated
120 # save outdated
121 if inline:
121 if inline:
122 outdated = [x for x in cur_until
122 outdated = [x for x in cur_until
123 if x.outdated_at_version(show_version)]
123 if x.outdated_at_version(show_version)]
124 else:
124 else:
125 outdated = [x for x in cur_until
125 outdated = [x for x in cur_until
126 if x.older_than_version(show_version)]
126 if x.older_than_version(show_version)]
127 display = [x for x in cur_until if x not in outdated]
127 display = [x for x in cur_until if x not in outdated]
128
128
129 comment_versions[prvid]['outdated'] = outdated
129 comment_versions[prvid]['outdated'] = outdated
130 comment_versions[prvid]['display'] = display
130 comment_versions[prvid]['display'] = display
131
131
132 prev_prvid = prvid
132 prev_prvid = prvid
133
133
134 return comment_versions
134 return comment_versions
135
135
136 def get_repository_comments(self, repo, comment_type=None, user=None, commit_id=None):
136 def get_repository_comments(self, repo, comment_type=None, user=None, commit_id=None):
137 qry = Session().query(ChangesetComment) \
137 qry = Session().query(ChangesetComment) \
138 .filter(ChangesetComment.repo == repo)
138 .filter(ChangesetComment.repo == repo)
139
139
140 if comment_type and comment_type in ChangesetComment.COMMENT_TYPES:
140 if comment_type and comment_type in ChangesetComment.COMMENT_TYPES:
141 qry = qry.filter(ChangesetComment.comment_type == comment_type)
141 qry = qry.filter(ChangesetComment.comment_type == comment_type)
142
142
143 if user:
143 if user:
144 user = self._get_user(user)
144 user = self._get_user(user)
145 if user:
145 if user:
146 qry = qry.filter(ChangesetComment.user_id == user.user_id)
146 qry = qry.filter(ChangesetComment.user_id == user.user_id)
147
147
148 if commit_id:
148 if commit_id:
149 qry = qry.filter(ChangesetComment.revision == commit_id)
149 qry = qry.filter(ChangesetComment.revision == commit_id)
150
150
151 qry = qry.order_by(ChangesetComment.created_on)
151 qry = qry.order_by(ChangesetComment.created_on)
152 return qry.all()
152 return qry.all()
153
153
154 def get_repository_unresolved_todos(self, repo):
154 def get_repository_unresolved_todos(self, repo):
155 todos = Session().query(ChangesetComment) \
155 todos = Session().query(ChangesetComment) \
156 .filter(ChangesetComment.repo == repo) \
156 .filter(ChangesetComment.repo == repo) \
157 .filter(ChangesetComment.resolved_by == None) \
157 .filter(ChangesetComment.resolved_by == None) \
158 .filter(ChangesetComment.comment_type
158 .filter(ChangesetComment.comment_type
159 == ChangesetComment.COMMENT_TYPE_TODO)
159 == ChangesetComment.COMMENT_TYPE_TODO)
160 todos = todos.all()
160 todos = todos.all()
161
161
162 return todos
162 return todos
163
163
164 def get_pull_request_unresolved_todos(self, pull_request, show_outdated=True, include_drafts=True):
164 def get_pull_request_unresolved_todos(self, pull_request, show_outdated=True, include_drafts=True):
165
165
166 todos = Session().query(ChangesetComment) \
166 todos = Session().query(ChangesetComment) \
167 .filter(ChangesetComment.pull_request == pull_request) \
167 .filter(ChangesetComment.pull_request == pull_request) \
168 .filter(ChangesetComment.resolved_by == None) \
168 .filter(ChangesetComment.resolved_by == None) \
169 .filter(ChangesetComment.comment_type
169 .filter(ChangesetComment.comment_type
170 == ChangesetComment.COMMENT_TYPE_TODO)
170 == ChangesetComment.COMMENT_TYPE_TODO)
171
171
172 if not include_drafts:
172 if not include_drafts:
173 todos = todos.filter(ChangesetComment.draft == false())
173 todos = todos.filter(ChangesetComment.draft == false())
174
174
175 if not show_outdated:
175 if not show_outdated:
176 todos = todos.filter(
176 todos = todos.filter(
177 coalesce(ChangesetComment.display_state, '') !=
177 coalesce(ChangesetComment.display_state, '') !=
178 ChangesetComment.COMMENT_OUTDATED)
178 ChangesetComment.COMMENT_OUTDATED)
179
179
180 todos = todos.all()
180 todos = todos.all()
181
181
182 return todos
182 return todos
183
183
184 def get_pull_request_resolved_todos(self, pull_request, show_outdated=True, include_drafts=True):
184 def get_pull_request_resolved_todos(self, pull_request, show_outdated=True, include_drafts=True):
185
185
186 todos = Session().query(ChangesetComment) \
186 todos = Session().query(ChangesetComment) \
187 .filter(ChangesetComment.pull_request == pull_request) \
187 .filter(ChangesetComment.pull_request == pull_request) \
188 .filter(ChangesetComment.resolved_by != None) \
188 .filter(ChangesetComment.resolved_by != None) \
189 .filter(ChangesetComment.comment_type
189 .filter(ChangesetComment.comment_type
190 == ChangesetComment.COMMENT_TYPE_TODO)
190 == ChangesetComment.COMMENT_TYPE_TODO)
191
191
192 if not include_drafts:
192 if not include_drafts:
193 todos = todos.filter(ChangesetComment.draft == false())
193 todos = todos.filter(ChangesetComment.draft == false())
194
194
195 if not show_outdated:
195 if not show_outdated:
196 todos = todos.filter(
196 todos = todos.filter(
197 coalesce(ChangesetComment.display_state, '') !=
197 coalesce(ChangesetComment.display_state, '') !=
198 ChangesetComment.COMMENT_OUTDATED)
198 ChangesetComment.COMMENT_OUTDATED)
199
199
200 todos = todos.all()
200 todos = todos.all()
201
201
202 return todos
202 return todos
203
203
204 def get_pull_request_drafts(self, user_id, pull_request):
204 def get_pull_request_drafts(self, user_id, pull_request):
205 drafts = Session().query(ChangesetComment) \
205 drafts = Session().query(ChangesetComment) \
206 .filter(ChangesetComment.pull_request == pull_request) \
206 .filter(ChangesetComment.pull_request == pull_request) \
207 .filter(ChangesetComment.user_id == user_id) \
207 .filter(ChangesetComment.user_id == user_id) \
208 .filter(ChangesetComment.draft == true())
208 .filter(ChangesetComment.draft == true())
209 return drafts.all()
209 return drafts.all()
210
210
211 def get_commit_unresolved_todos(self, commit_id, show_outdated=True, include_drafts=True):
211 def get_commit_unresolved_todos(self, commit_id, show_outdated=True, include_drafts=True):
212
212
213 todos = Session().query(ChangesetComment) \
213 todos = Session().query(ChangesetComment) \
214 .filter(ChangesetComment.revision == commit_id) \
214 .filter(ChangesetComment.revision == commit_id) \
215 .filter(ChangesetComment.resolved_by == None) \
215 .filter(ChangesetComment.resolved_by == None) \
216 .filter(ChangesetComment.comment_type
216 .filter(ChangesetComment.comment_type
217 == ChangesetComment.COMMENT_TYPE_TODO)
217 == ChangesetComment.COMMENT_TYPE_TODO)
218
218
219 if not include_drafts:
219 if not include_drafts:
220 todos = todos.filter(ChangesetComment.draft == false())
220 todos = todos.filter(ChangesetComment.draft == false())
221
221
222 if not show_outdated:
222 if not show_outdated:
223 todos = todos.filter(
223 todos = todos.filter(
224 coalesce(ChangesetComment.display_state, '') !=
224 coalesce(ChangesetComment.display_state, '') !=
225 ChangesetComment.COMMENT_OUTDATED)
225 ChangesetComment.COMMENT_OUTDATED)
226
226
227 todos = todos.all()
227 todos = todos.all()
228
228
229 return todos
229 return todos
230
230
231 def get_commit_resolved_todos(self, commit_id, show_outdated=True, include_drafts=True):
231 def get_commit_resolved_todos(self, commit_id, show_outdated=True, include_drafts=True):
232
232
233 todos = Session().query(ChangesetComment) \
233 todos = Session().query(ChangesetComment) \
234 .filter(ChangesetComment.revision == commit_id) \
234 .filter(ChangesetComment.revision == commit_id) \
235 .filter(ChangesetComment.resolved_by != None) \
235 .filter(ChangesetComment.resolved_by != None) \
236 .filter(ChangesetComment.comment_type
236 .filter(ChangesetComment.comment_type
237 == ChangesetComment.COMMENT_TYPE_TODO)
237 == ChangesetComment.COMMENT_TYPE_TODO)
238
238
239 if not include_drafts:
239 if not include_drafts:
240 todos = todos.filter(ChangesetComment.draft == false())
240 todos = todos.filter(ChangesetComment.draft == false())
241
241
242 if not show_outdated:
242 if not show_outdated:
243 todos = todos.filter(
243 todos = todos.filter(
244 coalesce(ChangesetComment.display_state, '') !=
244 coalesce(ChangesetComment.display_state, '') !=
245 ChangesetComment.COMMENT_OUTDATED)
245 ChangesetComment.COMMENT_OUTDATED)
246
246
247 todos = todos.all()
247 todos = todos.all()
248
248
249 return todos
249 return todos
250
250
251 def get_commit_inline_comments(self, commit_id, include_drafts=True):
251 def get_commit_inline_comments(self, commit_id, include_drafts=True):
252 inline_comments = Session().query(ChangesetComment) \
252 inline_comments = Session().query(ChangesetComment) \
253 .filter(ChangesetComment.line_no != None) \
253 .filter(ChangesetComment.line_no != None) \
254 .filter(ChangesetComment.f_path != None) \
254 .filter(ChangesetComment.f_path != None) \
255 .filter(ChangesetComment.revision == commit_id)
255 .filter(ChangesetComment.revision == commit_id)
256
256
257 if not include_drafts:
257 if not include_drafts:
258 inline_comments = inline_comments.filter(ChangesetComment.draft == false())
258 inline_comments = inline_comments.filter(ChangesetComment.draft == false())
259
259
260 inline_comments = inline_comments.all()
260 inline_comments = inline_comments.all()
261 return inline_comments
261 return inline_comments
262
262
263 def _log_audit_action(self, action, action_data, auth_user, comment):
263 def _log_audit_action(self, action, action_data, auth_user, comment):
264 audit_logger.store(
264 audit_logger.store(
265 action=action,
265 action=action,
266 action_data=action_data,
266 action_data=action_data,
267 user=auth_user,
267 user=auth_user,
268 repo=comment.repo)
268 repo=comment.repo)
269
269
270 def create(self, text, repo, user, commit_id=None, pull_request=None,
270 def create(self, text, repo, user, commit_id=None, pull_request=None,
271 f_path=None, line_no=None, status_change=None,
271 f_path=None, line_no=None, status_change=None,
272 status_change_type=None, comment_type=None, is_draft=False,
272 status_change_type=None, comment_type=None, is_draft=False,
273 resolves_comment_id=None, closing_pr=False, send_email=True,
273 resolves_comment_id=None, closing_pr=False, send_email=True,
274 renderer=None, auth_user=None, extra_recipients=None):
274 renderer=None, auth_user=None, extra_recipients=None):
275 """
275 """
276 Creates new comment for commit or pull request.
276 Creates new comment for commit or pull request.
277 IF status_change is not none this comment is associated with a
277 IF status_change is not none this comment is associated with a
278 status change of commit or commit associated with pull request
278 status change of commit or commit associated with pull request
279
279
280 :param text:
280 :param text:
281 :param repo:
281 :param repo:
282 :param user:
282 :param user:
283 :param commit_id:
283 :param commit_id:
284 :param pull_request:
284 :param pull_request:
285 :param f_path:
285 :param f_path:
286 :param line_no:
286 :param line_no:
287 :param status_change: Label for status change
287 :param status_change: Label for status change
288 :param comment_type: Type of comment
288 :param comment_type: Type of comment
289 :param is_draft: is comment a draft only
289 :param is_draft: is comment a draft only
290 :param resolves_comment_id: id of comment which this one will resolve
290 :param resolves_comment_id: id of comment which this one will resolve
291 :param status_change_type: type of status change
291 :param status_change_type: type of status change
292 :param closing_pr:
292 :param closing_pr:
293 :param send_email:
293 :param send_email:
294 :param renderer: pick renderer for this comment
294 :param renderer: pick renderer for this comment
295 :param auth_user: current authenticated user calling this method
295 :param auth_user: current authenticated user calling this method
296 :param extra_recipients: list of extra users to be added to recipients
296 :param extra_recipients: list of extra users to be added to recipients
297 """
297 """
298
298
299 if not text:
299 if not text:
300 log.warning('Missing text for comment, skipping...')
300 log.warning('Missing text for comment, skipping...')
301 return
301 return
302 request = get_current_request()
302 request = get_current_request()
303 _ = request.translate
303 _ = request.translate
304
304
305 if not renderer:
305 if not renderer:
306 renderer = self._get_renderer(request=request)
306 renderer = self._get_renderer(request=request)
307
307
308 repo = self._get_repo(repo)
308 repo = self._get_repo(repo)
309 user = self._get_user(user)
309 user = self._get_user(user)
310 auth_user = auth_user or user
310 auth_user = auth_user or user
311
311
312 schema = comment_schema.CommentSchema()
312 schema = comment_schema.CommentSchema()
313 validated_kwargs = schema.deserialize(dict(
313 validated_kwargs = schema.deserialize(dict(
314 comment_body=text,
314 comment_body=text,
315 comment_type=comment_type,
315 comment_type=comment_type,
316 is_draft=is_draft,
316 is_draft=is_draft,
317 comment_file=f_path,
317 comment_file=f_path,
318 comment_line=line_no,
318 comment_line=line_no,
319 renderer_type=renderer,
319 renderer_type=renderer,
320 status_change=status_change_type,
320 status_change=status_change_type,
321 resolves_comment_id=resolves_comment_id,
321 resolves_comment_id=resolves_comment_id,
322 repo=repo.repo_id,
322 repo=repo.repo_id,
323 user=user.user_id,
323 user=user.user_id,
324 ))
324 ))
325 is_draft = validated_kwargs['is_draft']
325 is_draft = validated_kwargs['is_draft']
326
326
327 comment = ChangesetComment()
327 comment = ChangesetComment()
328 comment.renderer = validated_kwargs['renderer_type']
328 comment.renderer = validated_kwargs['renderer_type']
329 comment.text = validated_kwargs['comment_body']
329 comment.text = validated_kwargs['comment_body']
330 comment.f_path = validated_kwargs['comment_file']
330 comment.f_path = validated_kwargs['comment_file']
331 comment.line_no = validated_kwargs['comment_line']
331 comment.line_no = validated_kwargs['comment_line']
332 comment.comment_type = validated_kwargs['comment_type']
332 comment.comment_type = validated_kwargs['comment_type']
333 comment.draft = is_draft
333 comment.draft = is_draft
334
334
335 comment.repo = repo
335 comment.repo = repo
336 comment.author = user
336 comment.author = user
337 resolved_comment = self.__get_commit_comment(
337 resolved_comment = self.__get_commit_comment(
338 validated_kwargs['resolves_comment_id'])
338 validated_kwargs['resolves_comment_id'])
339
339
340 # check if the comment actually belongs to this PR
340 # check if the comment actually belongs to this PR
341 if resolved_comment and resolved_comment.pull_request and \
341 if resolved_comment and resolved_comment.pull_request and \
342 resolved_comment.pull_request != pull_request:
342 resolved_comment.pull_request != pull_request:
343 log.warning('Comment tried to resolved unrelated todo comment: %s',
343 log.warning('Comment tried to resolved unrelated todo comment: %s',
344 resolved_comment)
344 resolved_comment)
345 # comment not bound to this pull request, forbid
345 # comment not bound to this pull request, forbid
346 resolved_comment = None
346 resolved_comment = None
347
347
348 elif resolved_comment and resolved_comment.repo and \
348 elif resolved_comment and resolved_comment.repo and \
349 resolved_comment.repo != repo:
349 resolved_comment.repo != repo:
350 log.warning('Comment tried to resolved unrelated todo comment: %s',
350 log.warning('Comment tried to resolved unrelated todo comment: %s',
351 resolved_comment)
351 resolved_comment)
352 # comment not bound to this repo, forbid
352 # comment not bound to this repo, forbid
353 resolved_comment = None
353 resolved_comment = None
354
354
355 if resolved_comment and resolved_comment.resolved_by:
355 if resolved_comment and resolved_comment.resolved_by:
356 # if this comment is already resolved, don't mark it again!
356 # if this comment is already resolved, don't mark it again!
357 resolved_comment = None
357 resolved_comment = None
358
358
359 comment.resolved_comment = resolved_comment
359 comment.resolved_comment = resolved_comment
360
360
361 pull_request_id = pull_request
361 pull_request_id = pull_request
362
362
363 commit_obj = None
363 commit_obj = None
364 pull_request_obj = None
364 pull_request_obj = None
365
365
366 if commit_id:
366 if commit_id:
367 notification_type = EmailNotificationModel.TYPE_COMMIT_COMMENT
367 notification_type = EmailNotificationModel.TYPE_COMMIT_COMMENT
368 # do a lookup, so we don't pass something bad here
368 # do a lookup, so we don't pass something bad here
369 commit_obj = repo.scm_instance().get_commit(commit_id=commit_id)
369 commit_obj = repo.scm_instance().get_commit(commit_id=commit_id)
370 comment.revision = commit_obj.raw_id
370 comment.revision = commit_obj.raw_id
371
371
372 elif pull_request_id:
372 elif pull_request_id:
373 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST_COMMENT
373 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST_COMMENT
374 pull_request_obj = self.__get_pull_request(pull_request_id)
374 pull_request_obj = self.__get_pull_request(pull_request_id)
375 comment.pull_request = pull_request_obj
375 comment.pull_request = pull_request_obj
376 else:
376 else:
377 raise Exception('Please specify commit or pull_request_id')
377 raise Exception('Please specify commit or pull_request_id')
378
378
379 Session().add(comment)
379 Session().add(comment)
380 Session().flush()
380 Session().flush()
381 kwargs = {
381 kwargs = {
382 'user': user,
382 'user': user,
383 'renderer_type': renderer,
383 'renderer_type': renderer,
384 'repo_name': repo.repo_name,
384 'repo_name': repo.repo_name,
385 'status_change': status_change,
385 'status_change': status_change,
386 'status_change_type': status_change_type,
386 'status_change_type': status_change_type,
387 'comment_body': text,
387 'comment_body': text,
388 'comment_file': f_path,
388 'comment_file': f_path,
389 'comment_line': line_no,
389 'comment_line': line_no,
390 'comment_type': comment_type or 'note',
390 'comment_type': comment_type or 'note',
391 'comment_id': comment.comment_id
391 'comment_id': comment.comment_id
392 }
392 }
393
393
394 if commit_obj:
394 if commit_obj:
395 recipients = ChangesetComment.get_users(
395 recipients = ChangesetComment.get_users(
396 revision=commit_obj.raw_id)
396 revision=commit_obj.raw_id)
397 # add commit author if it's in RhodeCode system
397 # add commit author if it's in RhodeCode system
398 cs_author = User.get_from_cs_author(commit_obj.author)
398 cs_author = User.get_from_cs_author(commit_obj.author)
399 if not cs_author:
399 if not cs_author:
400 # use repo owner if we cannot extract the author correctly
400 # use repo owner if we cannot extract the author correctly
401 cs_author = repo.user
401 cs_author = repo.user
402 recipients += [cs_author]
402 recipients += [cs_author]
403
403
404 commit_comment_url = self.get_url(comment, request=request)
404 commit_comment_url = self.get_url(comment, request=request)
405 commit_comment_reply_url = self.get_url(
405 commit_comment_reply_url = self.get_url(
406 comment, request=request,
406 comment, request=request,
407 anchor='comment-{}/?/ReplyToComment'.format(comment.comment_id))
407 anchor='comment-{}/?/ReplyToComment'.format(comment.comment_id))
408
408
409 target_repo_url = h.link_to(
409 target_repo_url = h.link_to(
410 repo.repo_name,
410 repo.repo_name,
411 h.route_url('repo_summary', repo_name=repo.repo_name))
411 h.route_url('repo_summary', repo_name=repo.repo_name))
412
412
413 commit_url = h.route_url('repo_commit', repo_name=repo.repo_name,
413 commit_url = h.route_url('repo_commit', repo_name=repo.repo_name,
414 commit_id=commit_id)
414 commit_id=commit_id)
415
415
416 # commit specifics
416 # commit specifics
417 kwargs.update({
417 kwargs.update({
418 'commit': commit_obj,
418 'commit': commit_obj,
419 'commit_message': commit_obj.message,
419 'commit_message': commit_obj.message,
420 'commit_target_repo_url': target_repo_url,
420 'commit_target_repo_url': target_repo_url,
421 'commit_comment_url': commit_comment_url,
421 'commit_comment_url': commit_comment_url,
422 'commit_comment_reply_url': commit_comment_reply_url,
422 'commit_comment_reply_url': commit_comment_reply_url,
423 'commit_url': commit_url,
423 'commit_url': commit_url,
424 'thread_ids': [commit_url, commit_comment_url],
424 'thread_ids': [commit_url, commit_comment_url],
425 })
425 })
426
426
427 elif pull_request_obj:
427 elif pull_request_obj:
428 # get the current participants of this pull request
428 # get the current participants of this pull request
429 recipients = ChangesetComment.get_users(
429 recipients = ChangesetComment.get_users(
430 pull_request_id=pull_request_obj.pull_request_id)
430 pull_request_id=pull_request_obj.pull_request_id)
431 # add pull request author
431 # add pull request author
432 recipients += [pull_request_obj.author]
432 recipients += [pull_request_obj.author]
433
433
434 # add the reviewers to notification
434 # add the reviewers to notification
435 recipients += [x.user for x in pull_request_obj.get_pull_request_reviewers()]
435 recipients += [x.user for x in pull_request_obj.get_pull_request_reviewers()]
436
436
437 pr_target_repo = pull_request_obj.target_repo
437 pr_target_repo = pull_request_obj.target_repo
438 pr_source_repo = pull_request_obj.source_repo
438 pr_source_repo = pull_request_obj.source_repo
439
439
440 pr_comment_url = self.get_url(comment, request=request)
440 pr_comment_url = self.get_url(comment, request=request)
441 pr_comment_reply_url = self.get_url(
441 pr_comment_reply_url = self.get_url(
442 comment, request=request,
442 comment, request=request,
443 anchor='comment-{}/?/ReplyToComment'.format(comment.comment_id))
443 anchor='comment-{}/?/ReplyToComment'.format(comment.comment_id))
444
444
445 pr_url = h.route_url(
445 pr_url = h.route_url(
446 'pullrequest_show',
446 'pullrequest_show',
447 repo_name=pr_target_repo.repo_name,
447 repo_name=pr_target_repo.repo_name,
448 pull_request_id=pull_request_obj.pull_request_id, )
448 pull_request_id=pull_request_obj.pull_request_id, )
449
449
450 # set some variables for email notification
450 # set some variables for email notification
451 pr_target_repo_url = h.route_url(
451 pr_target_repo_url = h.route_url(
452 'repo_summary', repo_name=pr_target_repo.repo_name)
452 'repo_summary', repo_name=pr_target_repo.repo_name)
453
453
454 pr_source_repo_url = h.route_url(
454 pr_source_repo_url = h.route_url(
455 'repo_summary', repo_name=pr_source_repo.repo_name)
455 'repo_summary', repo_name=pr_source_repo.repo_name)
456
456
457 # pull request specifics
457 # pull request specifics
458 kwargs.update({
458 kwargs.update({
459 'pull_request': pull_request_obj,
459 'pull_request': pull_request_obj,
460 'pr_id': pull_request_obj.pull_request_id,
460 'pr_id': pull_request_obj.pull_request_id,
461 'pull_request_url': pr_url,
461 'pull_request_url': pr_url,
462 'pull_request_target_repo': pr_target_repo,
462 'pull_request_target_repo': pr_target_repo,
463 'pull_request_target_repo_url': pr_target_repo_url,
463 'pull_request_target_repo_url': pr_target_repo_url,
464 'pull_request_source_repo': pr_source_repo,
464 'pull_request_source_repo': pr_source_repo,
465 'pull_request_source_repo_url': pr_source_repo_url,
465 'pull_request_source_repo_url': pr_source_repo_url,
466 'pr_comment_url': pr_comment_url,
466 'pr_comment_url': pr_comment_url,
467 'pr_comment_reply_url': pr_comment_reply_url,
467 'pr_comment_reply_url': pr_comment_reply_url,
468 'pr_closing': closing_pr,
468 'pr_closing': closing_pr,
469 'thread_ids': [pr_url, pr_comment_url],
469 'thread_ids': [pr_url, pr_comment_url],
470 })
470 })
471
471
472 if send_email:
472 if send_email:
473 recipients += [self._get_user(u) for u in (extra_recipients or [])]
473 recipients += [self._get_user(u) for u in (extra_recipients or [])]
474
474
475 mention_recipients = set(
475 mention_recipients = set(
476 self._extract_mentions(text)).difference(recipients)
476 self._extract_mentions(text)).difference(recipients)
477
477
478 # create notification objects, and emails
478 # create notification objects, and emails
479 NotificationModel().create(
479 NotificationModel().create(
480 created_by=user,
480 created_by=user,
481 notification_subject='', # Filled in based on the notification_type
481 notification_subject='', # Filled in based on the notification_type
482 notification_body='', # Filled in based on the notification_type
482 notification_body='', # Filled in based on the notification_type
483 notification_type=notification_type,
483 notification_type=notification_type,
484 recipients=recipients,
484 recipients=recipients,
485 mention_recipients=mention_recipients,
485 mention_recipients=mention_recipients,
486 email_kwargs=kwargs,
486 email_kwargs=kwargs,
487 )
487 )
488
488
489 Session().flush()
489 Session().flush()
490 if comment.pull_request:
490 if comment.pull_request:
491 action = 'repo.pull_request.comment.create'
491 action = 'repo.pull_request.comment.create'
492 else:
492 else:
493 action = 'repo.commit.comment.create'
493 action = 'repo.commit.comment.create'
494
494
495 if not is_draft:
495 if not is_draft:
496 comment_data = comment.get_api_data()
496 comment_data = comment.get_api_data()
497
497
498 self._log_audit_action(
498 self._log_audit_action(
499 action, {'data': comment_data}, auth_user, comment)
499 action, {'data': comment_data}, auth_user, comment)
500
500
501 return comment
501 return comment
502
502
503 def edit(self, comment_id, text, auth_user, version):
503 def edit(self, comment_id, text, auth_user, version):
504 """
504 """
505 Change existing comment for commit or pull request.
505 Change existing comment for commit or pull request.
506
506
507 :param comment_id:
507 :param comment_id:
508 :param text:
508 :param text:
509 :param auth_user: current authenticated user calling this method
509 :param auth_user: current authenticated user calling this method
510 :param version: last comment version
510 :param version: last comment version
511 """
511 """
512 if not text:
512 if not text:
513 log.warning('Missing text for comment, skipping...')
513 log.warning('Missing text for comment, skipping...')
514 return
514 return
515
515
516 comment = ChangesetComment.get(comment_id)
516 comment = ChangesetComment.get(comment_id)
517 old_comment_text = comment.text
517 old_comment_text = comment.text
518 comment.text = text
518 comment.text = text
519 comment.modified_at = datetime.datetime.now()
519 comment.modified_at = datetime.datetime.now()
520 version = safe_int(version)
520 version = safe_int(version)
521
521
522 # NOTE(marcink): this returns initial comment + edits, so v2 from ui
522 # NOTE(marcink): this returns initial comment + edits, so v2 from ui
523 # would return 3 here
523 # would return 3 here
524 comment_version = ChangesetCommentHistory.get_version(comment_id)
524 comment_version = ChangesetCommentHistory.get_version(comment_id)
525
525
526 if isinstance(version, int) and (comment_version - version) != 1:
526 if isinstance(version, int) and (comment_version - version) != 1:
527 log.warning(
527 log.warning(
528 'Version mismatch comment_version {} submitted {}, skipping'.format(
528 'Version mismatch comment_version {} submitted {}, skipping'.format(
529 comment_version-1, # -1 since note above
529 comment_version-1, # -1 since note above
530 version
530 version
531 )
531 )
532 )
532 )
533 raise CommentVersionMismatch()
533 raise CommentVersionMismatch()
534
534
535 comment_history = ChangesetCommentHistory()
535 comment_history = ChangesetCommentHistory()
536 comment_history.comment_id = comment_id
536 comment_history.comment_id = comment_id
537 comment_history.version = comment_version
537 comment_history.version = comment_version
538 comment_history.created_by_user_id = auth_user.user_id
538 comment_history.created_by_user_id = auth_user.user_id
539 comment_history.text = old_comment_text
539 comment_history.text = old_comment_text
540 # TODO add email notification
540 # TODO add email notification
541 Session().add(comment_history)
541 Session().add(comment_history)
542 Session().add(comment)
542 Session().add(comment)
543 Session().flush()
543 Session().flush()
544
544
545 if comment.pull_request:
545 if comment.pull_request:
546 action = 'repo.pull_request.comment.edit'
546 action = 'repo.pull_request.comment.edit'
547 else:
547 else:
548 action = 'repo.commit.comment.edit'
548 action = 'repo.commit.comment.edit'
549
549
550 comment_data = comment.get_api_data()
550 comment_data = comment.get_api_data()
551 comment_data['old_comment_text'] = old_comment_text
551 comment_data['old_comment_text'] = old_comment_text
552 self._log_audit_action(
552 self._log_audit_action(
553 action, {'data': comment_data}, auth_user, comment)
553 action, {'data': comment_data}, auth_user, comment)
554
554
555 return comment_history
555 return comment_history
556
556
557 def delete(self, comment, auth_user):
557 def delete(self, comment, auth_user):
558 """
558 """
559 Deletes given comment
559 Deletes given comment
560 """
560 """
561 comment = self.__get_commit_comment(comment)
561 comment = self.__get_commit_comment(comment)
562 old_data = comment.get_api_data()
562 old_data = comment.get_api_data()
563 Session().delete(comment)
563 Session().delete(comment)
564
564
565 if comment.pull_request:
565 if comment.pull_request:
566 action = 'repo.pull_request.comment.delete'
566 action = 'repo.pull_request.comment.delete'
567 else:
567 else:
568 action = 'repo.commit.comment.delete'
568 action = 'repo.commit.comment.delete'
569
569
570 self._log_audit_action(
570 self._log_audit_action(
571 action, {'old_data': old_data}, auth_user, comment)
571 action, {'old_data': old_data}, auth_user, comment)
572
572
573 return comment
573 return comment
574
574
575 def get_all_comments(self, repo_id, revision=None, pull_request=None,
575 def get_all_comments(self, repo_id, revision=None, pull_request=None,
576 include_drafts=True, count_only=False):
576 include_drafts=True, count_only=False):
577 q = ChangesetComment.query()\
577 q = ChangesetComment.query()\
578 .filter(ChangesetComment.repo_id == repo_id)
578 .filter(ChangesetComment.repo_id == repo_id)
579 if revision:
579 if revision:
580 q = q.filter(ChangesetComment.revision == revision)
580 q = q.filter(ChangesetComment.revision == revision)
581 elif pull_request:
581 elif pull_request:
582 pull_request = self.__get_pull_request(pull_request)
582 pull_request = self.__get_pull_request(pull_request)
583 q = q.filter(ChangesetComment.pull_request_id == pull_request.pull_request_id)
583 q = q.filter(ChangesetComment.pull_request_id == pull_request.pull_request_id)
584 else:
584 else:
585 raise Exception('Please specify commit or pull_request')
585 raise Exception('Please specify commit or pull_request')
586 if not include_drafts:
586 if not include_drafts:
587 q = q.filter(ChangesetComment.draft == false())
587 q = q.filter(ChangesetComment.draft == false())
588 q = q.order_by(ChangesetComment.created_on)
588 q = q.order_by(ChangesetComment.created_on)
589 if count_only:
589 if count_only:
590 return q.count()
590 return q.count()
591
591
592 return q.all()
592 return q.all()
593
593
594 def get_url(self, comment, request=None, permalink=False, anchor=None):
594 def get_url(self, comment, request=None, permalink=False, anchor=None):
595 if not request:
595 if not request:
596 request = get_current_request()
596 request = get_current_request()
597
597
598 comment = self.__get_commit_comment(comment)
598 comment = self.__get_commit_comment(comment)
599 if anchor is None:
599 if anchor is None:
600 anchor = 'comment-{}'.format(comment.comment_id)
600 anchor = 'comment-{}'.format(comment.comment_id)
601
601
602 if comment.pull_request:
602 if comment.pull_request:
603 pull_request = comment.pull_request
603 pull_request = comment.pull_request
604 if permalink:
604 if permalink:
605 return request.route_url(
605 return request.route_url(
606 'pull_requests_global',
606 'pull_requests_global',
607 pull_request_id=pull_request.pull_request_id,
607 pull_request_id=pull_request.pull_request_id,
608 _anchor=anchor)
608 _anchor=anchor)
609 else:
609 else:
610 return request.route_url(
610 return request.route_url(
611 'pullrequest_show',
611 'pullrequest_show',
612 repo_name=safe_str(pull_request.target_repo.repo_name),
612 repo_name=safe_str(pull_request.target_repo.repo_name),
613 pull_request_id=pull_request.pull_request_id,
613 pull_request_id=pull_request.pull_request_id,
614 _anchor=anchor)
614 _anchor=anchor)
615
615
616 else:
616 else:
617 repo = comment.repo
617 repo = comment.repo
618 commit_id = comment.revision
618 commit_id = comment.revision
619
619
620 if permalink:
620 if permalink:
621 return request.route_url(
621 return request.route_url(
622 'repo_commit', repo_name=safe_str(repo.repo_id),
622 'repo_commit', repo_name=safe_str(repo.repo_id),
623 commit_id=commit_id,
623 commit_id=commit_id,
624 _anchor=anchor)
624 _anchor=anchor)
625
625
626 else:
626 else:
627 return request.route_url(
627 return request.route_url(
628 'repo_commit', repo_name=safe_str(repo.repo_name),
628 'repo_commit', repo_name=safe_str(repo.repo_name),
629 commit_id=commit_id,
629 commit_id=commit_id,
630 _anchor=anchor)
630 _anchor=anchor)
631
631
632 def get_comments(self, repo_id, revision=None, pull_request=None):
632 def get_comments(self, repo_id, revision=None, pull_request=None):
633 """
633 """
634 Gets main comments based on revision or pull_request_id
634 Gets main comments based on revision or pull_request_id
635
635
636 :param repo_id:
636 :param repo_id:
637 :param revision:
637 :param revision:
638 :param pull_request:
638 :param pull_request:
639 """
639 """
640
640
641 q = ChangesetComment.query()\
641 q = ChangesetComment.query()\
642 .filter(ChangesetComment.repo_id == repo_id)\
642 .filter(ChangesetComment.repo_id == repo_id)\
643 .filter(ChangesetComment.line_no == None)\
643 .filter(ChangesetComment.line_no == None)\
644 .filter(ChangesetComment.f_path == None)
644 .filter(ChangesetComment.f_path == None)
645 if revision:
645 if revision:
646 q = q.filter(ChangesetComment.revision == revision)
646 q = q.filter(ChangesetComment.revision == revision)
647 elif pull_request:
647 elif pull_request:
648 pull_request = self.__get_pull_request(pull_request)
648 pull_request = self.__get_pull_request(pull_request)
649 q = q.filter(ChangesetComment.pull_request == pull_request)
649 q = q.filter(ChangesetComment.pull_request == pull_request)
650 else:
650 else:
651 raise Exception('Please specify commit or pull_request')
651 raise Exception('Please specify commit or pull_request')
652 q = q.order_by(ChangesetComment.created_on)
652 q = q.order_by(ChangesetComment.created_on)
653 return q.all()
653 return q.all()
654
654
655 def get_inline_comments(self, repo_id, revision=None, pull_request=None):
655 def get_inline_comments(self, repo_id, revision=None, pull_request=None):
656 q = self._get_inline_comments_query(repo_id, revision, pull_request)
656 q = self._get_inline_comments_query(repo_id, revision, pull_request)
657 return self._group_comments_by_path_and_line_number(q)
657 return self._group_comments_by_path_and_line_number(q)
658
658
659 def get_inline_comments_as_list(self, inline_comments, skip_outdated=True,
659 def get_inline_comments_as_list(self, inline_comments, skip_outdated=True,
660 version=None):
660 version=None):
661 inline_comms = []
661 inline_comms = []
662 for fname, per_line_comments in inline_comments.items():
662 for fname, per_line_comments in inline_comments.items():
663 for lno, comments in per_line_comments.items():
663 for lno, comments in per_line_comments.items():
664 for comm in comments:
664 for comm in comments:
665 if not comm.outdated_at_version(version) and skip_outdated:
665 if not comm.outdated_at_version(version) and skip_outdated:
666 inline_comms.append(comm)
666 inline_comms.append(comm)
667
667
668 return inline_comms
668 return inline_comms
669
669
670 def get_outdated_comments(self, repo_id, pull_request):
670 def get_outdated_comments(self, repo_id, pull_request):
671 # TODO: johbo: Remove `repo_id`, it is not needed to find the comments
671 # TODO: johbo: Remove `repo_id`, it is not needed to find the comments
672 # of a pull request.
672 # of a pull request.
673 q = self._all_inline_comments_of_pull_request(pull_request)
673 q = self._all_inline_comments_of_pull_request(pull_request)
674 q = q.filter(
674 q = q.filter(
675 ChangesetComment.display_state ==
675 ChangesetComment.display_state ==
676 ChangesetComment.COMMENT_OUTDATED
676 ChangesetComment.COMMENT_OUTDATED
677 ).order_by(ChangesetComment.comment_id.asc())
677 ).order_by(ChangesetComment.comment_id.asc())
678
678
679 return self._group_comments_by_path_and_line_number(q)
679 return self._group_comments_by_path_and_line_number(q)
680
680
681 def _get_inline_comments_query(self, repo_id, revision, pull_request):
681 def _get_inline_comments_query(self, repo_id, revision, pull_request):
682 # TODO: johbo: Split this into two methods: One for PR and one for
682 # TODO: johbo: Split this into two methods: One for PR and one for
683 # commit.
683 # commit.
684 if revision:
684 if revision:
685 q = Session().query(ChangesetComment).filter(
685 q = Session().query(ChangesetComment).filter(
686 ChangesetComment.repo_id == repo_id,
686 ChangesetComment.repo_id == repo_id,
687 ChangesetComment.line_no != null(),
687 ChangesetComment.line_no != null(),
688 ChangesetComment.f_path != null(),
688 ChangesetComment.f_path != null(),
689 ChangesetComment.revision == revision)
689 ChangesetComment.revision == revision)
690
690
691 elif pull_request:
691 elif pull_request:
692 pull_request = self.__get_pull_request(pull_request)
692 pull_request = self.__get_pull_request(pull_request)
693 if not CommentsModel.use_outdated_comments(pull_request):
693 if not CommentsModel.use_outdated_comments(pull_request):
694 q = self._visible_inline_comments_of_pull_request(pull_request)
694 q = self._visible_inline_comments_of_pull_request(pull_request)
695 else:
695 else:
696 q = self._all_inline_comments_of_pull_request(pull_request)
696 q = self._all_inline_comments_of_pull_request(pull_request)
697
697
698 else:
698 else:
699 raise Exception('Please specify commit or pull_request_id')
699 raise Exception('Please specify commit or pull_request_id')
700 q = q.order_by(ChangesetComment.comment_id.asc())
700 q = q.order_by(ChangesetComment.comment_id.asc())
701 return q
701 return q
702
702
703 def _group_comments_by_path_and_line_number(self, q):
703 def _group_comments_by_path_and_line_number(self, q):
704 comments = q.all()
704 comments = q.all()
705 paths = collections.defaultdict(lambda: collections.defaultdict(list))
705 paths = collections.defaultdict(lambda: collections.defaultdict(list))
706 for co in comments:
706 for co in comments:
707 paths[co.f_path][co.line_no].append(co)
707 paths[co.f_path][co.line_no].append(co)
708 return paths
708 return paths
709
709
710 @classmethod
710 @classmethod
711 def needed_extra_diff_context(cls):
711 def needed_extra_diff_context(cls):
712 return max(cls.DIFF_CONTEXT_BEFORE, cls.DIFF_CONTEXT_AFTER)
712 return max(cls.DIFF_CONTEXT_BEFORE, cls.DIFF_CONTEXT_AFTER)
713
713
714 def outdate_comments(self, pull_request, old_diff_data, new_diff_data):
714 def outdate_comments(self, pull_request, old_diff_data, new_diff_data):
715 if not CommentsModel.use_outdated_comments(pull_request):
715 if not CommentsModel.use_outdated_comments(pull_request):
716 return
716 return
717
717
718 comments = self._visible_inline_comments_of_pull_request(pull_request)
718 comments = self._visible_inline_comments_of_pull_request(pull_request)
719 comments_to_outdate = comments.all()
719 comments_to_outdate = comments.all()
720
720
721 for comment in comments_to_outdate:
721 for comment in comments_to_outdate:
722 self._outdate_one_comment(comment, old_diff_data, new_diff_data)
722 self._outdate_one_comment(comment, old_diff_data, new_diff_data)
723
723
724 def _outdate_one_comment(self, comment, old_diff_proc, new_diff_proc):
724 def _outdate_one_comment(self, comment, old_diff_proc, new_diff_proc):
725 diff_line = _parse_comment_line_number(comment.line_no)
725 diff_line = _parse_comment_line_number(comment.line_no)
726
726
727 try:
727 try:
728 old_context = old_diff_proc.get_context_of_line(
728 old_context = old_diff_proc.get_context_of_line(
729 path=comment.f_path, diff_line=diff_line)
729 path=comment.f_path, diff_line=diff_line)
730 new_context = new_diff_proc.get_context_of_line(
730 new_context = new_diff_proc.get_context_of_line(
731 path=comment.f_path, diff_line=diff_line)
731 path=comment.f_path, diff_line=diff_line)
732 except (diffs.LineNotInDiffException,
732 except (diffs.LineNotInDiffException,
733 diffs.FileNotInDiffException):
733 diffs.FileNotInDiffException):
734 if not comment.draft:
734 if not comment.draft:
735 comment.display_state = ChangesetComment.COMMENT_OUTDATED
735 comment.display_state = ChangesetComment.COMMENT_OUTDATED
736 return
736 return
737
737
738 if old_context == new_context:
738 if old_context == new_context:
739 return
739 return
740
740
741 if self._should_relocate_diff_line(diff_line):
741 if self._should_relocate_diff_line(diff_line):
742 new_diff_lines = new_diff_proc.find_context(
742 new_diff_lines = new_diff_proc.find_context(
743 path=comment.f_path, context=old_context,
743 path=comment.f_path, context=old_context,
744 offset=self.DIFF_CONTEXT_BEFORE)
744 offset=self.DIFF_CONTEXT_BEFORE)
745 if not new_diff_lines and not comment.draft:
745 if not new_diff_lines and not comment.draft:
746 comment.display_state = ChangesetComment.COMMENT_OUTDATED
746 comment.display_state = ChangesetComment.COMMENT_OUTDATED
747 else:
747 else:
748 new_diff_line = self._choose_closest_diff_line(
748 new_diff_line = self._choose_closest_diff_line(
749 diff_line, new_diff_lines)
749 diff_line, new_diff_lines)
750 comment.line_no = _diff_to_comment_line_number(new_diff_line)
750 comment.line_no = _diff_to_comment_line_number(new_diff_line)
751 else:
751 else:
752 if not comment.draft:
752 if not comment.draft:
753 comment.display_state = ChangesetComment.COMMENT_OUTDATED
753 comment.display_state = ChangesetComment.COMMENT_OUTDATED
754
754
755 def _should_relocate_diff_line(self, diff_line):
755 def _should_relocate_diff_line(self, diff_line):
756 """
756 """
757 Checks if relocation shall be tried for the given `diff_line`.
757 Checks if relocation shall be tried for the given `diff_line`.
758
758
759 If a comment points into the first lines, then we can have a situation
759 If a comment points into the first lines, then we can have a situation
760 that after an update another line has been added on top. In this case
760 that after an update another line has been added on top. In this case
761 we would find the context still and move the comment around. This
761 we would find the context still and move the comment around. This
762 would be wrong.
762 would be wrong.
763 """
763 """
764 should_relocate = (
764 should_relocate = (
765 (diff_line.new and diff_line.new > self.DIFF_CONTEXT_BEFORE) or
765 (diff_line.new and diff_line.new > self.DIFF_CONTEXT_BEFORE) or
766 (diff_line.old and diff_line.old > self.DIFF_CONTEXT_BEFORE))
766 (diff_line.old and diff_line.old > self.DIFF_CONTEXT_BEFORE))
767 return should_relocate
767 return should_relocate
768
768
769 def _choose_closest_diff_line(self, diff_line, new_diff_lines):
769 def _choose_closest_diff_line(self, diff_line, new_diff_lines):
770 candidate = new_diff_lines[0]
770 candidate = new_diff_lines[0]
771 best_delta = _diff_line_delta(diff_line, candidate)
771 best_delta = _diff_line_delta(diff_line, candidate)
772 for new_diff_line in new_diff_lines[1:]:
772 for new_diff_line in new_diff_lines[1:]:
773 delta = _diff_line_delta(diff_line, new_diff_line)
773 delta = _diff_line_delta(diff_line, new_diff_line)
774 if delta < best_delta:
774 if delta < best_delta:
775 candidate = new_diff_line
775 candidate = new_diff_line
776 best_delta = delta
776 best_delta = delta
777 return candidate
777 return candidate
778
778
779 def _visible_inline_comments_of_pull_request(self, pull_request):
779 def _visible_inline_comments_of_pull_request(self, pull_request):
780 comments = self._all_inline_comments_of_pull_request(pull_request)
780 comments = self._all_inline_comments_of_pull_request(pull_request)
781 comments = comments.filter(
781 comments = comments.filter(
782 coalesce(ChangesetComment.display_state, '') !=
782 coalesce(ChangesetComment.display_state, '') !=
783 ChangesetComment.COMMENT_OUTDATED)
783 ChangesetComment.COMMENT_OUTDATED)
784 return comments
784 return comments
785
785
786 def _all_inline_comments_of_pull_request(self, pull_request):
786 def _all_inline_comments_of_pull_request(self, pull_request):
787 comments = Session().query(ChangesetComment)\
787 comments = Session().query(ChangesetComment)\
788 .filter(ChangesetComment.line_no != None)\
788 .filter(ChangesetComment.line_no != None)\
789 .filter(ChangesetComment.f_path != None)\
789 .filter(ChangesetComment.f_path != None)\
790 .filter(ChangesetComment.pull_request == pull_request)
790 .filter(ChangesetComment.pull_request == pull_request)
791 return comments
791 return comments
792
792
793 def _all_general_comments_of_pull_request(self, pull_request):
793 def _all_general_comments_of_pull_request(self, pull_request):
794 comments = Session().query(ChangesetComment)\
794 comments = Session().query(ChangesetComment)\
795 .filter(ChangesetComment.line_no == None)\
795 .filter(ChangesetComment.line_no == None)\
796 .filter(ChangesetComment.f_path == None)\
796 .filter(ChangesetComment.f_path == None)\
797 .filter(ChangesetComment.pull_request == pull_request)
797 .filter(ChangesetComment.pull_request == pull_request)
798
798
799 return comments
799 return comments
800
800
801 @staticmethod
801 @staticmethod
802 def use_outdated_comments(pull_request):
802 def use_outdated_comments(pull_request):
803 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
803 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
804 settings = settings_model.get_general_settings()
804 settings = settings_model.get_general_settings()
805 return settings.get('rhodecode_use_outdated_comments', False)
805 return settings.get('rhodecode_use_outdated_comments', False)
806
806
807 def trigger_commit_comment_hook(self, repo, user, action, data=None):
807 def trigger_commit_comment_hook(self, repo, user, action, data=None):
808 repo = self._get_repo(repo)
808 repo = self._get_repo(repo)
809 target_scm = repo.scm_instance()
809 target_scm = repo.scm_instance()
810 if action == 'create':
810 if action == 'create':
811 trigger_hook = hooks_utils.trigger_comment_commit_hooks
811 trigger_hook = hooks_utils.trigger_comment_commit_hooks
812 elif action == 'edit':
812 elif action == 'edit':
813 trigger_hook = hooks_utils.trigger_comment_commit_edit_hooks
813 trigger_hook = hooks_utils.trigger_comment_commit_edit_hooks
814 else:
814 else:
815 return
815 return
816
816
817 log.debug('Handling repo %s trigger_commit_comment_hook with action %s: %s',
817 log.debug('Handling repo %s trigger_commit_comment_hook with action %s: %s',
818 repo, action, trigger_hook)
818 repo, action, trigger_hook)
819 trigger_hook(
819 trigger_hook(
820 username=user.username,
820 username=user.username,
821 repo_name=repo.repo_name,
821 repo_name=repo.repo_name,
822 repo_type=target_scm.alias,
822 repo_type=target_scm.alias,
823 repo=repo,
823 repo=repo,
824 data=data)
824 data=data)
825
825
826
826
827 def _parse_comment_line_number(line_no):
827 def _parse_comment_line_number(line_no):
828 """
828 """
829 Parses line numbers of the form "(o|n)\d+" and returns them in a tuple.
829 Parses line numbers of the form "(o|n)\d+" and returns them in a tuple.
830 """
830 """
831 old_line = None
831 old_line = None
832 new_line = None
832 new_line = None
833 if line_no.startswith('o'):
833 if line_no.startswith('o'):
834 old_line = int(line_no[1:])
834 old_line = int(line_no[1:])
835 elif line_no.startswith('n'):
835 elif line_no.startswith('n'):
836 new_line = int(line_no[1:])
836 new_line = int(line_no[1:])
837 else:
837 else:
838 raise ValueError("Comment lines have to start with either 'o' or 'n'.")
838 raise ValueError("Comment lines have to start with either 'o' or 'n'.")
839 return diffs.DiffLineNumber(old_line, new_line)
839 return diffs.DiffLineNumber(old_line, new_line)
840
840
841
841
842 def _diff_to_comment_line_number(diff_line):
842 def _diff_to_comment_line_number(diff_line):
843 if diff_line.new is not None:
843 if diff_line.new is not None:
844 return u'n{}'.format(diff_line.new)
844 return u'n{}'.format(diff_line.new)
845 elif diff_line.old is not None:
845 elif diff_line.old is not None:
846 return u'o{}'.format(diff_line.old)
846 return u'o{}'.format(diff_line.old)
847 return u''
847 return u''
848
848
849
849
850 def _diff_line_delta(a, b):
850 def _diff_line_delta(a, b):
851 if None not in (a.new, b.new):
851 if None not in (a.new, b.new):
852 return abs(a.new - b.new)
852 return abs(a.new - b.new)
853 elif None not in (a.old, b.old):
853 elif None not in (a.old, b.old):
854 return abs(a.old - b.old)
854 return abs(a.old - b.old)
855 else:
855 else:
856 raise ValueError(
856 raise ValueError(
857 "Cannot compute delta between {} and {}".format(a, b))
857 "Cannot compute delta between {} and {}".format(a, b))
@@ -1,5823 +1,5822 b''
1 # -*- coding: utf-8 -*-
2
1
3 # Copyright (C) 2010-2020 RhodeCode GmbH
2 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
3 #
5 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
8 #
7 #
9 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
11 # GNU General Public License for more details.
13 #
12 #
14 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
15 #
17 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
19
21 """
20 """
22 Database Models for RhodeCode Enterprise
21 Database Models for RhodeCode Enterprise
23 """
22 """
24
23
25 import re
24 import re
26 import os
25 import os
27 import time
26 import time
28 import string
27 import string
29 import hashlib
28 import hashlib
30 import logging
29 import logging
31 import datetime
30 import datetime
32 import uuid
31 import uuid
33 import warnings
32 import warnings
34 import ipaddress
33 import ipaddress
35 import functools
34 import functools
36 import traceback
35 import traceback
37 import collections
36 import collections
38
37
39 from sqlalchemy import (
38 from sqlalchemy import (
40 or_, and_, not_, func, cast, TypeDecorator, event,
39 or_, and_, not_, func, cast, TypeDecorator, event,
41 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
40 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
42 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
41 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
43 Text, Float, PickleType, BigInteger)
42 Text, Float, PickleType, BigInteger)
44 from sqlalchemy.sql.expression import true, false, case, null
43 from sqlalchemy.sql.expression import true, false, case, null
45 from sqlalchemy.sql.functions import coalesce, count # pragma: no cover
44 from sqlalchemy.sql.functions import coalesce, count # pragma: no cover
46 from sqlalchemy.orm import (
45 from sqlalchemy.orm import (
47 relationship, lazyload, joinedload, class_mapper, validates, aliased)
46 relationship, lazyload, joinedload, class_mapper, validates, aliased)
48 from sqlalchemy.ext.declarative import declared_attr
47 from sqlalchemy.ext.declarative import declared_attr
49 from sqlalchemy.ext.hybrid import hybrid_property
48 from sqlalchemy.ext.hybrid import hybrid_property
50 from sqlalchemy.exc import IntegrityError # pragma: no cover
49 from sqlalchemy.exc import IntegrityError # pragma: no cover
51 from sqlalchemy.dialects.mysql import LONGTEXT
50 from sqlalchemy.dialects.mysql import LONGTEXT
52 from zope.cachedescriptors.property import Lazy as LazyProperty
51 from zope.cachedescriptors.property import Lazy as LazyProperty
53 from pyramid.threadlocal import get_current_request
52 from pyramid.threadlocal import get_current_request
54 from webhelpers2.text import remove_formatting
53 from webhelpers2.text import remove_formatting
55
54
56 from rhodecode.lib.str_utils import safe_bytes
55 from rhodecode.lib.str_utils import safe_bytes
57 from rhodecode.translation import _
56 from rhodecode.translation import _
58 from rhodecode.lib.vcs import get_vcs_instance, VCSError
57 from rhodecode.lib.vcs import get_vcs_instance, VCSError
59 from rhodecode.lib.vcs.backends.base import (
58 from rhodecode.lib.vcs.backends.base import (
60 EmptyCommit, Reference, unicode_to_reference, reference_to_unicode)
59 EmptyCommit, Reference, unicode_to_reference, reference_to_unicode)
61 from rhodecode.lib.utils2 import (
60 from rhodecode.lib.utils2 import (
62 str2bool, safe_str, get_commit_safe, sha1_safe,
61 str2bool, safe_str, get_commit_safe, sha1_safe,
63 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
62 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
64 glob2re, StrictAttributeDict, cleaned_uri, datetime_to_time)
63 glob2re, StrictAttributeDict, cleaned_uri, datetime_to_time)
65 from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType, \
64 from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType, \
66 JsonRaw
65 JsonRaw
67 from rhodecode.lib import ext_json
66 from rhodecode.lib import ext_json
68 from rhodecode.lib import enc_utils
67 from rhodecode.lib import enc_utils
69 from rhodecode.lib.ext_json import json
68 from rhodecode.lib.ext_json import json
70 from rhodecode.lib.caching_query import FromCache
69 from rhodecode.lib.caching_query import FromCache
71 from rhodecode.lib.exceptions import (
70 from rhodecode.lib.exceptions import (
72 ArtifactMetadataDuplicate, ArtifactMetadataBadValueType)
71 ArtifactMetadataDuplicate, ArtifactMetadataBadValueType)
73 from rhodecode.model.meta import Base, Session
72 from rhodecode.model.meta import Base, Session
74
73
75 URL_SEP = '/'
74 URL_SEP = '/'
76 log = logging.getLogger(__name__)
75 log = logging.getLogger(__name__)
77
76
78 # =============================================================================
77 # =============================================================================
79 # BASE CLASSES
78 # BASE CLASSES
80 # =============================================================================
79 # =============================================================================
81
80
82 # this is propagated from .ini file rhodecode.encrypted_values.secret or
81 # this is propagated from .ini file rhodecode.encrypted_values.secret or
83 # beaker.session.secret if first is not set.
82 # beaker.session.secret if first is not set.
84 # and initialized at environment.py
83 # and initialized at environment.py
85 ENCRYPTION_KEY = ''
84 ENCRYPTION_KEY = ''
86
85
87 # used to sort permissions by types, '#' used here is not allowed to be in
86 # used to sort permissions by types, '#' used here is not allowed to be in
88 # usernames, and it's very early in sorted string.printable table.
87 # usernames, and it's very early in sorted string.printable table.
89 PERMISSION_TYPE_SORT = {
88 PERMISSION_TYPE_SORT = {
90 'admin': '####',
89 'admin': '####',
91 'write': '###',
90 'write': '###',
92 'read': '##',
91 'read': '##',
93 'none': '#',
92 'none': '#',
94 }
93 }
95
94
96
95
97 def display_user_sort(obj):
96 def display_user_sort(obj):
98 """
97 """
99 Sort function used to sort permissions in .permissions() function of
98 Sort function used to sort permissions in .permissions() function of
100 Repository, RepoGroup, UserGroup. Also it put the default user in front
99 Repository, RepoGroup, UserGroup. Also it put the default user in front
101 of all other resources
100 of all other resources
102 """
101 """
103
102
104 if obj.username == User.DEFAULT_USER:
103 if obj.username == User.DEFAULT_USER:
105 return '#####'
104 return '#####'
106 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
105 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
107 extra_sort_num = '1' # default
106 extra_sort_num = '1' # default
108
107
109 # NOTE(dan): inactive duplicates goes last
108 # NOTE(dan): inactive duplicates goes last
110 if getattr(obj, 'duplicate_perm', None):
109 if getattr(obj, 'duplicate_perm', None):
111 extra_sort_num = '9'
110 extra_sort_num = '9'
112 return prefix + extra_sort_num + obj.username
111 return prefix + extra_sort_num + obj.username
113
112
114
113
115 def display_user_group_sort(obj):
114 def display_user_group_sort(obj):
116 """
115 """
117 Sort function used to sort permissions in .permissions() function of
116 Sort function used to sort permissions in .permissions() function of
118 Repository, RepoGroup, UserGroup. Also it put the default user in front
117 Repository, RepoGroup, UserGroup. Also it put the default user in front
119 of all other resources
118 of all other resources
120 """
119 """
121
120
122 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
121 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
123 return prefix + obj.users_group_name
122 return prefix + obj.users_group_name
124
123
125
124
126 def _hash_key(k):
125 def _hash_key(k):
127 return sha1_safe(k)
126 return sha1_safe(k)
128
127
129
128
130 def in_filter_generator(qry, items, limit=500):
129 def in_filter_generator(qry, items, limit=500):
131 """
130 """
132 Splits IN() into multiple with OR
131 Splits IN() into multiple with OR
133 e.g.::
132 e.g.::
134 cnt = Repository.query().filter(
133 cnt = Repository.query().filter(
135 or_(
134 or_(
136 *in_filter_generator(Repository.repo_id, range(100000))
135 *in_filter_generator(Repository.repo_id, range(100000))
137 )).count()
136 )).count()
138 """
137 """
139 if not items:
138 if not items:
140 # empty list will cause empty query which might cause security issues
139 # empty list will cause empty query which might cause security issues
141 # this can lead to hidden unpleasant results
140 # this can lead to hidden unpleasant results
142 items = [-1]
141 items = [-1]
143
142
144 parts = []
143 parts = []
145 for chunk in range(0, len(items), limit):
144 for chunk in range(0, len(items), limit):
146 parts.append(
145 parts.append(
147 qry.in_(items[chunk: chunk + limit])
146 qry.in_(items[chunk: chunk + limit])
148 )
147 )
149
148
150 return parts
149 return parts
151
150
152
151
153 base_table_args = {
152 base_table_args = {
154 'extend_existing': True,
153 'extend_existing': True,
155 'mysql_engine': 'InnoDB',
154 'mysql_engine': 'InnoDB',
156 'mysql_charset': 'utf8',
155 'mysql_charset': 'utf8',
157 'sqlite_autoincrement': True
156 'sqlite_autoincrement': True
158 }
157 }
159
158
160
159
161 class EncryptedTextValue(TypeDecorator):
160 class EncryptedTextValue(TypeDecorator):
162 """
161 """
163 Special column for encrypted long text data, use like::
162 Special column for encrypted long text data, use like::
164
163
165 value = Column("encrypted_value", EncryptedValue(), nullable=False)
164 value = Column("encrypted_value", EncryptedValue(), nullable=False)
166
165
167 This column is intelligent so if value is in unencrypted form it return
166 This column is intelligent so if value is in unencrypted form it return
168 unencrypted form, but on save it always encrypts
167 unencrypted form, but on save it always encrypts
169 """
168 """
170 impl = Text
169 impl = Text
171
170
172 def process_bind_param(self, value, dialect):
171 def process_bind_param(self, value, dialect):
173 """
172 """
174 Setter for storing value
173 Setter for storing value
175 """
174 """
176 import rhodecode
175 import rhodecode
177 if not value:
176 if not value:
178 return value
177 return value
179
178
180 # protect against double encrypting if values is already encrypted
179 # protect against double encrypting if values is already encrypted
181 if value.startswith('enc$aes$') \
180 if value.startswith('enc$aes$') \
182 or value.startswith('enc$aes_hmac$') \
181 or value.startswith('enc$aes_hmac$') \
183 or value.startswith('enc2$'):
182 or value.startswith('enc2$'):
184 raise ValueError('value needs to be in unencrypted format, '
183 raise ValueError('value needs to be in unencrypted format, '
185 'ie. not starting with enc$ or enc2$')
184 'ie. not starting with enc$ or enc2$')
186
185
187 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
186 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
188 return enc_utils.encrypt_value(value, enc_key=ENCRYPTION_KEY, algo=algo)
187 return enc_utils.encrypt_value(value, enc_key=ENCRYPTION_KEY, algo=algo)
189
188
190 def process_result_value(self, value, dialect):
189 def process_result_value(self, value, dialect):
191 """
190 """
192 Getter for retrieving value
191 Getter for retrieving value
193 """
192 """
194
193
195 import rhodecode
194 import rhodecode
196 if not value:
195 if not value:
197 return value
196 return value
198
197
199 enc_strict_mode = str2bool(rhodecode.CONFIG.get('rhodecode.encrypted_values.strict') or True)
198 enc_strict_mode = str2bool(rhodecode.CONFIG.get('rhodecode.encrypted_values.strict') or True)
200
199
201 return enc_utils.decrypt_value(value, enc_key=ENCRYPTION_KEY, strict_mode=enc_strict_mode)
200 return enc_utils.decrypt_value(value, enc_key=ENCRYPTION_KEY, strict_mode=enc_strict_mode)
202
201
203
202
204 class BaseModel(object):
203 class BaseModel(object):
205 """
204 """
206 Base Model for all classes
205 Base Model for all classes
207 """
206 """
208
207
209 @classmethod
208 @classmethod
210 def _get_keys(cls):
209 def _get_keys(cls):
211 """return column names for this model """
210 """return column names for this model """
212 return class_mapper(cls).c.keys()
211 return class_mapper(cls).c.keys()
213
212
214 def get_dict(self):
213 def get_dict(self):
215 """
214 """
216 return dict with keys and values corresponding
215 return dict with keys and values corresponding
217 to this model data """
216 to this model data """
218
217
219 d = {}
218 d = {}
220 for k in self._get_keys():
219 for k in self._get_keys():
221 d[k] = getattr(self, k)
220 d[k] = getattr(self, k)
222
221
223 # also use __json__() if present to get additional fields
222 # also use __json__() if present to get additional fields
224 _json_attr = getattr(self, '__json__', None)
223 _json_attr = getattr(self, '__json__', None)
225 if _json_attr:
224 if _json_attr:
226 # update with attributes from __json__
225 # update with attributes from __json__
227 if callable(_json_attr):
226 if callable(_json_attr):
228 _json_attr = _json_attr()
227 _json_attr = _json_attr()
229 for k, val in _json_attr.items():
228 for k, val in _json_attr.items():
230 d[k] = val
229 d[k] = val
231 return d
230 return d
232
231
233 def get_appstruct(self):
232 def get_appstruct(self):
234 """return list with keys and values tuples corresponding
233 """return list with keys and values tuples corresponding
235 to this model data """
234 to this model data """
236
235
237 lst = []
236 lst = []
238 for k in self._get_keys():
237 for k in self._get_keys():
239 lst.append((k, getattr(self, k),))
238 lst.append((k, getattr(self, k),))
240 return lst
239 return lst
241
240
242 def populate_obj(self, populate_dict):
241 def populate_obj(self, populate_dict):
243 """populate model with data from given populate_dict"""
242 """populate model with data from given populate_dict"""
244
243
245 for k in self._get_keys():
244 for k in self._get_keys():
246 if k in populate_dict:
245 if k in populate_dict:
247 setattr(self, k, populate_dict[k])
246 setattr(self, k, populate_dict[k])
248
247
249 @classmethod
248 @classmethod
250 def query(cls):
249 def query(cls):
251 return Session().query(cls)
250 return Session().query(cls)
252
251
253 @classmethod
252 @classmethod
254 def get(cls, id_):
253 def get(cls, id_):
255 if id_:
254 if id_:
256 return cls.query().get(id_)
255 return cls.query().get(id_)
257
256
258 @classmethod
257 @classmethod
259 def get_or_404(cls, id_):
258 def get_or_404(cls, id_):
260 from pyramid.httpexceptions import HTTPNotFound
259 from pyramid.httpexceptions import HTTPNotFound
261
260
262 try:
261 try:
263 id_ = int(id_)
262 id_ = int(id_)
264 except (TypeError, ValueError):
263 except (TypeError, ValueError):
265 raise HTTPNotFound()
264 raise HTTPNotFound()
266
265
267 res = cls.query().get(id_)
266 res = cls.query().get(id_)
268 if not res:
267 if not res:
269 raise HTTPNotFound()
268 raise HTTPNotFound()
270 return res
269 return res
271
270
272 @classmethod
271 @classmethod
273 def getAll(cls):
272 def getAll(cls):
274 # deprecated and left for backward compatibility
273 # deprecated and left for backward compatibility
275 return cls.get_all()
274 return cls.get_all()
276
275
277 @classmethod
276 @classmethod
278 def get_all(cls):
277 def get_all(cls):
279 return cls.query().all()
278 return cls.query().all()
280
279
281 @classmethod
280 @classmethod
282 def delete(cls, id_):
281 def delete(cls, id_):
283 obj = cls.query().get(id_)
282 obj = cls.query().get(id_)
284 Session().delete(obj)
283 Session().delete(obj)
285
284
286 @classmethod
285 @classmethod
287 def identity_cache(cls, session, attr_name, value):
286 def identity_cache(cls, session, attr_name, value):
288 exist_in_session = []
287 exist_in_session = []
289 for (item_cls, pkey), instance in session.identity_map.items():
288 for (item_cls, pkey), instance in session.identity_map.items():
290 if cls == item_cls and getattr(instance, attr_name) == value:
289 if cls == item_cls and getattr(instance, attr_name) == value:
291 exist_in_session.append(instance)
290 exist_in_session.append(instance)
292 if exist_in_session:
291 if exist_in_session:
293 if len(exist_in_session) == 1:
292 if len(exist_in_session) == 1:
294 return exist_in_session[0]
293 return exist_in_session[0]
295 log.exception(
294 log.exception(
296 'multiple objects with attr %s and '
295 'multiple objects with attr %s and '
297 'value %s found with same name: %r',
296 'value %s found with same name: %r',
298 attr_name, value, exist_in_session)
297 attr_name, value, exist_in_session)
299
298
300 def __repr__(self):
299 def __repr__(self):
301 if hasattr(self, '__str__'):
300 if hasattr(self, '__str__'):
302 # python repr needs to return str
301 # python repr needs to return str
303 try:
302 try:
304 return self.__str__()
303 return self.__str__()
305 except UnicodeDecodeError:
304 except UnicodeDecodeError:
306 pass
305 pass
307 return f'<DB:{self.__class__.__name__}>'
306 return f'<DB:{self.__class__.__name__}>'
308
307
309
308
310 class RhodeCodeSetting(Base, BaseModel):
309 class RhodeCodeSetting(Base, BaseModel):
311 __tablename__ = 'rhodecode_settings'
310 __tablename__ = 'rhodecode_settings'
312 __table_args__ = (
311 __table_args__ = (
313 UniqueConstraint('app_settings_name'),
312 UniqueConstraint('app_settings_name'),
314 base_table_args
313 base_table_args
315 )
314 )
316
315
317 SETTINGS_TYPES = {
316 SETTINGS_TYPES = {
318 'str': safe_str,
317 'str': safe_str,
319 'int': safe_int,
318 'int': safe_int,
320 'unicode': safe_str,
319 'unicode': safe_str,
321 'bool': str2bool,
320 'bool': str2bool,
322 'list': functools.partial(aslist, sep=',')
321 'list': functools.partial(aslist, sep=',')
323 }
322 }
324 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
323 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
325 GLOBAL_CONF_KEY = 'app_settings'
324 GLOBAL_CONF_KEY = 'app_settings'
326
325
327 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
326 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
328 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
327 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
329 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
328 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
330 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
329 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
331
330
332 def __init__(self, key='', val='', type='unicode'):
331 def __init__(self, key='', val='', type='unicode'):
333 self.app_settings_name = key
332 self.app_settings_name = key
334 self.app_settings_type = type
333 self.app_settings_type = type
335 self.app_settings_value = val
334 self.app_settings_value = val
336
335
337 @validates('_app_settings_value')
336 @validates('_app_settings_value')
338 def validate_settings_value(self, key, val):
337 def validate_settings_value(self, key, val):
339 assert type(val) == str
338 assert type(val) == str
340 return val
339 return val
341
340
342 @hybrid_property
341 @hybrid_property
343 def app_settings_value(self):
342 def app_settings_value(self):
344 v = self._app_settings_value
343 v = self._app_settings_value
345 _type = self.app_settings_type
344 _type = self.app_settings_type
346 if _type:
345 if _type:
347 _type = self.app_settings_type.split('.')[0]
346 _type = self.app_settings_type.split('.')[0]
348 # decode the encrypted value
347 # decode the encrypted value
349 if 'encrypted' in self.app_settings_type:
348 if 'encrypted' in self.app_settings_type:
350 cipher = EncryptedTextValue()
349 cipher = EncryptedTextValue()
351 v = safe_str(cipher.process_result_value(v, None))
350 v = safe_str(cipher.process_result_value(v, None))
352
351
353 converter = self.SETTINGS_TYPES.get(_type) or \
352 converter = self.SETTINGS_TYPES.get(_type) or \
354 self.SETTINGS_TYPES['unicode']
353 self.SETTINGS_TYPES['unicode']
355 return converter(v)
354 return converter(v)
356
355
357 @app_settings_value.setter
356 @app_settings_value.setter
358 def app_settings_value(self, val):
357 def app_settings_value(self, val):
359 """
358 """
360 Setter that will always make sure we use unicode in app_settings_value
359 Setter that will always make sure we use unicode in app_settings_value
361
360
362 :param val:
361 :param val:
363 """
362 """
364 val = safe_str(val)
363 val = safe_str(val)
365 # encode the encrypted value
364 # encode the encrypted value
366 if 'encrypted' in self.app_settings_type:
365 if 'encrypted' in self.app_settings_type:
367 cipher = EncryptedTextValue()
366 cipher = EncryptedTextValue()
368 val = safe_str(cipher.process_bind_param(val, None))
367 val = safe_str(cipher.process_bind_param(val, None))
369 self._app_settings_value = val
368 self._app_settings_value = val
370
369
371 @hybrid_property
370 @hybrid_property
372 def app_settings_type(self):
371 def app_settings_type(self):
373 return self._app_settings_type
372 return self._app_settings_type
374
373
375 @app_settings_type.setter
374 @app_settings_type.setter
376 def app_settings_type(self, val):
375 def app_settings_type(self, val):
377 if val.split('.')[0] not in self.SETTINGS_TYPES:
376 if val.split('.')[0] not in self.SETTINGS_TYPES:
378 raise Exception('type must be one of %s got %s'
377 raise Exception('type must be one of %s got %s'
379 % (self.SETTINGS_TYPES.keys(), val))
378 % (self.SETTINGS_TYPES.keys(), val))
380 self._app_settings_type = val
379 self._app_settings_type = val
381
380
382 @classmethod
381 @classmethod
383 def get_by_prefix(cls, prefix):
382 def get_by_prefix(cls, prefix):
384 return RhodeCodeSetting.query()\
383 return RhodeCodeSetting.query()\
385 .filter(RhodeCodeSetting.app_settings_name.startswith(prefix))\
384 .filter(RhodeCodeSetting.app_settings_name.startswith(prefix))\
386 .all()
385 .all()
387
386
388 def __str__(self):
387 def __str__(self):
389 return "<%s('%s:%s[%s]')>" % (
388 return "<%s('%s:%s[%s]')>" % (
390 self.__class__.__name__,
389 self.__class__.__name__,
391 self.app_settings_name, self.app_settings_value,
390 self.app_settings_name, self.app_settings_value,
392 self.app_settings_type
391 self.app_settings_type
393 )
392 )
394
393
395
394
396 class RhodeCodeUi(Base, BaseModel):
395 class RhodeCodeUi(Base, BaseModel):
397 __tablename__ = 'rhodecode_ui'
396 __tablename__ = 'rhodecode_ui'
398 __table_args__ = (
397 __table_args__ = (
399 UniqueConstraint('ui_key'),
398 UniqueConstraint('ui_key'),
400 base_table_args
399 base_table_args
401 )
400 )
402
401
403 HOOK_REPO_SIZE = 'changegroup.repo_size'
402 HOOK_REPO_SIZE = 'changegroup.repo_size'
404 # HG
403 # HG
405 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
404 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
406 HOOK_PULL = 'outgoing.pull_logger'
405 HOOK_PULL = 'outgoing.pull_logger'
407 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
406 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
408 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
407 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
409 HOOK_PUSH = 'changegroup.push_logger'
408 HOOK_PUSH = 'changegroup.push_logger'
410 HOOK_PUSH_KEY = 'pushkey.key_push'
409 HOOK_PUSH_KEY = 'pushkey.key_push'
411
410
412 HOOKS_BUILTIN = [
411 HOOKS_BUILTIN = [
413 HOOK_PRE_PULL,
412 HOOK_PRE_PULL,
414 HOOK_PULL,
413 HOOK_PULL,
415 HOOK_PRE_PUSH,
414 HOOK_PRE_PUSH,
416 HOOK_PRETX_PUSH,
415 HOOK_PRETX_PUSH,
417 HOOK_PUSH,
416 HOOK_PUSH,
418 HOOK_PUSH_KEY,
417 HOOK_PUSH_KEY,
419 ]
418 ]
420
419
421 # TODO: johbo: Unify way how hooks are configured for git and hg,
420 # TODO: johbo: Unify way how hooks are configured for git and hg,
422 # git part is currently hardcoded.
421 # git part is currently hardcoded.
423
422
424 # SVN PATTERNS
423 # SVN PATTERNS
425 SVN_BRANCH_ID = 'vcs_svn_branch'
424 SVN_BRANCH_ID = 'vcs_svn_branch'
426 SVN_TAG_ID = 'vcs_svn_tag'
425 SVN_TAG_ID = 'vcs_svn_tag'
427
426
428 ui_id = Column(
427 ui_id = Column(
429 "ui_id", Integer(), nullable=False, unique=True, default=None,
428 "ui_id", Integer(), nullable=False, unique=True, default=None,
430 primary_key=True)
429 primary_key=True)
431 ui_section = Column(
430 ui_section = Column(
432 "ui_section", String(255), nullable=True, unique=None, default=None)
431 "ui_section", String(255), nullable=True, unique=None, default=None)
433 ui_key = Column(
432 ui_key = Column(
434 "ui_key", String(255), nullable=True, unique=None, default=None)
433 "ui_key", String(255), nullable=True, unique=None, default=None)
435 ui_value = Column(
434 ui_value = Column(
436 "ui_value", String(255), nullable=True, unique=None, default=None)
435 "ui_value", String(255), nullable=True, unique=None, default=None)
437 ui_active = Column(
436 ui_active = Column(
438 "ui_active", Boolean(), nullable=True, unique=None, default=True)
437 "ui_active", Boolean(), nullable=True, unique=None, default=True)
439
438
440 def __str__(self):
439 def __str__(self):
441 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
440 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
442 self.ui_key, self.ui_value)
441 self.ui_key, self.ui_value)
443
442
444
443
445 class RepoRhodeCodeSetting(Base, BaseModel):
444 class RepoRhodeCodeSetting(Base, BaseModel):
446 __tablename__ = 'repo_rhodecode_settings'
445 __tablename__ = 'repo_rhodecode_settings'
447 __table_args__ = (
446 __table_args__ = (
448 UniqueConstraint(
447 UniqueConstraint(
449 'app_settings_name', 'repository_id',
448 'app_settings_name', 'repository_id',
450 name='uq_repo_rhodecode_setting_name_repo_id'),
449 name='uq_repo_rhodecode_setting_name_repo_id'),
451 base_table_args
450 base_table_args
452 )
451 )
453
452
454 repository_id = Column(
453 repository_id = Column(
455 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
454 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
456 nullable=False)
455 nullable=False)
457 app_settings_id = Column(
456 app_settings_id = Column(
458 "app_settings_id", Integer(), nullable=False, unique=True,
457 "app_settings_id", Integer(), nullable=False, unique=True,
459 default=None, primary_key=True)
458 default=None, primary_key=True)
460 app_settings_name = Column(
459 app_settings_name = Column(
461 "app_settings_name", String(255), nullable=True, unique=None,
460 "app_settings_name", String(255), nullable=True, unique=None,
462 default=None)
461 default=None)
463 _app_settings_value = Column(
462 _app_settings_value = Column(
464 "app_settings_value", String(4096), nullable=True, unique=None,
463 "app_settings_value", String(4096), nullable=True, unique=None,
465 default=None)
464 default=None)
466 _app_settings_type = Column(
465 _app_settings_type = Column(
467 "app_settings_type", String(255), nullable=True, unique=None,
466 "app_settings_type", String(255), nullable=True, unique=None,
468 default=None)
467 default=None)
469
468
470 repository = relationship('Repository')
469 repository = relationship('Repository')
471
470
472 def __init__(self, repository_id, key='', val='', type='unicode'):
471 def __init__(self, repository_id, key='', val='', type='unicode'):
473 self.repository_id = repository_id
472 self.repository_id = repository_id
474 self.app_settings_name = key
473 self.app_settings_name = key
475 self.app_settings_type = type
474 self.app_settings_type = type
476 self.app_settings_value = val
475 self.app_settings_value = val
477
476
478 @validates('_app_settings_value')
477 @validates('_app_settings_value')
479 def validate_settings_value(self, key, val):
478 def validate_settings_value(self, key, val):
480 assert type(val) == str
479 assert type(val) == str
481 return val
480 return val
482
481
483 @hybrid_property
482 @hybrid_property
484 def app_settings_value(self):
483 def app_settings_value(self):
485 v = self._app_settings_value
484 v = self._app_settings_value
486 type_ = self.app_settings_type
485 type_ = self.app_settings_type
487 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
486 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
488 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
487 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
489 return converter(v)
488 return converter(v)
490
489
491 @app_settings_value.setter
490 @app_settings_value.setter
492 def app_settings_value(self, val):
491 def app_settings_value(self, val):
493 """
492 """
494 Setter that will always make sure we use unicode in app_settings_value
493 Setter that will always make sure we use unicode in app_settings_value
495
494
496 :param val:
495 :param val:
497 """
496 """
498 self._app_settings_value = safe_str(val)
497 self._app_settings_value = safe_str(val)
499
498
500 @hybrid_property
499 @hybrid_property
501 def app_settings_type(self):
500 def app_settings_type(self):
502 return self._app_settings_type
501 return self._app_settings_type
503
502
504 @app_settings_type.setter
503 @app_settings_type.setter
505 def app_settings_type(self, val):
504 def app_settings_type(self, val):
506 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
505 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
507 if val not in SETTINGS_TYPES:
506 if val not in SETTINGS_TYPES:
508 raise Exception('type must be one of %s got %s'
507 raise Exception('type must be one of %s got %s'
509 % (SETTINGS_TYPES.keys(), val))
508 % (SETTINGS_TYPES.keys(), val))
510 self._app_settings_type = val
509 self._app_settings_type = val
511
510
512 def __str__(self):
511 def __str__(self):
513 return "<%s('%s:%s:%s[%s]')>" % (
512 return "<%s('%s:%s:%s[%s]')>" % (
514 self.__class__.__name__, self.repository.repo_name,
513 self.__class__.__name__, self.repository.repo_name,
515 self.app_settings_name, self.app_settings_value,
514 self.app_settings_name, self.app_settings_value,
516 self.app_settings_type
515 self.app_settings_type
517 )
516 )
518
517
519
518
520 class RepoRhodeCodeUi(Base, BaseModel):
519 class RepoRhodeCodeUi(Base, BaseModel):
521 __tablename__ = 'repo_rhodecode_ui'
520 __tablename__ = 'repo_rhodecode_ui'
522 __table_args__ = (
521 __table_args__ = (
523 UniqueConstraint(
522 UniqueConstraint(
524 'repository_id', 'ui_section', 'ui_key',
523 'repository_id', 'ui_section', 'ui_key',
525 name='uq_repo_rhodecode_ui_repository_id_section_key'),
524 name='uq_repo_rhodecode_ui_repository_id_section_key'),
526 base_table_args
525 base_table_args
527 )
526 )
528
527
529 repository_id = Column(
528 repository_id = Column(
530 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
529 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
531 nullable=False)
530 nullable=False)
532 ui_id = Column(
531 ui_id = Column(
533 "ui_id", Integer(), nullable=False, unique=True, default=None,
532 "ui_id", Integer(), nullable=False, unique=True, default=None,
534 primary_key=True)
533 primary_key=True)
535 ui_section = Column(
534 ui_section = Column(
536 "ui_section", String(255), nullable=True, unique=None, default=None)
535 "ui_section", String(255), nullable=True, unique=None, default=None)
537 ui_key = Column(
536 ui_key = Column(
538 "ui_key", String(255), nullable=True, unique=None, default=None)
537 "ui_key", String(255), nullable=True, unique=None, default=None)
539 ui_value = Column(
538 ui_value = Column(
540 "ui_value", String(255), nullable=True, unique=None, default=None)
539 "ui_value", String(255), nullable=True, unique=None, default=None)
541 ui_active = Column(
540 ui_active = Column(
542 "ui_active", Boolean(), nullable=True, unique=None, default=True)
541 "ui_active", Boolean(), nullable=True, unique=None, default=True)
543
542
544 repository = relationship('Repository')
543 repository = relationship('Repository')
545
544
546 def __str__(self):
545 def __str__(self):
547 return '<%s[%s:%s]%s=>%s]>' % (
546 return '<%s[%s:%s]%s=>%s]>' % (
548 self.__class__.__name__, self.repository.repo_name,
547 self.__class__.__name__, self.repository.repo_name,
549 self.ui_section, self.ui_key, self.ui_value)
548 self.ui_section, self.ui_key, self.ui_value)
550
549
551
550
552 class User(Base, BaseModel):
551 class User(Base, BaseModel):
553 __tablename__ = 'users'
552 __tablename__ = 'users'
554 __table_args__ = (
553 __table_args__ = (
555 UniqueConstraint('username'), UniqueConstraint('email'),
554 UniqueConstraint('username'), UniqueConstraint('email'),
556 Index('u_username_idx', 'username'),
555 Index('u_username_idx', 'username'),
557 Index('u_email_idx', 'email'),
556 Index('u_email_idx', 'email'),
558 base_table_args
557 base_table_args
559 )
558 )
560
559
561 DEFAULT_USER = 'default'
560 DEFAULT_USER = 'default'
562 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
561 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
563 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
562 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
564
563
565 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
564 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
566 username = Column("username", String(255), nullable=True, unique=None, default=None)
565 username = Column("username", String(255), nullable=True, unique=None, default=None)
567 password = Column("password", String(255), nullable=True, unique=None, default=None)
566 password = Column("password", String(255), nullable=True, unique=None, default=None)
568 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
567 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
569 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
568 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
570 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
569 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
571 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
570 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
572 _email = Column("email", String(255), nullable=True, unique=None, default=None)
571 _email = Column("email", String(255), nullable=True, unique=None, default=None)
573 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
572 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
574 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
573 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
575 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
574 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
576
575
577 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
576 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
578 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
577 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
579 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
578 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
580 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
579 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
581 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
580 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
582 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
581 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
583
582
584 user_log = relationship('UserLog')
583 user_log = relationship('UserLog')
585 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all, delete-orphan')
584 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all, delete-orphan')
586
585
587 repositories = relationship('Repository')
586 repositories = relationship('Repository')
588 repository_groups = relationship('RepoGroup')
587 repository_groups = relationship('RepoGroup')
589 user_groups = relationship('UserGroup')
588 user_groups = relationship('UserGroup')
590
589
591 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
590 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
592 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
591 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
593
592
594 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all, delete-orphan')
593 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all, delete-orphan')
595 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan')
594 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan')
596 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan')
595 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan')
597
596
598 group_member = relationship('UserGroupMember', cascade='all')
597 group_member = relationship('UserGroupMember', cascade='all')
599
598
600 notifications = relationship('UserNotification', cascade='all')
599 notifications = relationship('UserNotification', cascade='all')
601 # notifications assigned to this user
600 # notifications assigned to this user
602 user_created_notifications = relationship('Notification', cascade='all')
601 user_created_notifications = relationship('Notification', cascade='all')
603 # comments created by this user
602 # comments created by this user
604 user_comments = relationship('ChangesetComment', cascade='all')
603 user_comments = relationship('ChangesetComment', cascade='all')
605 # user profile extra info
604 # user profile extra info
606 user_emails = relationship('UserEmailMap', cascade='all')
605 user_emails = relationship('UserEmailMap', cascade='all')
607 user_ip_map = relationship('UserIpMap', cascade='all')
606 user_ip_map = relationship('UserIpMap', cascade='all')
608 user_auth_tokens = relationship('UserApiKeys', cascade='all')
607 user_auth_tokens = relationship('UserApiKeys', cascade='all')
609 user_ssh_keys = relationship('UserSshKeys', cascade='all')
608 user_ssh_keys = relationship('UserSshKeys', cascade='all')
610
609
611 # gists
610 # gists
612 user_gists = relationship('Gist', cascade='all')
611 user_gists = relationship('Gist', cascade='all')
613 # user pull requests
612 # user pull requests
614 user_pull_requests = relationship('PullRequest', cascade='all')
613 user_pull_requests = relationship('PullRequest', cascade='all')
615
614
616 # external identities
615 # external identities
617 external_identities = relationship(
616 external_identities = relationship(
618 'ExternalIdentity',
617 'ExternalIdentity',
619 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
618 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
620 cascade='all')
619 cascade='all')
621 # review rules
620 # review rules
622 user_review_rules = relationship('RepoReviewRuleUser', cascade='all')
621 user_review_rules = relationship('RepoReviewRuleUser', cascade='all')
623
622
624 # artifacts owned
623 # artifacts owned
625 artifacts = relationship('FileStore', primaryjoin='FileStore.user_id==User.user_id')
624 artifacts = relationship('FileStore', primaryjoin='FileStore.user_id==User.user_id')
626
625
627 # no cascade, set NULL
626 # no cascade, set NULL
628 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_user_id==User.user_id')
627 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_user_id==User.user_id')
629
628
630
629
631 def __str__(self):
630 def __str__(self):
632 return f"<{self.__class__.__name__}('id:{self.user_id}:{self.username}')>"
631 return f"<{self.__class__.__name__}('id:{self.user_id}:{self.username}')>"
633
632
634 @hybrid_property
633 @hybrid_property
635 def email(self):
634 def email(self):
636 return self._email
635 return self._email
637
636
638 @email.setter
637 @email.setter
639 def email(self, val):
638 def email(self, val):
640 self._email = val.lower() if val else None
639 self._email = val.lower() if val else None
641
640
642 @hybrid_property
641 @hybrid_property
643 def first_name(self):
642 def first_name(self):
644 from rhodecode.lib import helpers as h
643 from rhodecode.lib import helpers as h
645 if self.name:
644 if self.name:
646 return h.escape(self.name)
645 return h.escape(self.name)
647 return self.name
646 return self.name
648
647
649 @hybrid_property
648 @hybrid_property
650 def last_name(self):
649 def last_name(self):
651 from rhodecode.lib import helpers as h
650 from rhodecode.lib import helpers as h
652 if self.lastname:
651 if self.lastname:
653 return h.escape(self.lastname)
652 return h.escape(self.lastname)
654 return self.lastname
653 return self.lastname
655
654
656 @hybrid_property
655 @hybrid_property
657 def api_key(self):
656 def api_key(self):
658 """
657 """
659 Fetch if exist an auth-token with role ALL connected to this user
658 Fetch if exist an auth-token with role ALL connected to this user
660 """
659 """
661 user_auth_token = UserApiKeys.query()\
660 user_auth_token = UserApiKeys.query()\
662 .filter(UserApiKeys.user_id == self.user_id)\
661 .filter(UserApiKeys.user_id == self.user_id)\
663 .filter(or_(UserApiKeys.expires == -1,
662 .filter(or_(UserApiKeys.expires == -1,
664 UserApiKeys.expires >= time.time()))\
663 UserApiKeys.expires >= time.time()))\
665 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
664 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
666 if user_auth_token:
665 if user_auth_token:
667 user_auth_token = user_auth_token.api_key
666 user_auth_token = user_auth_token.api_key
668
667
669 return user_auth_token
668 return user_auth_token
670
669
671 @api_key.setter
670 @api_key.setter
672 def api_key(self, val):
671 def api_key(self, val):
673 # don't allow to set API key this is deprecated for now
672 # don't allow to set API key this is deprecated for now
674 self._api_key = None
673 self._api_key = None
675
674
676 @property
675 @property
677 def reviewer_pull_requests(self):
676 def reviewer_pull_requests(self):
678 return PullRequestReviewers.query() \
677 return PullRequestReviewers.query() \
679 .options(joinedload(PullRequestReviewers.pull_request)) \
678 .options(joinedload(PullRequestReviewers.pull_request)) \
680 .filter(PullRequestReviewers.user_id == self.user_id) \
679 .filter(PullRequestReviewers.user_id == self.user_id) \
681 .all()
680 .all()
682
681
683 @property
682 @property
684 def firstname(self):
683 def firstname(self):
685 # alias for future
684 # alias for future
686 return self.name
685 return self.name
687
686
688 @property
687 @property
689 def emails(self):
688 def emails(self):
690 other = UserEmailMap.query()\
689 other = UserEmailMap.query()\
691 .filter(UserEmailMap.user == self) \
690 .filter(UserEmailMap.user == self) \
692 .order_by(UserEmailMap.email_id.asc()) \
691 .order_by(UserEmailMap.email_id.asc()) \
693 .all()
692 .all()
694 return [self.email] + [x.email for x in other]
693 return [self.email] + [x.email for x in other]
695
694
696 def emails_cached(self):
695 def emails_cached(self):
697 emails = []
696 emails = []
698 if self.user_id != self.get_default_user_id():
697 if self.user_id != self.get_default_user_id():
699 emails = UserEmailMap.query()\
698 emails = UserEmailMap.query()\
700 .filter(UserEmailMap.user == self) \
699 .filter(UserEmailMap.user == self) \
701 .order_by(UserEmailMap.email_id.asc())
700 .order_by(UserEmailMap.email_id.asc())
702
701
703 emails = emails.options(
702 emails = emails.options(
704 FromCache("sql_cache_short", f"get_user_{self.user_id}_emails")
703 FromCache("sql_cache_short", f"get_user_{self.user_id}_emails")
705 )
704 )
706
705
707 return [self.email] + [x.email for x in emails]
706 return [self.email] + [x.email for x in emails]
708
707
709 @property
708 @property
710 def auth_tokens(self):
709 def auth_tokens(self):
711 auth_tokens = self.get_auth_tokens()
710 auth_tokens = self.get_auth_tokens()
712 return [x.api_key for x in auth_tokens]
711 return [x.api_key for x in auth_tokens]
713
712
714 def get_auth_tokens(self):
713 def get_auth_tokens(self):
715 return UserApiKeys.query()\
714 return UserApiKeys.query()\
716 .filter(UserApiKeys.user == self)\
715 .filter(UserApiKeys.user == self)\
717 .order_by(UserApiKeys.user_api_key_id.asc())\
716 .order_by(UserApiKeys.user_api_key_id.asc())\
718 .all()
717 .all()
719
718
720 @LazyProperty
719 @LazyProperty
721 def feed_token(self):
720 def feed_token(self):
722 return self.get_feed_token()
721 return self.get_feed_token()
723
722
724 def get_feed_token(self, cache=True):
723 def get_feed_token(self, cache=True):
725 feed_tokens = UserApiKeys.query()\
724 feed_tokens = UserApiKeys.query()\
726 .filter(UserApiKeys.user == self)\
725 .filter(UserApiKeys.user == self)\
727 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
726 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
728 if cache:
727 if cache:
729 feed_tokens = feed_tokens.options(
728 feed_tokens = feed_tokens.options(
730 FromCache("sql_cache_short", f"get_user_feed_token_{self.user_id}"))
729 FromCache("sql_cache_short", f"get_user_feed_token_{self.user_id}"))
731
730
732 feed_tokens = feed_tokens.all()
731 feed_tokens = feed_tokens.all()
733 if feed_tokens:
732 if feed_tokens:
734 return feed_tokens[0].api_key
733 return feed_tokens[0].api_key
735 return 'NO_FEED_TOKEN_AVAILABLE'
734 return 'NO_FEED_TOKEN_AVAILABLE'
736
735
737 @LazyProperty
736 @LazyProperty
738 def artifact_token(self):
737 def artifact_token(self):
739 return self.get_artifact_token()
738 return self.get_artifact_token()
740
739
741 def get_artifact_token(self, cache=True):
740 def get_artifact_token(self, cache=True):
742 artifacts_tokens = UserApiKeys.query()\
741 artifacts_tokens = UserApiKeys.query()\
743 .filter(UserApiKeys.user == self) \
742 .filter(UserApiKeys.user == self) \
744 .filter(or_(UserApiKeys.expires == -1,
743 .filter(or_(UserApiKeys.expires == -1,
745 UserApiKeys.expires >= time.time())) \
744 UserApiKeys.expires >= time.time())) \
746 .filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
745 .filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
747
746
748 if cache:
747 if cache:
749 artifacts_tokens = artifacts_tokens.options(
748 artifacts_tokens = artifacts_tokens.options(
750 FromCache("sql_cache_short", f"get_user_artifact_token_{self.user_id}"))
749 FromCache("sql_cache_short", f"get_user_artifact_token_{self.user_id}"))
751
750
752 artifacts_tokens = artifacts_tokens.all()
751 artifacts_tokens = artifacts_tokens.all()
753 if artifacts_tokens:
752 if artifacts_tokens:
754 return artifacts_tokens[0].api_key
753 return artifacts_tokens[0].api_key
755 return 'NO_ARTIFACT_TOKEN_AVAILABLE'
754 return 'NO_ARTIFACT_TOKEN_AVAILABLE'
756
755
757 def get_or_create_artifact_token(self):
756 def get_or_create_artifact_token(self):
758 artifacts_tokens = UserApiKeys.query()\
757 artifacts_tokens = UserApiKeys.query()\
759 .filter(UserApiKeys.user == self) \
758 .filter(UserApiKeys.user == self) \
760 .filter(or_(UserApiKeys.expires == -1,
759 .filter(or_(UserApiKeys.expires == -1,
761 UserApiKeys.expires >= time.time())) \
760 UserApiKeys.expires >= time.time())) \
762 .filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
761 .filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
763
762
764 artifacts_tokens = artifacts_tokens.all()
763 artifacts_tokens = artifacts_tokens.all()
765 if artifacts_tokens:
764 if artifacts_tokens:
766 return artifacts_tokens[0].api_key
765 return artifacts_tokens[0].api_key
767 else:
766 else:
768 from rhodecode.model.auth_token import AuthTokenModel
767 from rhodecode.model.auth_token import AuthTokenModel
769 artifact_token = AuthTokenModel().create(
768 artifact_token = AuthTokenModel().create(
770 self, 'auto-generated-artifact-token',
769 self, 'auto-generated-artifact-token',
771 lifetime=-1, role=UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
770 lifetime=-1, role=UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
772 Session.commit()
771 Session.commit()
773 return artifact_token.api_key
772 return artifact_token.api_key
774
773
775 @classmethod
774 @classmethod
776 def get(cls, user_id, cache=False):
775 def get(cls, user_id, cache=False):
777 if not user_id:
776 if not user_id:
778 return
777 return
779
778
780 user = cls.query()
779 user = cls.query()
781 if cache:
780 if cache:
782 user = user.options(
781 user = user.options(
783 FromCache("sql_cache_short", f"get_users_{user_id}"))
782 FromCache("sql_cache_short", f"get_users_{user_id}"))
784 return user.get(user_id)
783 return user.get(user_id)
785
784
786 @classmethod
785 @classmethod
787 def extra_valid_auth_tokens(cls, user, role=None):
786 def extra_valid_auth_tokens(cls, user, role=None):
788 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
787 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
789 .filter(or_(UserApiKeys.expires == -1,
788 .filter(or_(UserApiKeys.expires == -1,
790 UserApiKeys.expires >= time.time()))
789 UserApiKeys.expires >= time.time()))
791 if role:
790 if role:
792 tokens = tokens.filter(or_(UserApiKeys.role == role,
791 tokens = tokens.filter(or_(UserApiKeys.role == role,
793 UserApiKeys.role == UserApiKeys.ROLE_ALL))
792 UserApiKeys.role == UserApiKeys.ROLE_ALL))
794 return tokens.all()
793 return tokens.all()
795
794
796 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
795 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
797 from rhodecode.lib import auth
796 from rhodecode.lib import auth
798
797
799 log.debug('Trying to authenticate user: %s via auth-token, '
798 log.debug('Trying to authenticate user: %s via auth-token, '
800 'and roles: %s', self, roles)
799 'and roles: %s', self, roles)
801
800
802 if not auth_token:
801 if not auth_token:
803 return False
802 return False
804
803
805 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
804 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
806 tokens_q = UserApiKeys.query()\
805 tokens_q = UserApiKeys.query()\
807 .filter(UserApiKeys.user_id == self.user_id)\
806 .filter(UserApiKeys.user_id == self.user_id)\
808 .filter(or_(UserApiKeys.expires == -1,
807 .filter(or_(UserApiKeys.expires == -1,
809 UserApiKeys.expires >= time.time()))
808 UserApiKeys.expires >= time.time()))
810
809
811 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
810 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
812
811
813 crypto_backend = auth.crypto_backend()
812 crypto_backend = auth.crypto_backend()
814 enc_token_map = {}
813 enc_token_map = {}
815 plain_token_map = {}
814 plain_token_map = {}
816 for token in tokens_q:
815 for token in tokens_q:
817 if token.api_key.startswith(crypto_backend.ENC_PREF):
816 if token.api_key.startswith(crypto_backend.ENC_PREF):
818 enc_token_map[token.api_key] = token
817 enc_token_map[token.api_key] = token
819 else:
818 else:
820 plain_token_map[token.api_key] = token
819 plain_token_map[token.api_key] = token
821 log.debug(
820 log.debug(
822 'Found %s plain and %s encrypted tokens to check for authentication for this user',
821 'Found %s plain and %s encrypted tokens to check for authentication for this user',
823 len(plain_token_map), len(enc_token_map))
822 len(plain_token_map), len(enc_token_map))
824
823
825 # plain token match comes first
824 # plain token match comes first
826 match = plain_token_map.get(auth_token)
825 match = plain_token_map.get(auth_token)
827
826
828 # check encrypted tokens now
827 # check encrypted tokens now
829 if not match:
828 if not match:
830 for token_hash, token in enc_token_map.items():
829 for token_hash, token in enc_token_map.items():
831 # NOTE(marcink): this is expensive to calculate, but most secure
830 # NOTE(marcink): this is expensive to calculate, but most secure
832 if crypto_backend.hash_check(auth_token, token_hash):
831 if crypto_backend.hash_check(auth_token, token_hash):
833 match = token
832 match = token
834 break
833 break
835
834
836 if match:
835 if match:
837 log.debug('Found matching token %s', match)
836 log.debug('Found matching token %s', match)
838 if match.repo_id:
837 if match.repo_id:
839 log.debug('Found scope, checking for scope match of token %s', match)
838 log.debug('Found scope, checking for scope match of token %s', match)
840 if match.repo_id == scope_repo_id:
839 if match.repo_id == scope_repo_id:
841 return True
840 return True
842 else:
841 else:
843 log.debug(
842 log.debug(
844 'AUTH_TOKEN: scope mismatch, token has a set repo scope: %s, '
843 'AUTH_TOKEN: scope mismatch, token has a set repo scope: %s, '
845 'and calling scope is:%s, skipping further checks',
844 'and calling scope is:%s, skipping further checks',
846 match.repo, scope_repo_id)
845 match.repo, scope_repo_id)
847 return False
846 return False
848 else:
847 else:
849 return True
848 return True
850
849
851 return False
850 return False
852
851
853 @property
852 @property
854 def ip_addresses(self):
853 def ip_addresses(self):
855 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
854 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
856 return [x.ip_addr for x in ret]
855 return [x.ip_addr for x in ret]
857
856
858 @property
857 @property
859 def username_and_name(self):
858 def username_and_name(self):
860 return '%s (%s %s)' % (self.username, self.first_name, self.last_name)
859 return '%s (%s %s)' % (self.username, self.first_name, self.last_name)
861
860
862 @property
861 @property
863 def username_or_name_or_email(self):
862 def username_or_name_or_email(self):
864 full_name = self.full_name if self.full_name != ' ' else None
863 full_name = self.full_name if self.full_name != ' ' else None
865 return self.username or full_name or self.email
864 return self.username or full_name or self.email
866
865
867 @property
866 @property
868 def full_name(self):
867 def full_name(self):
869 return '%s %s' % (self.first_name, self.last_name)
868 return '%s %s' % (self.first_name, self.last_name)
870
869
871 @property
870 @property
872 def full_name_or_username(self):
871 def full_name_or_username(self):
873 return ('%s %s' % (self.first_name, self.last_name)
872 return ('%s %s' % (self.first_name, self.last_name)
874 if (self.first_name and self.last_name) else self.username)
873 if (self.first_name and self.last_name) else self.username)
875
874
876 @property
875 @property
877 def full_contact(self):
876 def full_contact(self):
878 return '%s %s <%s>' % (self.first_name, self.last_name, self.email)
877 return '%s %s <%s>' % (self.first_name, self.last_name, self.email)
879
878
880 @property
879 @property
881 def short_contact(self):
880 def short_contact(self):
882 return '%s %s' % (self.first_name, self.last_name)
881 return '%s %s' % (self.first_name, self.last_name)
883
882
884 @property
883 @property
885 def is_admin(self):
884 def is_admin(self):
886 return self.admin
885 return self.admin
887
886
888 @property
887 @property
889 def language(self):
888 def language(self):
890 return self.user_data.get('language')
889 return self.user_data.get('language')
891
890
892 def AuthUser(self, **kwargs):
891 def AuthUser(self, **kwargs):
893 """
892 """
894 Returns instance of AuthUser for this user
893 Returns instance of AuthUser for this user
895 """
894 """
896 from rhodecode.lib.auth import AuthUser
895 from rhodecode.lib.auth import AuthUser
897 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
896 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
898
897
899 @hybrid_property
898 @hybrid_property
900 def user_data(self):
899 def user_data(self):
901 if not self._user_data:
900 if not self._user_data:
902 return {}
901 return {}
903
902
904 try:
903 try:
905 return json.loads(self._user_data) or {}
904 return json.loads(self._user_data) or {}
906 except TypeError:
905 except TypeError:
907 return {}
906 return {}
908
907
909 @user_data.setter
908 @user_data.setter
910 def user_data(self, val):
909 def user_data(self, val):
911 if not isinstance(val, dict):
910 if not isinstance(val, dict):
912 raise Exception('user_data must be dict, got %s' % type(val))
911 raise Exception('user_data must be dict, got %s' % type(val))
913 try:
912 try:
914 self._user_data = json.dumps(val)
913 self._user_data = json.dumps(val)
915 except Exception:
914 except Exception:
916 log.error(traceback.format_exc())
915 log.error(traceback.format_exc())
917
916
918 @classmethod
917 @classmethod
919 def get_by_username(cls, username, case_insensitive=False,
918 def get_by_username(cls, username, case_insensitive=False,
920 cache=False, identity_cache=False):
919 cache=False, identity_cache=False):
921 session = Session()
920 session = Session()
922
921
923 if case_insensitive:
922 if case_insensitive:
924 q = cls.query().filter(
923 q = cls.query().filter(
925 func.lower(cls.username) == func.lower(username))
924 func.lower(cls.username) == func.lower(username))
926 else:
925 else:
927 q = cls.query().filter(cls.username == username)
926 q = cls.query().filter(cls.username == username)
928
927
929 if cache:
928 if cache:
930 if identity_cache:
929 if identity_cache:
931 val = cls.identity_cache(session, 'username', username)
930 val = cls.identity_cache(session, 'username', username)
932 if val:
931 if val:
933 return val
932 return val
934 else:
933 else:
935 cache_key = "get_user_by_name_%s" % _hash_key(username)
934 cache_key = "get_user_by_name_%s" % _hash_key(username)
936 q = q.options(
935 q = q.options(
937 FromCache("sql_cache_short", cache_key))
936 FromCache("sql_cache_short", cache_key))
938
937
939 return q.scalar()
938 return q.scalar()
940
939
941 @classmethod
940 @classmethod
942 def get_by_auth_token(cls, auth_token, cache=False):
941 def get_by_auth_token(cls, auth_token, cache=False):
943 q = UserApiKeys.query()\
942 q = UserApiKeys.query()\
944 .filter(UserApiKeys.api_key == auth_token)\
943 .filter(UserApiKeys.api_key == auth_token)\
945 .filter(or_(UserApiKeys.expires == -1,
944 .filter(or_(UserApiKeys.expires == -1,
946 UserApiKeys.expires >= time.time()))
945 UserApiKeys.expires >= time.time()))
947 if cache:
946 if cache:
948 q = q.options(
947 q = q.options(
949 FromCache("sql_cache_short", f"get_auth_token_{auth_token}"))
948 FromCache("sql_cache_short", f"get_auth_token_{auth_token}"))
950
949
951 match = q.first()
950 match = q.first()
952 if match:
951 if match:
953 return match.user
952 return match.user
954
953
955 @classmethod
954 @classmethod
956 def get_by_email(cls, email, case_insensitive=False, cache=False):
955 def get_by_email(cls, email, case_insensitive=False, cache=False):
957
956
958 if case_insensitive:
957 if case_insensitive:
959 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
958 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
960
959
961 else:
960 else:
962 q = cls.query().filter(cls.email == email)
961 q = cls.query().filter(cls.email == email)
963
962
964 email_key = _hash_key(email)
963 email_key = _hash_key(email)
965 if cache:
964 if cache:
966 q = q.options(
965 q = q.options(
967 FromCache("sql_cache_short", f"get_email_key_{email_key}"))
966 FromCache("sql_cache_short", f"get_email_key_{email_key}"))
968
967
969 ret = q.scalar()
968 ret = q.scalar()
970 if ret is None:
969 if ret is None:
971 q = UserEmailMap.query()
970 q = UserEmailMap.query()
972 # try fetching in alternate email map
971 # try fetching in alternate email map
973 if case_insensitive:
972 if case_insensitive:
974 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
973 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
975 else:
974 else:
976 q = q.filter(UserEmailMap.email == email)
975 q = q.filter(UserEmailMap.email == email)
977 q = q.options(joinedload(UserEmailMap.user))
976 q = q.options(joinedload(UserEmailMap.user))
978 if cache:
977 if cache:
979 q = q.options(
978 q = q.options(
980 FromCache("sql_cache_short", f"get_email_map_key_{email_key}"))
979 FromCache("sql_cache_short", f"get_email_map_key_{email_key}"))
981 ret = getattr(q.scalar(), 'user', None)
980 ret = getattr(q.scalar(), 'user', None)
982
981
983 return ret
982 return ret
984
983
985 @classmethod
984 @classmethod
986 def get_from_cs_author(cls, author):
985 def get_from_cs_author(cls, author):
987 """
986 """
988 Tries to get User objects out of commit author string
987 Tries to get User objects out of commit author string
989
988
990 :param author:
989 :param author:
991 """
990 """
992 from rhodecode.lib.helpers import email, author_name
991 from rhodecode.lib.helpers import email, author_name
993 # Valid email in the attribute passed, see if they're in the system
992 # Valid email in the attribute passed, see if they're in the system
994 _email = email(author)
993 _email = email(author)
995 if _email:
994 if _email:
996 user = cls.get_by_email(_email, case_insensitive=True)
995 user = cls.get_by_email(_email, case_insensitive=True)
997 if user:
996 if user:
998 return user
997 return user
999 # Maybe we can match by username?
998 # Maybe we can match by username?
1000 _author = author_name(author)
999 _author = author_name(author)
1001 user = cls.get_by_username(_author, case_insensitive=True)
1000 user = cls.get_by_username(_author, case_insensitive=True)
1002 if user:
1001 if user:
1003 return user
1002 return user
1004
1003
1005 def update_userdata(self, **kwargs):
1004 def update_userdata(self, **kwargs):
1006 usr = self
1005 usr = self
1007 old = usr.user_data
1006 old = usr.user_data
1008 old.update(**kwargs)
1007 old.update(**kwargs)
1009 usr.user_data = old
1008 usr.user_data = old
1010 Session().add(usr)
1009 Session().add(usr)
1011 log.debug('updated userdata with %s', kwargs)
1010 log.debug('updated userdata with %s', kwargs)
1012
1011
1013 def update_lastlogin(self):
1012 def update_lastlogin(self):
1014 """Update user lastlogin"""
1013 """Update user lastlogin"""
1015 self.last_login = datetime.datetime.now()
1014 self.last_login = datetime.datetime.now()
1016 Session().add(self)
1015 Session().add(self)
1017 log.debug('updated user %s lastlogin', self.username)
1016 log.debug('updated user %s lastlogin', self.username)
1018
1017
1019 def update_password(self, new_password):
1018 def update_password(self, new_password):
1020 from rhodecode.lib.auth import get_crypt_password
1019 from rhodecode.lib.auth import get_crypt_password
1021
1020
1022 self.password = get_crypt_password(new_password)
1021 self.password = get_crypt_password(new_password)
1023 Session().add(self)
1022 Session().add(self)
1024
1023
1025 @classmethod
1024 @classmethod
1026 def get_first_super_admin(cls):
1025 def get_first_super_admin(cls):
1027 user = User.query()\
1026 user = User.query()\
1028 .filter(User.admin == true()) \
1027 .filter(User.admin == true()) \
1029 .order_by(User.user_id.asc()) \
1028 .order_by(User.user_id.asc()) \
1030 .first()
1029 .first()
1031
1030
1032 if user is None:
1031 if user is None:
1033 raise Exception('FATAL: Missing administrative account!')
1032 raise Exception('FATAL: Missing administrative account!')
1034 return user
1033 return user
1035
1034
1036 @classmethod
1035 @classmethod
1037 def get_all_super_admins(cls, only_active=False):
1036 def get_all_super_admins(cls, only_active=False):
1038 """
1037 """
1039 Returns all admin accounts sorted by username
1038 Returns all admin accounts sorted by username
1040 """
1039 """
1041 qry = User.query().filter(User.admin == true()).order_by(User.username.asc())
1040 qry = User.query().filter(User.admin == true()).order_by(User.username.asc())
1042 if only_active:
1041 if only_active:
1043 qry = qry.filter(User.active == true())
1042 qry = qry.filter(User.active == true())
1044 return qry.all()
1043 return qry.all()
1045
1044
1046 @classmethod
1045 @classmethod
1047 def get_all_user_ids(cls, only_active=True):
1046 def get_all_user_ids(cls, only_active=True):
1048 """
1047 """
1049 Returns all users IDs
1048 Returns all users IDs
1050 """
1049 """
1051 qry = Session().query(User.user_id)
1050 qry = Session().query(User.user_id)
1052
1051
1053 if only_active:
1052 if only_active:
1054 qry = qry.filter(User.active == true())
1053 qry = qry.filter(User.active == true())
1055 return [x.user_id for x in qry]
1054 return [x.user_id for x in qry]
1056
1055
1057 @classmethod
1056 @classmethod
1058 def get_default_user(cls, cache=False, refresh=False):
1057 def get_default_user(cls, cache=False, refresh=False):
1059 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
1058 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
1060 if user is None:
1059 if user is None:
1061 raise Exception('FATAL: Missing default account!')
1060 raise Exception('FATAL: Missing default account!')
1062 if refresh:
1061 if refresh:
1063 # The default user might be based on outdated state which
1062 # The default user might be based on outdated state which
1064 # has been loaded from the cache.
1063 # has been loaded from the cache.
1065 # A call to refresh() ensures that the
1064 # A call to refresh() ensures that the
1066 # latest state from the database is used.
1065 # latest state from the database is used.
1067 Session().refresh(user)
1066 Session().refresh(user)
1068 return user
1067 return user
1069
1068
1070 @classmethod
1069 @classmethod
1071 def get_default_user_id(cls):
1070 def get_default_user_id(cls):
1072 import rhodecode
1071 import rhodecode
1073 return rhodecode.CONFIG['default_user_id']
1072 return rhodecode.CONFIG['default_user_id']
1074
1073
1075 def _get_default_perms(self, user, suffix=''):
1074 def _get_default_perms(self, user, suffix=''):
1076 from rhodecode.model.permission import PermissionModel
1075 from rhodecode.model.permission import PermissionModel
1077 return PermissionModel().get_default_perms(user.user_perms, suffix)
1076 return PermissionModel().get_default_perms(user.user_perms, suffix)
1078
1077
1079 def get_default_perms(self, suffix=''):
1078 def get_default_perms(self, suffix=''):
1080 return self._get_default_perms(self, suffix)
1079 return self._get_default_perms(self, suffix)
1081
1080
1082 def get_api_data(self, include_secrets=False, details='full'):
1081 def get_api_data(self, include_secrets=False, details='full'):
1083 """
1082 """
1084 Common function for generating user related data for API
1083 Common function for generating user related data for API
1085
1084
1086 :param include_secrets: By default secrets in the API data will be replaced
1085 :param include_secrets: By default secrets in the API data will be replaced
1087 by a placeholder value to prevent exposing this data by accident. In case
1086 by a placeholder value to prevent exposing this data by accident. In case
1088 this data shall be exposed, set this flag to ``True``.
1087 this data shall be exposed, set this flag to ``True``.
1089
1088
1090 :param details: details can be 'basic|full' basic gives only a subset of
1089 :param details: details can be 'basic|full' basic gives only a subset of
1091 the available user information that includes user_id, name and emails.
1090 the available user information that includes user_id, name and emails.
1092 """
1091 """
1093 user = self
1092 user = self
1094 user_data = self.user_data
1093 user_data = self.user_data
1095 data = {
1094 data = {
1096 'user_id': user.user_id,
1095 'user_id': user.user_id,
1097 'username': user.username,
1096 'username': user.username,
1098 'firstname': user.name,
1097 'firstname': user.name,
1099 'lastname': user.lastname,
1098 'lastname': user.lastname,
1100 'description': user.description,
1099 'description': user.description,
1101 'email': user.email,
1100 'email': user.email,
1102 'emails': user.emails,
1101 'emails': user.emails,
1103 }
1102 }
1104 if details == 'basic':
1103 if details == 'basic':
1105 return data
1104 return data
1106
1105
1107 auth_token_length = 40
1106 auth_token_length = 40
1108 auth_token_replacement = '*' * auth_token_length
1107 auth_token_replacement = '*' * auth_token_length
1109
1108
1110 extras = {
1109 extras = {
1111 'auth_tokens': [auth_token_replacement],
1110 'auth_tokens': [auth_token_replacement],
1112 'active': user.active,
1111 'active': user.active,
1113 'admin': user.admin,
1112 'admin': user.admin,
1114 'extern_type': user.extern_type,
1113 'extern_type': user.extern_type,
1115 'extern_name': user.extern_name,
1114 'extern_name': user.extern_name,
1116 'last_login': user.last_login,
1115 'last_login': user.last_login,
1117 'last_activity': user.last_activity,
1116 'last_activity': user.last_activity,
1118 'ip_addresses': user.ip_addresses,
1117 'ip_addresses': user.ip_addresses,
1119 'language': user_data.get('language')
1118 'language': user_data.get('language')
1120 }
1119 }
1121 data.update(extras)
1120 data.update(extras)
1122
1121
1123 if include_secrets:
1122 if include_secrets:
1124 data['auth_tokens'] = user.auth_tokens
1123 data['auth_tokens'] = user.auth_tokens
1125 return data
1124 return data
1126
1125
1127 def __json__(self):
1126 def __json__(self):
1128 data = {
1127 data = {
1129 'full_name': self.full_name,
1128 'full_name': self.full_name,
1130 'full_name_or_username': self.full_name_or_username,
1129 'full_name_or_username': self.full_name_or_username,
1131 'short_contact': self.short_contact,
1130 'short_contact': self.short_contact,
1132 'full_contact': self.full_contact,
1131 'full_contact': self.full_contact,
1133 }
1132 }
1134 data.update(self.get_api_data())
1133 data.update(self.get_api_data())
1135 return data
1134 return data
1136
1135
1137
1136
1138 class UserApiKeys(Base, BaseModel):
1137 class UserApiKeys(Base, BaseModel):
1139 __tablename__ = 'user_api_keys'
1138 __tablename__ = 'user_api_keys'
1140 __table_args__ = (
1139 __table_args__ = (
1141 Index('uak_api_key_idx', 'api_key'),
1140 Index('uak_api_key_idx', 'api_key'),
1142 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1141 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1143 base_table_args
1142 base_table_args
1144 )
1143 )
1145 __mapper_args__ = {}
1144 __mapper_args__ = {}
1146
1145
1147 # ApiKey role
1146 # ApiKey role
1148 ROLE_ALL = 'token_role_all'
1147 ROLE_ALL = 'token_role_all'
1149 ROLE_VCS = 'token_role_vcs'
1148 ROLE_VCS = 'token_role_vcs'
1150 ROLE_API = 'token_role_api'
1149 ROLE_API = 'token_role_api'
1151 ROLE_HTTP = 'token_role_http'
1150 ROLE_HTTP = 'token_role_http'
1152 ROLE_FEED = 'token_role_feed'
1151 ROLE_FEED = 'token_role_feed'
1153 ROLE_ARTIFACT_DOWNLOAD = 'role_artifact_download'
1152 ROLE_ARTIFACT_DOWNLOAD = 'role_artifact_download'
1154 # The last one is ignored in the list as we only
1153 # The last one is ignored in the list as we only
1155 # use it for one action, and cannot be created by users
1154 # use it for one action, and cannot be created by users
1156 ROLE_PASSWORD_RESET = 'token_password_reset'
1155 ROLE_PASSWORD_RESET = 'token_password_reset'
1157
1156
1158 ROLES = [ROLE_ALL, ROLE_VCS, ROLE_API, ROLE_HTTP, ROLE_FEED, ROLE_ARTIFACT_DOWNLOAD]
1157 ROLES = [ROLE_ALL, ROLE_VCS, ROLE_API, ROLE_HTTP, ROLE_FEED, ROLE_ARTIFACT_DOWNLOAD]
1159
1158
1160 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1159 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1161 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1160 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1162 api_key = Column("api_key", String(255), nullable=False, unique=True)
1161 api_key = Column("api_key", String(255), nullable=False, unique=True)
1163 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1162 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1164 expires = Column('expires', Float(53), nullable=False)
1163 expires = Column('expires', Float(53), nullable=False)
1165 role = Column('role', String(255), nullable=True)
1164 role = Column('role', String(255), nullable=True)
1166 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1165 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1167
1166
1168 # scope columns
1167 # scope columns
1169 repo_id = Column(
1168 repo_id = Column(
1170 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1169 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1171 nullable=True, unique=None, default=None)
1170 nullable=True, unique=None, default=None)
1172 repo = relationship('Repository', lazy='joined')
1171 repo = relationship('Repository', lazy='joined')
1173
1172
1174 repo_group_id = Column(
1173 repo_group_id = Column(
1175 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1174 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1176 nullable=True, unique=None, default=None)
1175 nullable=True, unique=None, default=None)
1177 repo_group = relationship('RepoGroup', lazy='joined')
1176 repo_group = relationship('RepoGroup', lazy='joined')
1178
1177
1179 user = relationship('User', lazy='joined')
1178 user = relationship('User', lazy='joined')
1180
1179
1181 def __str__(self):
1180 def __str__(self):
1182 return f"<{self.__class__.__name__}('{self.role}')>"
1181 return f"<{self.__class__.__name__}('{self.role}')>"
1183
1182
1184 def __json__(self):
1183 def __json__(self):
1185 data = {
1184 data = {
1186 'auth_token': self.api_key,
1185 'auth_token': self.api_key,
1187 'role': self.role,
1186 'role': self.role,
1188 'scope': self.scope_humanized,
1187 'scope': self.scope_humanized,
1189 'expired': self.expired
1188 'expired': self.expired
1190 }
1189 }
1191 return data
1190 return data
1192
1191
1193 def get_api_data(self, include_secrets=False):
1192 def get_api_data(self, include_secrets=False):
1194 data = self.__json__()
1193 data = self.__json__()
1195 if include_secrets:
1194 if include_secrets:
1196 return data
1195 return data
1197 else:
1196 else:
1198 data['auth_token'] = self.token_obfuscated
1197 data['auth_token'] = self.token_obfuscated
1199 return data
1198 return data
1200
1199
1201 @hybrid_property
1200 @hybrid_property
1202 def description_safe(self):
1201 def description_safe(self):
1203 from rhodecode.lib import helpers as h
1202 from rhodecode.lib import helpers as h
1204 return h.escape(self.description)
1203 return h.escape(self.description)
1205
1204
1206 @property
1205 @property
1207 def expired(self):
1206 def expired(self):
1208 if self.expires == -1:
1207 if self.expires == -1:
1209 return False
1208 return False
1210 return time.time() > self.expires
1209 return time.time() > self.expires
1211
1210
1212 @classmethod
1211 @classmethod
1213 def _get_role_name(cls, role):
1212 def _get_role_name(cls, role):
1214 return {
1213 return {
1215 cls.ROLE_ALL: _('all'),
1214 cls.ROLE_ALL: _('all'),
1216 cls.ROLE_HTTP: _('http/web interface'),
1215 cls.ROLE_HTTP: _('http/web interface'),
1217 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1216 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1218 cls.ROLE_API: _('api calls'),
1217 cls.ROLE_API: _('api calls'),
1219 cls.ROLE_FEED: _('feed access'),
1218 cls.ROLE_FEED: _('feed access'),
1220 cls.ROLE_ARTIFACT_DOWNLOAD: _('artifacts downloads'),
1219 cls.ROLE_ARTIFACT_DOWNLOAD: _('artifacts downloads'),
1221 }.get(role, role)
1220 }.get(role, role)
1222
1221
1223 @classmethod
1222 @classmethod
1224 def _get_role_description(cls, role):
1223 def _get_role_description(cls, role):
1225 return {
1224 return {
1226 cls.ROLE_ALL: _('Token for all actions.'),
1225 cls.ROLE_ALL: _('Token for all actions.'),
1227 cls.ROLE_HTTP: _('Token to access RhodeCode pages via web interface without '
1226 cls.ROLE_HTTP: _('Token to access RhodeCode pages via web interface without '
1228 'login using `api_access_controllers_whitelist` functionality.'),
1227 'login using `api_access_controllers_whitelist` functionality.'),
1229 cls.ROLE_VCS: _('Token to interact over git/hg/svn protocols. '
1228 cls.ROLE_VCS: _('Token to interact over git/hg/svn protocols. '
1230 'Requires auth_token authentication plugin to be active. <br/>'
1229 'Requires auth_token authentication plugin to be active. <br/>'
1231 'Such Token should be used then instead of a password to '
1230 'Such Token should be used then instead of a password to '
1232 'interact with a repository, and additionally can be '
1231 'interact with a repository, and additionally can be '
1233 'limited to single repository using repo scope.'),
1232 'limited to single repository using repo scope.'),
1234 cls.ROLE_API: _('Token limited to api calls.'),
1233 cls.ROLE_API: _('Token limited to api calls.'),
1235 cls.ROLE_FEED: _('Token to read RSS/ATOM feed.'),
1234 cls.ROLE_FEED: _('Token to read RSS/ATOM feed.'),
1236 cls.ROLE_ARTIFACT_DOWNLOAD: _('Token for artifacts downloads.'),
1235 cls.ROLE_ARTIFACT_DOWNLOAD: _('Token for artifacts downloads.'),
1237 }.get(role, role)
1236 }.get(role, role)
1238
1237
1239 @property
1238 @property
1240 def role_humanized(self):
1239 def role_humanized(self):
1241 return self._get_role_name(self.role)
1240 return self._get_role_name(self.role)
1242
1241
1243 def _get_scope(self):
1242 def _get_scope(self):
1244 if self.repo:
1243 if self.repo:
1245 return 'Repository: {}'.format(self.repo.repo_name)
1244 return 'Repository: {}'.format(self.repo.repo_name)
1246 if self.repo_group:
1245 if self.repo_group:
1247 return 'RepositoryGroup: {} (recursive)'.format(self.repo_group.group_name)
1246 return 'RepositoryGroup: {} (recursive)'.format(self.repo_group.group_name)
1248 return 'Global'
1247 return 'Global'
1249
1248
1250 @property
1249 @property
1251 def scope_humanized(self):
1250 def scope_humanized(self):
1252 return self._get_scope()
1251 return self._get_scope()
1253
1252
1254 @property
1253 @property
1255 def token_obfuscated(self):
1254 def token_obfuscated(self):
1256 if self.api_key:
1255 if self.api_key:
1257 return self.api_key[:4] + "****"
1256 return self.api_key[:4] + "****"
1258
1257
1259
1258
1260 class UserEmailMap(Base, BaseModel):
1259 class UserEmailMap(Base, BaseModel):
1261 __tablename__ = 'user_email_map'
1260 __tablename__ = 'user_email_map'
1262 __table_args__ = (
1261 __table_args__ = (
1263 Index('uem_email_idx', 'email'),
1262 Index('uem_email_idx', 'email'),
1264 UniqueConstraint('email'),
1263 UniqueConstraint('email'),
1265 base_table_args
1264 base_table_args
1266 )
1265 )
1267 __mapper_args__ = {}
1266 __mapper_args__ = {}
1268
1267
1269 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1268 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1270 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1269 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1271 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1270 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1272 user = relationship('User', lazy='joined')
1271 user = relationship('User', lazy='joined')
1273
1272
1274 @validates('_email')
1273 @validates('_email')
1275 def validate_email(self, key, email):
1274 def validate_email(self, key, email):
1276 # check if this email is not main one
1275 # check if this email is not main one
1277 main_email = Session().query(User).filter(User.email == email).scalar()
1276 main_email = Session().query(User).filter(User.email == email).scalar()
1278 if main_email is not None:
1277 if main_email is not None:
1279 raise AttributeError('email %s is present is user table' % email)
1278 raise AttributeError('email %s is present is user table' % email)
1280 return email
1279 return email
1281
1280
1282 @hybrid_property
1281 @hybrid_property
1283 def email(self):
1282 def email(self):
1284 return self._email
1283 return self._email
1285
1284
1286 @email.setter
1285 @email.setter
1287 def email(self, val):
1286 def email(self, val):
1288 self._email = val.lower() if val else None
1287 self._email = val.lower() if val else None
1289
1288
1290
1289
1291 class UserIpMap(Base, BaseModel):
1290 class UserIpMap(Base, BaseModel):
1292 __tablename__ = 'user_ip_map'
1291 __tablename__ = 'user_ip_map'
1293 __table_args__ = (
1292 __table_args__ = (
1294 UniqueConstraint('user_id', 'ip_addr'),
1293 UniqueConstraint('user_id', 'ip_addr'),
1295 base_table_args
1294 base_table_args
1296 )
1295 )
1297 __mapper_args__ = {}
1296 __mapper_args__ = {}
1298
1297
1299 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1298 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1300 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1299 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1301 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1300 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1302 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1301 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1303 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1302 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1304 user = relationship('User', lazy='joined')
1303 user = relationship('User', lazy='joined')
1305
1304
1306 @hybrid_property
1305 @hybrid_property
1307 def description_safe(self):
1306 def description_safe(self):
1308 from rhodecode.lib import helpers as h
1307 from rhodecode.lib import helpers as h
1309 return h.escape(self.description)
1308 return h.escape(self.description)
1310
1309
1311 @classmethod
1310 @classmethod
1312 def _get_ip_range(cls, ip_addr):
1311 def _get_ip_range(cls, ip_addr):
1313 net = ipaddress.ip_network(safe_unicode(ip_addr), strict=False)
1312 net = ipaddress.ip_network(safe_unicode(ip_addr), strict=False)
1314 return [str(net.network_address), str(net.broadcast_address)]
1313 return [str(net.network_address), str(net.broadcast_address)]
1315
1314
1316 def __json__(self):
1315 def __json__(self):
1317 return {
1316 return {
1318 'ip_addr': self.ip_addr,
1317 'ip_addr': self.ip_addr,
1319 'ip_range': self._get_ip_range(self.ip_addr),
1318 'ip_range': self._get_ip_range(self.ip_addr),
1320 }
1319 }
1321
1320
1322 def __str__(self):
1321 def __str__(self):
1323 return "<%s('user_id:%s=>%s')>" % (self.__class__.__name__, self.user_id, self.ip_addr)
1322 return "<%s('user_id:%s=>%s')>" % (self.__class__.__name__, self.user_id, self.ip_addr)
1324
1323
1325
1324
1326 class UserSshKeys(Base, BaseModel):
1325 class UserSshKeys(Base, BaseModel):
1327 __tablename__ = 'user_ssh_keys'
1326 __tablename__ = 'user_ssh_keys'
1328 __table_args__ = (
1327 __table_args__ = (
1329 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1328 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1330
1329
1331 UniqueConstraint('ssh_key_fingerprint'),
1330 UniqueConstraint('ssh_key_fingerprint'),
1332
1331
1333 base_table_args
1332 base_table_args
1334 )
1333 )
1335 __mapper_args__ = {}
1334 __mapper_args__ = {}
1336
1335
1337 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1336 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1338 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1337 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1339 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1338 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1340
1339
1341 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1340 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1342
1341
1343 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1342 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1344 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1343 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1345 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1344 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1346
1345
1347 user = relationship('User', lazy='joined')
1346 user = relationship('User', lazy='joined')
1348
1347
1349 def __json__(self):
1348 def __json__(self):
1350 data = {
1349 data = {
1351 'ssh_fingerprint': self.ssh_key_fingerprint,
1350 'ssh_fingerprint': self.ssh_key_fingerprint,
1352 'description': self.description,
1351 'description': self.description,
1353 'created_on': self.created_on
1352 'created_on': self.created_on
1354 }
1353 }
1355 return data
1354 return data
1356
1355
1357 def get_api_data(self):
1356 def get_api_data(self):
1358 data = self.__json__()
1357 data = self.__json__()
1359 return data
1358 return data
1360
1359
1361
1360
1362 class UserLog(Base, BaseModel):
1361 class UserLog(Base, BaseModel):
1363 __tablename__ = 'user_logs'
1362 __tablename__ = 'user_logs'
1364 __table_args__ = (
1363 __table_args__ = (
1365 base_table_args,
1364 base_table_args,
1366 )
1365 )
1367
1366
1368 VERSION_1 = 'v1'
1367 VERSION_1 = 'v1'
1369 VERSION_2 = 'v2'
1368 VERSION_2 = 'v2'
1370 VERSIONS = [VERSION_1, VERSION_2]
1369 VERSIONS = [VERSION_1, VERSION_2]
1371
1370
1372 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1371 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1373 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1372 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1374 username = Column("username", String(255), nullable=True, unique=None, default=None)
1373 username = Column("username", String(255), nullable=True, unique=None, default=None)
1375 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1374 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1376 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1375 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1377 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1376 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1378 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1377 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1379 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1378 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1380
1379
1381 version = Column("version", String(255), nullable=True, default=VERSION_1)
1380 version = Column("version", String(255), nullable=True, default=VERSION_1)
1382 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1381 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1383 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1382 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1384
1383
1385
1384
1386 def __str__(self):
1385 def __str__(self):
1387 return f"<{self.__class__.__name__}('id:{self.repository_name}:{self.action}')>"
1386 return f"<{self.__class__.__name__}('id:{self.repository_name}:{self.action}')>"
1388
1387
1389 def __json__(self):
1388 def __json__(self):
1390 return {
1389 return {
1391 'user_id': self.user_id,
1390 'user_id': self.user_id,
1392 'username': self.username,
1391 'username': self.username,
1393 'repository_id': self.repository_id,
1392 'repository_id': self.repository_id,
1394 'repository_name': self.repository_name,
1393 'repository_name': self.repository_name,
1395 'user_ip': self.user_ip,
1394 'user_ip': self.user_ip,
1396 'action_date': self.action_date,
1395 'action_date': self.action_date,
1397 'action': self.action,
1396 'action': self.action,
1398 }
1397 }
1399
1398
1400 @hybrid_property
1399 @hybrid_property
1401 def entry_id(self):
1400 def entry_id(self):
1402 return self.user_log_id
1401 return self.user_log_id
1403
1402
1404 @property
1403 @property
1405 def action_as_day(self):
1404 def action_as_day(self):
1406 return datetime.date(*self.action_date.timetuple()[:3])
1405 return datetime.date(*self.action_date.timetuple()[:3])
1407
1406
1408 user = relationship('User')
1407 user = relationship('User')
1409 repository = relationship('Repository', cascade='')
1408 repository = relationship('Repository', cascade='')
1410
1409
1411
1410
1412 class UserGroup(Base, BaseModel):
1411 class UserGroup(Base, BaseModel):
1413 __tablename__ = 'users_groups'
1412 __tablename__ = 'users_groups'
1414 __table_args__ = (
1413 __table_args__ = (
1415 base_table_args,
1414 base_table_args,
1416 )
1415 )
1417
1416
1418 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1417 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1419 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1418 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1420 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1419 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1421 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1420 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1422 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1421 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1423 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1422 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1424 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1423 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1425 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1424 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1426
1425
1427 members = relationship('UserGroupMember', cascade="all, delete-orphan", lazy="joined")
1426 members = relationship('UserGroupMember', cascade="all, delete-orphan", lazy="joined")
1428 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1427 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1429 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1428 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1430 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1429 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1431 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1430 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1432 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1431 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1433
1432
1434 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all')
1433 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all')
1435 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id")
1434 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id")
1436
1435
1437 @classmethod
1436 @classmethod
1438 def _load_group_data(cls, column):
1437 def _load_group_data(cls, column):
1439 if not column:
1438 if not column:
1440 return {}
1439 return {}
1441
1440
1442 try:
1441 try:
1443 return json.loads(column) or {}
1442 return json.loads(column) or {}
1444 except TypeError:
1443 except TypeError:
1445 return {}
1444 return {}
1446
1445
1447 @hybrid_property
1446 @hybrid_property
1448 def description_safe(self):
1447 def description_safe(self):
1449 from rhodecode.lib import helpers as h
1448 from rhodecode.lib import helpers as h
1450 return h.escape(self.user_group_description)
1449 return h.escape(self.user_group_description)
1451
1450
1452 @hybrid_property
1451 @hybrid_property
1453 def group_data(self):
1452 def group_data(self):
1454 return self._load_group_data(self._group_data)
1453 return self._load_group_data(self._group_data)
1455
1454
1456 @group_data.expression
1455 @group_data.expression
1457 def group_data(self, **kwargs):
1456 def group_data(self, **kwargs):
1458 return self._group_data
1457 return self._group_data
1459
1458
1460 @group_data.setter
1459 @group_data.setter
1461 def group_data(self, val):
1460 def group_data(self, val):
1462 try:
1461 try:
1463 self._group_data = json.dumps(val)
1462 self._group_data = json.dumps(val)
1464 except Exception:
1463 except Exception:
1465 log.error(traceback.format_exc())
1464 log.error(traceback.format_exc())
1466
1465
1467 @classmethod
1466 @classmethod
1468 def _load_sync(cls, group_data):
1467 def _load_sync(cls, group_data):
1469 if group_data:
1468 if group_data:
1470 return group_data.get('extern_type')
1469 return group_data.get('extern_type')
1471
1470
1472 @property
1471 @property
1473 def sync(self):
1472 def sync(self):
1474 return self._load_sync(self.group_data)
1473 return self._load_sync(self.group_data)
1475
1474
1476 def __str__(self):
1475 def __str__(self):
1477 return f"<{self.__class__.__name__}('id:{self.users_group_id}:{self.users_group_name}')>"
1476 return f"<{self.__class__.__name__}('id:{self.users_group_id}:{self.users_group_name}')>"
1478
1477
1479 @classmethod
1478 @classmethod
1480 def get_by_group_name(cls, group_name, cache=False,
1479 def get_by_group_name(cls, group_name, cache=False,
1481 case_insensitive=False):
1480 case_insensitive=False):
1482 if case_insensitive:
1481 if case_insensitive:
1483 q = cls.query().filter(func.lower(cls.users_group_name) ==
1482 q = cls.query().filter(func.lower(cls.users_group_name) ==
1484 func.lower(group_name))
1483 func.lower(group_name))
1485
1484
1486 else:
1485 else:
1487 q = cls.query().filter(cls.users_group_name == group_name)
1486 q = cls.query().filter(cls.users_group_name == group_name)
1488 if cache:
1487 if cache:
1489 name_key = _hash_key(group_name)
1488 name_key = _hash_key(group_name)
1490 q = q.options(
1489 q = q.options(
1491 FromCache("sql_cache_short", f"get_group_{name_key}"))
1490 FromCache("sql_cache_short", f"get_group_{name_key}"))
1492 return q.scalar()
1491 return q.scalar()
1493
1492
1494 @classmethod
1493 @classmethod
1495 def get(cls, user_group_id, cache=False):
1494 def get(cls, user_group_id, cache=False):
1496 if not user_group_id:
1495 if not user_group_id:
1497 return
1496 return
1498
1497
1499 user_group = cls.query()
1498 user_group = cls.query()
1500 if cache:
1499 if cache:
1501 user_group = user_group.options(
1500 user_group = user_group.options(
1502 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1501 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1503 return user_group.get(user_group_id)
1502 return user_group.get(user_group_id)
1504
1503
1505 def permissions(self, with_admins=True, with_owner=True,
1504 def permissions(self, with_admins=True, with_owner=True,
1506 expand_from_user_groups=False):
1505 expand_from_user_groups=False):
1507 """
1506 """
1508 Permissions for user groups
1507 Permissions for user groups
1509 """
1508 """
1510 _admin_perm = 'usergroup.admin'
1509 _admin_perm = 'usergroup.admin'
1511
1510
1512 owner_row = []
1511 owner_row = []
1513 if with_owner:
1512 if with_owner:
1514 usr = AttributeDict(self.user.get_dict())
1513 usr = AttributeDict(self.user.get_dict())
1515 usr.owner_row = True
1514 usr.owner_row = True
1516 usr.permission = _admin_perm
1515 usr.permission = _admin_perm
1517 owner_row.append(usr)
1516 owner_row.append(usr)
1518
1517
1519 super_admin_ids = []
1518 super_admin_ids = []
1520 super_admin_rows = []
1519 super_admin_rows = []
1521 if with_admins:
1520 if with_admins:
1522 for usr in User.get_all_super_admins():
1521 for usr in User.get_all_super_admins():
1523 super_admin_ids.append(usr.user_id)
1522 super_admin_ids.append(usr.user_id)
1524 # if this admin is also owner, don't double the record
1523 # if this admin is also owner, don't double the record
1525 if usr.user_id == owner_row[0].user_id:
1524 if usr.user_id == owner_row[0].user_id:
1526 owner_row[0].admin_row = True
1525 owner_row[0].admin_row = True
1527 else:
1526 else:
1528 usr = AttributeDict(usr.get_dict())
1527 usr = AttributeDict(usr.get_dict())
1529 usr.admin_row = True
1528 usr.admin_row = True
1530 usr.permission = _admin_perm
1529 usr.permission = _admin_perm
1531 super_admin_rows.append(usr)
1530 super_admin_rows.append(usr)
1532
1531
1533 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1532 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1534 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1533 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1535 joinedload(UserUserGroupToPerm.user),
1534 joinedload(UserUserGroupToPerm.user),
1536 joinedload(UserUserGroupToPerm.permission),)
1535 joinedload(UserUserGroupToPerm.permission),)
1537
1536
1538 # get owners and admins and permissions. We do a trick of re-writing
1537 # get owners and admins and permissions. We do a trick of re-writing
1539 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1538 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1540 # has a global reference and changing one object propagates to all
1539 # has a global reference and changing one object propagates to all
1541 # others. This means if admin is also an owner admin_row that change
1540 # others. This means if admin is also an owner admin_row that change
1542 # would propagate to both objects
1541 # would propagate to both objects
1543 perm_rows = []
1542 perm_rows = []
1544 for _usr in q.all():
1543 for _usr in q.all():
1545 usr = AttributeDict(_usr.user.get_dict())
1544 usr = AttributeDict(_usr.user.get_dict())
1546 # if this user is also owner/admin, mark as duplicate record
1545 # if this user is also owner/admin, mark as duplicate record
1547 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
1546 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
1548 usr.duplicate_perm = True
1547 usr.duplicate_perm = True
1549 usr.permission = _usr.permission.permission_name
1548 usr.permission = _usr.permission.permission_name
1550 perm_rows.append(usr)
1549 perm_rows.append(usr)
1551
1550
1552 # filter the perm rows by 'default' first and then sort them by
1551 # filter the perm rows by 'default' first and then sort them by
1553 # admin,write,read,none permissions sorted again alphabetically in
1552 # admin,write,read,none permissions sorted again alphabetically in
1554 # each group
1553 # each group
1555 perm_rows = sorted(perm_rows, key=display_user_sort)
1554 perm_rows = sorted(perm_rows, key=display_user_sort)
1556
1555
1557 user_groups_rows = []
1556 user_groups_rows = []
1558 if expand_from_user_groups:
1557 if expand_from_user_groups:
1559 for ug in self.permission_user_groups(with_members=True):
1558 for ug in self.permission_user_groups(with_members=True):
1560 for user_data in ug.members:
1559 for user_data in ug.members:
1561 user_groups_rows.append(user_data)
1560 user_groups_rows.append(user_data)
1562
1561
1563 return super_admin_rows + owner_row + perm_rows + user_groups_rows
1562 return super_admin_rows + owner_row + perm_rows + user_groups_rows
1564
1563
1565 def permission_user_groups(self, with_members=False):
1564 def permission_user_groups(self, with_members=False):
1566 q = UserGroupUserGroupToPerm.query()\
1565 q = UserGroupUserGroupToPerm.query()\
1567 .filter(UserGroupUserGroupToPerm.target_user_group == self)
1566 .filter(UserGroupUserGroupToPerm.target_user_group == self)
1568 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1567 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1569 joinedload(UserGroupUserGroupToPerm.target_user_group),
1568 joinedload(UserGroupUserGroupToPerm.target_user_group),
1570 joinedload(UserGroupUserGroupToPerm.permission),)
1569 joinedload(UserGroupUserGroupToPerm.permission),)
1571
1570
1572 perm_rows = []
1571 perm_rows = []
1573 for _user_group in q.all():
1572 for _user_group in q.all():
1574 entry = AttributeDict(_user_group.user_group.get_dict())
1573 entry = AttributeDict(_user_group.user_group.get_dict())
1575 entry.permission = _user_group.permission.permission_name
1574 entry.permission = _user_group.permission.permission_name
1576 if with_members:
1575 if with_members:
1577 entry.members = [x.user.get_dict()
1576 entry.members = [x.user.get_dict()
1578 for x in _user_group.user_group.members]
1577 for x in _user_group.user_group.members]
1579 perm_rows.append(entry)
1578 perm_rows.append(entry)
1580
1579
1581 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1580 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1582 return perm_rows
1581 return perm_rows
1583
1582
1584 def _get_default_perms(self, user_group, suffix=''):
1583 def _get_default_perms(self, user_group, suffix=''):
1585 from rhodecode.model.permission import PermissionModel
1584 from rhodecode.model.permission import PermissionModel
1586 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1585 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1587
1586
1588 def get_default_perms(self, suffix=''):
1587 def get_default_perms(self, suffix=''):
1589 return self._get_default_perms(self, suffix)
1588 return self._get_default_perms(self, suffix)
1590
1589
1591 def get_api_data(self, with_group_members=True, include_secrets=False):
1590 def get_api_data(self, with_group_members=True, include_secrets=False):
1592 """
1591 """
1593 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1592 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1594 basically forwarded.
1593 basically forwarded.
1595
1594
1596 """
1595 """
1597 user_group = self
1596 user_group = self
1598 data = {
1597 data = {
1599 'users_group_id': user_group.users_group_id,
1598 'users_group_id': user_group.users_group_id,
1600 'group_name': user_group.users_group_name,
1599 'group_name': user_group.users_group_name,
1601 'group_description': user_group.user_group_description,
1600 'group_description': user_group.user_group_description,
1602 'active': user_group.users_group_active,
1601 'active': user_group.users_group_active,
1603 'owner': user_group.user.username,
1602 'owner': user_group.user.username,
1604 'sync': user_group.sync,
1603 'sync': user_group.sync,
1605 'owner_email': user_group.user.email,
1604 'owner_email': user_group.user.email,
1606 }
1605 }
1607
1606
1608 if with_group_members:
1607 if with_group_members:
1609 users = []
1608 users = []
1610 for user in user_group.members:
1609 for user in user_group.members:
1611 user = user.user
1610 user = user.user
1612 users.append(user.get_api_data(include_secrets=include_secrets))
1611 users.append(user.get_api_data(include_secrets=include_secrets))
1613 data['users'] = users
1612 data['users'] = users
1614
1613
1615 return data
1614 return data
1616
1615
1617
1616
1618 class UserGroupMember(Base, BaseModel):
1617 class UserGroupMember(Base, BaseModel):
1619 __tablename__ = 'users_groups_members'
1618 __tablename__ = 'users_groups_members'
1620 __table_args__ = (
1619 __table_args__ = (
1621 base_table_args,
1620 base_table_args,
1622 )
1621 )
1623
1622
1624 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1623 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1625 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1624 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1626 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1625 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1627
1626
1628 user = relationship('User', lazy='joined')
1627 user = relationship('User', lazy='joined')
1629 users_group = relationship('UserGroup')
1628 users_group = relationship('UserGroup')
1630
1629
1631 def __init__(self, gr_id='', u_id=''):
1630 def __init__(self, gr_id='', u_id=''):
1632 self.users_group_id = gr_id
1631 self.users_group_id = gr_id
1633 self.user_id = u_id
1632 self.user_id = u_id
1634
1633
1635
1634
1636 class RepositoryField(Base, BaseModel):
1635 class RepositoryField(Base, BaseModel):
1637 __tablename__ = 'repositories_fields'
1636 __tablename__ = 'repositories_fields'
1638 __table_args__ = (
1637 __table_args__ = (
1639 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1638 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1640 base_table_args,
1639 base_table_args,
1641 )
1640 )
1642
1641
1643 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1642 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1644
1643
1645 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1644 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1646 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1645 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1647 field_key = Column("field_key", String(250))
1646 field_key = Column("field_key", String(250))
1648 field_label = Column("field_label", String(1024), nullable=False)
1647 field_label = Column("field_label", String(1024), nullable=False)
1649 field_value = Column("field_value", String(10000), nullable=False)
1648 field_value = Column("field_value", String(10000), nullable=False)
1650 field_desc = Column("field_desc", String(1024), nullable=False)
1649 field_desc = Column("field_desc", String(1024), nullable=False)
1651 field_type = Column("field_type", String(255), nullable=False, unique=None)
1650 field_type = Column("field_type", String(255), nullable=False, unique=None)
1652 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1651 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1653
1652
1654 repository = relationship('Repository')
1653 repository = relationship('Repository')
1655
1654
1656 @property
1655 @property
1657 def field_key_prefixed(self):
1656 def field_key_prefixed(self):
1658 return 'ex_%s' % self.field_key
1657 return 'ex_%s' % self.field_key
1659
1658
1660 @classmethod
1659 @classmethod
1661 def un_prefix_key(cls, key):
1660 def un_prefix_key(cls, key):
1662 if key.startswith(cls.PREFIX):
1661 if key.startswith(cls.PREFIX):
1663 return key[len(cls.PREFIX):]
1662 return key[len(cls.PREFIX):]
1664 return key
1663 return key
1665
1664
1666 @classmethod
1665 @classmethod
1667 def get_by_key_name(cls, key, repo):
1666 def get_by_key_name(cls, key, repo):
1668 row = cls.query()\
1667 row = cls.query()\
1669 .filter(cls.repository == repo)\
1668 .filter(cls.repository == repo)\
1670 .filter(cls.field_key == key).scalar()
1669 .filter(cls.field_key == key).scalar()
1671 return row
1670 return row
1672
1671
1673
1672
1674 class Repository(Base, BaseModel):
1673 class Repository(Base, BaseModel):
1675 __tablename__ = 'repositories'
1674 __tablename__ = 'repositories'
1676 __table_args__ = (
1675 __table_args__ = (
1677 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1676 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1678 base_table_args,
1677 base_table_args,
1679 )
1678 )
1680 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1679 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1681 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1680 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1682 DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}'
1681 DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}'
1683
1682
1684 STATE_CREATED = 'repo_state_created'
1683 STATE_CREATED = 'repo_state_created'
1685 STATE_PENDING = 'repo_state_pending'
1684 STATE_PENDING = 'repo_state_pending'
1686 STATE_ERROR = 'repo_state_error'
1685 STATE_ERROR = 'repo_state_error'
1687
1686
1688 LOCK_AUTOMATIC = 'lock_auto'
1687 LOCK_AUTOMATIC = 'lock_auto'
1689 LOCK_API = 'lock_api'
1688 LOCK_API = 'lock_api'
1690 LOCK_WEB = 'lock_web'
1689 LOCK_WEB = 'lock_web'
1691 LOCK_PULL = 'lock_pull'
1690 LOCK_PULL = 'lock_pull'
1692
1691
1693 NAME_SEP = URL_SEP
1692 NAME_SEP = URL_SEP
1694
1693
1695 repo_id = Column(
1694 repo_id = Column(
1696 "repo_id", Integer(), nullable=False, unique=True, default=None,
1695 "repo_id", Integer(), nullable=False, unique=True, default=None,
1697 primary_key=True)
1696 primary_key=True)
1698 _repo_name = Column(
1697 _repo_name = Column(
1699 "repo_name", Text(), nullable=False, default=None)
1698 "repo_name", Text(), nullable=False, default=None)
1700 repo_name_hash = Column(
1699 repo_name_hash = Column(
1701 "repo_name_hash", String(255), nullable=False, unique=True)
1700 "repo_name_hash", String(255), nullable=False, unique=True)
1702 repo_state = Column("repo_state", String(255), nullable=True)
1701 repo_state = Column("repo_state", String(255), nullable=True)
1703
1702
1704 clone_uri = Column(
1703 clone_uri = Column(
1705 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1704 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1706 default=None)
1705 default=None)
1707 push_uri = Column(
1706 push_uri = Column(
1708 "push_uri", EncryptedTextValue(), nullable=True, unique=False,
1707 "push_uri", EncryptedTextValue(), nullable=True, unique=False,
1709 default=None)
1708 default=None)
1710 repo_type = Column(
1709 repo_type = Column(
1711 "repo_type", String(255), nullable=False, unique=False, default=None)
1710 "repo_type", String(255), nullable=False, unique=False, default=None)
1712 user_id = Column(
1711 user_id = Column(
1713 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1712 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1714 unique=False, default=None)
1713 unique=False, default=None)
1715 private = Column(
1714 private = Column(
1716 "private", Boolean(), nullable=True, unique=None, default=None)
1715 "private", Boolean(), nullable=True, unique=None, default=None)
1717 archived = Column(
1716 archived = Column(
1718 "archived", Boolean(), nullable=True, unique=None, default=None)
1717 "archived", Boolean(), nullable=True, unique=None, default=None)
1719 enable_statistics = Column(
1718 enable_statistics = Column(
1720 "statistics", Boolean(), nullable=True, unique=None, default=True)
1719 "statistics", Boolean(), nullable=True, unique=None, default=True)
1721 enable_downloads = Column(
1720 enable_downloads = Column(
1722 "downloads", Boolean(), nullable=True, unique=None, default=True)
1721 "downloads", Boolean(), nullable=True, unique=None, default=True)
1723 description = Column(
1722 description = Column(
1724 "description", String(10000), nullable=True, unique=None, default=None)
1723 "description", String(10000), nullable=True, unique=None, default=None)
1725 created_on = Column(
1724 created_on = Column(
1726 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1725 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1727 default=datetime.datetime.now)
1726 default=datetime.datetime.now)
1728 updated_on = Column(
1727 updated_on = Column(
1729 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1728 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1730 default=datetime.datetime.now)
1729 default=datetime.datetime.now)
1731 _landing_revision = Column(
1730 _landing_revision = Column(
1732 "landing_revision", String(255), nullable=False, unique=False,
1731 "landing_revision", String(255), nullable=False, unique=False,
1733 default=None)
1732 default=None)
1734 enable_locking = Column(
1733 enable_locking = Column(
1735 "enable_locking", Boolean(), nullable=False, unique=None,
1734 "enable_locking", Boolean(), nullable=False, unique=None,
1736 default=False)
1735 default=False)
1737 _locked = Column(
1736 _locked = Column(
1738 "locked", String(255), nullable=True, unique=False, default=None)
1737 "locked", String(255), nullable=True, unique=False, default=None)
1739 _changeset_cache = Column(
1738 _changeset_cache = Column(
1740 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1739 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1741
1740
1742 fork_id = Column(
1741 fork_id = Column(
1743 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1742 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1744 nullable=True, unique=False, default=None)
1743 nullable=True, unique=False, default=None)
1745 group_id = Column(
1744 group_id = Column(
1746 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1745 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1747 unique=False, default=None)
1746 unique=False, default=None)
1748
1747
1749 user = relationship('User', lazy='joined')
1748 user = relationship('User', lazy='joined')
1750 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1749 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1751 group = relationship('RepoGroup', lazy='joined')
1750 group = relationship('RepoGroup', lazy='joined')
1752 repo_to_perm = relationship(
1751 repo_to_perm = relationship(
1753 'UserRepoToPerm', cascade='all',
1752 'UserRepoToPerm', cascade='all',
1754 order_by='UserRepoToPerm.repo_to_perm_id')
1753 order_by='UserRepoToPerm.repo_to_perm_id')
1755 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1754 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1756 stats = relationship('Statistics', cascade='all', uselist=False)
1755 stats = relationship('Statistics', cascade='all', uselist=False)
1757
1756
1758 followers = relationship(
1757 followers = relationship(
1759 'UserFollowing',
1758 'UserFollowing',
1760 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1759 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1761 cascade='all')
1760 cascade='all')
1762 extra_fields = relationship(
1761 extra_fields = relationship(
1763 'RepositoryField', cascade="all, delete-orphan")
1762 'RepositoryField', cascade="all, delete-orphan")
1764 logs = relationship('UserLog')
1763 logs = relationship('UserLog')
1765 comments = relationship(
1764 comments = relationship(
1766 'ChangesetComment', cascade="all, delete-orphan")
1765 'ChangesetComment', cascade="all, delete-orphan")
1767 pull_requests_source = relationship(
1766 pull_requests_source = relationship(
1768 'PullRequest',
1767 'PullRequest',
1769 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1768 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1770 cascade="all, delete-orphan")
1769 cascade="all, delete-orphan")
1771 pull_requests_target = relationship(
1770 pull_requests_target = relationship(
1772 'PullRequest',
1771 'PullRequest',
1773 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1772 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1774 cascade="all, delete-orphan")
1773 cascade="all, delete-orphan")
1775 ui = relationship('RepoRhodeCodeUi', cascade="all")
1774 ui = relationship('RepoRhodeCodeUi', cascade="all")
1776 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1775 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1777 integrations = relationship('Integration', cascade="all, delete-orphan")
1776 integrations = relationship('Integration', cascade="all, delete-orphan")
1778
1777
1779 scoped_tokens = relationship('UserApiKeys', cascade="all")
1778 scoped_tokens = relationship('UserApiKeys', cascade="all")
1780
1779
1781 # no cascade, set NULL
1780 # no cascade, set NULL
1782 artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_id==Repository.repo_id')
1781 artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_id==Repository.repo_id')
1783
1782
1784
1783
1785 def __str__(self):
1784 def __str__(self):
1786 return "<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id, self.repo_name)
1785 return "<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id, self.repo_name)
1787
1786
1788 @hybrid_property
1787 @hybrid_property
1789 def description_safe(self):
1788 def description_safe(self):
1790 from rhodecode.lib import helpers as h
1789 from rhodecode.lib import helpers as h
1791 return h.escape(self.description)
1790 return h.escape(self.description)
1792
1791
1793 @hybrid_property
1792 @hybrid_property
1794 def landing_rev(self):
1793 def landing_rev(self):
1795 # always should return [rev_type, rev], e.g ['branch', 'master']
1794 # always should return [rev_type, rev], e.g ['branch', 'master']
1796 if self._landing_revision:
1795 if self._landing_revision:
1797 _rev_info = self._landing_revision.split(':')
1796 _rev_info = self._landing_revision.split(':')
1798 if len(_rev_info) < 2:
1797 if len(_rev_info) < 2:
1799 _rev_info.insert(0, 'rev')
1798 _rev_info.insert(0, 'rev')
1800 return [_rev_info[0], _rev_info[1]]
1799 return [_rev_info[0], _rev_info[1]]
1801 return [None, None]
1800 return [None, None]
1802
1801
1803 @property
1802 @property
1804 def landing_ref_type(self):
1803 def landing_ref_type(self):
1805 return self.landing_rev[0]
1804 return self.landing_rev[0]
1806
1805
1807 @property
1806 @property
1808 def landing_ref_name(self):
1807 def landing_ref_name(self):
1809 return self.landing_rev[1]
1808 return self.landing_rev[1]
1810
1809
1811 @landing_rev.setter
1810 @landing_rev.setter
1812 def landing_rev(self, val):
1811 def landing_rev(self, val):
1813 if ':' not in val:
1812 if ':' not in val:
1814 raise ValueError('value must be delimited with `:` and consist '
1813 raise ValueError('value must be delimited with `:` and consist '
1815 'of <rev_type>:<rev>, got %s instead' % val)
1814 'of <rev_type>:<rev>, got %s instead' % val)
1816 self._landing_revision = val
1815 self._landing_revision = val
1817
1816
1818 @hybrid_property
1817 @hybrid_property
1819 def locked(self):
1818 def locked(self):
1820 if self._locked:
1819 if self._locked:
1821 user_id, timelocked, reason = self._locked.split(':')
1820 user_id, timelocked, reason = self._locked.split(':')
1822 lock_values = int(user_id), timelocked, reason
1821 lock_values = int(user_id), timelocked, reason
1823 else:
1822 else:
1824 lock_values = [None, None, None]
1823 lock_values = [None, None, None]
1825 return lock_values
1824 return lock_values
1826
1825
1827 @locked.setter
1826 @locked.setter
1828 def locked(self, val):
1827 def locked(self, val):
1829 if val and isinstance(val, (list, tuple)):
1828 if val and isinstance(val, (list, tuple)):
1830 self._locked = ':'.join(map(str, val))
1829 self._locked = ':'.join(map(str, val))
1831 else:
1830 else:
1832 self._locked = None
1831 self._locked = None
1833
1832
1834 @classmethod
1833 @classmethod
1835 def _load_changeset_cache(cls, repo_id, changeset_cache_raw):
1834 def _load_changeset_cache(cls, repo_id, changeset_cache_raw):
1836 from rhodecode.lib.vcs.backends.base import EmptyCommit
1835 from rhodecode.lib.vcs.backends.base import EmptyCommit
1837 dummy = EmptyCommit().__json__()
1836 dummy = EmptyCommit().__json__()
1838 if not changeset_cache_raw:
1837 if not changeset_cache_raw:
1839 dummy['source_repo_id'] = repo_id
1838 dummy['source_repo_id'] = repo_id
1840 return json.loads(json.dumps(dummy))
1839 return json.loads(json.dumps(dummy))
1841
1840
1842 try:
1841 try:
1843 return json.loads(changeset_cache_raw)
1842 return json.loads(changeset_cache_raw)
1844 except TypeError:
1843 except TypeError:
1845 return dummy
1844 return dummy
1846 except Exception:
1845 except Exception:
1847 log.error(traceback.format_exc())
1846 log.error(traceback.format_exc())
1848 return dummy
1847 return dummy
1849
1848
1850 @hybrid_property
1849 @hybrid_property
1851 def changeset_cache(self):
1850 def changeset_cache(self):
1852 return self._load_changeset_cache(self.repo_id, self._changeset_cache)
1851 return self._load_changeset_cache(self.repo_id, self._changeset_cache)
1853
1852
1854 @changeset_cache.setter
1853 @changeset_cache.setter
1855 def changeset_cache(self, val):
1854 def changeset_cache(self, val):
1856 try:
1855 try:
1857 self._changeset_cache = json.dumps(val)
1856 self._changeset_cache = json.dumps(val)
1858 except Exception:
1857 except Exception:
1859 log.error(traceback.format_exc())
1858 log.error(traceback.format_exc())
1860
1859
1861 @hybrid_property
1860 @hybrid_property
1862 def repo_name(self):
1861 def repo_name(self):
1863 return self._repo_name
1862 return self._repo_name
1864
1863
1865 @repo_name.setter
1864 @repo_name.setter
1866 def repo_name(self, value):
1865 def repo_name(self, value):
1867 self._repo_name = value
1866 self._repo_name = value
1868 self.repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1867 self.repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1869
1868
1870 @classmethod
1869 @classmethod
1871 def normalize_repo_name(cls, repo_name):
1870 def normalize_repo_name(cls, repo_name):
1872 """
1871 """
1873 Normalizes os specific repo_name to the format internally stored inside
1872 Normalizes os specific repo_name to the format internally stored inside
1874 database using URL_SEP
1873 database using URL_SEP
1875
1874
1876 :param cls:
1875 :param cls:
1877 :param repo_name:
1876 :param repo_name:
1878 """
1877 """
1879 return cls.NAME_SEP.join(repo_name.split(os.sep))
1878 return cls.NAME_SEP.join(repo_name.split(os.sep))
1880
1879
1881 @classmethod
1880 @classmethod
1882 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1881 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1883 session = Session()
1882 session = Session()
1884 q = session.query(cls).filter(cls.repo_name == repo_name)
1883 q = session.query(cls).filter(cls.repo_name == repo_name)
1885
1884
1886 if cache:
1885 if cache:
1887 if identity_cache:
1886 if identity_cache:
1888 val = cls.identity_cache(session, 'repo_name', repo_name)
1887 val = cls.identity_cache(session, 'repo_name', repo_name)
1889 if val:
1888 if val:
1890 return val
1889 return val
1891 else:
1890 else:
1892 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1891 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1893 q = q.options(
1892 q = q.options(
1894 FromCache("sql_cache_short", cache_key))
1893 FromCache("sql_cache_short", cache_key))
1895
1894
1896 return q.scalar()
1895 return q.scalar()
1897
1896
1898 @classmethod
1897 @classmethod
1899 def get_by_id_or_repo_name(cls, repoid):
1898 def get_by_id_or_repo_name(cls, repoid):
1900 if isinstance(repoid, int):
1899 if isinstance(repoid, int):
1901 try:
1900 try:
1902 repo = cls.get(repoid)
1901 repo = cls.get(repoid)
1903 except ValueError:
1902 except ValueError:
1904 repo = None
1903 repo = None
1905 else:
1904 else:
1906 repo = cls.get_by_repo_name(repoid)
1905 repo = cls.get_by_repo_name(repoid)
1907 return repo
1906 return repo
1908
1907
1909 @classmethod
1908 @classmethod
1910 def get_by_full_path(cls, repo_full_path):
1909 def get_by_full_path(cls, repo_full_path):
1911 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1910 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1912 repo_name = cls.normalize_repo_name(repo_name)
1911 repo_name = cls.normalize_repo_name(repo_name)
1913 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1912 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1914
1913
1915 @classmethod
1914 @classmethod
1916 def get_repo_forks(cls, repo_id):
1915 def get_repo_forks(cls, repo_id):
1917 return cls.query().filter(Repository.fork_id == repo_id)
1916 return cls.query().filter(Repository.fork_id == repo_id)
1918
1917
1919 @classmethod
1918 @classmethod
1920 def base_path(cls):
1919 def base_path(cls):
1921 """
1920 """
1922 Returns base path when all repos are stored
1921 Returns base path when all repos are stored
1923
1922
1924 :param cls:
1923 :param cls:
1925 """
1924 """
1926 q = Session().query(RhodeCodeUi)\
1925 q = Session().query(RhodeCodeUi)\
1927 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1926 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1928 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1927 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1929 return q.one().ui_value
1928 return q.one().ui_value
1930
1929
1931 @classmethod
1930 @classmethod
1932 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1931 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1933 case_insensitive=True, archived=False):
1932 case_insensitive=True, archived=False):
1934 q = Repository.query()
1933 q = Repository.query()
1935
1934
1936 if not archived:
1935 if not archived:
1937 q = q.filter(Repository.archived.isnot(true()))
1936 q = q.filter(Repository.archived.isnot(true()))
1938
1937
1939 if not isinstance(user_id, Optional):
1938 if not isinstance(user_id, Optional):
1940 q = q.filter(Repository.user_id == user_id)
1939 q = q.filter(Repository.user_id == user_id)
1941
1940
1942 if not isinstance(group_id, Optional):
1941 if not isinstance(group_id, Optional):
1943 q = q.filter(Repository.group_id == group_id)
1942 q = q.filter(Repository.group_id == group_id)
1944
1943
1945 if case_insensitive:
1944 if case_insensitive:
1946 q = q.order_by(func.lower(Repository.repo_name))
1945 q = q.order_by(func.lower(Repository.repo_name))
1947 else:
1946 else:
1948 q = q.order_by(Repository.repo_name)
1947 q = q.order_by(Repository.repo_name)
1949
1948
1950 return q.all()
1949 return q.all()
1951
1950
1952 @property
1951 @property
1953 def repo_uid(self):
1952 def repo_uid(self):
1954 return '_{}'.format(self.repo_id)
1953 return '_{}'.format(self.repo_id)
1955
1954
1956 @property
1955 @property
1957 def forks(self):
1956 def forks(self):
1958 """
1957 """
1959 Return forks of this repo
1958 Return forks of this repo
1960 """
1959 """
1961 return Repository.get_repo_forks(self.repo_id)
1960 return Repository.get_repo_forks(self.repo_id)
1962
1961
1963 @property
1962 @property
1964 def parent(self):
1963 def parent(self):
1965 """
1964 """
1966 Returns fork parent
1965 Returns fork parent
1967 """
1966 """
1968 return self.fork
1967 return self.fork
1969
1968
1970 @property
1969 @property
1971 def just_name(self):
1970 def just_name(self):
1972 return self.repo_name.split(self.NAME_SEP)[-1]
1971 return self.repo_name.split(self.NAME_SEP)[-1]
1973
1972
1974 @property
1973 @property
1975 def groups_with_parents(self):
1974 def groups_with_parents(self):
1976 groups = []
1975 groups = []
1977 if self.group is None:
1976 if self.group is None:
1978 return groups
1977 return groups
1979
1978
1980 cur_gr = self.group
1979 cur_gr = self.group
1981 groups.insert(0, cur_gr)
1980 groups.insert(0, cur_gr)
1982 while 1:
1981 while 1:
1983 gr = getattr(cur_gr, 'parent_group', None)
1982 gr = getattr(cur_gr, 'parent_group', None)
1984 cur_gr = cur_gr.parent_group
1983 cur_gr = cur_gr.parent_group
1985 if gr is None:
1984 if gr is None:
1986 break
1985 break
1987 groups.insert(0, gr)
1986 groups.insert(0, gr)
1988
1987
1989 return groups
1988 return groups
1990
1989
1991 @property
1990 @property
1992 def groups_and_repo(self):
1991 def groups_and_repo(self):
1993 return self.groups_with_parents, self
1992 return self.groups_with_parents, self
1994
1993
1995 @LazyProperty
1994 @LazyProperty
1996 def repo_path(self):
1995 def repo_path(self):
1997 """
1996 """
1998 Returns base full path for that repository means where it actually
1997 Returns base full path for that repository means where it actually
1999 exists on a filesystem
1998 exists on a filesystem
2000 """
1999 """
2001 q = Session().query(RhodeCodeUi).filter(
2000 q = Session().query(RhodeCodeUi).filter(
2002 RhodeCodeUi.ui_key == self.NAME_SEP)
2001 RhodeCodeUi.ui_key == self.NAME_SEP)
2003 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
2002 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
2004 return q.one().ui_value
2003 return q.one().ui_value
2005
2004
2006 @property
2005 @property
2007 def repo_full_path(self):
2006 def repo_full_path(self):
2008 p = [self.repo_path]
2007 p = [self.repo_path]
2009 # we need to split the name by / since this is how we store the
2008 # we need to split the name by / since this is how we store the
2010 # names in the database, but that eventually needs to be converted
2009 # names in the database, but that eventually needs to be converted
2011 # into a valid system path
2010 # into a valid system path
2012 p += self.repo_name.split(self.NAME_SEP)
2011 p += self.repo_name.split(self.NAME_SEP)
2013 return os.path.join(*map(safe_str, p))
2012 return os.path.join(*map(safe_str, p))
2014
2013
2015 @property
2014 @property
2016 def cache_keys(self):
2015 def cache_keys(self):
2017 """
2016 """
2018 Returns associated cache keys for that repo
2017 Returns associated cache keys for that repo
2019 """
2018 """
2020 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
2019 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
2021 repo_id=self.repo_id)
2020 repo_id=self.repo_id)
2022 return CacheKey.query()\
2021 return CacheKey.query()\
2023 .filter(CacheKey.cache_args == invalidation_namespace)\
2022 .filter(CacheKey.cache_args == invalidation_namespace)\
2024 .order_by(CacheKey.cache_key)\
2023 .order_by(CacheKey.cache_key)\
2025 .all()
2024 .all()
2026
2025
2027 @property
2026 @property
2028 def cached_diffs_relative_dir(self):
2027 def cached_diffs_relative_dir(self):
2029 """
2028 """
2030 Return a relative to the repository store path of cached diffs
2029 Return a relative to the repository store path of cached diffs
2031 used for safe display for users, who shouldn't know the absolute store
2030 used for safe display for users, who shouldn't know the absolute store
2032 path
2031 path
2033 """
2032 """
2034 return os.path.join(
2033 return os.path.join(
2035 os.path.dirname(self.repo_name),
2034 os.path.dirname(self.repo_name),
2036 self.cached_diffs_dir.split(os.path.sep)[-1])
2035 self.cached_diffs_dir.split(os.path.sep)[-1])
2037
2036
2038 @property
2037 @property
2039 def cached_diffs_dir(self):
2038 def cached_diffs_dir(self):
2040 path = self.repo_full_path
2039 path = self.repo_full_path
2041 return os.path.join(
2040 return os.path.join(
2042 os.path.dirname(path),
2041 os.path.dirname(path),
2043 '.__shadow_diff_cache_repo_{}'.format(self.repo_id))
2042 '.__shadow_diff_cache_repo_{}'.format(self.repo_id))
2044
2043
2045 def cached_diffs(self):
2044 def cached_diffs(self):
2046 diff_cache_dir = self.cached_diffs_dir
2045 diff_cache_dir = self.cached_diffs_dir
2047 if os.path.isdir(diff_cache_dir):
2046 if os.path.isdir(diff_cache_dir):
2048 return os.listdir(diff_cache_dir)
2047 return os.listdir(diff_cache_dir)
2049 return []
2048 return []
2050
2049
2051 def shadow_repos(self):
2050 def shadow_repos(self):
2052 shadow_repos_pattern = '.__shadow_repo_{}'.format(self.repo_id)
2051 shadow_repos_pattern = '.__shadow_repo_{}'.format(self.repo_id)
2053 return [
2052 return [
2054 x for x in os.listdir(os.path.dirname(self.repo_full_path))
2053 x for x in os.listdir(os.path.dirname(self.repo_full_path))
2055 if x.startswith(shadow_repos_pattern)]
2054 if x.startswith(shadow_repos_pattern)]
2056
2055
2057 def get_new_name(self, repo_name):
2056 def get_new_name(self, repo_name):
2058 """
2057 """
2059 returns new full repository name based on assigned group and new new
2058 returns new full repository name based on assigned group and new new
2060
2059
2061 :param group_name:
2060 :param group_name:
2062 """
2061 """
2063 path_prefix = self.group.full_path_splitted if self.group else []
2062 path_prefix = self.group.full_path_splitted if self.group else []
2064 return self.NAME_SEP.join(path_prefix + [repo_name])
2063 return self.NAME_SEP.join(path_prefix + [repo_name])
2065
2064
2066 @property
2065 @property
2067 def _config(self):
2066 def _config(self):
2068 """
2067 """
2069 Returns db based config object.
2068 Returns db based config object.
2070 """
2069 """
2071 from rhodecode.lib.utils import make_db_config
2070 from rhodecode.lib.utils import make_db_config
2072 return make_db_config(clear_session=False, repo=self)
2071 return make_db_config(clear_session=False, repo=self)
2073
2072
2074 def permissions(self, with_admins=True, with_owner=True,
2073 def permissions(self, with_admins=True, with_owner=True,
2075 expand_from_user_groups=False):
2074 expand_from_user_groups=False):
2076 """
2075 """
2077 Permissions for repositories
2076 Permissions for repositories
2078 """
2077 """
2079 _admin_perm = 'repository.admin'
2078 _admin_perm = 'repository.admin'
2080
2079
2081 owner_row = []
2080 owner_row = []
2082 if with_owner:
2081 if with_owner:
2083 usr = AttributeDict(self.user.get_dict())
2082 usr = AttributeDict(self.user.get_dict())
2084 usr.owner_row = True
2083 usr.owner_row = True
2085 usr.permission = _admin_perm
2084 usr.permission = _admin_perm
2086 usr.permission_id = None
2085 usr.permission_id = None
2087 owner_row.append(usr)
2086 owner_row.append(usr)
2088
2087
2089 super_admin_ids = []
2088 super_admin_ids = []
2090 super_admin_rows = []
2089 super_admin_rows = []
2091 if with_admins:
2090 if with_admins:
2092 for usr in User.get_all_super_admins():
2091 for usr in User.get_all_super_admins():
2093 super_admin_ids.append(usr.user_id)
2092 super_admin_ids.append(usr.user_id)
2094 # if this admin is also owner, don't double the record
2093 # if this admin is also owner, don't double the record
2095 if usr.user_id == owner_row[0].user_id:
2094 if usr.user_id == owner_row[0].user_id:
2096 owner_row[0].admin_row = True
2095 owner_row[0].admin_row = True
2097 else:
2096 else:
2098 usr = AttributeDict(usr.get_dict())
2097 usr = AttributeDict(usr.get_dict())
2099 usr.admin_row = True
2098 usr.admin_row = True
2100 usr.permission = _admin_perm
2099 usr.permission = _admin_perm
2101 usr.permission_id = None
2100 usr.permission_id = None
2102 super_admin_rows.append(usr)
2101 super_admin_rows.append(usr)
2103
2102
2104 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
2103 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
2105 q = q.options(joinedload(UserRepoToPerm.repository),
2104 q = q.options(joinedload(UserRepoToPerm.repository),
2106 joinedload(UserRepoToPerm.user),
2105 joinedload(UserRepoToPerm.user),
2107 joinedload(UserRepoToPerm.permission),)
2106 joinedload(UserRepoToPerm.permission),)
2108
2107
2109 # get owners and admins and permissions. We do a trick of re-writing
2108 # get owners and admins and permissions. We do a trick of re-writing
2110 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2109 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2111 # has a global reference and changing one object propagates to all
2110 # has a global reference and changing one object propagates to all
2112 # others. This means if admin is also an owner admin_row that change
2111 # others. This means if admin is also an owner admin_row that change
2113 # would propagate to both objects
2112 # would propagate to both objects
2114 perm_rows = []
2113 perm_rows = []
2115 for _usr in q.all():
2114 for _usr in q.all():
2116 usr = AttributeDict(_usr.user.get_dict())
2115 usr = AttributeDict(_usr.user.get_dict())
2117 # if this user is also owner/admin, mark as duplicate record
2116 # if this user is also owner/admin, mark as duplicate record
2118 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
2117 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
2119 usr.duplicate_perm = True
2118 usr.duplicate_perm = True
2120 # also check if this permission is maybe used by branch_permissions
2119 # also check if this permission is maybe used by branch_permissions
2121 if _usr.branch_perm_entry:
2120 if _usr.branch_perm_entry:
2122 usr.branch_rules = [x.branch_rule_id for x in _usr.branch_perm_entry]
2121 usr.branch_rules = [x.branch_rule_id for x in _usr.branch_perm_entry]
2123
2122
2124 usr.permission = _usr.permission.permission_name
2123 usr.permission = _usr.permission.permission_name
2125 usr.permission_id = _usr.repo_to_perm_id
2124 usr.permission_id = _usr.repo_to_perm_id
2126 perm_rows.append(usr)
2125 perm_rows.append(usr)
2127
2126
2128 # filter the perm rows by 'default' first and then sort them by
2127 # filter the perm rows by 'default' first and then sort them by
2129 # admin,write,read,none permissions sorted again alphabetically in
2128 # admin,write,read,none permissions sorted again alphabetically in
2130 # each group
2129 # each group
2131 perm_rows = sorted(perm_rows, key=display_user_sort)
2130 perm_rows = sorted(perm_rows, key=display_user_sort)
2132
2131
2133 user_groups_rows = []
2132 user_groups_rows = []
2134 if expand_from_user_groups:
2133 if expand_from_user_groups:
2135 for ug in self.permission_user_groups(with_members=True):
2134 for ug in self.permission_user_groups(with_members=True):
2136 for user_data in ug.members:
2135 for user_data in ug.members:
2137 user_groups_rows.append(user_data)
2136 user_groups_rows.append(user_data)
2138
2137
2139 return super_admin_rows + owner_row + perm_rows + user_groups_rows
2138 return super_admin_rows + owner_row + perm_rows + user_groups_rows
2140
2139
2141 def permission_user_groups(self, with_members=True):
2140 def permission_user_groups(self, with_members=True):
2142 q = UserGroupRepoToPerm.query()\
2141 q = UserGroupRepoToPerm.query()\
2143 .filter(UserGroupRepoToPerm.repository == self)
2142 .filter(UserGroupRepoToPerm.repository == self)
2144 q = q.options(joinedload(UserGroupRepoToPerm.repository),
2143 q = q.options(joinedload(UserGroupRepoToPerm.repository),
2145 joinedload(UserGroupRepoToPerm.users_group),
2144 joinedload(UserGroupRepoToPerm.users_group),
2146 joinedload(UserGroupRepoToPerm.permission),)
2145 joinedload(UserGroupRepoToPerm.permission),)
2147
2146
2148 perm_rows = []
2147 perm_rows = []
2149 for _user_group in q.all():
2148 for _user_group in q.all():
2150 entry = AttributeDict(_user_group.users_group.get_dict())
2149 entry = AttributeDict(_user_group.users_group.get_dict())
2151 entry.permission = _user_group.permission.permission_name
2150 entry.permission = _user_group.permission.permission_name
2152 if with_members:
2151 if with_members:
2153 entry.members = [x.user.get_dict()
2152 entry.members = [x.user.get_dict()
2154 for x in _user_group.users_group.members]
2153 for x in _user_group.users_group.members]
2155 perm_rows.append(entry)
2154 perm_rows.append(entry)
2156
2155
2157 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2156 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2158 return perm_rows
2157 return perm_rows
2159
2158
2160 def get_api_data(self, include_secrets=False):
2159 def get_api_data(self, include_secrets=False):
2161 """
2160 """
2162 Common function for generating repo api data
2161 Common function for generating repo api data
2163
2162
2164 :param include_secrets: See :meth:`User.get_api_data`.
2163 :param include_secrets: See :meth:`User.get_api_data`.
2165
2164
2166 """
2165 """
2167 # TODO: mikhail: Here there is an anti-pattern, we probably need to
2166 # TODO: mikhail: Here there is an anti-pattern, we probably need to
2168 # move this methods on models level.
2167 # move this methods on models level.
2169 from rhodecode.model.settings import SettingsModel
2168 from rhodecode.model.settings import SettingsModel
2170 from rhodecode.model.repo import RepoModel
2169 from rhodecode.model.repo import RepoModel
2171
2170
2172 repo = self
2171 repo = self
2173 _user_id, _time, _reason = self.locked
2172 _user_id, _time, _reason = self.locked
2174
2173
2175 data = {
2174 data = {
2176 'repo_id': repo.repo_id,
2175 'repo_id': repo.repo_id,
2177 'repo_name': repo.repo_name,
2176 'repo_name': repo.repo_name,
2178 'repo_type': repo.repo_type,
2177 'repo_type': repo.repo_type,
2179 'clone_uri': repo.clone_uri or '',
2178 'clone_uri': repo.clone_uri or '',
2180 'push_uri': repo.push_uri or '',
2179 'push_uri': repo.push_uri or '',
2181 'url': RepoModel().get_url(self),
2180 'url': RepoModel().get_url(self),
2182 'private': repo.private,
2181 'private': repo.private,
2183 'created_on': repo.created_on,
2182 'created_on': repo.created_on,
2184 'description': repo.description_safe,
2183 'description': repo.description_safe,
2185 'landing_rev': repo.landing_rev,
2184 'landing_rev': repo.landing_rev,
2186 'owner': repo.user.username,
2185 'owner': repo.user.username,
2187 'fork_of': repo.fork.repo_name if repo.fork else None,
2186 'fork_of': repo.fork.repo_name if repo.fork else None,
2188 'fork_of_id': repo.fork.repo_id if repo.fork else None,
2187 'fork_of_id': repo.fork.repo_id if repo.fork else None,
2189 'enable_statistics': repo.enable_statistics,
2188 'enable_statistics': repo.enable_statistics,
2190 'enable_locking': repo.enable_locking,
2189 'enable_locking': repo.enable_locking,
2191 'enable_downloads': repo.enable_downloads,
2190 'enable_downloads': repo.enable_downloads,
2192 'last_changeset': repo.changeset_cache,
2191 'last_changeset': repo.changeset_cache,
2193 'locked_by': User.get(_user_id).get_api_data(
2192 'locked_by': User.get(_user_id).get_api_data(
2194 include_secrets=include_secrets) if _user_id else None,
2193 include_secrets=include_secrets) if _user_id else None,
2195 'locked_date': time_to_datetime(_time) if _time else None,
2194 'locked_date': time_to_datetime(_time) if _time else None,
2196 'lock_reason': _reason if _reason else None,
2195 'lock_reason': _reason if _reason else None,
2197 }
2196 }
2198
2197
2199 # TODO: mikhail: should be per-repo settings here
2198 # TODO: mikhail: should be per-repo settings here
2200 rc_config = SettingsModel().get_all_settings()
2199 rc_config = SettingsModel().get_all_settings()
2201 repository_fields = str2bool(
2200 repository_fields = str2bool(
2202 rc_config.get('rhodecode_repository_fields'))
2201 rc_config.get('rhodecode_repository_fields'))
2203 if repository_fields:
2202 if repository_fields:
2204 for f in self.extra_fields:
2203 for f in self.extra_fields:
2205 data[f.field_key_prefixed] = f.field_value
2204 data[f.field_key_prefixed] = f.field_value
2206
2205
2207 return data
2206 return data
2208
2207
2209 @classmethod
2208 @classmethod
2210 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
2209 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
2211 if not lock_time:
2210 if not lock_time:
2212 lock_time = time.time()
2211 lock_time = time.time()
2213 if not lock_reason:
2212 if not lock_reason:
2214 lock_reason = cls.LOCK_AUTOMATIC
2213 lock_reason = cls.LOCK_AUTOMATIC
2215 repo.locked = [user_id, lock_time, lock_reason]
2214 repo.locked = [user_id, lock_time, lock_reason]
2216 Session().add(repo)
2215 Session().add(repo)
2217 Session().commit()
2216 Session().commit()
2218
2217
2219 @classmethod
2218 @classmethod
2220 def unlock(cls, repo):
2219 def unlock(cls, repo):
2221 repo.locked = None
2220 repo.locked = None
2222 Session().add(repo)
2221 Session().add(repo)
2223 Session().commit()
2222 Session().commit()
2224
2223
2225 @classmethod
2224 @classmethod
2226 def getlock(cls, repo):
2225 def getlock(cls, repo):
2227 return repo.locked
2226 return repo.locked
2228
2227
2229 def is_user_lock(self, user_id):
2228 def is_user_lock(self, user_id):
2230 if self.lock[0]:
2229 if self.lock[0]:
2231 lock_user_id = safe_int(self.lock[0])
2230 lock_user_id = safe_int(self.lock[0])
2232 user_id = safe_int(user_id)
2231 user_id = safe_int(user_id)
2233 # both are ints, and they are equal
2232 # both are ints, and they are equal
2234 return all([lock_user_id, user_id]) and lock_user_id == user_id
2233 return all([lock_user_id, user_id]) and lock_user_id == user_id
2235
2234
2236 return False
2235 return False
2237
2236
2238 def get_locking_state(self, action, user_id, only_when_enabled=True):
2237 def get_locking_state(self, action, user_id, only_when_enabled=True):
2239 """
2238 """
2240 Checks locking on this repository, if locking is enabled and lock is
2239 Checks locking on this repository, if locking is enabled and lock is
2241 present returns a tuple of make_lock, locked, locked_by.
2240 present returns a tuple of make_lock, locked, locked_by.
2242 make_lock can have 3 states None (do nothing) True, make lock
2241 make_lock can have 3 states None (do nothing) True, make lock
2243 False release lock, This value is later propagated to hooks, which
2242 False release lock, This value is later propagated to hooks, which
2244 do the locking. Think about this as signals passed to hooks what to do.
2243 do the locking. Think about this as signals passed to hooks what to do.
2245
2244
2246 """
2245 """
2247 # TODO: johbo: This is part of the business logic and should be moved
2246 # TODO: johbo: This is part of the business logic and should be moved
2248 # into the RepositoryModel.
2247 # into the RepositoryModel.
2249
2248
2250 if action not in ('push', 'pull'):
2249 if action not in ('push', 'pull'):
2251 raise ValueError("Invalid action value: %s" % repr(action))
2250 raise ValueError("Invalid action value: %s" % repr(action))
2252
2251
2253 # defines if locked error should be thrown to user
2252 # defines if locked error should be thrown to user
2254 currently_locked = False
2253 currently_locked = False
2255 # defines if new lock should be made, tri-state
2254 # defines if new lock should be made, tri-state
2256 make_lock = None
2255 make_lock = None
2257 repo = self
2256 repo = self
2258 user = User.get(user_id)
2257 user = User.get(user_id)
2259
2258
2260 lock_info = repo.locked
2259 lock_info = repo.locked
2261
2260
2262 if repo and (repo.enable_locking or not only_when_enabled):
2261 if repo and (repo.enable_locking or not only_when_enabled):
2263 if action == 'push':
2262 if action == 'push':
2264 # check if it's already locked !, if it is compare users
2263 # check if it's already locked !, if it is compare users
2265 locked_by_user_id = lock_info[0]
2264 locked_by_user_id = lock_info[0]
2266 if user.user_id == locked_by_user_id:
2265 if user.user_id == locked_by_user_id:
2267 log.debug(
2266 log.debug(
2268 'Got `push` action from user %s, now unlocking', user)
2267 'Got `push` action from user %s, now unlocking', user)
2269 # unlock if we have push from user who locked
2268 # unlock if we have push from user who locked
2270 make_lock = False
2269 make_lock = False
2271 else:
2270 else:
2272 # we're not the same user who locked, ban with
2271 # we're not the same user who locked, ban with
2273 # code defined in settings (default is 423 HTTP Locked) !
2272 # code defined in settings (default is 423 HTTP Locked) !
2274 log.debug('Repo %s is currently locked by %s', repo, user)
2273 log.debug('Repo %s is currently locked by %s', repo, user)
2275 currently_locked = True
2274 currently_locked = True
2276 elif action == 'pull':
2275 elif action == 'pull':
2277 # [0] user [1] date
2276 # [0] user [1] date
2278 if lock_info[0] and lock_info[1]:
2277 if lock_info[0] and lock_info[1]:
2279 log.debug('Repo %s is currently locked by %s', repo, user)
2278 log.debug('Repo %s is currently locked by %s', repo, user)
2280 currently_locked = True
2279 currently_locked = True
2281 else:
2280 else:
2282 log.debug('Setting lock on repo %s by %s', repo, user)
2281 log.debug('Setting lock on repo %s by %s', repo, user)
2283 make_lock = True
2282 make_lock = True
2284
2283
2285 else:
2284 else:
2286 log.debug('Repository %s do not have locking enabled', repo)
2285 log.debug('Repository %s do not have locking enabled', repo)
2287
2286
2288 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2287 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2289 make_lock, currently_locked, lock_info)
2288 make_lock, currently_locked, lock_info)
2290
2289
2291 from rhodecode.lib.auth import HasRepoPermissionAny
2290 from rhodecode.lib.auth import HasRepoPermissionAny
2292 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2291 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2293 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2292 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2294 # if we don't have at least write permission we cannot make a lock
2293 # if we don't have at least write permission we cannot make a lock
2295 log.debug('lock state reset back to FALSE due to lack '
2294 log.debug('lock state reset back to FALSE due to lack '
2296 'of at least read permission')
2295 'of at least read permission')
2297 make_lock = False
2296 make_lock = False
2298
2297
2299 return make_lock, currently_locked, lock_info
2298 return make_lock, currently_locked, lock_info
2300
2299
2301 @property
2300 @property
2302 def last_commit_cache_update_diff(self):
2301 def last_commit_cache_update_diff(self):
2303 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2302 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2304
2303
2305 @classmethod
2304 @classmethod
2306 def _load_commit_change(cls, last_commit_cache):
2305 def _load_commit_change(cls, last_commit_cache):
2307 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2306 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2308 empty_date = datetime.datetime.fromtimestamp(0)
2307 empty_date = datetime.datetime.fromtimestamp(0)
2309 date_latest = last_commit_cache.get('date', empty_date)
2308 date_latest = last_commit_cache.get('date', empty_date)
2310 try:
2309 try:
2311 return parse_datetime(date_latest)
2310 return parse_datetime(date_latest)
2312 except Exception:
2311 except Exception:
2313 return empty_date
2312 return empty_date
2314
2313
2315 @property
2314 @property
2316 def last_commit_change(self):
2315 def last_commit_change(self):
2317 return self._load_commit_change(self.changeset_cache)
2316 return self._load_commit_change(self.changeset_cache)
2318
2317
2319 @property
2318 @property
2320 def last_db_change(self):
2319 def last_db_change(self):
2321 return self.updated_on
2320 return self.updated_on
2322
2321
2323 @property
2322 @property
2324 def clone_uri_hidden(self):
2323 def clone_uri_hidden(self):
2325 clone_uri = self.clone_uri
2324 clone_uri = self.clone_uri
2326 if clone_uri:
2325 if clone_uri:
2327 import urlobject
2326 import urlobject
2328 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2327 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2329 if url_obj.password:
2328 if url_obj.password:
2330 clone_uri = url_obj.with_password('*****')
2329 clone_uri = url_obj.with_password('*****')
2331 return clone_uri
2330 return clone_uri
2332
2331
2333 @property
2332 @property
2334 def push_uri_hidden(self):
2333 def push_uri_hidden(self):
2335 push_uri = self.push_uri
2334 push_uri = self.push_uri
2336 if push_uri:
2335 if push_uri:
2337 import urlobject
2336 import urlobject
2338 url_obj = urlobject.URLObject(cleaned_uri(push_uri))
2337 url_obj = urlobject.URLObject(cleaned_uri(push_uri))
2339 if url_obj.password:
2338 if url_obj.password:
2340 push_uri = url_obj.with_password('*****')
2339 push_uri = url_obj.with_password('*****')
2341 return push_uri
2340 return push_uri
2342
2341
2343 def clone_url(self, **override):
2342 def clone_url(self, **override):
2344 from rhodecode.model.settings import SettingsModel
2343 from rhodecode.model.settings import SettingsModel
2345
2344
2346 uri_tmpl = None
2345 uri_tmpl = None
2347 if 'with_id' in override:
2346 if 'with_id' in override:
2348 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2347 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2349 del override['with_id']
2348 del override['with_id']
2350
2349
2351 if 'uri_tmpl' in override:
2350 if 'uri_tmpl' in override:
2352 uri_tmpl = override['uri_tmpl']
2351 uri_tmpl = override['uri_tmpl']
2353 del override['uri_tmpl']
2352 del override['uri_tmpl']
2354
2353
2355 ssh = False
2354 ssh = False
2356 if 'ssh' in override:
2355 if 'ssh' in override:
2357 ssh = True
2356 ssh = True
2358 del override['ssh']
2357 del override['ssh']
2359
2358
2360 # we didn't override our tmpl from **overrides
2359 # we didn't override our tmpl from **overrides
2361 request = get_current_request()
2360 request = get_current_request()
2362 if not uri_tmpl:
2361 if not uri_tmpl:
2363 if hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'):
2362 if hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'):
2364 rc_config = request.call_context.rc_config
2363 rc_config = request.call_context.rc_config
2365 else:
2364 else:
2366 rc_config = SettingsModel().get_all_settings(cache=True)
2365 rc_config = SettingsModel().get_all_settings(cache=True)
2367
2366
2368 if ssh:
2367 if ssh:
2369 uri_tmpl = rc_config.get(
2368 uri_tmpl = rc_config.get(
2370 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH
2369 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH
2371
2370
2372 else:
2371 else:
2373 uri_tmpl = rc_config.get(
2372 uri_tmpl = rc_config.get(
2374 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2373 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2375
2374
2376 return get_clone_url(request=request,
2375 return get_clone_url(request=request,
2377 uri_tmpl=uri_tmpl,
2376 uri_tmpl=uri_tmpl,
2378 repo_name=self.repo_name,
2377 repo_name=self.repo_name,
2379 repo_id=self.repo_id,
2378 repo_id=self.repo_id,
2380 repo_type=self.repo_type,
2379 repo_type=self.repo_type,
2381 **override)
2380 **override)
2382
2381
2383 def set_state(self, state):
2382 def set_state(self, state):
2384 self.repo_state = state
2383 self.repo_state = state
2385 Session().add(self)
2384 Session().add(self)
2386 #==========================================================================
2385 #==========================================================================
2387 # SCM PROPERTIES
2386 # SCM PROPERTIES
2388 #==========================================================================
2387 #==========================================================================
2389
2388
2390 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, maybe_unreachable=False, reference_obj=None):
2389 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, maybe_unreachable=False, reference_obj=None):
2391 return get_commit_safe(
2390 return get_commit_safe(
2392 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load,
2391 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load,
2393 maybe_unreachable=maybe_unreachable, reference_obj=reference_obj)
2392 maybe_unreachable=maybe_unreachable, reference_obj=reference_obj)
2394
2393
2395 def get_changeset(self, rev=None, pre_load=None):
2394 def get_changeset(self, rev=None, pre_load=None):
2396 warnings.warn("Use get_commit", DeprecationWarning)
2395 warnings.warn("Use get_commit", DeprecationWarning)
2397 commit_id = None
2396 commit_id = None
2398 commit_idx = None
2397 commit_idx = None
2399 if isinstance(rev, str):
2398 if isinstance(rev, str):
2400 commit_id = rev
2399 commit_id = rev
2401 else:
2400 else:
2402 commit_idx = rev
2401 commit_idx = rev
2403 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2402 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2404 pre_load=pre_load)
2403 pre_load=pre_load)
2405
2404
2406 def get_landing_commit(self):
2405 def get_landing_commit(self):
2407 """
2406 """
2408 Returns landing commit, or if that doesn't exist returns the tip
2407 Returns landing commit, or if that doesn't exist returns the tip
2409 """
2408 """
2410 _rev_type, _rev = self.landing_rev
2409 _rev_type, _rev = self.landing_rev
2411 commit = self.get_commit(_rev)
2410 commit = self.get_commit(_rev)
2412 if isinstance(commit, EmptyCommit):
2411 if isinstance(commit, EmptyCommit):
2413 return self.get_commit()
2412 return self.get_commit()
2414 return commit
2413 return commit
2415
2414
2416 def flush_commit_cache(self):
2415 def flush_commit_cache(self):
2417 self.update_commit_cache(cs_cache={'raw_id':'0'})
2416 self.update_commit_cache(cs_cache={'raw_id':'0'})
2418 self.update_commit_cache()
2417 self.update_commit_cache()
2419
2418
2420 def update_commit_cache(self, cs_cache=None, config=None):
2419 def update_commit_cache(self, cs_cache=None, config=None):
2421 """
2420 """
2422 Update cache of last commit for repository
2421 Update cache of last commit for repository
2423 cache_keys should be::
2422 cache_keys should be::
2424
2423
2425 source_repo_id
2424 source_repo_id
2426 short_id
2425 short_id
2427 raw_id
2426 raw_id
2428 revision
2427 revision
2429 parents
2428 parents
2430 message
2429 message
2431 date
2430 date
2432 author
2431 author
2433 updated_on
2432 updated_on
2434
2433
2435 """
2434 """
2436 from rhodecode.lib.vcs.backends.base import BaseChangeset
2435 from rhodecode.lib.vcs.backends.base import BaseChangeset
2437 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2436 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2438 empty_date = datetime.datetime.fromtimestamp(0)
2437 empty_date = datetime.datetime.fromtimestamp(0)
2439
2438
2440 if cs_cache is None:
2439 if cs_cache is None:
2441 # use no-cache version here
2440 # use no-cache version here
2442 try:
2441 try:
2443 scm_repo = self.scm_instance(cache=False, config=config)
2442 scm_repo = self.scm_instance(cache=False, config=config)
2444 except VCSError:
2443 except VCSError:
2445 scm_repo = None
2444 scm_repo = None
2446 empty = scm_repo is None or scm_repo.is_empty()
2445 empty = scm_repo is None or scm_repo.is_empty()
2447
2446
2448 if not empty:
2447 if not empty:
2449 cs_cache = scm_repo.get_commit(
2448 cs_cache = scm_repo.get_commit(
2450 pre_load=["author", "date", "message", "parents", "branch"])
2449 pre_load=["author", "date", "message", "parents", "branch"])
2451 else:
2450 else:
2452 cs_cache = EmptyCommit()
2451 cs_cache = EmptyCommit()
2453
2452
2454 if isinstance(cs_cache, BaseChangeset):
2453 if isinstance(cs_cache, BaseChangeset):
2455 cs_cache = cs_cache.__json__()
2454 cs_cache = cs_cache.__json__()
2456
2455
2457 def is_outdated(new_cs_cache):
2456 def is_outdated(new_cs_cache):
2458 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2457 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2459 new_cs_cache['revision'] != self.changeset_cache['revision']):
2458 new_cs_cache['revision'] != self.changeset_cache['revision']):
2460 return True
2459 return True
2461 return False
2460 return False
2462
2461
2463 # check if we have maybe already latest cached revision
2462 # check if we have maybe already latest cached revision
2464 if is_outdated(cs_cache) or not self.changeset_cache:
2463 if is_outdated(cs_cache) or not self.changeset_cache:
2465 _current_datetime = datetime.datetime.utcnow()
2464 _current_datetime = datetime.datetime.utcnow()
2466 last_change = cs_cache.get('date') or _current_datetime
2465 last_change = cs_cache.get('date') or _current_datetime
2467 # we check if last update is newer than the new value
2466 # we check if last update is newer than the new value
2468 # if yes, we use the current timestamp instead. Imagine you get
2467 # if yes, we use the current timestamp instead. Imagine you get
2469 # old commit pushed 1y ago, we'd set last update 1y to ago.
2468 # old commit pushed 1y ago, we'd set last update 1y to ago.
2470 last_change_timestamp = datetime_to_time(last_change)
2469 last_change_timestamp = datetime_to_time(last_change)
2471 current_timestamp = datetime_to_time(last_change)
2470 current_timestamp = datetime_to_time(last_change)
2472 if last_change_timestamp > current_timestamp and not empty:
2471 if last_change_timestamp > current_timestamp and not empty:
2473 cs_cache['date'] = _current_datetime
2472 cs_cache['date'] = _current_datetime
2474
2473
2475 _date_latest = parse_datetime(cs_cache.get('date') or empty_date)
2474 _date_latest = parse_datetime(cs_cache.get('date') or empty_date)
2476 cs_cache['updated_on'] = time.time()
2475 cs_cache['updated_on'] = time.time()
2477 self.changeset_cache = cs_cache
2476 self.changeset_cache = cs_cache
2478 self.updated_on = last_change
2477 self.updated_on = last_change
2479 Session().add(self)
2478 Session().add(self)
2480 Session().commit()
2479 Session().commit()
2481
2480
2482 else:
2481 else:
2483 if empty:
2482 if empty:
2484 cs_cache = EmptyCommit().__json__()
2483 cs_cache = EmptyCommit().__json__()
2485 else:
2484 else:
2486 cs_cache = self.changeset_cache
2485 cs_cache = self.changeset_cache
2487
2486
2488 _date_latest = parse_datetime(cs_cache.get('date') or empty_date)
2487 _date_latest = parse_datetime(cs_cache.get('date') or empty_date)
2489
2488
2490 cs_cache['updated_on'] = time.time()
2489 cs_cache['updated_on'] = time.time()
2491 self.changeset_cache = cs_cache
2490 self.changeset_cache = cs_cache
2492 self.updated_on = _date_latest
2491 self.updated_on = _date_latest
2493 Session().add(self)
2492 Session().add(self)
2494 Session().commit()
2493 Session().commit()
2495
2494
2496 log.debug('updated repo `%s` with new commit cache %s, and last update_date: %s',
2495 log.debug('updated repo `%s` with new commit cache %s, and last update_date: %s',
2497 self.repo_name, cs_cache, _date_latest)
2496 self.repo_name, cs_cache, _date_latest)
2498
2497
2499 @property
2498 @property
2500 def tip(self):
2499 def tip(self):
2501 return self.get_commit('tip')
2500 return self.get_commit('tip')
2502
2501
2503 @property
2502 @property
2504 def author(self):
2503 def author(self):
2505 return self.tip.author
2504 return self.tip.author
2506
2505
2507 @property
2506 @property
2508 def last_change(self):
2507 def last_change(self):
2509 return self.scm_instance().last_change
2508 return self.scm_instance().last_change
2510
2509
2511 def get_comments(self, revisions=None):
2510 def get_comments(self, revisions=None):
2512 """
2511 """
2513 Returns comments for this repository grouped by revisions
2512 Returns comments for this repository grouped by revisions
2514
2513
2515 :param revisions: filter query by revisions only
2514 :param revisions: filter query by revisions only
2516 """
2515 """
2517 cmts = ChangesetComment.query()\
2516 cmts = ChangesetComment.query()\
2518 .filter(ChangesetComment.repo == self)
2517 .filter(ChangesetComment.repo == self)
2519 if revisions:
2518 if revisions:
2520 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2519 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2521 grouped = collections.defaultdict(list)
2520 grouped = collections.defaultdict(list)
2522 for cmt in cmts.all():
2521 for cmt in cmts.all():
2523 grouped[cmt.revision].append(cmt)
2522 grouped[cmt.revision].append(cmt)
2524 return grouped
2523 return grouped
2525
2524
2526 def statuses(self, revisions=None):
2525 def statuses(self, revisions=None):
2527 """
2526 """
2528 Returns statuses for this repository
2527 Returns statuses for this repository
2529
2528
2530 :param revisions: list of revisions to get statuses for
2529 :param revisions: list of revisions to get statuses for
2531 """
2530 """
2532 statuses = ChangesetStatus.query()\
2531 statuses = ChangesetStatus.query()\
2533 .filter(ChangesetStatus.repo == self)\
2532 .filter(ChangesetStatus.repo == self)\
2534 .filter(ChangesetStatus.version == 0)
2533 .filter(ChangesetStatus.version == 0)
2535
2534
2536 if revisions:
2535 if revisions:
2537 # Try doing the filtering in chunks to avoid hitting limits
2536 # Try doing the filtering in chunks to avoid hitting limits
2538 size = 500
2537 size = 500
2539 status_results = []
2538 status_results = []
2540 for chunk in range(0, len(revisions), size):
2539 for chunk in range(0, len(revisions), size):
2541 status_results += statuses.filter(
2540 status_results += statuses.filter(
2542 ChangesetStatus.revision.in_(
2541 ChangesetStatus.revision.in_(
2543 revisions[chunk: chunk+size])
2542 revisions[chunk: chunk+size])
2544 ).all()
2543 ).all()
2545 else:
2544 else:
2546 status_results = statuses.all()
2545 status_results = statuses.all()
2547
2546
2548 grouped = {}
2547 grouped = {}
2549
2548
2550 # maybe we have open new pullrequest without a status?
2549 # maybe we have open new pullrequest without a status?
2551 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2550 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2552 status_lbl = ChangesetStatus.get_status_lbl(stat)
2551 status_lbl = ChangesetStatus.get_status_lbl(stat)
2553 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2552 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2554 for rev in pr.revisions:
2553 for rev in pr.revisions:
2555 pr_id = pr.pull_request_id
2554 pr_id = pr.pull_request_id
2556 pr_repo = pr.target_repo.repo_name
2555 pr_repo = pr.target_repo.repo_name
2557 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2556 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2558
2557
2559 for stat in status_results:
2558 for stat in status_results:
2560 pr_id = pr_repo = None
2559 pr_id = pr_repo = None
2561 if stat.pull_request:
2560 if stat.pull_request:
2562 pr_id = stat.pull_request.pull_request_id
2561 pr_id = stat.pull_request.pull_request_id
2563 pr_repo = stat.pull_request.target_repo.repo_name
2562 pr_repo = stat.pull_request.target_repo.repo_name
2564 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2563 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2565 pr_id, pr_repo]
2564 pr_id, pr_repo]
2566 return grouped
2565 return grouped
2567
2566
2568 # ==========================================================================
2567 # ==========================================================================
2569 # SCM CACHE INSTANCE
2568 # SCM CACHE INSTANCE
2570 # ==========================================================================
2569 # ==========================================================================
2571
2570
2572 def scm_instance(self, **kwargs):
2571 def scm_instance(self, **kwargs):
2573 import rhodecode
2572 import rhodecode
2574
2573
2575 # Passing a config will not hit the cache currently only used
2574 # Passing a config will not hit the cache currently only used
2576 # for repo2dbmapper
2575 # for repo2dbmapper
2577 config = kwargs.pop('config', None)
2576 config = kwargs.pop('config', None)
2578 cache = kwargs.pop('cache', None)
2577 cache = kwargs.pop('cache', None)
2579 vcs_full_cache = kwargs.pop('vcs_full_cache', None)
2578 vcs_full_cache = kwargs.pop('vcs_full_cache', None)
2580 if vcs_full_cache is not None:
2579 if vcs_full_cache is not None:
2581 # allows override global config
2580 # allows override global config
2582 full_cache = vcs_full_cache
2581 full_cache = vcs_full_cache
2583 else:
2582 else:
2584 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
2583 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
2585 # if cache is NOT defined use default global, else we have a full
2584 # if cache is NOT defined use default global, else we have a full
2586 # control over cache behaviour
2585 # control over cache behaviour
2587 if cache is None and full_cache and not config:
2586 if cache is None and full_cache and not config:
2588 log.debug('Initializing pure cached instance for %s', self.repo_path)
2587 log.debug('Initializing pure cached instance for %s', self.repo_path)
2589 return self._get_instance_cached()
2588 return self._get_instance_cached()
2590
2589
2591 # cache here is sent to the "vcs server"
2590 # cache here is sent to the "vcs server"
2592 return self._get_instance(cache=bool(cache), config=config)
2591 return self._get_instance(cache=bool(cache), config=config)
2593
2592
2594 def _get_instance_cached(self):
2593 def _get_instance_cached(self):
2595 from rhodecode.lib import rc_cache
2594 from rhodecode.lib import rc_cache
2596
2595
2597 cache_namespace_uid = 'cache_repo_instance.{}'.format(self.repo_id)
2596 cache_namespace_uid = 'cache_repo_instance.{}'.format(self.repo_id)
2598 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
2597 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
2599 repo_id=self.repo_id)
2598 repo_id=self.repo_id)
2600 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
2599 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
2601
2600
2602 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
2601 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
2603 def get_instance_cached(repo_id, context_id, _cache_state_uid):
2602 def get_instance_cached(repo_id, context_id, _cache_state_uid):
2604 return self._get_instance(repo_state_uid=_cache_state_uid)
2603 return self._get_instance(repo_state_uid=_cache_state_uid)
2605
2604
2606 # we must use thread scoped cache here,
2605 # we must use thread scoped cache here,
2607 # because each thread of gevent needs it's own not shared connection and cache
2606 # because each thread of gevent needs it's own not shared connection and cache
2608 # we also alter `args` so the cache key is individual for every green thread.
2607 # we also alter `args` so the cache key is individual for every green thread.
2609 inv_context_manager = rc_cache.InvalidationContext(
2608 inv_context_manager = rc_cache.InvalidationContext(
2610 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace,
2609 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace,
2611 thread_scoped=True)
2610 thread_scoped=True)
2612 with inv_context_manager as invalidation_context:
2611 with inv_context_manager as invalidation_context:
2613 cache_state_uid = invalidation_context.cache_data['cache_state_uid']
2612 cache_state_uid = invalidation_context.cache_data['cache_state_uid']
2614 args = (self.repo_id, inv_context_manager.cache_key, cache_state_uid)
2613 args = (self.repo_id, inv_context_manager.cache_key, cache_state_uid)
2615
2614
2616 # re-compute and store cache if we get invalidate signal
2615 # re-compute and store cache if we get invalidate signal
2617 if invalidation_context.should_invalidate():
2616 if invalidation_context.should_invalidate():
2618 instance = get_instance_cached.refresh(*args)
2617 instance = get_instance_cached.refresh(*args)
2619 else:
2618 else:
2620 instance = get_instance_cached(*args)
2619 instance = get_instance_cached(*args)
2621
2620
2622 log.debug('Repo instance fetched in %.4fs', inv_context_manager.compute_time)
2621 log.debug('Repo instance fetched in %.4fs', inv_context_manager.compute_time)
2623 return instance
2622 return instance
2624
2623
2625 def _get_instance(self, cache=True, config=None, repo_state_uid=None):
2624 def _get_instance(self, cache=True, config=None, repo_state_uid=None):
2626 log.debug('Initializing %s instance `%s` with cache flag set to: %s',
2625 log.debug('Initializing %s instance `%s` with cache flag set to: %s',
2627 self.repo_type, self.repo_path, cache)
2626 self.repo_type, self.repo_path, cache)
2628 config = config or self._config
2627 config = config or self._config
2629 custom_wire = {
2628 custom_wire = {
2630 'cache': cache, # controls the vcs.remote cache
2629 'cache': cache, # controls the vcs.remote cache
2631 'repo_state_uid': repo_state_uid
2630 'repo_state_uid': repo_state_uid
2632 }
2631 }
2633 repo = get_vcs_instance(
2632 repo = get_vcs_instance(
2634 repo_path=safe_str(self.repo_full_path),
2633 repo_path=safe_str(self.repo_full_path),
2635 config=config,
2634 config=config,
2636 with_wire=custom_wire,
2635 with_wire=custom_wire,
2637 create=False,
2636 create=False,
2638 _vcs_alias=self.repo_type)
2637 _vcs_alias=self.repo_type)
2639 if repo is not None:
2638 if repo is not None:
2640 repo.count() # cache rebuild
2639 repo.count() # cache rebuild
2641 return repo
2640 return repo
2642
2641
2643 def get_shadow_repository_path(self, workspace_id):
2642 def get_shadow_repository_path(self, workspace_id):
2644 from rhodecode.lib.vcs.backends.base import BaseRepository
2643 from rhodecode.lib.vcs.backends.base import BaseRepository
2645 shadow_repo_path = BaseRepository._get_shadow_repository_path(
2644 shadow_repo_path = BaseRepository._get_shadow_repository_path(
2646 self.repo_full_path, self.repo_id, workspace_id)
2645 self.repo_full_path, self.repo_id, workspace_id)
2647 return shadow_repo_path
2646 return shadow_repo_path
2648
2647
2649 def __json__(self):
2648 def __json__(self):
2650 return {'landing_rev': self.landing_rev}
2649 return {'landing_rev': self.landing_rev}
2651
2650
2652 def get_dict(self):
2651 def get_dict(self):
2653
2652
2654 # Since we transformed `repo_name` to a hybrid property, we need to
2653 # Since we transformed `repo_name` to a hybrid property, we need to
2655 # keep compatibility with the code which uses `repo_name` field.
2654 # keep compatibility with the code which uses `repo_name` field.
2656
2655
2657 result = super(Repository, self).get_dict()
2656 result = super(Repository, self).get_dict()
2658 result['repo_name'] = result.pop('_repo_name', None)
2657 result['repo_name'] = result.pop('_repo_name', None)
2659 return result
2658 return result
2660
2659
2661
2660
2662 class RepoGroup(Base, BaseModel):
2661 class RepoGroup(Base, BaseModel):
2663 __tablename__ = 'groups'
2662 __tablename__ = 'groups'
2664 __table_args__ = (
2663 __table_args__ = (
2665 UniqueConstraint('group_name', 'group_parent_id'),
2664 UniqueConstraint('group_name', 'group_parent_id'),
2666 base_table_args,
2665 base_table_args,
2667 )
2666 )
2668 __mapper_args__ = {
2667 __mapper_args__ = {
2669 #TODO: this is now depracated ?!
2668 #TODO: this is now depracated ?!
2670 # 'order_by': 'group_name'
2669 # 'order_by': 'group_name'
2671 }
2670 }
2672
2671
2673 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2672 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2674
2673
2675 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2674 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2676 _group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2675 _group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2677 group_name_hash = Column("repo_group_name_hash", String(1024), nullable=False, unique=False)
2676 group_name_hash = Column("repo_group_name_hash", String(1024), nullable=False, unique=False)
2678 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2677 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2679 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2678 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2680 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2679 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2681 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2680 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2682 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2681 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2683 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2682 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2684 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2683 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2685 _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) # JSON data
2684 _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) # JSON data
2686
2685
2687 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2686 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2688 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2687 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2689 parent_group = relationship('RepoGroup', remote_side=group_id)
2688 parent_group = relationship('RepoGroup', remote_side=group_id)
2690 user = relationship('User')
2689 user = relationship('User')
2691 integrations = relationship('Integration', cascade="all, delete-orphan")
2690 integrations = relationship('Integration', cascade="all, delete-orphan")
2692
2691
2693 # no cascade, set NULL
2692 # no cascade, set NULL
2694 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_group_id==RepoGroup.group_id')
2693 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_group_id==RepoGroup.group_id')
2695
2694
2696 def __init__(self, group_name='', parent_group=None):
2695 def __init__(self, group_name='', parent_group=None):
2697 self.group_name = group_name
2696 self.group_name = group_name
2698 self.parent_group = parent_group
2697 self.parent_group = parent_group
2699
2698
2700 def __str__(self):
2699 def __str__(self):
2701 return "<%s('id:%s:%s')>" % (
2700 return "<%s('id:%s:%s')>" % (
2702 self.__class__.__name__, self.group_id, self.group_name)
2701 self.__class__.__name__, self.group_id, self.group_name)
2703
2702
2704 @hybrid_property
2703 @hybrid_property
2705 def group_name(self):
2704 def group_name(self):
2706 return self._group_name
2705 return self._group_name
2707
2706
2708 @group_name.setter
2707 @group_name.setter
2709 def group_name(self, value):
2708 def group_name(self, value):
2710 self._group_name = value
2709 self._group_name = value
2711 self.group_name_hash = self.hash_repo_group_name(value)
2710 self.group_name_hash = self.hash_repo_group_name(value)
2712
2711
2713 @classmethod
2712 @classmethod
2714 def _load_changeset_cache(cls, repo_id, changeset_cache_raw):
2713 def _load_changeset_cache(cls, repo_id, changeset_cache_raw):
2715 from rhodecode.lib.vcs.backends.base import EmptyCommit
2714 from rhodecode.lib.vcs.backends.base import EmptyCommit
2716 dummy = EmptyCommit().__json__()
2715 dummy = EmptyCommit().__json__()
2717 if not changeset_cache_raw:
2716 if not changeset_cache_raw:
2718 dummy['source_repo_id'] = repo_id
2717 dummy['source_repo_id'] = repo_id
2719 return json.loads(json.dumps(dummy))
2718 return json.loads(json.dumps(dummy))
2720
2719
2721 try:
2720 try:
2722 return json.loads(changeset_cache_raw)
2721 return json.loads(changeset_cache_raw)
2723 except TypeError:
2722 except TypeError:
2724 return dummy
2723 return dummy
2725 except Exception:
2724 except Exception:
2726 log.error(traceback.format_exc())
2725 log.error(traceback.format_exc())
2727 return dummy
2726 return dummy
2728
2727
2729 @hybrid_property
2728 @hybrid_property
2730 def changeset_cache(self):
2729 def changeset_cache(self):
2731 return self._load_changeset_cache('', self._changeset_cache)
2730 return self._load_changeset_cache('', self._changeset_cache)
2732
2731
2733 @changeset_cache.setter
2732 @changeset_cache.setter
2734 def changeset_cache(self, val):
2733 def changeset_cache(self, val):
2735 try:
2734 try:
2736 self._changeset_cache = json.dumps(val)
2735 self._changeset_cache = json.dumps(val)
2737 except Exception:
2736 except Exception:
2738 log.error(traceback.format_exc())
2737 log.error(traceback.format_exc())
2739
2738
2740 @validates('group_parent_id')
2739 @validates('group_parent_id')
2741 def validate_group_parent_id(self, key, val):
2740 def validate_group_parent_id(self, key, val):
2742 """
2741 """
2743 Check cycle references for a parent group to self
2742 Check cycle references for a parent group to self
2744 """
2743 """
2745 if self.group_id and val:
2744 if self.group_id and val:
2746 assert val != self.group_id
2745 assert val != self.group_id
2747
2746
2748 return val
2747 return val
2749
2748
2750 @hybrid_property
2749 @hybrid_property
2751 def description_safe(self):
2750 def description_safe(self):
2752 from rhodecode.lib import helpers as h
2751 from rhodecode.lib import helpers as h
2753 return h.escape(self.group_description)
2752 return h.escape(self.group_description)
2754
2753
2755 @classmethod
2754 @classmethod
2756 def hash_repo_group_name(cls, repo_group_name):
2755 def hash_repo_group_name(cls, repo_group_name):
2757 val = remove_formatting(repo_group_name)
2756 val = remove_formatting(repo_group_name)
2758 val = safe_str(val).lower()
2757 val = safe_str(val).lower()
2759 chars = []
2758 chars = []
2760 for c in val:
2759 for c in val:
2761 if c not in string.ascii_letters:
2760 if c not in string.ascii_letters:
2762 c = str(ord(c))
2761 c = str(ord(c))
2763 chars.append(c)
2762 chars.append(c)
2764
2763
2765 return ''.join(chars)
2764 return ''.join(chars)
2766
2765
2767 @classmethod
2766 @classmethod
2768 def _generate_choice(cls, repo_group):
2767 def _generate_choice(cls, repo_group):
2769 from webhelpers2.html import literal as _literal
2768 from webhelpers2.html import literal as _literal
2770 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2769 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2771 return repo_group.group_id, _name(repo_group.full_path_splitted)
2770 return repo_group.group_id, _name(repo_group.full_path_splitted)
2772
2771
2773 @classmethod
2772 @classmethod
2774 def groups_choices(cls, groups=None, show_empty_group=True):
2773 def groups_choices(cls, groups=None, show_empty_group=True):
2775 if not groups:
2774 if not groups:
2776 groups = cls.query().all()
2775 groups = cls.query().all()
2777
2776
2778 repo_groups = []
2777 repo_groups = []
2779 if show_empty_group:
2778 if show_empty_group:
2780 repo_groups = [(-1, u'-- %s --' % _('No parent'))]
2779 repo_groups = [(-1, u'-- %s --' % _('No parent'))]
2781
2780
2782 repo_groups.extend([cls._generate_choice(x) for x in groups])
2781 repo_groups.extend([cls._generate_choice(x) for x in groups])
2783
2782
2784 repo_groups = sorted(
2783 repo_groups = sorted(
2785 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2784 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2786 return repo_groups
2785 return repo_groups
2787
2786
2788 @classmethod
2787 @classmethod
2789 def url_sep(cls):
2788 def url_sep(cls):
2790 return URL_SEP
2789 return URL_SEP
2791
2790
2792 @classmethod
2791 @classmethod
2793 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2792 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2794 if case_insensitive:
2793 if case_insensitive:
2795 gr = cls.query().filter(func.lower(cls.group_name)
2794 gr = cls.query().filter(func.lower(cls.group_name)
2796 == func.lower(group_name))
2795 == func.lower(group_name))
2797 else:
2796 else:
2798 gr = cls.query().filter(cls.group_name == group_name)
2797 gr = cls.query().filter(cls.group_name == group_name)
2799 if cache:
2798 if cache:
2800 name_key = _hash_key(group_name)
2799 name_key = _hash_key(group_name)
2801 gr = gr.options(
2800 gr = gr.options(
2802 FromCache("sql_cache_short", "get_group_%s" % name_key))
2801 FromCache("sql_cache_short", "get_group_%s" % name_key))
2803 return gr.scalar()
2802 return gr.scalar()
2804
2803
2805 @classmethod
2804 @classmethod
2806 def get_user_personal_repo_group(cls, user_id):
2805 def get_user_personal_repo_group(cls, user_id):
2807 user = User.get(user_id)
2806 user = User.get(user_id)
2808 if user.username == User.DEFAULT_USER:
2807 if user.username == User.DEFAULT_USER:
2809 return None
2808 return None
2810
2809
2811 return cls.query()\
2810 return cls.query()\
2812 .filter(cls.personal == true()) \
2811 .filter(cls.personal == true()) \
2813 .filter(cls.user == user) \
2812 .filter(cls.user == user) \
2814 .order_by(cls.group_id.asc()) \
2813 .order_by(cls.group_id.asc()) \
2815 .first()
2814 .first()
2816
2815
2817 @classmethod
2816 @classmethod
2818 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2817 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2819 case_insensitive=True):
2818 case_insensitive=True):
2820 q = RepoGroup.query()
2819 q = RepoGroup.query()
2821
2820
2822 if not isinstance(user_id, Optional):
2821 if not isinstance(user_id, Optional):
2823 q = q.filter(RepoGroup.user_id == user_id)
2822 q = q.filter(RepoGroup.user_id == user_id)
2824
2823
2825 if not isinstance(group_id, Optional):
2824 if not isinstance(group_id, Optional):
2826 q = q.filter(RepoGroup.group_parent_id == group_id)
2825 q = q.filter(RepoGroup.group_parent_id == group_id)
2827
2826
2828 if case_insensitive:
2827 if case_insensitive:
2829 q = q.order_by(func.lower(RepoGroup.group_name))
2828 q = q.order_by(func.lower(RepoGroup.group_name))
2830 else:
2829 else:
2831 q = q.order_by(RepoGroup.group_name)
2830 q = q.order_by(RepoGroup.group_name)
2832 return q.all()
2831 return q.all()
2833
2832
2834 @property
2833 @property
2835 def parents(self, parents_recursion_limit=10):
2834 def parents(self, parents_recursion_limit=10):
2836 groups = []
2835 groups = []
2837 if self.parent_group is None:
2836 if self.parent_group is None:
2838 return groups
2837 return groups
2839 cur_gr = self.parent_group
2838 cur_gr = self.parent_group
2840 groups.insert(0, cur_gr)
2839 groups.insert(0, cur_gr)
2841 cnt = 0
2840 cnt = 0
2842 while 1:
2841 while 1:
2843 cnt += 1
2842 cnt += 1
2844 gr = getattr(cur_gr, 'parent_group', None)
2843 gr = getattr(cur_gr, 'parent_group', None)
2845 cur_gr = cur_gr.parent_group
2844 cur_gr = cur_gr.parent_group
2846 if gr is None:
2845 if gr is None:
2847 break
2846 break
2848 if cnt == parents_recursion_limit:
2847 if cnt == parents_recursion_limit:
2849 # this will prevent accidental infinit loops
2848 # this will prevent accidental infinit loops
2850 log.error('more than %s parents found for group %s, stopping '
2849 log.error('more than %s parents found for group %s, stopping '
2851 'recursive parent fetching', parents_recursion_limit, self)
2850 'recursive parent fetching', parents_recursion_limit, self)
2852 break
2851 break
2853
2852
2854 groups.insert(0, gr)
2853 groups.insert(0, gr)
2855 return groups
2854 return groups
2856
2855
2857 @property
2856 @property
2858 def last_commit_cache_update_diff(self):
2857 def last_commit_cache_update_diff(self):
2859 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2858 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2860
2859
2861 @classmethod
2860 @classmethod
2862 def _load_commit_change(cls, last_commit_cache):
2861 def _load_commit_change(cls, last_commit_cache):
2863 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2862 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2864 empty_date = datetime.datetime.fromtimestamp(0)
2863 empty_date = datetime.datetime.fromtimestamp(0)
2865 date_latest = last_commit_cache.get('date', empty_date)
2864 date_latest = last_commit_cache.get('date', empty_date)
2866 try:
2865 try:
2867 return parse_datetime(date_latest)
2866 return parse_datetime(date_latest)
2868 except Exception:
2867 except Exception:
2869 return empty_date
2868 return empty_date
2870
2869
2871 @property
2870 @property
2872 def last_commit_change(self):
2871 def last_commit_change(self):
2873 return self._load_commit_change(self.changeset_cache)
2872 return self._load_commit_change(self.changeset_cache)
2874
2873
2875 @property
2874 @property
2876 def last_db_change(self):
2875 def last_db_change(self):
2877 return self.updated_on
2876 return self.updated_on
2878
2877
2879 @property
2878 @property
2880 def children(self):
2879 def children(self):
2881 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2880 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2882
2881
2883 @property
2882 @property
2884 def name(self):
2883 def name(self):
2885 return self.group_name.split(RepoGroup.url_sep())[-1]
2884 return self.group_name.split(RepoGroup.url_sep())[-1]
2886
2885
2887 @property
2886 @property
2888 def full_path(self):
2887 def full_path(self):
2889 return self.group_name
2888 return self.group_name
2890
2889
2891 @property
2890 @property
2892 def full_path_splitted(self):
2891 def full_path_splitted(self):
2893 return self.group_name.split(RepoGroup.url_sep())
2892 return self.group_name.split(RepoGroup.url_sep())
2894
2893
2895 @property
2894 @property
2896 def repositories(self):
2895 def repositories(self):
2897 return Repository.query()\
2896 return Repository.query()\
2898 .filter(Repository.group == self)\
2897 .filter(Repository.group == self)\
2899 .order_by(Repository.repo_name)
2898 .order_by(Repository.repo_name)
2900
2899
2901 @property
2900 @property
2902 def repositories_recursive_count(self):
2901 def repositories_recursive_count(self):
2903 cnt = self.repositories.count()
2902 cnt = self.repositories.count()
2904
2903
2905 def children_count(group):
2904 def children_count(group):
2906 cnt = 0
2905 cnt = 0
2907 for child in group.children:
2906 for child in group.children:
2908 cnt += child.repositories.count()
2907 cnt += child.repositories.count()
2909 cnt += children_count(child)
2908 cnt += children_count(child)
2910 return cnt
2909 return cnt
2911
2910
2912 return cnt + children_count(self)
2911 return cnt + children_count(self)
2913
2912
2914 def _recursive_objects(self, include_repos=True, include_groups=True):
2913 def _recursive_objects(self, include_repos=True, include_groups=True):
2915 all_ = []
2914 all_ = []
2916
2915
2917 def _get_members(root_gr):
2916 def _get_members(root_gr):
2918 if include_repos:
2917 if include_repos:
2919 for r in root_gr.repositories:
2918 for r in root_gr.repositories:
2920 all_.append(r)
2919 all_.append(r)
2921 childs = root_gr.children.all()
2920 childs = root_gr.children.all()
2922 if childs:
2921 if childs:
2923 for gr in childs:
2922 for gr in childs:
2924 if include_groups:
2923 if include_groups:
2925 all_.append(gr)
2924 all_.append(gr)
2926 _get_members(gr)
2925 _get_members(gr)
2927
2926
2928 root_group = []
2927 root_group = []
2929 if include_groups:
2928 if include_groups:
2930 root_group = [self]
2929 root_group = [self]
2931
2930
2932 _get_members(self)
2931 _get_members(self)
2933 return root_group + all_
2932 return root_group + all_
2934
2933
2935 def recursive_groups_and_repos(self):
2934 def recursive_groups_and_repos(self):
2936 """
2935 """
2937 Recursive return all groups, with repositories in those groups
2936 Recursive return all groups, with repositories in those groups
2938 """
2937 """
2939 return self._recursive_objects()
2938 return self._recursive_objects()
2940
2939
2941 def recursive_groups(self):
2940 def recursive_groups(self):
2942 """
2941 """
2943 Returns all children groups for this group including children of children
2942 Returns all children groups for this group including children of children
2944 """
2943 """
2945 return self._recursive_objects(include_repos=False)
2944 return self._recursive_objects(include_repos=False)
2946
2945
2947 def recursive_repos(self):
2946 def recursive_repos(self):
2948 """
2947 """
2949 Returns all children repositories for this group
2948 Returns all children repositories for this group
2950 """
2949 """
2951 return self._recursive_objects(include_groups=False)
2950 return self._recursive_objects(include_groups=False)
2952
2951
2953 def get_new_name(self, group_name):
2952 def get_new_name(self, group_name):
2954 """
2953 """
2955 returns new full group name based on parent and new name
2954 returns new full group name based on parent and new name
2956
2955
2957 :param group_name:
2956 :param group_name:
2958 """
2957 """
2959 path_prefix = (self.parent_group.full_path_splitted if
2958 path_prefix = (self.parent_group.full_path_splitted if
2960 self.parent_group else [])
2959 self.parent_group else [])
2961 return RepoGroup.url_sep().join(path_prefix + [group_name])
2960 return RepoGroup.url_sep().join(path_prefix + [group_name])
2962
2961
2963 def update_commit_cache(self, config=None):
2962 def update_commit_cache(self, config=None):
2964 """
2963 """
2965 Update cache of last commit for newest repository inside this repository group.
2964 Update cache of last commit for newest repository inside this repository group.
2966 cache_keys should be::
2965 cache_keys should be::
2967
2966
2968 source_repo_id
2967 source_repo_id
2969 short_id
2968 short_id
2970 raw_id
2969 raw_id
2971 revision
2970 revision
2972 parents
2971 parents
2973 message
2972 message
2974 date
2973 date
2975 author
2974 author
2976
2975
2977 """
2976 """
2978 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2977 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2979 empty_date = datetime.datetime.fromtimestamp(0)
2978 empty_date = datetime.datetime.fromtimestamp(0)
2980
2979
2981 def repo_groups_and_repos(root_gr):
2980 def repo_groups_and_repos(root_gr):
2982 for _repo in root_gr.repositories:
2981 for _repo in root_gr.repositories:
2983 yield _repo
2982 yield _repo
2984 for child_group in root_gr.children.all():
2983 for child_group in root_gr.children.all():
2985 yield child_group
2984 yield child_group
2986
2985
2987 latest_repo_cs_cache = {}
2986 latest_repo_cs_cache = {}
2988 for obj in repo_groups_and_repos(self):
2987 for obj in repo_groups_and_repos(self):
2989 repo_cs_cache = obj.changeset_cache
2988 repo_cs_cache = obj.changeset_cache
2990 date_latest = latest_repo_cs_cache.get('date', empty_date)
2989 date_latest = latest_repo_cs_cache.get('date', empty_date)
2991 date_current = repo_cs_cache.get('date', empty_date)
2990 date_current = repo_cs_cache.get('date', empty_date)
2992 current_timestamp = datetime_to_time(parse_datetime(date_latest))
2991 current_timestamp = datetime_to_time(parse_datetime(date_latest))
2993 if current_timestamp < datetime_to_time(parse_datetime(date_current)):
2992 if current_timestamp < datetime_to_time(parse_datetime(date_current)):
2994 latest_repo_cs_cache = repo_cs_cache
2993 latest_repo_cs_cache = repo_cs_cache
2995 if hasattr(obj, 'repo_id'):
2994 if hasattr(obj, 'repo_id'):
2996 latest_repo_cs_cache['source_repo_id'] = obj.repo_id
2995 latest_repo_cs_cache['source_repo_id'] = obj.repo_id
2997 else:
2996 else:
2998 latest_repo_cs_cache['source_repo_id'] = repo_cs_cache.get('source_repo_id')
2997 latest_repo_cs_cache['source_repo_id'] = repo_cs_cache.get('source_repo_id')
2999
2998
3000 _date_latest = parse_datetime(latest_repo_cs_cache.get('date') or empty_date)
2999 _date_latest = parse_datetime(latest_repo_cs_cache.get('date') or empty_date)
3001
3000
3002 latest_repo_cs_cache['updated_on'] = time.time()
3001 latest_repo_cs_cache['updated_on'] = time.time()
3003 self.changeset_cache = latest_repo_cs_cache
3002 self.changeset_cache = latest_repo_cs_cache
3004 self.updated_on = _date_latest
3003 self.updated_on = _date_latest
3005 Session().add(self)
3004 Session().add(self)
3006 Session().commit()
3005 Session().commit()
3007
3006
3008 log.debug('updated repo group `%s` with new commit cache %s, and last update_date: %s',
3007 log.debug('updated repo group `%s` with new commit cache %s, and last update_date: %s',
3009 self.group_name, latest_repo_cs_cache, _date_latest)
3008 self.group_name, latest_repo_cs_cache, _date_latest)
3010
3009
3011 def permissions(self, with_admins=True, with_owner=True,
3010 def permissions(self, with_admins=True, with_owner=True,
3012 expand_from_user_groups=False):
3011 expand_from_user_groups=False):
3013 """
3012 """
3014 Permissions for repository groups
3013 Permissions for repository groups
3015 """
3014 """
3016 _admin_perm = 'group.admin'
3015 _admin_perm = 'group.admin'
3017
3016
3018 owner_row = []
3017 owner_row = []
3019 if with_owner:
3018 if with_owner:
3020 usr = AttributeDict(self.user.get_dict())
3019 usr = AttributeDict(self.user.get_dict())
3021 usr.owner_row = True
3020 usr.owner_row = True
3022 usr.permission = _admin_perm
3021 usr.permission = _admin_perm
3023 owner_row.append(usr)
3022 owner_row.append(usr)
3024
3023
3025 super_admin_ids = []
3024 super_admin_ids = []
3026 super_admin_rows = []
3025 super_admin_rows = []
3027 if with_admins:
3026 if with_admins:
3028 for usr in User.get_all_super_admins():
3027 for usr in User.get_all_super_admins():
3029 super_admin_ids.append(usr.user_id)
3028 super_admin_ids.append(usr.user_id)
3030 # if this admin is also owner, don't double the record
3029 # if this admin is also owner, don't double the record
3031 if usr.user_id == owner_row[0].user_id:
3030 if usr.user_id == owner_row[0].user_id:
3032 owner_row[0].admin_row = True
3031 owner_row[0].admin_row = True
3033 else:
3032 else:
3034 usr = AttributeDict(usr.get_dict())
3033 usr = AttributeDict(usr.get_dict())
3035 usr.admin_row = True
3034 usr.admin_row = True
3036 usr.permission = _admin_perm
3035 usr.permission = _admin_perm
3037 super_admin_rows.append(usr)
3036 super_admin_rows.append(usr)
3038
3037
3039 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
3038 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
3040 q = q.options(joinedload(UserRepoGroupToPerm.group),
3039 q = q.options(joinedload(UserRepoGroupToPerm.group),
3041 joinedload(UserRepoGroupToPerm.user),
3040 joinedload(UserRepoGroupToPerm.user),
3042 joinedload(UserRepoGroupToPerm.permission),)
3041 joinedload(UserRepoGroupToPerm.permission),)
3043
3042
3044 # get owners and admins and permissions. We do a trick of re-writing
3043 # get owners and admins and permissions. We do a trick of re-writing
3045 # objects from sqlalchemy to named-tuples due to sqlalchemy session
3044 # objects from sqlalchemy to named-tuples due to sqlalchemy session
3046 # has a global reference and changing one object propagates to all
3045 # has a global reference and changing one object propagates to all
3047 # others. This means if admin is also an owner admin_row that change
3046 # others. This means if admin is also an owner admin_row that change
3048 # would propagate to both objects
3047 # would propagate to both objects
3049 perm_rows = []
3048 perm_rows = []
3050 for _usr in q.all():
3049 for _usr in q.all():
3051 usr = AttributeDict(_usr.user.get_dict())
3050 usr = AttributeDict(_usr.user.get_dict())
3052 # if this user is also owner/admin, mark as duplicate record
3051 # if this user is also owner/admin, mark as duplicate record
3053 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
3052 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
3054 usr.duplicate_perm = True
3053 usr.duplicate_perm = True
3055 usr.permission = _usr.permission.permission_name
3054 usr.permission = _usr.permission.permission_name
3056 perm_rows.append(usr)
3055 perm_rows.append(usr)
3057
3056
3058 # filter the perm rows by 'default' first and then sort them by
3057 # filter the perm rows by 'default' first and then sort them by
3059 # admin,write,read,none permissions sorted again alphabetically in
3058 # admin,write,read,none permissions sorted again alphabetically in
3060 # each group
3059 # each group
3061 perm_rows = sorted(perm_rows, key=display_user_sort)
3060 perm_rows = sorted(perm_rows, key=display_user_sort)
3062
3061
3063 user_groups_rows = []
3062 user_groups_rows = []
3064 if expand_from_user_groups:
3063 if expand_from_user_groups:
3065 for ug in self.permission_user_groups(with_members=True):
3064 for ug in self.permission_user_groups(with_members=True):
3066 for user_data in ug.members:
3065 for user_data in ug.members:
3067 user_groups_rows.append(user_data)
3066 user_groups_rows.append(user_data)
3068
3067
3069 return super_admin_rows + owner_row + perm_rows + user_groups_rows
3068 return super_admin_rows + owner_row + perm_rows + user_groups_rows
3070
3069
3071 def permission_user_groups(self, with_members=False):
3070 def permission_user_groups(self, with_members=False):
3072 q = UserGroupRepoGroupToPerm.query()\
3071 q = UserGroupRepoGroupToPerm.query()\
3073 .filter(UserGroupRepoGroupToPerm.group == self)
3072 .filter(UserGroupRepoGroupToPerm.group == self)
3074 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
3073 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
3075 joinedload(UserGroupRepoGroupToPerm.users_group),
3074 joinedload(UserGroupRepoGroupToPerm.users_group),
3076 joinedload(UserGroupRepoGroupToPerm.permission),)
3075 joinedload(UserGroupRepoGroupToPerm.permission),)
3077
3076
3078 perm_rows = []
3077 perm_rows = []
3079 for _user_group in q.all():
3078 for _user_group in q.all():
3080 entry = AttributeDict(_user_group.users_group.get_dict())
3079 entry = AttributeDict(_user_group.users_group.get_dict())
3081 entry.permission = _user_group.permission.permission_name
3080 entry.permission = _user_group.permission.permission_name
3082 if with_members:
3081 if with_members:
3083 entry.members = [x.user.get_dict()
3082 entry.members = [x.user.get_dict()
3084 for x in _user_group.users_group.members]
3083 for x in _user_group.users_group.members]
3085 perm_rows.append(entry)
3084 perm_rows.append(entry)
3086
3085
3087 perm_rows = sorted(perm_rows, key=display_user_group_sort)
3086 perm_rows = sorted(perm_rows, key=display_user_group_sort)
3088 return perm_rows
3087 return perm_rows
3089
3088
3090 def get_api_data(self):
3089 def get_api_data(self):
3091 """
3090 """
3092 Common function for generating api data
3091 Common function for generating api data
3093
3092
3094 """
3093 """
3095 group = self
3094 group = self
3096 data = {
3095 data = {
3097 'group_id': group.group_id,
3096 'group_id': group.group_id,
3098 'group_name': group.group_name,
3097 'group_name': group.group_name,
3099 'group_description': group.description_safe,
3098 'group_description': group.description_safe,
3100 'parent_group': group.parent_group.group_name if group.parent_group else None,
3099 'parent_group': group.parent_group.group_name if group.parent_group else None,
3101 'repositories': [x.repo_name for x in group.repositories],
3100 'repositories': [x.repo_name for x in group.repositories],
3102 'owner': group.user.username,
3101 'owner': group.user.username,
3103 }
3102 }
3104 return data
3103 return data
3105
3104
3106 def get_dict(self):
3105 def get_dict(self):
3107 # Since we transformed `group_name` to a hybrid property, we need to
3106 # Since we transformed `group_name` to a hybrid property, we need to
3108 # keep compatibility with the code which uses `group_name` field.
3107 # keep compatibility with the code which uses `group_name` field.
3109 result = super(RepoGroup, self).get_dict()
3108 result = super(RepoGroup, self).get_dict()
3110 result['group_name'] = result.pop('_group_name', None)
3109 result['group_name'] = result.pop('_group_name', None)
3111 return result
3110 return result
3112
3111
3113
3112
3114 class Permission(Base, BaseModel):
3113 class Permission(Base, BaseModel):
3115 __tablename__ = 'permissions'
3114 __tablename__ = 'permissions'
3116 __table_args__ = (
3115 __table_args__ = (
3117 Index('p_perm_name_idx', 'permission_name'),
3116 Index('p_perm_name_idx', 'permission_name'),
3118 base_table_args,
3117 base_table_args,
3119 )
3118 )
3120
3119
3121 PERMS = [
3120 PERMS = [
3122 ('hg.admin', _('RhodeCode Super Administrator')),
3121 ('hg.admin', _('RhodeCode Super Administrator')),
3123
3122
3124 ('repository.none', _('Repository no access')),
3123 ('repository.none', _('Repository no access')),
3125 ('repository.read', _('Repository read access')),
3124 ('repository.read', _('Repository read access')),
3126 ('repository.write', _('Repository write access')),
3125 ('repository.write', _('Repository write access')),
3127 ('repository.admin', _('Repository admin access')),
3126 ('repository.admin', _('Repository admin access')),
3128
3127
3129 ('group.none', _('Repository group no access')),
3128 ('group.none', _('Repository group no access')),
3130 ('group.read', _('Repository group read access')),
3129 ('group.read', _('Repository group read access')),
3131 ('group.write', _('Repository group write access')),
3130 ('group.write', _('Repository group write access')),
3132 ('group.admin', _('Repository group admin access')),
3131 ('group.admin', _('Repository group admin access')),
3133
3132
3134 ('usergroup.none', _('User group no access')),
3133 ('usergroup.none', _('User group no access')),
3135 ('usergroup.read', _('User group read access')),
3134 ('usergroup.read', _('User group read access')),
3136 ('usergroup.write', _('User group write access')),
3135 ('usergroup.write', _('User group write access')),
3137 ('usergroup.admin', _('User group admin access')),
3136 ('usergroup.admin', _('User group admin access')),
3138
3137
3139 ('branch.none', _('Branch no permissions')),
3138 ('branch.none', _('Branch no permissions')),
3140 ('branch.merge', _('Branch access by web merge')),
3139 ('branch.merge', _('Branch access by web merge')),
3141 ('branch.push', _('Branch access by push')),
3140 ('branch.push', _('Branch access by push')),
3142 ('branch.push_force', _('Branch access by push with force')),
3141 ('branch.push_force', _('Branch access by push with force')),
3143
3142
3144 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
3143 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
3145 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
3144 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
3146
3145
3147 ('hg.usergroup.create.false', _('User Group creation disabled')),
3146 ('hg.usergroup.create.false', _('User Group creation disabled')),
3148 ('hg.usergroup.create.true', _('User Group creation enabled')),
3147 ('hg.usergroup.create.true', _('User Group creation enabled')),
3149
3148
3150 ('hg.create.none', _('Repository creation disabled')),
3149 ('hg.create.none', _('Repository creation disabled')),
3151 ('hg.create.repository', _('Repository creation enabled')),
3150 ('hg.create.repository', _('Repository creation enabled')),
3152 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
3151 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
3153 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
3152 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
3154
3153
3155 ('hg.fork.none', _('Repository forking disabled')),
3154 ('hg.fork.none', _('Repository forking disabled')),
3156 ('hg.fork.repository', _('Repository forking enabled')),
3155 ('hg.fork.repository', _('Repository forking enabled')),
3157
3156
3158 ('hg.register.none', _('Registration disabled')),
3157 ('hg.register.none', _('Registration disabled')),
3159 ('hg.register.manual_activate', _('User Registration with manual account activation')),
3158 ('hg.register.manual_activate', _('User Registration with manual account activation')),
3160 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
3159 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
3161
3160
3162 ('hg.password_reset.enabled', _('Password reset enabled')),
3161 ('hg.password_reset.enabled', _('Password reset enabled')),
3163 ('hg.password_reset.hidden', _('Password reset hidden')),
3162 ('hg.password_reset.hidden', _('Password reset hidden')),
3164 ('hg.password_reset.disabled', _('Password reset disabled')),
3163 ('hg.password_reset.disabled', _('Password reset disabled')),
3165
3164
3166 ('hg.extern_activate.manual', _('Manual activation of external account')),
3165 ('hg.extern_activate.manual', _('Manual activation of external account')),
3167 ('hg.extern_activate.auto', _('Automatic activation of external account')),
3166 ('hg.extern_activate.auto', _('Automatic activation of external account')),
3168
3167
3169 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
3168 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
3170 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
3169 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
3171 ]
3170 ]
3172
3171
3173 # definition of system default permissions for DEFAULT user, created on
3172 # definition of system default permissions for DEFAULT user, created on
3174 # system setup
3173 # system setup
3175 DEFAULT_USER_PERMISSIONS = [
3174 DEFAULT_USER_PERMISSIONS = [
3176 # object perms
3175 # object perms
3177 'repository.read',
3176 'repository.read',
3178 'group.read',
3177 'group.read',
3179 'usergroup.read',
3178 'usergroup.read',
3180 # branch, for backward compat we need same value as before so forced pushed
3179 # branch, for backward compat we need same value as before so forced pushed
3181 'branch.push_force',
3180 'branch.push_force',
3182 # global
3181 # global
3183 'hg.create.repository',
3182 'hg.create.repository',
3184 'hg.repogroup.create.false',
3183 'hg.repogroup.create.false',
3185 'hg.usergroup.create.false',
3184 'hg.usergroup.create.false',
3186 'hg.create.write_on_repogroup.true',
3185 'hg.create.write_on_repogroup.true',
3187 'hg.fork.repository',
3186 'hg.fork.repository',
3188 'hg.register.manual_activate',
3187 'hg.register.manual_activate',
3189 'hg.password_reset.enabled',
3188 'hg.password_reset.enabled',
3190 'hg.extern_activate.auto',
3189 'hg.extern_activate.auto',
3191 'hg.inherit_default_perms.true',
3190 'hg.inherit_default_perms.true',
3192 ]
3191 ]
3193
3192
3194 # defines which permissions are more important higher the more important
3193 # defines which permissions are more important higher the more important
3195 # Weight defines which permissions are more important.
3194 # Weight defines which permissions are more important.
3196 # The higher number the more important.
3195 # The higher number the more important.
3197 PERM_WEIGHTS = {
3196 PERM_WEIGHTS = {
3198 'repository.none': 0,
3197 'repository.none': 0,
3199 'repository.read': 1,
3198 'repository.read': 1,
3200 'repository.write': 3,
3199 'repository.write': 3,
3201 'repository.admin': 4,
3200 'repository.admin': 4,
3202
3201
3203 'group.none': 0,
3202 'group.none': 0,
3204 'group.read': 1,
3203 'group.read': 1,
3205 'group.write': 3,
3204 'group.write': 3,
3206 'group.admin': 4,
3205 'group.admin': 4,
3207
3206
3208 'usergroup.none': 0,
3207 'usergroup.none': 0,
3209 'usergroup.read': 1,
3208 'usergroup.read': 1,
3210 'usergroup.write': 3,
3209 'usergroup.write': 3,
3211 'usergroup.admin': 4,
3210 'usergroup.admin': 4,
3212
3211
3213 'branch.none': 0,
3212 'branch.none': 0,
3214 'branch.merge': 1,
3213 'branch.merge': 1,
3215 'branch.push': 3,
3214 'branch.push': 3,
3216 'branch.push_force': 4,
3215 'branch.push_force': 4,
3217
3216
3218 'hg.repogroup.create.false': 0,
3217 'hg.repogroup.create.false': 0,
3219 'hg.repogroup.create.true': 1,
3218 'hg.repogroup.create.true': 1,
3220
3219
3221 'hg.usergroup.create.false': 0,
3220 'hg.usergroup.create.false': 0,
3222 'hg.usergroup.create.true': 1,
3221 'hg.usergroup.create.true': 1,
3223
3222
3224 'hg.fork.none': 0,
3223 'hg.fork.none': 0,
3225 'hg.fork.repository': 1,
3224 'hg.fork.repository': 1,
3226 'hg.create.none': 0,
3225 'hg.create.none': 0,
3227 'hg.create.repository': 1
3226 'hg.create.repository': 1
3228 }
3227 }
3229
3228
3230 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3229 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3231 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
3230 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
3232 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
3231 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
3233
3232
3234 def __str__(self):
3233 def __str__(self):
3235 return "<%s('%s:%s')>" % (
3234 return "<%s('%s:%s')>" % (
3236 self.__class__.__name__, self.permission_id, self.permission_name
3235 self.__class__.__name__, self.permission_id, self.permission_name
3237 )
3236 )
3238
3237
3239 @classmethod
3238 @classmethod
3240 def get_by_key(cls, key):
3239 def get_by_key(cls, key):
3241 return cls.query().filter(cls.permission_name == key).scalar()
3240 return cls.query().filter(cls.permission_name == key).scalar()
3242
3241
3243 @classmethod
3242 @classmethod
3244 def get_default_repo_perms(cls, user_id, repo_id=None):
3243 def get_default_repo_perms(cls, user_id, repo_id=None):
3245 q = Session().query(UserRepoToPerm, Repository, Permission)\
3244 q = Session().query(UserRepoToPerm, Repository, Permission)\
3246 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
3245 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
3247 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
3246 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
3248 .filter(UserRepoToPerm.user_id == user_id)
3247 .filter(UserRepoToPerm.user_id == user_id)
3249 if repo_id:
3248 if repo_id:
3250 q = q.filter(UserRepoToPerm.repository_id == repo_id)
3249 q = q.filter(UserRepoToPerm.repository_id == repo_id)
3251 return q.all()
3250 return q.all()
3252
3251
3253 @classmethod
3252 @classmethod
3254 def get_default_repo_branch_perms(cls, user_id, repo_id=None):
3253 def get_default_repo_branch_perms(cls, user_id, repo_id=None):
3255 q = Session().query(UserToRepoBranchPermission, UserRepoToPerm, Permission) \
3254 q = Session().query(UserToRepoBranchPermission, UserRepoToPerm, Permission) \
3256 .join(
3255 .join(
3257 Permission,
3256 Permission,
3258 UserToRepoBranchPermission.permission_id == Permission.permission_id) \
3257 UserToRepoBranchPermission.permission_id == Permission.permission_id) \
3259 .join(
3258 .join(
3260 UserRepoToPerm,
3259 UserRepoToPerm,
3261 UserToRepoBranchPermission.rule_to_perm_id == UserRepoToPerm.repo_to_perm_id) \
3260 UserToRepoBranchPermission.rule_to_perm_id == UserRepoToPerm.repo_to_perm_id) \
3262 .filter(UserRepoToPerm.user_id == user_id)
3261 .filter(UserRepoToPerm.user_id == user_id)
3263
3262
3264 if repo_id:
3263 if repo_id:
3265 q = q.filter(UserToRepoBranchPermission.repository_id == repo_id)
3264 q = q.filter(UserToRepoBranchPermission.repository_id == repo_id)
3266 return q.order_by(UserToRepoBranchPermission.rule_order).all()
3265 return q.order_by(UserToRepoBranchPermission.rule_order).all()
3267
3266
3268 @classmethod
3267 @classmethod
3269 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
3268 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
3270 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
3269 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
3271 .join(
3270 .join(
3272 Permission,
3271 Permission,
3273 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
3272 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
3274 .join(
3273 .join(
3275 Repository,
3274 Repository,
3276 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
3275 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
3277 .join(
3276 .join(
3278 UserGroup,
3277 UserGroup,
3279 UserGroupRepoToPerm.users_group_id ==
3278 UserGroupRepoToPerm.users_group_id ==
3280 UserGroup.users_group_id)\
3279 UserGroup.users_group_id)\
3281 .join(
3280 .join(
3282 UserGroupMember,
3281 UserGroupMember,
3283 UserGroupRepoToPerm.users_group_id ==
3282 UserGroupRepoToPerm.users_group_id ==
3284 UserGroupMember.users_group_id)\
3283 UserGroupMember.users_group_id)\
3285 .filter(
3284 .filter(
3286 UserGroupMember.user_id == user_id,
3285 UserGroupMember.user_id == user_id,
3287 UserGroup.users_group_active == true())
3286 UserGroup.users_group_active == true())
3288 if repo_id:
3287 if repo_id:
3289 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
3288 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
3290 return q.all()
3289 return q.all()
3291
3290
3292 @classmethod
3291 @classmethod
3293 def get_default_repo_branch_perms_from_user_group(cls, user_id, repo_id=None):
3292 def get_default_repo_branch_perms_from_user_group(cls, user_id, repo_id=None):
3294 q = Session().query(UserGroupToRepoBranchPermission, UserGroupRepoToPerm, Permission) \
3293 q = Session().query(UserGroupToRepoBranchPermission, UserGroupRepoToPerm, Permission) \
3295 .join(
3294 .join(
3296 Permission,
3295 Permission,
3297 UserGroupToRepoBranchPermission.permission_id == Permission.permission_id) \
3296 UserGroupToRepoBranchPermission.permission_id == Permission.permission_id) \
3298 .join(
3297 .join(
3299 UserGroupRepoToPerm,
3298 UserGroupRepoToPerm,
3300 UserGroupToRepoBranchPermission.rule_to_perm_id == UserGroupRepoToPerm.users_group_to_perm_id) \
3299 UserGroupToRepoBranchPermission.rule_to_perm_id == UserGroupRepoToPerm.users_group_to_perm_id) \
3301 .join(
3300 .join(
3302 UserGroup,
3301 UserGroup,
3303 UserGroupRepoToPerm.users_group_id == UserGroup.users_group_id) \
3302 UserGroupRepoToPerm.users_group_id == UserGroup.users_group_id) \
3304 .join(
3303 .join(
3305 UserGroupMember,
3304 UserGroupMember,
3306 UserGroupRepoToPerm.users_group_id == UserGroupMember.users_group_id) \
3305 UserGroupRepoToPerm.users_group_id == UserGroupMember.users_group_id) \
3307 .filter(
3306 .filter(
3308 UserGroupMember.user_id == user_id,
3307 UserGroupMember.user_id == user_id,
3309 UserGroup.users_group_active == true())
3308 UserGroup.users_group_active == true())
3310
3309
3311 if repo_id:
3310 if repo_id:
3312 q = q.filter(UserGroupToRepoBranchPermission.repository_id == repo_id)
3311 q = q.filter(UserGroupToRepoBranchPermission.repository_id == repo_id)
3313 return q.order_by(UserGroupToRepoBranchPermission.rule_order).all()
3312 return q.order_by(UserGroupToRepoBranchPermission.rule_order).all()
3314
3313
3315 @classmethod
3314 @classmethod
3316 def get_default_group_perms(cls, user_id, repo_group_id=None):
3315 def get_default_group_perms(cls, user_id, repo_group_id=None):
3317 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
3316 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
3318 .join(
3317 .join(
3319 Permission,
3318 Permission,
3320 UserRepoGroupToPerm.permission_id == Permission.permission_id)\
3319 UserRepoGroupToPerm.permission_id == Permission.permission_id)\
3321 .join(
3320 .join(
3322 RepoGroup,
3321 RepoGroup,
3323 UserRepoGroupToPerm.group_id == RepoGroup.group_id)\
3322 UserRepoGroupToPerm.group_id == RepoGroup.group_id)\
3324 .filter(UserRepoGroupToPerm.user_id == user_id)
3323 .filter(UserRepoGroupToPerm.user_id == user_id)
3325 if repo_group_id:
3324 if repo_group_id:
3326 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
3325 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
3327 return q.all()
3326 return q.all()
3328
3327
3329 @classmethod
3328 @classmethod
3330 def get_default_group_perms_from_user_group(
3329 def get_default_group_perms_from_user_group(
3331 cls, user_id, repo_group_id=None):
3330 cls, user_id, repo_group_id=None):
3332 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
3331 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
3333 .join(
3332 .join(
3334 Permission,
3333 Permission,
3335 UserGroupRepoGroupToPerm.permission_id ==
3334 UserGroupRepoGroupToPerm.permission_id ==
3336 Permission.permission_id)\
3335 Permission.permission_id)\
3337 .join(
3336 .join(
3338 RepoGroup,
3337 RepoGroup,
3339 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
3338 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
3340 .join(
3339 .join(
3341 UserGroup,
3340 UserGroup,
3342 UserGroupRepoGroupToPerm.users_group_id ==
3341 UserGroupRepoGroupToPerm.users_group_id ==
3343 UserGroup.users_group_id)\
3342 UserGroup.users_group_id)\
3344 .join(
3343 .join(
3345 UserGroupMember,
3344 UserGroupMember,
3346 UserGroupRepoGroupToPerm.users_group_id ==
3345 UserGroupRepoGroupToPerm.users_group_id ==
3347 UserGroupMember.users_group_id)\
3346 UserGroupMember.users_group_id)\
3348 .filter(
3347 .filter(
3349 UserGroupMember.user_id == user_id,
3348 UserGroupMember.user_id == user_id,
3350 UserGroup.users_group_active == true())
3349 UserGroup.users_group_active == true())
3351 if repo_group_id:
3350 if repo_group_id:
3352 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
3351 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
3353 return q.all()
3352 return q.all()
3354
3353
3355 @classmethod
3354 @classmethod
3356 def get_default_user_group_perms(cls, user_id, user_group_id=None):
3355 def get_default_user_group_perms(cls, user_id, user_group_id=None):
3357 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
3356 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
3358 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
3357 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
3359 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
3358 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
3360 .filter(UserUserGroupToPerm.user_id == user_id)
3359 .filter(UserUserGroupToPerm.user_id == user_id)
3361 if user_group_id:
3360 if user_group_id:
3362 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
3361 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
3363 return q.all()
3362 return q.all()
3364
3363
3365 @classmethod
3364 @classmethod
3366 def get_default_user_group_perms_from_user_group(
3365 def get_default_user_group_perms_from_user_group(
3367 cls, user_id, user_group_id=None):
3366 cls, user_id, user_group_id=None):
3368 TargetUserGroup = aliased(UserGroup, name='target_user_group')
3367 TargetUserGroup = aliased(UserGroup, name='target_user_group')
3369 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
3368 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
3370 .join(
3369 .join(
3371 Permission,
3370 Permission,
3372 UserGroupUserGroupToPerm.permission_id ==
3371 UserGroupUserGroupToPerm.permission_id ==
3373 Permission.permission_id)\
3372 Permission.permission_id)\
3374 .join(
3373 .join(
3375 TargetUserGroup,
3374 TargetUserGroup,
3376 UserGroupUserGroupToPerm.target_user_group_id ==
3375 UserGroupUserGroupToPerm.target_user_group_id ==
3377 TargetUserGroup.users_group_id)\
3376 TargetUserGroup.users_group_id)\
3378 .join(
3377 .join(
3379 UserGroup,
3378 UserGroup,
3380 UserGroupUserGroupToPerm.user_group_id ==
3379 UserGroupUserGroupToPerm.user_group_id ==
3381 UserGroup.users_group_id)\
3380 UserGroup.users_group_id)\
3382 .join(
3381 .join(
3383 UserGroupMember,
3382 UserGroupMember,
3384 UserGroupUserGroupToPerm.user_group_id ==
3383 UserGroupUserGroupToPerm.user_group_id ==
3385 UserGroupMember.users_group_id)\
3384 UserGroupMember.users_group_id)\
3386 .filter(
3385 .filter(
3387 UserGroupMember.user_id == user_id,
3386 UserGroupMember.user_id == user_id,
3388 UserGroup.users_group_active == true())
3387 UserGroup.users_group_active == true())
3389 if user_group_id:
3388 if user_group_id:
3390 q = q.filter(
3389 q = q.filter(
3391 UserGroupUserGroupToPerm.user_group_id == user_group_id)
3390 UserGroupUserGroupToPerm.user_group_id == user_group_id)
3392
3391
3393 return q.all()
3392 return q.all()
3394
3393
3395
3394
3396 class UserRepoToPerm(Base, BaseModel):
3395 class UserRepoToPerm(Base, BaseModel):
3397 __tablename__ = 'repo_to_perm'
3396 __tablename__ = 'repo_to_perm'
3398 __table_args__ = (
3397 __table_args__ = (
3399 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
3398 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
3400 base_table_args
3399 base_table_args
3401 )
3400 )
3402
3401
3403 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3402 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3404 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3403 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3405 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3404 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3406 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3405 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3407
3406
3408 user = relationship('User')
3407 user = relationship('User')
3409 repository = relationship('Repository')
3408 repository = relationship('Repository')
3410 permission = relationship('Permission')
3409 permission = relationship('Permission')
3411
3410
3412 branch_perm_entry = relationship('UserToRepoBranchPermission', cascade="all, delete-orphan", lazy='joined')
3411 branch_perm_entry = relationship('UserToRepoBranchPermission', cascade="all, delete-orphan", lazy='joined')
3413
3412
3414 @classmethod
3413 @classmethod
3415 def create(cls, user, repository, permission):
3414 def create(cls, user, repository, permission):
3416 n = cls()
3415 n = cls()
3417 n.user = user
3416 n.user = user
3418 n.repository = repository
3417 n.repository = repository
3419 n.permission = permission
3418 n.permission = permission
3420 Session().add(n)
3419 Session().add(n)
3421 return n
3420 return n
3422
3421
3423 def __str__(self):
3422 def __str__(self):
3424 return f'<{self.user} => {self.repository} >'
3423 return f'<{self.user} => {self.repository} >'
3425
3424
3426
3425
3427 class UserUserGroupToPerm(Base, BaseModel):
3426 class UserUserGroupToPerm(Base, BaseModel):
3428 __tablename__ = 'user_user_group_to_perm'
3427 __tablename__ = 'user_user_group_to_perm'
3429 __table_args__ = (
3428 __table_args__ = (
3430 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
3429 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
3431 base_table_args
3430 base_table_args
3432 )
3431 )
3433
3432
3434 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3433 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3435 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3434 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3436 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3435 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3437 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3436 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3438
3437
3439 user = relationship('User')
3438 user = relationship('User')
3440 user_group = relationship('UserGroup')
3439 user_group = relationship('UserGroup')
3441 permission = relationship('Permission')
3440 permission = relationship('Permission')
3442
3441
3443 @classmethod
3442 @classmethod
3444 def create(cls, user, user_group, permission):
3443 def create(cls, user, user_group, permission):
3445 n = cls()
3444 n = cls()
3446 n.user = user
3445 n.user = user
3447 n.user_group = user_group
3446 n.user_group = user_group
3448 n.permission = permission
3447 n.permission = permission
3449 Session().add(n)
3448 Session().add(n)
3450 return n
3449 return n
3451
3450
3452 def __str__(self):
3451 def __str__(self):
3453 return f'<{self.user} => {self.user_group} >'
3452 return f'<{self.user} => {self.user_group} >'
3454
3453
3455
3454
3456 class UserToPerm(Base, BaseModel):
3455 class UserToPerm(Base, BaseModel):
3457 __tablename__ = 'user_to_perm'
3456 __tablename__ = 'user_to_perm'
3458 __table_args__ = (
3457 __table_args__ = (
3459 UniqueConstraint('user_id', 'permission_id'),
3458 UniqueConstraint('user_id', 'permission_id'),
3460 base_table_args
3459 base_table_args
3461 )
3460 )
3462
3461
3463 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3462 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3464 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3463 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3465 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3464 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3466
3465
3467 user = relationship('User')
3466 user = relationship('User')
3468 permission = relationship('Permission', lazy='joined')
3467 permission = relationship('Permission', lazy='joined')
3469
3468
3470 def __str__(self):
3469 def __str__(self):
3471 return f'<{self.user} => {self.permission} >'
3470 return f'<{self.user} => {self.permission} >'
3472
3471
3473
3472
3474 class UserGroupRepoToPerm(Base, BaseModel):
3473 class UserGroupRepoToPerm(Base, BaseModel):
3475 __tablename__ = 'users_group_repo_to_perm'
3474 __tablename__ = 'users_group_repo_to_perm'
3476 __table_args__ = (
3475 __table_args__ = (
3477 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
3476 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
3478 base_table_args
3477 base_table_args
3479 )
3478 )
3480
3479
3481 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3480 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3482 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3481 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3483 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3482 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3484 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3483 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3485
3484
3486 users_group = relationship('UserGroup')
3485 users_group = relationship('UserGroup')
3487 permission = relationship('Permission')
3486 permission = relationship('Permission')
3488 repository = relationship('Repository')
3487 repository = relationship('Repository')
3489 user_group_branch_perms = relationship('UserGroupToRepoBranchPermission', cascade='all')
3488 user_group_branch_perms = relationship('UserGroupToRepoBranchPermission', cascade='all')
3490
3489
3491 @classmethod
3490 @classmethod
3492 def create(cls, users_group, repository, permission):
3491 def create(cls, users_group, repository, permission):
3493 n = cls()
3492 n = cls()
3494 n.users_group = users_group
3493 n.users_group = users_group
3495 n.repository = repository
3494 n.repository = repository
3496 n.permission = permission
3495 n.permission = permission
3497 Session().add(n)
3496 Session().add(n)
3498 return n
3497 return n
3499
3498
3500 def __str__(self):
3499 def __str__(self):
3501 return '<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
3500 return '<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
3502
3501
3503
3502
3504 class UserGroupUserGroupToPerm(Base, BaseModel):
3503 class UserGroupUserGroupToPerm(Base, BaseModel):
3505 __tablename__ = 'user_group_user_group_to_perm'
3504 __tablename__ = 'user_group_user_group_to_perm'
3506 __table_args__ = (
3505 __table_args__ = (
3507 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
3506 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
3508 CheckConstraint('target_user_group_id != user_group_id'),
3507 CheckConstraint('target_user_group_id != user_group_id'),
3509 base_table_args
3508 base_table_args
3510 )
3509 )
3511
3510
3512 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3511 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3513 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3512 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3514 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3513 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3515 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3514 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3516
3515
3517 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
3516 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
3518 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
3517 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
3519 permission = relationship('Permission')
3518 permission = relationship('Permission')
3520
3519
3521 @classmethod
3520 @classmethod
3522 def create(cls, target_user_group, user_group, permission):
3521 def create(cls, target_user_group, user_group, permission):
3523 n = cls()
3522 n = cls()
3524 n.target_user_group = target_user_group
3523 n.target_user_group = target_user_group
3525 n.user_group = user_group
3524 n.user_group = user_group
3526 n.permission = permission
3525 n.permission = permission
3527 Session().add(n)
3526 Session().add(n)
3528 return n
3527 return n
3529
3528
3530 def __str__(self):
3529 def __str__(self):
3531 return '<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
3530 return '<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
3532
3531
3533
3532
3534 class UserGroupToPerm(Base, BaseModel):
3533 class UserGroupToPerm(Base, BaseModel):
3535 __tablename__ = 'users_group_to_perm'
3534 __tablename__ = 'users_group_to_perm'
3536 __table_args__ = (
3535 __table_args__ = (
3537 UniqueConstraint('users_group_id', 'permission_id',),
3536 UniqueConstraint('users_group_id', 'permission_id',),
3538 base_table_args
3537 base_table_args
3539 )
3538 )
3540
3539
3541 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3540 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3542 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3541 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3543 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3542 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3544
3543
3545 users_group = relationship('UserGroup')
3544 users_group = relationship('UserGroup')
3546 permission = relationship('Permission')
3545 permission = relationship('Permission')
3547
3546
3548
3547
3549 class UserRepoGroupToPerm(Base, BaseModel):
3548 class UserRepoGroupToPerm(Base, BaseModel):
3550 __tablename__ = 'user_repo_group_to_perm'
3549 __tablename__ = 'user_repo_group_to_perm'
3551 __table_args__ = (
3550 __table_args__ = (
3552 UniqueConstraint('user_id', 'group_id', 'permission_id'),
3551 UniqueConstraint('user_id', 'group_id', 'permission_id'),
3553 base_table_args
3552 base_table_args
3554 )
3553 )
3555
3554
3556 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3555 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3557 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3556 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3558 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3557 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3559 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3558 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3560
3559
3561 user = relationship('User')
3560 user = relationship('User')
3562 group = relationship('RepoGroup')
3561 group = relationship('RepoGroup')
3563 permission = relationship('Permission')
3562 permission = relationship('Permission')
3564
3563
3565 @classmethod
3564 @classmethod
3566 def create(cls, user, repository_group, permission):
3565 def create(cls, user, repository_group, permission):
3567 n = cls()
3566 n = cls()
3568 n.user = user
3567 n.user = user
3569 n.group = repository_group
3568 n.group = repository_group
3570 n.permission = permission
3569 n.permission = permission
3571 Session().add(n)
3570 Session().add(n)
3572 return n
3571 return n
3573
3572
3574
3573
3575 class UserGroupRepoGroupToPerm(Base, BaseModel):
3574 class UserGroupRepoGroupToPerm(Base, BaseModel):
3576 __tablename__ = 'users_group_repo_group_to_perm'
3575 __tablename__ = 'users_group_repo_group_to_perm'
3577 __table_args__ = (
3576 __table_args__ = (
3578 UniqueConstraint('users_group_id', 'group_id'),
3577 UniqueConstraint('users_group_id', 'group_id'),
3579 base_table_args
3578 base_table_args
3580 )
3579 )
3581
3580
3582 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3581 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3583 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3582 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3584 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3583 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3585 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3584 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3586
3585
3587 users_group = relationship('UserGroup')
3586 users_group = relationship('UserGroup')
3588 permission = relationship('Permission')
3587 permission = relationship('Permission')
3589 group = relationship('RepoGroup')
3588 group = relationship('RepoGroup')
3590
3589
3591 @classmethod
3590 @classmethod
3592 def create(cls, user_group, repository_group, permission):
3591 def create(cls, user_group, repository_group, permission):
3593 n = cls()
3592 n = cls()
3594 n.users_group = user_group
3593 n.users_group = user_group
3595 n.group = repository_group
3594 n.group = repository_group
3596 n.permission = permission
3595 n.permission = permission
3597 Session().add(n)
3596 Session().add(n)
3598 return n
3597 return n
3599
3598
3600 def __str__(self):
3599 def __str__(self):
3601 return '<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3600 return '<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3602
3601
3603
3602
3604 class Statistics(Base, BaseModel):
3603 class Statistics(Base, BaseModel):
3605 __tablename__ = 'statistics'
3604 __tablename__ = 'statistics'
3606 __table_args__ = (
3605 __table_args__ = (
3607 base_table_args
3606 base_table_args
3608 )
3607 )
3609
3608
3610 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3609 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3611 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3610 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3612 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3611 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3613 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
3612 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
3614 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
3613 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
3615 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
3614 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
3616
3615
3617 repository = relationship('Repository', single_parent=True)
3616 repository = relationship('Repository', single_parent=True)
3618
3617
3619
3618
3620 class UserFollowing(Base, BaseModel):
3619 class UserFollowing(Base, BaseModel):
3621 __tablename__ = 'user_followings'
3620 __tablename__ = 'user_followings'
3622 __table_args__ = (
3621 __table_args__ = (
3623 UniqueConstraint('user_id', 'follows_repository_id'),
3622 UniqueConstraint('user_id', 'follows_repository_id'),
3624 UniqueConstraint('user_id', 'follows_user_id'),
3623 UniqueConstraint('user_id', 'follows_user_id'),
3625 base_table_args
3624 base_table_args
3626 )
3625 )
3627
3626
3628 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3627 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3629 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3628 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3630 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3629 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3631 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3630 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3632 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3631 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3633
3632
3634 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
3633 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
3635
3634
3636 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3635 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3637 follows_repository = relationship('Repository', order_by='Repository.repo_name')
3636 follows_repository = relationship('Repository', order_by='Repository.repo_name')
3638
3637
3639 @classmethod
3638 @classmethod
3640 def get_repo_followers(cls, repo_id):
3639 def get_repo_followers(cls, repo_id):
3641 return cls.query().filter(cls.follows_repo_id == repo_id)
3640 return cls.query().filter(cls.follows_repo_id == repo_id)
3642
3641
3643
3642
3644 class CacheKey(Base, BaseModel):
3643 class CacheKey(Base, BaseModel):
3645 __tablename__ = 'cache_invalidation'
3644 __tablename__ = 'cache_invalidation'
3646 __table_args__ = (
3645 __table_args__ = (
3647 UniqueConstraint('cache_key'),
3646 UniqueConstraint('cache_key'),
3648 Index('key_idx', 'cache_key'),
3647 Index('key_idx', 'cache_key'),
3649 Index('cache_args_idx', 'cache_args'),
3648 Index('cache_args_idx', 'cache_args'),
3650 base_table_args,
3649 base_table_args,
3651 )
3650 )
3652
3651
3653 CACHE_TYPE_FEED = 'FEED'
3652 CACHE_TYPE_FEED = 'FEED'
3654
3653
3655 # namespaces used to register process/thread aware caches
3654 # namespaces used to register process/thread aware caches
3656 REPO_INVALIDATION_NAMESPACE = 'repo_cache:{repo_id}'
3655 REPO_INVALIDATION_NAMESPACE = 'repo_cache:{repo_id}'
3657
3656
3658 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3657 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3659 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3658 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3660 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3659 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3661 cache_state_uid = Column("cache_state_uid", String(255), nullable=True, unique=None, default=None)
3660 cache_state_uid = Column("cache_state_uid", String(255), nullable=True, unique=None, default=None)
3662 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3661 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3663
3662
3664 def __init__(self, cache_key, cache_args='', cache_state_uid=None):
3663 def __init__(self, cache_key, cache_args='', cache_state_uid=None):
3665 self.cache_key = cache_key
3664 self.cache_key = cache_key
3666 self.cache_args = cache_args
3665 self.cache_args = cache_args
3667 self.cache_active = False
3666 self.cache_active = False
3668 # first key should be same for all entries, since all workers should share it
3667 # first key should be same for all entries, since all workers should share it
3669 self.cache_state_uid = cache_state_uid or self.generate_new_state_uid()
3668 self.cache_state_uid = cache_state_uid or self.generate_new_state_uid()
3670
3669
3671 def __str__(self):
3670 def __str__(self):
3672 return "<%s('%s:%s[%s]')>" % (
3671 return "<%s('%s:%s[%s]')>" % (
3673 self.__class__.__name__,
3672 self.__class__.__name__,
3674 self.cache_id, self.cache_key, self.cache_active)
3673 self.cache_id, self.cache_key, self.cache_active)
3675
3674
3676 def _cache_key_partition(self):
3675 def _cache_key_partition(self):
3677 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3676 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3678 return prefix, repo_name, suffix
3677 return prefix, repo_name, suffix
3679
3678
3680 def get_prefix(self):
3679 def get_prefix(self):
3681 """
3680 """
3682 Try to extract prefix from existing cache key. The key could consist
3681 Try to extract prefix from existing cache key. The key could consist
3683 of prefix, repo_name, suffix
3682 of prefix, repo_name, suffix
3684 """
3683 """
3685 # this returns prefix, repo_name, suffix
3684 # this returns prefix, repo_name, suffix
3686 return self._cache_key_partition()[0]
3685 return self._cache_key_partition()[0]
3687
3686
3688 def get_suffix(self):
3687 def get_suffix(self):
3689 """
3688 """
3690 get suffix that might have been used in _get_cache_key to
3689 get suffix that might have been used in _get_cache_key to
3691 generate self.cache_key. Only used for informational purposes
3690 generate self.cache_key. Only used for informational purposes
3692 in repo_edit.mako.
3691 in repo_edit.mako.
3693 """
3692 """
3694 # prefix, repo_name, suffix
3693 # prefix, repo_name, suffix
3695 return self._cache_key_partition()[2]
3694 return self._cache_key_partition()[2]
3696
3695
3697 @classmethod
3696 @classmethod
3698 def generate_new_state_uid(cls, based_on=None):
3697 def generate_new_state_uid(cls, based_on=None):
3699 if based_on:
3698 if based_on:
3700 return str(uuid.uuid5(uuid.NAMESPACE_URL, safe_str(based_on)))
3699 return str(uuid.uuid5(uuid.NAMESPACE_URL, safe_str(based_on)))
3701 else:
3700 else:
3702 return str(uuid.uuid4())
3701 return str(uuid.uuid4())
3703
3702
3704 @classmethod
3703 @classmethod
3705 def delete_all_cache(cls):
3704 def delete_all_cache(cls):
3706 """
3705 """
3707 Delete all cache keys from database.
3706 Delete all cache keys from database.
3708 Should only be run when all instances are down and all entries
3707 Should only be run when all instances are down and all entries
3709 thus stale.
3708 thus stale.
3710 """
3709 """
3711 cls.query().delete()
3710 cls.query().delete()
3712 Session().commit()
3711 Session().commit()
3713
3712
3714 @classmethod
3713 @classmethod
3715 def set_invalidate(cls, cache_uid, delete=False):
3714 def set_invalidate(cls, cache_uid, delete=False):
3716 """
3715 """
3717 Mark all caches of a repo as invalid in the database.
3716 Mark all caches of a repo as invalid in the database.
3718 """
3717 """
3719
3718
3720 try:
3719 try:
3721 qry = Session().query(cls).filter(cls.cache_args == cache_uid)
3720 qry = Session().query(cls).filter(cls.cache_args == cache_uid)
3722 if delete:
3721 if delete:
3723 qry.delete()
3722 qry.delete()
3724 log.debug('cache objects deleted for cache args %s',
3723 log.debug('cache objects deleted for cache args %s',
3725 safe_str(cache_uid))
3724 safe_str(cache_uid))
3726 else:
3725 else:
3727 qry.update({"cache_active": False,
3726 qry.update({"cache_active": False,
3728 "cache_state_uid": cls.generate_new_state_uid()})
3727 "cache_state_uid": cls.generate_new_state_uid()})
3729 log.debug('cache objects marked as invalid for cache args %s',
3728 log.debug('cache objects marked as invalid for cache args %s',
3730 safe_str(cache_uid))
3729 safe_str(cache_uid))
3731
3730
3732 Session().commit()
3731 Session().commit()
3733 except Exception:
3732 except Exception:
3734 log.exception(
3733 log.exception(
3735 'Cache key invalidation failed for cache args %s',
3734 'Cache key invalidation failed for cache args %s',
3736 safe_str(cache_uid))
3735 safe_str(cache_uid))
3737 Session().rollback()
3736 Session().rollback()
3738
3737
3739 @classmethod
3738 @classmethod
3740 def get_active_cache(cls, cache_key):
3739 def get_active_cache(cls, cache_key):
3741 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3740 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3742 if inv_obj:
3741 if inv_obj:
3743 return inv_obj
3742 return inv_obj
3744 return None
3743 return None
3745
3744
3746 @classmethod
3745 @classmethod
3747 def get_namespace_map(cls, namespace):
3746 def get_namespace_map(cls, namespace):
3748 return {
3747 return {
3749 x.cache_key: x
3748 x.cache_key: x
3750 for x in cls.query().filter(cls.cache_args == namespace)}
3749 for x in cls.query().filter(cls.cache_args == namespace)}
3751
3750
3752
3751
3753 class ChangesetComment(Base, BaseModel):
3752 class ChangesetComment(Base, BaseModel):
3754 __tablename__ = 'changeset_comments'
3753 __tablename__ = 'changeset_comments'
3755 __table_args__ = (
3754 __table_args__ = (
3756 Index('cc_revision_idx', 'revision'),
3755 Index('cc_revision_idx', 'revision'),
3757 base_table_args,
3756 base_table_args,
3758 )
3757 )
3759
3758
3760 COMMENT_OUTDATED = u'comment_outdated'
3759 COMMENT_OUTDATED = u'comment_outdated'
3761 COMMENT_TYPE_NOTE = u'note'
3760 COMMENT_TYPE_NOTE = u'note'
3762 COMMENT_TYPE_TODO = u'todo'
3761 COMMENT_TYPE_TODO = u'todo'
3763 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3762 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3764
3763
3765 OP_IMMUTABLE = u'immutable'
3764 OP_IMMUTABLE = u'immutable'
3766 OP_CHANGEABLE = u'changeable'
3765 OP_CHANGEABLE = u'changeable'
3767
3766
3768 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3767 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3769 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3768 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3770 revision = Column('revision', String(40), nullable=True)
3769 revision = Column('revision', String(40), nullable=True)
3771 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3770 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3772 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3771 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3773 line_no = Column('line_no', Unicode(10), nullable=True)
3772 line_no = Column('line_no', Unicode(10), nullable=True)
3774 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3773 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3775 f_path = Column('f_path', Unicode(1000), nullable=True)
3774 f_path = Column('f_path', Unicode(1000), nullable=True)
3776 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3775 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3777 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3776 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3778 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3777 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3779 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3778 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3780 renderer = Column('renderer', Unicode(64), nullable=True)
3779 renderer = Column('renderer', Unicode(64), nullable=True)
3781 display_state = Column('display_state', Unicode(128), nullable=True)
3780 display_state = Column('display_state', Unicode(128), nullable=True)
3782 immutable_state = Column('immutable_state', Unicode(128), nullable=True, default=OP_CHANGEABLE)
3781 immutable_state = Column('immutable_state', Unicode(128), nullable=True, default=OP_CHANGEABLE)
3783 draft = Column('draft', Boolean(), nullable=True, default=False)
3782 draft = Column('draft', Boolean(), nullable=True, default=False)
3784
3783
3785 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3784 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3786 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3785 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3787
3786
3788 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, back_populates='resolved_by')
3787 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, back_populates='resolved_by')
3789 resolved_by = relationship('ChangesetComment', back_populates='resolved_comment')
3788 resolved_by = relationship('ChangesetComment', back_populates='resolved_comment')
3790
3789
3791 author = relationship('User', lazy='select')
3790 author = relationship('User', lazy='select')
3792 repo = relationship('Repository')
3791 repo = relationship('Repository')
3793 status_change = relationship('ChangesetStatus', cascade="all, delete-orphan", lazy='select')
3792 status_change = relationship('ChangesetStatus', cascade="all, delete-orphan", lazy='select')
3794 pull_request = relationship('PullRequest', lazy='select')
3793 pull_request = relationship('PullRequest', lazy='select')
3795 pull_request_version = relationship('PullRequestVersion', lazy='select')
3794 pull_request_version = relationship('PullRequestVersion', lazy='select')
3796 history = relationship('ChangesetCommentHistory', cascade='all, delete-orphan', lazy='select', order_by='ChangesetCommentHistory.version')
3795 history = relationship('ChangesetCommentHistory', cascade='all, delete-orphan', lazy='select', order_by='ChangesetCommentHistory.version')
3797
3796
3798 @classmethod
3797 @classmethod
3799 def get_users(cls, revision=None, pull_request_id=None):
3798 def get_users(cls, revision=None, pull_request_id=None):
3800 """
3799 """
3801 Returns user associated with this ChangesetComment. ie those
3800 Returns user associated with this ChangesetComment. ie those
3802 who actually commented
3801 who actually commented
3803
3802
3804 :param cls:
3803 :param cls:
3805 :param revision:
3804 :param revision:
3806 """
3805 """
3807 q = Session().query(User)\
3806 q = Session().query(User)\
3808 .join(ChangesetComment.author)
3807 .join(ChangesetComment.author)
3809 if revision:
3808 if revision:
3810 q = q.filter(cls.revision == revision)
3809 q = q.filter(cls.revision == revision)
3811 elif pull_request_id:
3810 elif pull_request_id:
3812 q = q.filter(cls.pull_request_id == pull_request_id)
3811 q = q.filter(cls.pull_request_id == pull_request_id)
3813 return q.all()
3812 return q.all()
3814
3813
3815 @classmethod
3814 @classmethod
3816 def get_index_from_version(cls, pr_version, versions=None, num_versions=None):
3815 def get_index_from_version(cls, pr_version, versions=None, num_versions=None):
3817
3816
3818 if versions is not None:
3817 if versions is not None:
3819 num_versions = [x.pull_request_version_id for x in versions]
3818 num_versions = [x.pull_request_version_id for x in versions]
3820
3819
3821 num_versions = num_versions or []
3820 num_versions = num_versions or []
3822 try:
3821 try:
3823 return num_versions.index(pr_version) + 1
3822 return num_versions.index(pr_version) + 1
3824 except (IndexError, ValueError):
3823 except (IndexError, ValueError):
3825 return
3824 return
3826
3825
3827 @property
3826 @property
3828 def outdated(self):
3827 def outdated(self):
3829 return self.display_state == self.COMMENT_OUTDATED
3828 return self.display_state == self.COMMENT_OUTDATED
3830
3829
3831 @property
3830 @property
3832 def outdated_js(self):
3831 def outdated_js(self):
3833 return json.dumps(self.display_state == self.COMMENT_OUTDATED)
3832 return json.dumps(self.display_state == self.COMMENT_OUTDATED)
3834
3833
3835 @property
3834 @property
3836 def immutable(self):
3835 def immutable(self):
3837 return self.immutable_state == self.OP_IMMUTABLE
3836 return self.immutable_state == self.OP_IMMUTABLE
3838
3837
3839 def outdated_at_version(self, version):
3838 def outdated_at_version(self, version):
3840 """
3839 """
3841 Checks if comment is outdated for given pull request version
3840 Checks if comment is outdated for given pull request version
3842 """
3841 """
3843 def version_check():
3842 def version_check():
3844 return self.pull_request_version_id and self.pull_request_version_id != version
3843 return self.pull_request_version_id and self.pull_request_version_id != version
3845
3844
3846 if self.is_inline:
3845 if self.is_inline:
3847 return self.outdated and version_check()
3846 return self.outdated and version_check()
3848 else:
3847 else:
3849 # general comments don't have .outdated set, also latest don't have a version
3848 # general comments don't have .outdated set, also latest don't have a version
3850 return version_check()
3849 return version_check()
3851
3850
3852 def outdated_at_version_js(self, version):
3851 def outdated_at_version_js(self, version):
3853 """
3852 """
3854 Checks if comment is outdated for given pull request version
3853 Checks if comment is outdated for given pull request version
3855 """
3854 """
3856 return json.dumps(self.outdated_at_version(version))
3855 return json.dumps(self.outdated_at_version(version))
3857
3856
3858 def older_than_version(self, version):
3857 def older_than_version(self, version):
3859 """
3858 """
3860 Checks if comment is made from previous version than given
3859 Checks if comment is made from previous version than given
3861 """
3860 """
3862 if version is None:
3861 if version is None:
3863 return self.pull_request_version != version
3862 return self.pull_request_version != version
3864
3863
3865 return self.pull_request_version < version
3864 return self.pull_request_version < version
3866
3865
3867 def older_than_version_js(self, version):
3866 def older_than_version_js(self, version):
3868 """
3867 """
3869 Checks if comment is made from previous version than given
3868 Checks if comment is made from previous version than given
3870 """
3869 """
3871 return json.dumps(self.older_than_version(version))
3870 return json.dumps(self.older_than_version(version))
3872
3871
3873 @property
3872 @property
3874 def commit_id(self):
3873 def commit_id(self):
3875 """New style naming to stop using .revision"""
3874 """New style naming to stop using .revision"""
3876 return self.revision
3875 return self.revision
3877
3876
3878 @property
3877 @property
3879 def resolved(self):
3878 def resolved(self):
3880 return self.resolved_by[0] if self.resolved_by else None
3879 return self.resolved_by[0] if self.resolved_by else None
3881
3880
3882 @property
3881 @property
3883 def is_todo(self):
3882 def is_todo(self):
3884 return self.comment_type == self.COMMENT_TYPE_TODO
3883 return self.comment_type == self.COMMENT_TYPE_TODO
3885
3884
3886 @property
3885 @property
3887 def is_inline(self):
3886 def is_inline(self):
3888 if self.line_no and self.f_path:
3887 if self.line_no and self.f_path:
3889 return True
3888 return True
3890 return False
3889 return False
3891
3890
3892 @property
3891 @property
3893 def last_version(self):
3892 def last_version(self):
3894 version = 0
3893 version = 0
3895 if self.history:
3894 if self.history:
3896 version = self.history[-1].version
3895 version = self.history[-1].version
3897 return version
3896 return version
3898
3897
3899 def get_index_version(self, versions):
3898 def get_index_version(self, versions):
3900 return self.get_index_from_version(
3899 return self.get_index_from_version(
3901 self.pull_request_version_id, versions)
3900 self.pull_request_version_id, versions)
3902
3901
3903 @property
3902 @property
3904 def review_status(self):
3903 def review_status(self):
3905 if self.status_change:
3904 if self.status_change:
3906 return self.status_change[0].status
3905 return self.status_change[0].status
3907
3906
3908 @property
3907 @property
3909 def review_status_lbl(self):
3908 def review_status_lbl(self):
3910 if self.status_change:
3909 if self.status_change:
3911 return self.status_change[0].status_lbl
3910 return self.status_change[0].status_lbl
3912
3911
3913 def __str__(self):
3912 def __str__(self):
3914 if self.comment_id:
3913 if self.comment_id:
3915 return f'<DB:Comment #{self.comment_id}>'
3914 return f'<DB:Comment #{self.comment_id}>'
3916 else:
3915 else:
3917 return f'<DB:Comment at {id(self)!r}>'
3916 return f'<DB:Comment at {id(self)!r}>'
3918
3917
3919 def get_api_data(self):
3918 def get_api_data(self):
3920 comment = self
3919 comment = self
3921
3920
3922 data = {
3921 data = {
3923 'comment_id': comment.comment_id,
3922 'comment_id': comment.comment_id,
3924 'comment_type': comment.comment_type,
3923 'comment_type': comment.comment_type,
3925 'comment_text': comment.text,
3924 'comment_text': comment.text,
3926 'comment_status': comment.status_change,
3925 'comment_status': comment.status_change,
3927 'comment_f_path': comment.f_path,
3926 'comment_f_path': comment.f_path,
3928 'comment_lineno': comment.line_no,
3927 'comment_lineno': comment.line_no,
3929 'comment_author': comment.author,
3928 'comment_author': comment.author,
3930 'comment_created_on': comment.created_on,
3929 'comment_created_on': comment.created_on,
3931 'comment_resolved_by': self.resolved,
3930 'comment_resolved_by': self.resolved,
3932 'comment_commit_id': comment.revision,
3931 'comment_commit_id': comment.revision,
3933 'comment_pull_request_id': comment.pull_request_id,
3932 'comment_pull_request_id': comment.pull_request_id,
3934 'comment_last_version': self.last_version
3933 'comment_last_version': self.last_version
3935 }
3934 }
3936 return data
3935 return data
3937
3936
3938 def __json__(self):
3937 def __json__(self):
3939 data = dict()
3938 data = dict()
3940 data.update(self.get_api_data())
3939 data.update(self.get_api_data())
3941 return data
3940 return data
3942
3941
3943
3942
3944 class ChangesetCommentHistory(Base, BaseModel):
3943 class ChangesetCommentHistory(Base, BaseModel):
3945 __tablename__ = 'changeset_comments_history'
3944 __tablename__ = 'changeset_comments_history'
3946 __table_args__ = (
3945 __table_args__ = (
3947 Index('cch_comment_id_idx', 'comment_id'),
3946 Index('cch_comment_id_idx', 'comment_id'),
3948 base_table_args,
3947 base_table_args,
3949 )
3948 )
3950
3949
3951 comment_history_id = Column('comment_history_id', Integer(), nullable=False, primary_key=True)
3950 comment_history_id = Column('comment_history_id', Integer(), nullable=False, primary_key=True)
3952 comment_id = Column('comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=False)
3951 comment_id = Column('comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=False)
3953 version = Column("version", Integer(), nullable=False, default=0)
3952 version = Column("version", Integer(), nullable=False, default=0)
3954 created_by_user_id = Column('created_by_user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3953 created_by_user_id = Column('created_by_user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3955 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3954 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3956 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3955 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3957 deleted = Column('deleted', Boolean(), default=False)
3956 deleted = Column('deleted', Boolean(), default=False)
3958
3957
3959 author = relationship('User', lazy='joined')
3958 author = relationship('User', lazy='joined')
3960 comment = relationship('ChangesetComment', cascade="all, delete")
3959 comment = relationship('ChangesetComment', cascade="all, delete")
3961
3960
3962 @classmethod
3961 @classmethod
3963 def get_version(cls, comment_id):
3962 def get_version(cls, comment_id):
3964 q = Session().query(ChangesetCommentHistory).filter(
3963 q = Session().query(ChangesetCommentHistory).filter(
3965 ChangesetCommentHistory.comment_id == comment_id).order_by(ChangesetCommentHistory.version.desc())
3964 ChangesetCommentHistory.comment_id == comment_id).order_by(ChangesetCommentHistory.version.desc())
3966 if q.count() == 0:
3965 if q.count() == 0:
3967 return 1
3966 return 1
3968 elif q.count() >= q[0].version:
3967 elif q.count() >= q[0].version:
3969 return q.count() + 1
3968 return q.count() + 1
3970 else:
3969 else:
3971 return q[0].version + 1
3970 return q[0].version + 1
3972
3971
3973
3972
3974 class ChangesetStatus(Base, BaseModel):
3973 class ChangesetStatus(Base, BaseModel):
3975 __tablename__ = 'changeset_statuses'
3974 __tablename__ = 'changeset_statuses'
3976 __table_args__ = (
3975 __table_args__ = (
3977 Index('cs_revision_idx', 'revision'),
3976 Index('cs_revision_idx', 'revision'),
3978 Index('cs_version_idx', 'version'),
3977 Index('cs_version_idx', 'version'),
3979 UniqueConstraint('repo_id', 'revision', 'version'),
3978 UniqueConstraint('repo_id', 'revision', 'version'),
3980 base_table_args
3979 base_table_args
3981 )
3980 )
3982
3981
3983 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
3982 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
3984 STATUS_APPROVED = 'approved'
3983 STATUS_APPROVED = 'approved'
3985 STATUS_REJECTED = 'rejected'
3984 STATUS_REJECTED = 'rejected'
3986 STATUS_UNDER_REVIEW = 'under_review'
3985 STATUS_UNDER_REVIEW = 'under_review'
3987 CheckConstraint,
3986 CheckConstraint,
3988 STATUSES = [
3987 STATUSES = [
3989 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
3988 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
3990 (STATUS_APPROVED, _("Approved")),
3989 (STATUS_APPROVED, _("Approved")),
3991 (STATUS_REJECTED, _("Rejected")),
3990 (STATUS_REJECTED, _("Rejected")),
3992 (STATUS_UNDER_REVIEW, _("Under Review")),
3991 (STATUS_UNDER_REVIEW, _("Under Review")),
3993 ]
3992 ]
3994
3993
3995 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
3994 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
3996 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3995 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3997 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
3996 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
3998 revision = Column('revision', String(40), nullable=False)
3997 revision = Column('revision', String(40), nullable=False)
3999 status = Column('status', String(128), nullable=False, default=DEFAULT)
3998 status = Column('status', String(128), nullable=False, default=DEFAULT)
4000 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
3999 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
4001 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
4000 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
4002 version = Column('version', Integer(), nullable=False, default=0)
4001 version = Column('version', Integer(), nullable=False, default=0)
4003 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
4002 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
4004
4003
4005 author = relationship('User', lazy='select')
4004 author = relationship('User', lazy='select')
4006 repo = relationship('Repository', lazy='select')
4005 repo = relationship('Repository', lazy='select')
4007 comment = relationship('ChangesetComment', lazy='select')
4006 comment = relationship('ChangesetComment', lazy='select')
4008 pull_request = relationship('PullRequest', lazy='select')
4007 pull_request = relationship('PullRequest', lazy='select')
4009
4008
4010
4009
4011 def __str__(self):
4010 def __str__(self):
4012 return f"<{self.__class__.__name__}('{self.status}[v{self.version}]:{self.author}')>"
4011 return f"<{self.__class__.__name__}('{self.status}[v{self.version}]:{self.author}')>"
4013
4012
4014 @classmethod
4013 @classmethod
4015 def get_status_lbl(cls, value):
4014 def get_status_lbl(cls, value):
4016 return dict(cls.STATUSES).get(value)
4015 return dict(cls.STATUSES).get(value)
4017
4016
4018 @property
4017 @property
4019 def status_lbl(self):
4018 def status_lbl(self):
4020 return ChangesetStatus.get_status_lbl(self.status)
4019 return ChangesetStatus.get_status_lbl(self.status)
4021
4020
4022 def get_api_data(self):
4021 def get_api_data(self):
4023 status = self
4022 status = self
4024 data = {
4023 data = {
4025 'status_id': status.changeset_status_id,
4024 'status_id': status.changeset_status_id,
4026 'status': status.status,
4025 'status': status.status,
4027 }
4026 }
4028 return data
4027 return data
4029
4028
4030 def __json__(self):
4029 def __json__(self):
4031 data = dict()
4030 data = dict()
4032 data.update(self.get_api_data())
4031 data.update(self.get_api_data())
4033 return data
4032 return data
4034
4033
4035
4034
4036 class _SetState(object):
4035 class _SetState(object):
4037 """
4036 """
4038 Context processor allowing changing state for sensitive operation such as
4037 Context processor allowing changing state for sensitive operation such as
4039 pull request update or merge
4038 pull request update or merge
4040 """
4039 """
4041
4040
4042 def __init__(self, pull_request, pr_state, back_state=None):
4041 def __init__(self, pull_request, pr_state, back_state=None):
4043 self._pr = pull_request
4042 self._pr = pull_request
4044 self._org_state = back_state or pull_request.pull_request_state
4043 self._org_state = back_state or pull_request.pull_request_state
4045 self._pr_state = pr_state
4044 self._pr_state = pr_state
4046 self._current_state = None
4045 self._current_state = None
4047
4046
4048 def __enter__(self):
4047 def __enter__(self):
4049 log.debug('StateLock: entering set state context of pr %s, setting state to: `%s`',
4048 log.debug('StateLock: entering set state context of pr %s, setting state to: `%s`',
4050 self._pr, self._pr_state)
4049 self._pr, self._pr_state)
4051 self.set_pr_state(self._pr_state)
4050 self.set_pr_state(self._pr_state)
4052 return self
4051 return self
4053
4052
4054 def __exit__(self, exc_type, exc_val, exc_tb):
4053 def __exit__(self, exc_type, exc_val, exc_tb):
4055 if exc_val is not None or exc_type is not None:
4054 if exc_val is not None or exc_type is not None:
4056 log.error(traceback.format_exc(exc_tb))
4055 log.error(traceback.format_exc(exc_tb))
4057 return None
4056 return None
4058
4057
4059 self.set_pr_state(self._org_state)
4058 self.set_pr_state(self._org_state)
4060 log.debug('StateLock: exiting set state context of pr %s, setting state to: `%s`',
4059 log.debug('StateLock: exiting set state context of pr %s, setting state to: `%s`',
4061 self._pr, self._org_state)
4060 self._pr, self._org_state)
4062
4061
4063 @property
4062 @property
4064 def state(self):
4063 def state(self):
4065 return self._current_state
4064 return self._current_state
4066
4065
4067 def set_pr_state(self, pr_state):
4066 def set_pr_state(self, pr_state):
4068 try:
4067 try:
4069 self._pr.pull_request_state = pr_state
4068 self._pr.pull_request_state = pr_state
4070 Session().add(self._pr)
4069 Session().add(self._pr)
4071 Session().commit()
4070 Session().commit()
4072 self._current_state = pr_state
4071 self._current_state = pr_state
4073 except Exception:
4072 except Exception:
4074 log.exception('Failed to set PullRequest %s state to %s', self._pr, pr_state)
4073 log.exception('Failed to set PullRequest %s state to %s', self._pr, pr_state)
4075 raise
4074 raise
4076
4075
4077
4076
4078 class _PullRequestBase(BaseModel):
4077 class _PullRequestBase(BaseModel):
4079 """
4078 """
4080 Common attributes of pull request and version entries.
4079 Common attributes of pull request and version entries.
4081 """
4080 """
4082
4081
4083 # .status values
4082 # .status values
4084 STATUS_NEW = u'new'
4083 STATUS_NEW = u'new'
4085 STATUS_OPEN = u'open'
4084 STATUS_OPEN = u'open'
4086 STATUS_CLOSED = u'closed'
4085 STATUS_CLOSED = u'closed'
4087
4086
4088 # available states
4087 # available states
4089 STATE_CREATING = u'creating'
4088 STATE_CREATING = u'creating'
4090 STATE_UPDATING = u'updating'
4089 STATE_UPDATING = u'updating'
4091 STATE_MERGING = u'merging'
4090 STATE_MERGING = u'merging'
4092 STATE_CREATED = u'created'
4091 STATE_CREATED = u'created'
4093
4092
4094 title = Column('title', Unicode(255), nullable=True)
4093 title = Column('title', Unicode(255), nullable=True)
4095 description = Column(
4094 description = Column(
4096 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
4095 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
4097 nullable=True)
4096 nullable=True)
4098 description_renderer = Column('description_renderer', Unicode(64), nullable=True)
4097 description_renderer = Column('description_renderer', Unicode(64), nullable=True)
4099
4098
4100 # new/open/closed status of pull request (not approve/reject/etc)
4099 # new/open/closed status of pull request (not approve/reject/etc)
4101 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
4100 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
4102 created_on = Column(
4101 created_on = Column(
4103 'created_on', DateTime(timezone=False), nullable=False,
4102 'created_on', DateTime(timezone=False), nullable=False,
4104 default=datetime.datetime.now)
4103 default=datetime.datetime.now)
4105 updated_on = Column(
4104 updated_on = Column(
4106 'updated_on', DateTime(timezone=False), nullable=False,
4105 'updated_on', DateTime(timezone=False), nullable=False,
4107 default=datetime.datetime.now)
4106 default=datetime.datetime.now)
4108
4107
4109 pull_request_state = Column("pull_request_state", String(255), nullable=True)
4108 pull_request_state = Column("pull_request_state", String(255), nullable=True)
4110
4109
4111 @declared_attr
4110 @declared_attr
4112 def user_id(cls):
4111 def user_id(cls):
4113 return Column(
4112 return Column(
4114 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
4113 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
4115 unique=None)
4114 unique=None)
4116
4115
4117 # 500 revisions max
4116 # 500 revisions max
4118 _revisions = Column(
4117 _revisions = Column(
4119 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
4118 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
4120
4119
4121 common_ancestor_id = Column('common_ancestor_id', Unicode(255), nullable=True)
4120 common_ancestor_id = Column('common_ancestor_id', Unicode(255), nullable=True)
4122
4121
4123 @declared_attr
4122 @declared_attr
4124 def source_repo_id(cls):
4123 def source_repo_id(cls):
4125 # TODO: dan: rename column to source_repo_id
4124 # TODO: dan: rename column to source_repo_id
4126 return Column(
4125 return Column(
4127 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
4126 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
4128 nullable=False)
4127 nullable=False)
4129
4128
4130 _source_ref = Column('org_ref', Unicode(255), nullable=False)
4129 _source_ref = Column('org_ref', Unicode(255), nullable=False)
4131
4130
4132 @hybrid_property
4131 @hybrid_property
4133 def source_ref(self):
4132 def source_ref(self):
4134 return self._source_ref
4133 return self._source_ref
4135
4134
4136 @source_ref.setter
4135 @source_ref.setter
4137 def source_ref(self, val):
4136 def source_ref(self, val):
4138 parts = (val or '').split(':')
4137 parts = (val or '').split(':')
4139 if len(parts) != 3:
4138 if len(parts) != 3:
4140 raise ValueError(
4139 raise ValueError(
4141 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
4140 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
4142 self._source_ref = safe_str(val)
4141 self._source_ref = safe_str(val)
4143
4142
4144 _target_ref = Column('other_ref', Unicode(255), nullable=False)
4143 _target_ref = Column('other_ref', Unicode(255), nullable=False)
4145
4144
4146 @hybrid_property
4145 @hybrid_property
4147 def target_ref(self):
4146 def target_ref(self):
4148 return self._target_ref
4147 return self._target_ref
4149
4148
4150 @target_ref.setter
4149 @target_ref.setter
4151 def target_ref(self, val):
4150 def target_ref(self, val):
4152 parts = (val or '').split(':')
4151 parts = (val or '').split(':')
4153 if len(parts) != 3:
4152 if len(parts) != 3:
4154 raise ValueError(
4153 raise ValueError(
4155 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
4154 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
4156 self._target_ref = safe_str(val)
4155 self._target_ref = safe_str(val)
4157
4156
4158 @declared_attr
4157 @declared_attr
4159 def target_repo_id(cls):
4158 def target_repo_id(cls):
4160 # TODO: dan: rename column to target_repo_id
4159 # TODO: dan: rename column to target_repo_id
4161 return Column(
4160 return Column(
4162 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
4161 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
4163 nullable=False)
4162 nullable=False)
4164
4163
4165 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
4164 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
4166
4165
4167 # TODO: dan: rename column to last_merge_source_rev
4166 # TODO: dan: rename column to last_merge_source_rev
4168 _last_merge_source_rev = Column(
4167 _last_merge_source_rev = Column(
4169 'last_merge_org_rev', String(40), nullable=True)
4168 'last_merge_org_rev', String(40), nullable=True)
4170 # TODO: dan: rename column to last_merge_target_rev
4169 # TODO: dan: rename column to last_merge_target_rev
4171 _last_merge_target_rev = Column(
4170 _last_merge_target_rev = Column(
4172 'last_merge_other_rev', String(40), nullable=True)
4171 'last_merge_other_rev', String(40), nullable=True)
4173 _last_merge_status = Column('merge_status', Integer(), nullable=True)
4172 _last_merge_status = Column('merge_status', Integer(), nullable=True)
4174 last_merge_metadata = Column(
4173 last_merge_metadata = Column(
4175 'last_merge_metadata', MutationObj.as_mutable(
4174 'last_merge_metadata', MutationObj.as_mutable(
4176 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4175 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4177
4176
4178 merge_rev = Column('merge_rev', String(40), nullable=True)
4177 merge_rev = Column('merge_rev', String(40), nullable=True)
4179
4178
4180 reviewer_data = Column(
4179 reviewer_data = Column(
4181 'reviewer_data_json', MutationObj.as_mutable(
4180 'reviewer_data_json', MutationObj.as_mutable(
4182 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4181 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4183
4182
4184 @property
4183 @property
4185 def reviewer_data_json(self):
4184 def reviewer_data_json(self):
4186 return json.dumps(self.reviewer_data)
4185 return json.dumps(self.reviewer_data)
4187
4186
4188 @property
4187 @property
4189 def last_merge_metadata_parsed(self):
4188 def last_merge_metadata_parsed(self):
4190 metadata = {}
4189 metadata = {}
4191 if not self.last_merge_metadata:
4190 if not self.last_merge_metadata:
4192 return metadata
4191 return metadata
4193
4192
4194 if hasattr(self.last_merge_metadata, 'de_coerce'):
4193 if hasattr(self.last_merge_metadata, 'de_coerce'):
4195 for k, v in self.last_merge_metadata.de_coerce().items():
4194 for k, v in self.last_merge_metadata.de_coerce().items():
4196 if k in ['target_ref', 'source_ref']:
4195 if k in ['target_ref', 'source_ref']:
4197 metadata[k] = Reference(v['type'], v['name'], v['commit_id'])
4196 metadata[k] = Reference(v['type'], v['name'], v['commit_id'])
4198 else:
4197 else:
4199 if hasattr(v, 'de_coerce'):
4198 if hasattr(v, 'de_coerce'):
4200 metadata[k] = v.de_coerce()
4199 metadata[k] = v.de_coerce()
4201 else:
4200 else:
4202 metadata[k] = v
4201 metadata[k] = v
4203 return metadata
4202 return metadata
4204
4203
4205 @property
4204 @property
4206 def work_in_progress(self):
4205 def work_in_progress(self):
4207 """checks if pull request is work in progress by checking the title"""
4206 """checks if pull request is work in progress by checking the title"""
4208 title = self.title.upper()
4207 title = self.title.upper()
4209 if re.match(r'^(\[WIP\]\s*|WIP:\s*|WIP\s+)', title):
4208 if re.match(r'^(\[WIP\]\s*|WIP:\s*|WIP\s+)', title):
4210 return True
4209 return True
4211 return False
4210 return False
4212
4211
4213 @property
4212 @property
4214 def title_safe(self):
4213 def title_safe(self):
4215 return self.title\
4214 return self.title\
4216 .replace('{', '{{')\
4215 .replace('{', '{{')\
4217 .replace('}', '}}')
4216 .replace('}', '}}')
4218
4217
4219 @hybrid_property
4218 @hybrid_property
4220 def description_safe(self):
4219 def description_safe(self):
4221 from rhodecode.lib import helpers as h
4220 from rhodecode.lib import helpers as h
4222 return h.escape(self.description)
4221 return h.escape(self.description)
4223
4222
4224 @hybrid_property
4223 @hybrid_property
4225 def revisions(self):
4224 def revisions(self):
4226 return self._revisions.split(':') if self._revisions else []
4225 return self._revisions.split(':') if self._revisions else []
4227
4226
4228 @revisions.setter
4227 @revisions.setter
4229 def revisions(self, val):
4228 def revisions(self, val):
4230 self._revisions = u':'.join(val)
4229 self._revisions = u':'.join(val)
4231
4230
4232 @hybrid_property
4231 @hybrid_property
4233 def last_merge_status(self):
4232 def last_merge_status(self):
4234 return safe_int(self._last_merge_status)
4233 return safe_int(self._last_merge_status)
4235
4234
4236 @last_merge_status.setter
4235 @last_merge_status.setter
4237 def last_merge_status(self, val):
4236 def last_merge_status(self, val):
4238 self._last_merge_status = val
4237 self._last_merge_status = val
4239
4238
4240 @declared_attr
4239 @declared_attr
4241 def author(cls):
4240 def author(cls):
4242 return relationship('User', lazy='joined')
4241 return relationship('User', lazy='joined')
4243
4242
4244 @declared_attr
4243 @declared_attr
4245 def source_repo(cls):
4244 def source_repo(cls):
4246 return relationship(
4245 return relationship(
4247 'Repository',
4246 'Repository',
4248 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
4247 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
4249
4248
4250 @property
4249 @property
4251 def source_ref_parts(self):
4250 def source_ref_parts(self):
4252 return self.unicode_to_reference(self.source_ref)
4251 return self.unicode_to_reference(self.source_ref)
4253
4252
4254 @declared_attr
4253 @declared_attr
4255 def target_repo(cls):
4254 def target_repo(cls):
4256 return relationship(
4255 return relationship(
4257 'Repository',
4256 'Repository',
4258 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
4257 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
4259
4258
4260 @property
4259 @property
4261 def target_ref_parts(self):
4260 def target_ref_parts(self):
4262 return self.unicode_to_reference(self.target_ref)
4261 return self.unicode_to_reference(self.target_ref)
4263
4262
4264 @property
4263 @property
4265 def shadow_merge_ref(self):
4264 def shadow_merge_ref(self):
4266 return self.unicode_to_reference(self._shadow_merge_ref)
4265 return self.unicode_to_reference(self._shadow_merge_ref)
4267
4266
4268 @shadow_merge_ref.setter
4267 @shadow_merge_ref.setter
4269 def shadow_merge_ref(self, ref):
4268 def shadow_merge_ref(self, ref):
4270 self._shadow_merge_ref = self.reference_to_unicode(ref)
4269 self._shadow_merge_ref = self.reference_to_unicode(ref)
4271
4270
4272 @staticmethod
4271 @staticmethod
4273 def unicode_to_reference(raw):
4272 def unicode_to_reference(raw):
4274 return unicode_to_reference(raw)
4273 return unicode_to_reference(raw)
4275
4274
4276 @staticmethod
4275 @staticmethod
4277 def reference_to_unicode(ref):
4276 def reference_to_unicode(ref):
4278 return reference_to_unicode(ref)
4277 return reference_to_unicode(ref)
4279
4278
4280 def get_api_data(self, with_merge_state=True):
4279 def get_api_data(self, with_merge_state=True):
4281 from rhodecode.model.pull_request import PullRequestModel
4280 from rhodecode.model.pull_request import PullRequestModel
4282
4281
4283 pull_request = self
4282 pull_request = self
4284 if with_merge_state:
4283 if with_merge_state:
4285 merge_response, merge_status, msg = \
4284 merge_response, merge_status, msg = \
4286 PullRequestModel().merge_status(pull_request)
4285 PullRequestModel().merge_status(pull_request)
4287 merge_state = {
4286 merge_state = {
4288 'status': merge_status,
4287 'status': merge_status,
4289 'message': safe_str(msg),
4288 'message': safe_str(msg),
4290 }
4289 }
4291 else:
4290 else:
4292 merge_state = {'status': 'not_available',
4291 merge_state = {'status': 'not_available',
4293 'message': 'not_available'}
4292 'message': 'not_available'}
4294
4293
4295 merge_data = {
4294 merge_data = {
4296 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
4295 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
4297 'reference': (
4296 'reference': (
4298 pull_request.shadow_merge_ref._asdict()
4297 pull_request.shadow_merge_ref._asdict()
4299 if pull_request.shadow_merge_ref else None),
4298 if pull_request.shadow_merge_ref else None),
4300 }
4299 }
4301
4300
4302 data = {
4301 data = {
4303 'pull_request_id': pull_request.pull_request_id,
4302 'pull_request_id': pull_request.pull_request_id,
4304 'url': PullRequestModel().get_url(pull_request),
4303 'url': PullRequestModel().get_url(pull_request),
4305 'title': pull_request.title,
4304 'title': pull_request.title,
4306 'description': pull_request.description,
4305 'description': pull_request.description,
4307 'status': pull_request.status,
4306 'status': pull_request.status,
4308 'state': pull_request.pull_request_state,
4307 'state': pull_request.pull_request_state,
4309 'created_on': pull_request.created_on,
4308 'created_on': pull_request.created_on,
4310 'updated_on': pull_request.updated_on,
4309 'updated_on': pull_request.updated_on,
4311 'commit_ids': pull_request.revisions,
4310 'commit_ids': pull_request.revisions,
4312 'review_status': pull_request.calculated_review_status(),
4311 'review_status': pull_request.calculated_review_status(),
4313 'mergeable': merge_state,
4312 'mergeable': merge_state,
4314 'source': {
4313 'source': {
4315 'clone_url': pull_request.source_repo.clone_url(),
4314 'clone_url': pull_request.source_repo.clone_url(),
4316 'repository': pull_request.source_repo.repo_name,
4315 'repository': pull_request.source_repo.repo_name,
4317 'reference': {
4316 'reference': {
4318 'name': pull_request.source_ref_parts.name,
4317 'name': pull_request.source_ref_parts.name,
4319 'type': pull_request.source_ref_parts.type,
4318 'type': pull_request.source_ref_parts.type,
4320 'commit_id': pull_request.source_ref_parts.commit_id,
4319 'commit_id': pull_request.source_ref_parts.commit_id,
4321 },
4320 },
4322 },
4321 },
4323 'target': {
4322 'target': {
4324 'clone_url': pull_request.target_repo.clone_url(),
4323 'clone_url': pull_request.target_repo.clone_url(),
4325 'repository': pull_request.target_repo.repo_name,
4324 'repository': pull_request.target_repo.repo_name,
4326 'reference': {
4325 'reference': {
4327 'name': pull_request.target_ref_parts.name,
4326 'name': pull_request.target_ref_parts.name,
4328 'type': pull_request.target_ref_parts.type,
4327 'type': pull_request.target_ref_parts.type,
4329 'commit_id': pull_request.target_ref_parts.commit_id,
4328 'commit_id': pull_request.target_ref_parts.commit_id,
4330 },
4329 },
4331 },
4330 },
4332 'merge': merge_data,
4331 'merge': merge_data,
4333 'author': pull_request.author.get_api_data(include_secrets=False,
4332 'author': pull_request.author.get_api_data(include_secrets=False,
4334 details='basic'),
4333 details='basic'),
4335 'reviewers': [
4334 'reviewers': [
4336 {
4335 {
4337 'user': reviewer.get_api_data(include_secrets=False,
4336 'user': reviewer.get_api_data(include_secrets=False,
4338 details='basic'),
4337 details='basic'),
4339 'reasons': reasons,
4338 'reasons': reasons,
4340 'review_status': st[0][1].status if st else 'not_reviewed',
4339 'review_status': st[0][1].status if st else 'not_reviewed',
4341 }
4340 }
4342 for obj, reviewer, reasons, mandatory, st in
4341 for obj, reviewer, reasons, mandatory, st in
4343 pull_request.reviewers_statuses()
4342 pull_request.reviewers_statuses()
4344 ]
4343 ]
4345 }
4344 }
4346
4345
4347 return data
4346 return data
4348
4347
4349 def set_state(self, pull_request_state, final_state=None):
4348 def set_state(self, pull_request_state, final_state=None):
4350 """
4349 """
4351 # goes from initial state to updating to initial state.
4350 # goes from initial state to updating to initial state.
4352 # initial state can be changed by specifying back_state=
4351 # initial state can be changed by specifying back_state=
4353 with pull_request_obj.set_state(PullRequest.STATE_UPDATING):
4352 with pull_request_obj.set_state(PullRequest.STATE_UPDATING):
4354 pull_request.merge()
4353 pull_request.merge()
4355
4354
4356 :param pull_request_state:
4355 :param pull_request_state:
4357 :param final_state:
4356 :param final_state:
4358
4357
4359 """
4358 """
4360
4359
4361 return _SetState(self, pull_request_state, back_state=final_state)
4360 return _SetState(self, pull_request_state, back_state=final_state)
4362
4361
4363
4362
4364 class PullRequest(Base, _PullRequestBase):
4363 class PullRequest(Base, _PullRequestBase):
4365 __tablename__ = 'pull_requests'
4364 __tablename__ = 'pull_requests'
4366 __table_args__ = (
4365 __table_args__ = (
4367 base_table_args,
4366 base_table_args,
4368 )
4367 )
4369 LATEST_VER = 'latest'
4368 LATEST_VER = 'latest'
4370
4369
4371 pull_request_id = Column(
4370 pull_request_id = Column(
4372 'pull_request_id', Integer(), nullable=False, primary_key=True)
4371 'pull_request_id', Integer(), nullable=False, primary_key=True)
4373
4372
4374 def __str__(self):
4373 def __str__(self):
4375 if self.pull_request_id:
4374 if self.pull_request_id:
4376 return f'<DB:PullRequest #{self.pull_request_id}>'
4375 return f'<DB:PullRequest #{self.pull_request_id}>'
4377 else:
4376 else:
4378 reviewers = relationship('PullRequestReviewers', cascade="all, delete-orphan")
4377 reviewers = relationship('PullRequestReviewers', cascade="all, delete-orphan")
4379 statuses = relationship('ChangesetStatus', cascade="all, delete-orphan")
4378 statuses = relationship('ChangesetStatus', cascade="all, delete-orphan")
4380 comments = relationship('ChangesetComment', cascade="all, delete-orphan")
4379 comments = relationship('ChangesetComment', cascade="all, delete-orphan")
4381 versions = relationship('PullRequestVersion', cascade="all, delete-orphan",
4380 versions = relationship('PullRequestVersion', cascade="all, delete-orphan",
4382 lazy='dynamic')
4381 lazy='dynamic')
4383
4382
4384 @classmethod
4383 @classmethod
4385 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
4384 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
4386 internal_methods=None):
4385 internal_methods=None):
4387
4386
4388 class PullRequestDisplay(object):
4387 class PullRequestDisplay(object):
4389 """
4388 """
4390 Special object wrapper for showing PullRequest data via Versions
4389 Special object wrapper for showing PullRequest data via Versions
4391 It mimics PR object as close as possible. This is read only object
4390 It mimics PR object as close as possible. This is read only object
4392 just for display
4391 just for display
4393 """
4392 """
4394
4393
4395 def __init__(self, attrs, internal=None):
4394 def __init__(self, attrs, internal=None):
4396 self.attrs = attrs
4395 self.attrs = attrs
4397 # internal have priority over the given ones via attrs
4396 # internal have priority over the given ones via attrs
4398 self.internal = internal or ['versions']
4397 self.internal = internal or ['versions']
4399
4398
4400 def __getattr__(self, item):
4399 def __getattr__(self, item):
4401 if item in self.internal:
4400 if item in self.internal:
4402 return getattr(self, item)
4401 return getattr(self, item)
4403 try:
4402 try:
4404 return self.attrs[item]
4403 return self.attrs[item]
4405 except KeyError:
4404 except KeyError:
4406 raise AttributeError(
4405 raise AttributeError(
4407 '%s object has no attribute %s' % (self, item))
4406 '%s object has no attribute %s' % (self, item))
4408
4407
4409 def __str__(self):
4408 def __str__(self):
4410 pr_id = self.attrs.get('pull_request_id')
4409 pr_id = self.attrs.get('pull_request_id')
4411 return f'<DB:PullRequestDisplay #{pr_id}>'
4410 return f'<DB:PullRequestDisplay #{pr_id}>'
4412
4411
4413 def versions(self):
4412 def versions(self):
4414 return pull_request_obj.versions.order_by(
4413 return pull_request_obj.versions.order_by(
4415 PullRequestVersion.pull_request_version_id).all()
4414 PullRequestVersion.pull_request_version_id).all()
4416
4415
4417 def is_closed(self):
4416 def is_closed(self):
4418 return pull_request_obj.is_closed()
4417 return pull_request_obj.is_closed()
4419
4418
4420 def is_state_changing(self):
4419 def is_state_changing(self):
4421 return pull_request_obj.is_state_changing()
4420 return pull_request_obj.is_state_changing()
4422
4421
4423 @property
4422 @property
4424 def pull_request_version_id(self):
4423 def pull_request_version_id(self):
4425 return getattr(pull_request_obj, 'pull_request_version_id', None)
4424 return getattr(pull_request_obj, 'pull_request_version_id', None)
4426
4425
4427 @property
4426 @property
4428 def pull_request_last_version(self):
4427 def pull_request_last_version(self):
4429 return pull_request_obj.pull_request_last_version
4428 return pull_request_obj.pull_request_last_version
4430
4429
4431 attrs = StrictAttributeDict(pull_request_obj.get_api_data(with_merge_state=False))
4430 attrs = StrictAttributeDict(pull_request_obj.get_api_data(with_merge_state=False))
4432
4431
4433 attrs.author = StrictAttributeDict(
4432 attrs.author = StrictAttributeDict(
4434 pull_request_obj.author.get_api_data())
4433 pull_request_obj.author.get_api_data())
4435 if pull_request_obj.target_repo:
4434 if pull_request_obj.target_repo:
4436 attrs.target_repo = StrictAttributeDict(
4435 attrs.target_repo = StrictAttributeDict(
4437 pull_request_obj.target_repo.get_api_data())
4436 pull_request_obj.target_repo.get_api_data())
4438 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
4437 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
4439
4438
4440 if pull_request_obj.source_repo:
4439 if pull_request_obj.source_repo:
4441 attrs.source_repo = StrictAttributeDict(
4440 attrs.source_repo = StrictAttributeDict(
4442 pull_request_obj.source_repo.get_api_data())
4441 pull_request_obj.source_repo.get_api_data())
4443 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
4442 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
4444
4443
4445 attrs.source_ref_parts = pull_request_obj.source_ref_parts
4444 attrs.source_ref_parts = pull_request_obj.source_ref_parts
4446 attrs.target_ref_parts = pull_request_obj.target_ref_parts
4445 attrs.target_ref_parts = pull_request_obj.target_ref_parts
4447 attrs.revisions = pull_request_obj.revisions
4446 attrs.revisions = pull_request_obj.revisions
4448 attrs.common_ancestor_id = pull_request_obj.common_ancestor_id
4447 attrs.common_ancestor_id = pull_request_obj.common_ancestor_id
4449 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
4448 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
4450 attrs.reviewer_data = org_pull_request_obj.reviewer_data
4449 attrs.reviewer_data = org_pull_request_obj.reviewer_data
4451 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
4450 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
4452
4451
4453 return PullRequestDisplay(attrs, internal=internal_methods)
4452 return PullRequestDisplay(attrs, internal=internal_methods)
4454
4453
4455 def is_closed(self):
4454 def is_closed(self):
4456 return self.status == self.STATUS_CLOSED
4455 return self.status == self.STATUS_CLOSED
4457
4456
4458 def is_state_changing(self):
4457 def is_state_changing(self):
4459 return self.pull_request_state != PullRequest.STATE_CREATED
4458 return self.pull_request_state != PullRequest.STATE_CREATED
4460
4459
4461 def __json__(self):
4460 def __json__(self):
4462 return {
4461 return {
4463 'revisions': self.revisions,
4462 'revisions': self.revisions,
4464 'versions': self.versions_count
4463 'versions': self.versions_count
4465 }
4464 }
4466
4465
4467 def calculated_review_status(self):
4466 def calculated_review_status(self):
4468 from rhodecode.model.changeset_status import ChangesetStatusModel
4467 from rhodecode.model.changeset_status import ChangesetStatusModel
4469 return ChangesetStatusModel().calculated_review_status(self)
4468 return ChangesetStatusModel().calculated_review_status(self)
4470
4469
4471 def reviewers_statuses(self, user=None):
4470 def reviewers_statuses(self, user=None):
4472 from rhodecode.model.changeset_status import ChangesetStatusModel
4471 from rhodecode.model.changeset_status import ChangesetStatusModel
4473 return ChangesetStatusModel().reviewers_statuses(self, user=user)
4472 return ChangesetStatusModel().reviewers_statuses(self, user=user)
4474
4473
4475 def get_pull_request_reviewers(self, role=None):
4474 def get_pull_request_reviewers(self, role=None):
4476 qry = PullRequestReviewers.query()\
4475 qry = PullRequestReviewers.query()\
4477 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)
4476 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)
4478 if role:
4477 if role:
4479 qry = qry.filter(PullRequestReviewers.role == role)
4478 qry = qry.filter(PullRequestReviewers.role == role)
4480
4479
4481 return qry.all()
4480 return qry.all()
4482
4481
4483 @property
4482 @property
4484 def reviewers_count(self):
4483 def reviewers_count(self):
4485 qry = PullRequestReviewers.query()\
4484 qry = PullRequestReviewers.query()\
4486 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4485 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4487 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER)
4486 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER)
4488 return qry.count()
4487 return qry.count()
4489
4488
4490 @property
4489 @property
4491 def observers_count(self):
4490 def observers_count(self):
4492 qry = PullRequestReviewers.query()\
4491 qry = PullRequestReviewers.query()\
4493 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4492 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4494 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER)
4493 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER)
4495 return qry.count()
4494 return qry.count()
4496
4495
4497 def observers(self):
4496 def observers(self):
4498 qry = PullRequestReviewers.query()\
4497 qry = PullRequestReviewers.query()\
4499 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4498 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4500 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER)\
4499 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER)\
4501 .all()
4500 .all()
4502
4501
4503 for entry in qry:
4502 for entry in qry:
4504 yield entry, entry.user
4503 yield entry, entry.user
4505
4504
4506 @property
4505 @property
4507 def workspace_id(self):
4506 def workspace_id(self):
4508 from rhodecode.model.pull_request import PullRequestModel
4507 from rhodecode.model.pull_request import PullRequestModel
4509 return PullRequestModel()._workspace_id(self)
4508 return PullRequestModel()._workspace_id(self)
4510
4509
4511 def get_shadow_repo(self):
4510 def get_shadow_repo(self):
4512 workspace_id = self.workspace_id
4511 workspace_id = self.workspace_id
4513 shadow_repository_path = self.target_repo.get_shadow_repository_path(workspace_id)
4512 shadow_repository_path = self.target_repo.get_shadow_repository_path(workspace_id)
4514 if os.path.isdir(shadow_repository_path):
4513 if os.path.isdir(shadow_repository_path):
4515 vcs_obj = self.target_repo.scm_instance()
4514 vcs_obj = self.target_repo.scm_instance()
4516 return vcs_obj.get_shadow_instance(shadow_repository_path)
4515 return vcs_obj.get_shadow_instance(shadow_repository_path)
4517
4516
4518 @property
4517 @property
4519 def versions_count(self):
4518 def versions_count(self):
4520 """
4519 """
4521 return number of versions this PR have, e.g a PR that once been
4520 return number of versions this PR have, e.g a PR that once been
4522 updated will have 2 versions
4521 updated will have 2 versions
4523 """
4522 """
4524 return self.versions.count() + 1
4523 return self.versions.count() + 1
4525
4524
4526 @property
4525 @property
4527 def pull_request_last_version(self):
4526 def pull_request_last_version(self):
4528 return self.versions_count
4527 return self.versions_count
4529
4528
4530
4529
4531 class PullRequestVersion(Base, _PullRequestBase):
4530 class PullRequestVersion(Base, _PullRequestBase):
4532 __tablename__ = 'pull_request_versions'
4531 __tablename__ = 'pull_request_versions'
4533 __table_args__ = (
4532 __table_args__ = (
4534 base_table_args,
4533 base_table_args,
4535 )
4534 )
4536
4535
4537 pull_request_version_id = Column(
4536 pull_request_version_id = Column(
4538 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
4537 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
4539 pull_request_id = Column(
4538 pull_request_id = Column(
4540 'pull_request_id', Integer(),
4539 'pull_request_id', Integer(),
4541 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4540 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4542 pull_request = relationship('PullRequest')
4541 pull_request = relationship('PullRequest')
4543
4542
4544 def __str__(self):
4543 def __str__(self):
4545 if self.pull_request_version_id:
4544 if self.pull_request_version_id:
4546 return f'<DB:PullRequestVersion #{self.pull_request_version_id}>'
4545 return f'<DB:PullRequestVersion #{self.pull_request_version_id}>'
4547 else:
4546 else:
4548 return f'<DB:PullRequestVersion at {id(self)!r}>'
4547 return f'<DB:PullRequestVersion at {id(self)!r}>'
4549
4548
4550 @property
4549 @property
4551 def reviewers(self):
4550 def reviewers(self):
4552 return self.pull_request.reviewers
4551 return self.pull_request.reviewers
4553
4552
4554 @property
4553 @property
4555 def reviewers(self):
4554 def reviewers(self):
4556 return self.pull_request.reviewers
4555 return self.pull_request.reviewers
4557
4556
4558 @property
4557 @property
4559 def versions(self):
4558 def versions(self):
4560 return self.pull_request.versions
4559 return self.pull_request.versions
4561
4560
4562 def is_closed(self):
4561 def is_closed(self):
4563 # calculate from original
4562 # calculate from original
4564 return self.pull_request.status == self.STATUS_CLOSED
4563 return self.pull_request.status == self.STATUS_CLOSED
4565
4564
4566 def is_state_changing(self):
4565 def is_state_changing(self):
4567 return self.pull_request.pull_request_state != PullRequest.STATE_CREATED
4566 return self.pull_request.pull_request_state != PullRequest.STATE_CREATED
4568
4567
4569 def calculated_review_status(self):
4568 def calculated_review_status(self):
4570 return self.pull_request.calculated_review_status()
4569 return self.pull_request.calculated_review_status()
4571
4570
4572 def reviewers_statuses(self):
4571 def reviewers_statuses(self):
4573 return self.pull_request.reviewers_statuses()
4572 return self.pull_request.reviewers_statuses()
4574
4573
4575 def observers(self):
4574 def observers(self):
4576 return self.pull_request.observers()
4575 return self.pull_request.observers()
4577
4576
4578
4577
4579 class PullRequestReviewers(Base, BaseModel):
4578 class PullRequestReviewers(Base, BaseModel):
4580 __tablename__ = 'pull_request_reviewers'
4579 __tablename__ = 'pull_request_reviewers'
4581 __table_args__ = (
4580 __table_args__ = (
4582 base_table_args,
4581 base_table_args,
4583 )
4582 )
4584 ROLE_REVIEWER = u'reviewer'
4583 ROLE_REVIEWER = u'reviewer'
4585 ROLE_OBSERVER = u'observer'
4584 ROLE_OBSERVER = u'observer'
4586 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
4585 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
4587
4586
4588 @hybrid_property
4587 @hybrid_property
4589 def reasons(self):
4588 def reasons(self):
4590 if not self._reasons:
4589 if not self._reasons:
4591 return []
4590 return []
4592 return self._reasons
4591 return self._reasons
4593
4592
4594 @reasons.setter
4593 @reasons.setter
4595 def reasons(self, val):
4594 def reasons(self, val):
4596 val = val or []
4595 val = val or []
4597 if any(not isinstance(x, str) for x in val):
4596 if any(not isinstance(x, str) for x in val):
4598 raise Exception('invalid reasons type, must be list of strings')
4597 raise Exception('invalid reasons type, must be list of strings')
4599 self._reasons = val
4598 self._reasons = val
4600
4599
4601 pull_requests_reviewers_id = Column(
4600 pull_requests_reviewers_id = Column(
4602 'pull_requests_reviewers_id', Integer(), nullable=False,
4601 'pull_requests_reviewers_id', Integer(), nullable=False,
4603 primary_key=True)
4602 primary_key=True)
4604 pull_request_id = Column(
4603 pull_request_id = Column(
4605 "pull_request_id", Integer(),
4604 "pull_request_id", Integer(),
4606 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4605 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4607 user_id = Column(
4606 user_id = Column(
4608 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
4607 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
4609 _reasons = Column(
4608 _reasons = Column(
4610 'reason', MutationList.as_mutable(
4609 'reason', MutationList.as_mutable(
4611 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
4610 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
4612
4611
4613 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4612 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4614 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
4613 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
4615
4614
4616 user = relationship('User')
4615 user = relationship('User')
4617 pull_request = relationship('PullRequest')
4616 pull_request = relationship('PullRequest')
4618
4617
4619 rule_data = Column(
4618 rule_data = Column(
4620 'rule_data_json',
4619 'rule_data_json',
4621 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
4620 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
4622
4621
4623 def rule_user_group_data(self):
4622 def rule_user_group_data(self):
4624 """
4623 """
4625 Returns the voting user group rule data for this reviewer
4624 Returns the voting user group rule data for this reviewer
4626 """
4625 """
4627
4626
4628 if self.rule_data and 'vote_rule' in self.rule_data:
4627 if self.rule_data and 'vote_rule' in self.rule_data:
4629 user_group_data = {}
4628 user_group_data = {}
4630 if 'rule_user_group_entry_id' in self.rule_data:
4629 if 'rule_user_group_entry_id' in self.rule_data:
4631 # means a group with voting rules !
4630 # means a group with voting rules !
4632 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
4631 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
4633 user_group_data['name'] = self.rule_data['rule_name']
4632 user_group_data['name'] = self.rule_data['rule_name']
4634 user_group_data['vote_rule'] = self.rule_data['vote_rule']
4633 user_group_data['vote_rule'] = self.rule_data['vote_rule']
4635
4634
4636 return user_group_data
4635 return user_group_data
4637
4636
4638 @classmethod
4637 @classmethod
4639 def get_pull_request_reviewers(cls, pull_request_id, role=None):
4638 def get_pull_request_reviewers(cls, pull_request_id, role=None):
4640 qry = PullRequestReviewers.query()\
4639 qry = PullRequestReviewers.query()\
4641 .filter(PullRequestReviewers.pull_request_id == pull_request_id)
4640 .filter(PullRequestReviewers.pull_request_id == pull_request_id)
4642 if role:
4641 if role:
4643 qry = qry.filter(PullRequestReviewers.role == role)
4642 qry = qry.filter(PullRequestReviewers.role == role)
4644
4643
4645 return qry.all()
4644 return qry.all()
4646
4645
4647 def __str__(self):
4646 def __str__(self):
4648 return f"<{self.__class__.__name__}('id:{self.pull_requests_reviewers_id}')>"
4647 return f"<{self.__class__.__name__}('id:{self.pull_requests_reviewers_id}')>"
4649
4648
4650
4649
4651 class Notification(Base, BaseModel):
4650 class Notification(Base, BaseModel):
4652 __tablename__ = 'notifications'
4651 __tablename__ = 'notifications'
4653 __table_args__ = (
4652 __table_args__ = (
4654 Index('notification_type_idx', 'type'),
4653 Index('notification_type_idx', 'type'),
4655 base_table_args,
4654 base_table_args,
4656 )
4655 )
4657
4656
4658 TYPE_CHANGESET_COMMENT = u'cs_comment'
4657 TYPE_CHANGESET_COMMENT = u'cs_comment'
4659 TYPE_MESSAGE = u'message'
4658 TYPE_MESSAGE = u'message'
4660 TYPE_MENTION = u'mention'
4659 TYPE_MENTION = u'mention'
4661 TYPE_REGISTRATION = u'registration'
4660 TYPE_REGISTRATION = u'registration'
4662 TYPE_PULL_REQUEST = u'pull_request'
4661 TYPE_PULL_REQUEST = u'pull_request'
4663 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
4662 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
4664 TYPE_PULL_REQUEST_UPDATE = u'pull_request_update'
4663 TYPE_PULL_REQUEST_UPDATE = u'pull_request_update'
4665
4664
4666 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
4665 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
4667 subject = Column('subject', Unicode(512), nullable=True)
4666 subject = Column('subject', Unicode(512), nullable=True)
4668 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4667 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4669 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
4668 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
4670 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4669 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4671 type_ = Column('type', Unicode(255))
4670 type_ = Column('type', Unicode(255))
4672
4671
4673 created_by_user = relationship('User')
4672 created_by_user = relationship('User')
4674 notifications_to_users = relationship('UserNotification', lazy='joined',
4673 notifications_to_users = relationship('UserNotification', lazy='joined',
4675 cascade="all, delete-orphan")
4674 cascade="all, delete-orphan")
4676
4675
4677 @property
4676 @property
4678 def recipients(self):
4677 def recipients(self):
4679 return [x.user for x in UserNotification.query()\
4678 return [x.user for x in UserNotification.query()\
4680 .filter(UserNotification.notification == self)\
4679 .filter(UserNotification.notification == self)\
4681 .order_by(UserNotification.user_id.asc()).all()]
4680 .order_by(UserNotification.user_id.asc()).all()]
4682
4681
4683 @classmethod
4682 @classmethod
4684 def create(cls, created_by, subject, body, recipients, type_=None):
4683 def create(cls, created_by, subject, body, recipients, type_=None):
4685 if type_ is None:
4684 if type_ is None:
4686 type_ = Notification.TYPE_MESSAGE
4685 type_ = Notification.TYPE_MESSAGE
4687
4686
4688 notification = cls()
4687 notification = cls()
4689 notification.created_by_user = created_by
4688 notification.created_by_user = created_by
4690 notification.subject = subject
4689 notification.subject = subject
4691 notification.body = body
4690 notification.body = body
4692 notification.type_ = type_
4691 notification.type_ = type_
4693 notification.created_on = datetime.datetime.now()
4692 notification.created_on = datetime.datetime.now()
4694
4693
4695 # For each recipient link the created notification to his account
4694 # For each recipient link the created notification to his account
4696 for u in recipients:
4695 for u in recipients:
4697 assoc = UserNotification()
4696 assoc = UserNotification()
4698 assoc.user_id = u.user_id
4697 assoc.user_id = u.user_id
4699 assoc.notification = notification
4698 assoc.notification = notification
4700
4699
4701 # if created_by is inside recipients mark his notification
4700 # if created_by is inside recipients mark his notification
4702 # as read
4701 # as read
4703 if u.user_id == created_by.user_id:
4702 if u.user_id == created_by.user_id:
4704 assoc.read = True
4703 assoc.read = True
4705 Session().add(assoc)
4704 Session().add(assoc)
4706
4705
4707 Session().add(notification)
4706 Session().add(notification)
4708
4707
4709 return notification
4708 return notification
4710
4709
4711
4710
4712 class UserNotification(Base, BaseModel):
4711 class UserNotification(Base, BaseModel):
4713 __tablename__ = 'user_to_notification'
4712 __tablename__ = 'user_to_notification'
4714 __table_args__ = (
4713 __table_args__ = (
4715 UniqueConstraint('user_id', 'notification_id'),
4714 UniqueConstraint('user_id', 'notification_id'),
4716 base_table_args
4715 base_table_args
4717 )
4716 )
4718
4717
4719 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4718 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4720 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
4719 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
4721 read = Column('read', Boolean, default=False)
4720 read = Column('read', Boolean, default=False)
4722 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
4721 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
4723
4722
4724 user = relationship('User', lazy="joined")
4723 user = relationship('User', lazy="joined")
4725 notification = relationship('Notification', lazy="joined",
4724 notification = relationship('Notification', lazy="joined",
4726 order_by=lambda: Notification.created_on.desc(),)
4725 order_by=lambda: Notification.created_on.desc(),)
4727
4726
4728 def mark_as_read(self):
4727 def mark_as_read(self):
4729 self.read = True
4728 self.read = True
4730 Session().add(self)
4729 Session().add(self)
4731
4730
4732
4731
4733 class UserNotice(Base, BaseModel):
4732 class UserNotice(Base, BaseModel):
4734 __tablename__ = 'user_notices'
4733 __tablename__ = 'user_notices'
4735 __table_args__ = (
4734 __table_args__ = (
4736 base_table_args
4735 base_table_args
4737 )
4736 )
4738
4737
4739 NOTIFICATION_TYPE_MESSAGE = 'message'
4738 NOTIFICATION_TYPE_MESSAGE = 'message'
4740 NOTIFICATION_TYPE_NOTICE = 'notice'
4739 NOTIFICATION_TYPE_NOTICE = 'notice'
4741
4740
4742 NOTIFICATION_LEVEL_INFO = 'info'
4741 NOTIFICATION_LEVEL_INFO = 'info'
4743 NOTIFICATION_LEVEL_WARNING = 'warning'
4742 NOTIFICATION_LEVEL_WARNING = 'warning'
4744 NOTIFICATION_LEVEL_ERROR = 'error'
4743 NOTIFICATION_LEVEL_ERROR = 'error'
4745
4744
4746 user_notice_id = Column('gist_id', Integer(), primary_key=True)
4745 user_notice_id = Column('gist_id', Integer(), primary_key=True)
4747
4746
4748 notice_subject = Column('notice_subject', Unicode(512), nullable=True)
4747 notice_subject = Column('notice_subject', Unicode(512), nullable=True)
4749 notice_body = Column('notice_body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4748 notice_body = Column('notice_body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4750
4749
4751 notice_read = Column('notice_read', Boolean, default=False)
4750 notice_read = Column('notice_read', Boolean, default=False)
4752
4751
4753 notification_level = Column('notification_level', String(1024), default=NOTIFICATION_LEVEL_INFO)
4752 notification_level = Column('notification_level', String(1024), default=NOTIFICATION_LEVEL_INFO)
4754 notification_type = Column('notification_type', String(1024), default=NOTIFICATION_TYPE_NOTICE)
4753 notification_type = Column('notification_type', String(1024), default=NOTIFICATION_TYPE_NOTICE)
4755
4754
4756 notice_created_by = Column('notice_created_by', Integer(), ForeignKey('users.user_id'), nullable=True)
4755 notice_created_by = Column('notice_created_by', Integer(), ForeignKey('users.user_id'), nullable=True)
4757 notice_created_on = Column('notice_created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4756 notice_created_on = Column('notice_created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4758
4757
4759 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'))
4758 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'))
4760 user = relationship('User', lazy="joined", primaryjoin='User.user_id==UserNotice.user_id')
4759 user = relationship('User', lazy="joined", primaryjoin='User.user_id==UserNotice.user_id')
4761
4760
4762 @classmethod
4761 @classmethod
4763 def create_for_user(cls, user, subject, body, notice_level=NOTIFICATION_LEVEL_INFO, allow_duplicate=False):
4762 def create_for_user(cls, user, subject, body, notice_level=NOTIFICATION_LEVEL_INFO, allow_duplicate=False):
4764
4763
4765 if notice_level not in [cls.NOTIFICATION_LEVEL_ERROR,
4764 if notice_level not in [cls.NOTIFICATION_LEVEL_ERROR,
4766 cls.NOTIFICATION_LEVEL_WARNING,
4765 cls.NOTIFICATION_LEVEL_WARNING,
4767 cls.NOTIFICATION_LEVEL_INFO]:
4766 cls.NOTIFICATION_LEVEL_INFO]:
4768 return
4767 return
4769
4768
4770 from rhodecode.model.user import UserModel
4769 from rhodecode.model.user import UserModel
4771 user = UserModel().get_user(user)
4770 user = UserModel().get_user(user)
4772
4771
4773 new_notice = UserNotice()
4772 new_notice = UserNotice()
4774 if not allow_duplicate:
4773 if not allow_duplicate:
4775 existing_msg = UserNotice().query() \
4774 existing_msg = UserNotice().query() \
4776 .filter(UserNotice.user == user) \
4775 .filter(UserNotice.user == user) \
4777 .filter(UserNotice.notice_body == body) \
4776 .filter(UserNotice.notice_body == body) \
4778 .filter(UserNotice.notice_read == false()) \
4777 .filter(UserNotice.notice_read == false()) \
4779 .scalar()
4778 .scalar()
4780 if existing_msg:
4779 if existing_msg:
4781 log.warning('Ignoring duplicate notice for user %s', user)
4780 log.warning('Ignoring duplicate notice for user %s', user)
4782 return
4781 return
4783
4782
4784 new_notice.user = user
4783 new_notice.user = user
4785 new_notice.notice_subject = subject
4784 new_notice.notice_subject = subject
4786 new_notice.notice_body = body
4785 new_notice.notice_body = body
4787 new_notice.notification_level = notice_level
4786 new_notice.notification_level = notice_level
4788 Session().add(new_notice)
4787 Session().add(new_notice)
4789 Session().commit()
4788 Session().commit()
4790
4789
4791
4790
4792 class Gist(Base, BaseModel):
4791 class Gist(Base, BaseModel):
4793 __tablename__ = 'gists'
4792 __tablename__ = 'gists'
4794 __table_args__ = (
4793 __table_args__ = (
4795 Index('g_gist_access_id_idx', 'gist_access_id'),
4794 Index('g_gist_access_id_idx', 'gist_access_id'),
4796 Index('g_created_on_idx', 'created_on'),
4795 Index('g_created_on_idx', 'created_on'),
4797 base_table_args
4796 base_table_args
4798 )
4797 )
4799
4798
4800 GIST_PUBLIC = u'public'
4799 GIST_PUBLIC = u'public'
4801 GIST_PRIVATE = u'private'
4800 GIST_PRIVATE = u'private'
4802 DEFAULT_FILENAME = u'gistfile1.txt'
4801 DEFAULT_FILENAME = u'gistfile1.txt'
4803
4802
4804 ACL_LEVEL_PUBLIC = u'acl_public'
4803 ACL_LEVEL_PUBLIC = u'acl_public'
4805 ACL_LEVEL_PRIVATE = u'acl_private'
4804 ACL_LEVEL_PRIVATE = u'acl_private'
4806
4805
4807 gist_id = Column('gist_id', Integer(), primary_key=True)
4806 gist_id = Column('gist_id', Integer(), primary_key=True)
4808 gist_access_id = Column('gist_access_id', Unicode(250))
4807 gist_access_id = Column('gist_access_id', Unicode(250))
4809 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
4808 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
4810 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
4809 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
4811 gist_expires = Column('gist_expires', Float(53), nullable=False)
4810 gist_expires = Column('gist_expires', Float(53), nullable=False)
4812 gist_type = Column('gist_type', Unicode(128), nullable=False)
4811 gist_type = Column('gist_type', Unicode(128), nullable=False)
4813 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4812 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4814 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4813 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4815 acl_level = Column('acl_level', Unicode(128), nullable=True)
4814 acl_level = Column('acl_level', Unicode(128), nullable=True)
4816
4815
4817 owner = relationship('User')
4816 owner = relationship('User')
4818
4817
4819 def __str__(self):
4818 def __str__(self):
4820 return f'<Gist:[{self.gist_type}]{self.gist_access_id}>'
4819 return f'<Gist:[{self.gist_type}]{self.gist_access_id}>'
4821
4820
4822 @hybrid_property
4821 @hybrid_property
4823 def description_safe(self):
4822 def description_safe(self):
4824 from rhodecode.lib import helpers as h
4823 from rhodecode.lib import helpers as h
4825 return h.escape(self.gist_description)
4824 return h.escape(self.gist_description)
4826
4825
4827 @classmethod
4826 @classmethod
4828 def get_or_404(cls, id_):
4827 def get_or_404(cls, id_):
4829 from pyramid.httpexceptions import HTTPNotFound
4828 from pyramid.httpexceptions import HTTPNotFound
4830
4829
4831 res = cls.query().filter(cls.gist_access_id == id_).scalar()
4830 res = cls.query().filter(cls.gist_access_id == id_).scalar()
4832 if not res:
4831 if not res:
4833 log.debug('WARN: No DB entry with id %s', id_)
4832 log.debug('WARN: No DB entry with id %s', id_)
4834 raise HTTPNotFound()
4833 raise HTTPNotFound()
4835 return res
4834 return res
4836
4835
4837 @classmethod
4836 @classmethod
4838 def get_by_access_id(cls, gist_access_id):
4837 def get_by_access_id(cls, gist_access_id):
4839 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
4838 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
4840
4839
4841 def gist_url(self):
4840 def gist_url(self):
4842 from rhodecode.model.gist import GistModel
4841 from rhodecode.model.gist import GistModel
4843 return GistModel().get_url(self)
4842 return GistModel().get_url(self)
4844
4843
4845 @classmethod
4844 @classmethod
4846 def base_path(cls):
4845 def base_path(cls):
4847 """
4846 """
4848 Returns base path when all gists are stored
4847 Returns base path when all gists are stored
4849
4848
4850 :param cls:
4849 :param cls:
4851 """
4850 """
4852 from rhodecode.model.gist import GIST_STORE_LOC
4851 from rhodecode.model.gist import GIST_STORE_LOC
4853 q = Session().query(RhodeCodeUi)\
4852 q = Session().query(RhodeCodeUi)\
4854 .filter(RhodeCodeUi.ui_key == URL_SEP)
4853 .filter(RhodeCodeUi.ui_key == URL_SEP)
4855 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
4854 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
4856 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
4855 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
4857
4856
4858 def get_api_data(self):
4857 def get_api_data(self):
4859 """
4858 """
4860 Common function for generating gist related data for API
4859 Common function for generating gist related data for API
4861 """
4860 """
4862 gist = self
4861 gist = self
4863 data = {
4862 data = {
4864 'gist_id': gist.gist_id,
4863 'gist_id': gist.gist_id,
4865 'type': gist.gist_type,
4864 'type': gist.gist_type,
4866 'access_id': gist.gist_access_id,
4865 'access_id': gist.gist_access_id,
4867 'description': gist.gist_description,
4866 'description': gist.gist_description,
4868 'url': gist.gist_url(),
4867 'url': gist.gist_url(),
4869 'expires': gist.gist_expires,
4868 'expires': gist.gist_expires,
4870 'created_on': gist.created_on,
4869 'created_on': gist.created_on,
4871 'modified_at': gist.modified_at,
4870 'modified_at': gist.modified_at,
4872 'content': None,
4871 'content': None,
4873 'acl_level': gist.acl_level,
4872 'acl_level': gist.acl_level,
4874 }
4873 }
4875 return data
4874 return data
4876
4875
4877 def __json__(self):
4876 def __json__(self):
4878 data = dict(
4877 data = dict(
4879 )
4878 )
4880 data.update(self.get_api_data())
4879 data.update(self.get_api_data())
4881 return data
4880 return data
4882 # SCM functions
4881 # SCM functions
4883
4882
4884 def scm_instance(self, **kwargs):
4883 def scm_instance(self, **kwargs):
4885 """
4884 """
4886 Get an instance of VCS Repository
4885 Get an instance of VCS Repository
4887
4886
4888 :param kwargs:
4887 :param kwargs:
4889 """
4888 """
4890 from rhodecode.model.gist import GistModel
4889 from rhodecode.model.gist import GistModel
4891 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
4890 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
4892 return get_vcs_instance(
4891 return get_vcs_instance(
4893 repo_path=safe_str(full_repo_path), create=False,
4892 repo_path=safe_str(full_repo_path), create=False,
4894 _vcs_alias=GistModel.vcs_backend)
4893 _vcs_alias=GistModel.vcs_backend)
4895
4894
4896
4895
4897 class ExternalIdentity(Base, BaseModel):
4896 class ExternalIdentity(Base, BaseModel):
4898 __tablename__ = 'external_identities'
4897 __tablename__ = 'external_identities'
4899 __table_args__ = (
4898 __table_args__ = (
4900 Index('local_user_id_idx', 'local_user_id'),
4899 Index('local_user_id_idx', 'local_user_id'),
4901 Index('external_id_idx', 'external_id'),
4900 Index('external_id_idx', 'external_id'),
4902 base_table_args
4901 base_table_args
4903 )
4902 )
4904
4903
4905 external_id = Column('external_id', Unicode(255), default=u'', primary_key=True)
4904 external_id = Column('external_id', Unicode(255), default=u'', primary_key=True)
4906 external_username = Column('external_username', Unicode(1024), default=u'')
4905 external_username = Column('external_username', Unicode(1024), default=u'')
4907 local_user_id = Column('local_user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4906 local_user_id = Column('local_user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4908 provider_name = Column('provider_name', Unicode(255), default=u'', primary_key=True)
4907 provider_name = Column('provider_name', Unicode(255), default=u'', primary_key=True)
4909 access_token = Column('access_token', String(1024), default=u'')
4908 access_token = Column('access_token', String(1024), default=u'')
4910 alt_token = Column('alt_token', String(1024), default=u'')
4909 alt_token = Column('alt_token', String(1024), default=u'')
4911 token_secret = Column('token_secret', String(1024), default=u'')
4910 token_secret = Column('token_secret', String(1024), default=u'')
4912
4911
4913 @classmethod
4912 @classmethod
4914 def by_external_id_and_provider(cls, external_id, provider_name, local_user_id=None):
4913 def by_external_id_and_provider(cls, external_id, provider_name, local_user_id=None):
4915 """
4914 """
4916 Returns ExternalIdentity instance based on search params
4915 Returns ExternalIdentity instance based on search params
4917
4916
4918 :param external_id:
4917 :param external_id:
4919 :param provider_name:
4918 :param provider_name:
4920 :return: ExternalIdentity
4919 :return: ExternalIdentity
4921 """
4920 """
4922 query = cls.query()
4921 query = cls.query()
4923 query = query.filter(cls.external_id == external_id)
4922 query = query.filter(cls.external_id == external_id)
4924 query = query.filter(cls.provider_name == provider_name)
4923 query = query.filter(cls.provider_name == provider_name)
4925 if local_user_id:
4924 if local_user_id:
4926 query = query.filter(cls.local_user_id == local_user_id)
4925 query = query.filter(cls.local_user_id == local_user_id)
4927 return query.first()
4926 return query.first()
4928
4927
4929 @classmethod
4928 @classmethod
4930 def user_by_external_id_and_provider(cls, external_id, provider_name):
4929 def user_by_external_id_and_provider(cls, external_id, provider_name):
4931 """
4930 """
4932 Returns User instance based on search params
4931 Returns User instance based on search params
4933
4932
4934 :param external_id:
4933 :param external_id:
4935 :param provider_name:
4934 :param provider_name:
4936 :return: User
4935 :return: User
4937 """
4936 """
4938 query = User.query()
4937 query = User.query()
4939 query = query.filter(cls.external_id == external_id)
4938 query = query.filter(cls.external_id == external_id)
4940 query = query.filter(cls.provider_name == provider_name)
4939 query = query.filter(cls.provider_name == provider_name)
4941 query = query.filter(User.user_id == cls.local_user_id)
4940 query = query.filter(User.user_id == cls.local_user_id)
4942 return query.first()
4941 return query.first()
4943
4942
4944 @classmethod
4943 @classmethod
4945 def by_local_user_id(cls, local_user_id):
4944 def by_local_user_id(cls, local_user_id):
4946 """
4945 """
4947 Returns all tokens for user
4946 Returns all tokens for user
4948
4947
4949 :param local_user_id:
4948 :param local_user_id:
4950 :return: ExternalIdentity
4949 :return: ExternalIdentity
4951 """
4950 """
4952 query = cls.query()
4951 query = cls.query()
4953 query = query.filter(cls.local_user_id == local_user_id)
4952 query = query.filter(cls.local_user_id == local_user_id)
4954 return query
4953 return query
4955
4954
4956 @classmethod
4955 @classmethod
4957 def load_provider_plugin(cls, plugin_id):
4956 def load_provider_plugin(cls, plugin_id):
4958 from rhodecode.authentication.base import loadplugin
4957 from rhodecode.authentication.base import loadplugin
4959 _plugin_id = 'egg:rhodecode-enterprise-ee#{}'.format(plugin_id)
4958 _plugin_id = 'egg:rhodecode-enterprise-ee#{}'.format(plugin_id)
4960 auth_plugin = loadplugin(_plugin_id)
4959 auth_plugin = loadplugin(_plugin_id)
4961 return auth_plugin
4960 return auth_plugin
4962
4961
4963
4962
4964 class Integration(Base, BaseModel):
4963 class Integration(Base, BaseModel):
4965 __tablename__ = 'integrations'
4964 __tablename__ = 'integrations'
4966 __table_args__ = (
4965 __table_args__ = (
4967 base_table_args
4966 base_table_args
4968 )
4967 )
4969
4968
4970 integration_id = Column('integration_id', Integer(), primary_key=True)
4969 integration_id = Column('integration_id', Integer(), primary_key=True)
4971 integration_type = Column('integration_type', String(255))
4970 integration_type = Column('integration_type', String(255))
4972 enabled = Column('enabled', Boolean(), nullable=False)
4971 enabled = Column('enabled', Boolean(), nullable=False)
4973 name = Column('name', String(255), nullable=False)
4972 name = Column('name', String(255), nullable=False)
4974 child_repos_only = Column('child_repos_only', Boolean(), nullable=False,
4973 child_repos_only = Column('child_repos_only', Boolean(), nullable=False,
4975 default=False)
4974 default=False)
4976
4975
4977 settings = Column(
4976 settings = Column(
4978 'settings_json', MutationObj.as_mutable(
4977 'settings_json', MutationObj.as_mutable(
4979 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4978 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4980 repo_id = Column(
4979 repo_id = Column(
4981 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
4980 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
4982 nullable=True, unique=None, default=None)
4981 nullable=True, unique=None, default=None)
4983 repo = relationship('Repository', lazy='joined')
4982 repo = relationship('Repository', lazy='joined')
4984
4983
4985 repo_group_id = Column(
4984 repo_group_id = Column(
4986 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
4985 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
4987 nullable=True, unique=None, default=None)
4986 nullable=True, unique=None, default=None)
4988 repo_group = relationship('RepoGroup', lazy='joined')
4987 repo_group = relationship('RepoGroup', lazy='joined')
4989
4988
4990 @property
4989 @property
4991 def scope(self):
4990 def scope(self):
4992 if self.repo:
4991 if self.repo:
4993 return repr(self.repo)
4992 return repr(self.repo)
4994 if self.repo_group:
4993 if self.repo_group:
4995 if self.child_repos_only:
4994 if self.child_repos_only:
4996 return repr(self.repo_group) + ' (child repos only)'
4995 return repr(self.repo_group) + ' (child repos only)'
4997 else:
4996 else:
4998 return repr(self.repo_group) + ' (recursive)'
4997 return repr(self.repo_group) + ' (recursive)'
4999 if self.child_repos_only:
4998 if self.child_repos_only:
5000 return 'root_repos'
4999 return 'root_repos'
5001 return 'global'
5000 return 'global'
5002
5001
5003 def __str__(self):
5002 def __str__(self):
5004 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
5003 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
5005
5004
5006
5005
5007 class RepoReviewRuleUser(Base, BaseModel):
5006 class RepoReviewRuleUser(Base, BaseModel):
5008 __tablename__ = 'repo_review_rules_users'
5007 __tablename__ = 'repo_review_rules_users'
5009 __table_args__ = (
5008 __table_args__ = (
5010 base_table_args
5009 base_table_args
5011 )
5010 )
5012 ROLE_REVIEWER = u'reviewer'
5011 ROLE_REVIEWER = u'reviewer'
5013 ROLE_OBSERVER = u'observer'
5012 ROLE_OBSERVER = u'observer'
5014 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
5013 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
5015
5014
5016 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
5015 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
5017 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
5016 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
5018 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
5017 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
5019 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
5018 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
5020 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
5019 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
5021 user = relationship('User')
5020 user = relationship('User')
5022
5021
5023 def rule_data(self):
5022 def rule_data(self):
5024 return {
5023 return {
5025 'mandatory': self.mandatory,
5024 'mandatory': self.mandatory,
5026 'role': self.role,
5025 'role': self.role,
5027 }
5026 }
5028
5027
5029
5028
5030 class RepoReviewRuleUserGroup(Base, BaseModel):
5029 class RepoReviewRuleUserGroup(Base, BaseModel):
5031 __tablename__ = 'repo_review_rules_users_groups'
5030 __tablename__ = 'repo_review_rules_users_groups'
5032 __table_args__ = (
5031 __table_args__ = (
5033 base_table_args
5032 base_table_args
5034 )
5033 )
5035
5034
5036 VOTE_RULE_ALL = -1
5035 VOTE_RULE_ALL = -1
5037 ROLE_REVIEWER = u'reviewer'
5036 ROLE_REVIEWER = u'reviewer'
5038 ROLE_OBSERVER = u'observer'
5037 ROLE_OBSERVER = u'observer'
5039 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
5038 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
5040
5039
5041 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
5040 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
5042 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
5041 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
5043 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False)
5042 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False)
5044 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
5043 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
5045 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
5044 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
5046 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
5045 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
5047 users_group = relationship('UserGroup')
5046 users_group = relationship('UserGroup')
5048
5047
5049 def rule_data(self):
5048 def rule_data(self):
5050 return {
5049 return {
5051 'mandatory': self.mandatory,
5050 'mandatory': self.mandatory,
5052 'role': self.role,
5051 'role': self.role,
5053 'vote_rule': self.vote_rule
5052 'vote_rule': self.vote_rule
5054 }
5053 }
5055
5054
5056 @property
5055 @property
5057 def vote_rule_label(self):
5056 def vote_rule_label(self):
5058 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
5057 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
5059 return 'all must vote'
5058 return 'all must vote'
5060 else:
5059 else:
5061 return 'min. vote {}'.format(self.vote_rule)
5060 return 'min. vote {}'.format(self.vote_rule)
5062
5061
5063
5062
5064 class RepoReviewRule(Base, BaseModel):
5063 class RepoReviewRule(Base, BaseModel):
5065 __tablename__ = 'repo_review_rules'
5064 __tablename__ = 'repo_review_rules'
5066 __table_args__ = (
5065 __table_args__ = (
5067 base_table_args
5066 base_table_args
5068 )
5067 )
5069
5068
5070 repo_review_rule_id = Column(
5069 repo_review_rule_id = Column(
5071 'repo_review_rule_id', Integer(), primary_key=True)
5070 'repo_review_rule_id', Integer(), primary_key=True)
5072 repo_id = Column(
5071 repo_id = Column(
5073 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
5072 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
5074 repo = relationship('Repository', backref='review_rules')
5073 repo = relationship('Repository', backref='review_rules')
5075
5074
5076 review_rule_name = Column('review_rule_name', String(255))
5075 review_rule_name = Column('review_rule_name', String(255))
5077 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
5076 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
5078 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
5077 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
5079 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
5078 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
5080
5079
5081 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
5080 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
5082
5081
5083 # Legacy fields, just for backward compat
5082 # Legacy fields, just for backward compat
5084 _forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
5083 _forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
5085 _forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
5084 _forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
5086
5085
5087 pr_author = Column("pr_author", UnicodeText().with_variant(UnicodeText(255), 'mysql'), nullable=True)
5086 pr_author = Column("pr_author", UnicodeText().with_variant(UnicodeText(255), 'mysql'), nullable=True)
5088 commit_author = Column("commit_author", UnicodeText().with_variant(UnicodeText(255), 'mysql'), nullable=True)
5087 commit_author = Column("commit_author", UnicodeText().with_variant(UnicodeText(255), 'mysql'), nullable=True)
5089
5088
5090 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
5089 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
5091
5090
5092 rule_users = relationship('RepoReviewRuleUser')
5091 rule_users = relationship('RepoReviewRuleUser')
5093 rule_user_groups = relationship('RepoReviewRuleUserGroup')
5092 rule_user_groups = relationship('RepoReviewRuleUserGroup')
5094
5093
5095 def _validate_pattern(self, value):
5094 def _validate_pattern(self, value):
5096 re.compile('^' + glob2re(value) + '$')
5095 re.compile('^' + glob2re(value) + '$')
5097
5096
5098 @hybrid_property
5097 @hybrid_property
5099 def source_branch_pattern(self):
5098 def source_branch_pattern(self):
5100 return self._branch_pattern or '*'
5099 return self._branch_pattern or '*'
5101
5100
5102 @source_branch_pattern.setter
5101 @source_branch_pattern.setter
5103 def source_branch_pattern(self, value):
5102 def source_branch_pattern(self, value):
5104 self._validate_pattern(value)
5103 self._validate_pattern(value)
5105 self._branch_pattern = value or '*'
5104 self._branch_pattern = value or '*'
5106
5105
5107 @hybrid_property
5106 @hybrid_property
5108 def target_branch_pattern(self):
5107 def target_branch_pattern(self):
5109 return self._target_branch_pattern or '*'
5108 return self._target_branch_pattern or '*'
5110
5109
5111 @target_branch_pattern.setter
5110 @target_branch_pattern.setter
5112 def target_branch_pattern(self, value):
5111 def target_branch_pattern(self, value):
5113 self._validate_pattern(value)
5112 self._validate_pattern(value)
5114 self._target_branch_pattern = value or '*'
5113 self._target_branch_pattern = value or '*'
5115
5114
5116 @hybrid_property
5115 @hybrid_property
5117 def file_pattern(self):
5116 def file_pattern(self):
5118 return self._file_pattern or '*'
5117 return self._file_pattern or '*'
5119
5118
5120 @file_pattern.setter
5119 @file_pattern.setter
5121 def file_pattern(self, value):
5120 def file_pattern(self, value):
5122 self._validate_pattern(value)
5121 self._validate_pattern(value)
5123 self._file_pattern = value or '*'
5122 self._file_pattern = value or '*'
5124
5123
5125 @hybrid_property
5124 @hybrid_property
5126 def forbid_pr_author_to_review(self):
5125 def forbid_pr_author_to_review(self):
5127 return self.pr_author == 'forbid_pr_author'
5126 return self.pr_author == 'forbid_pr_author'
5128
5127
5129 @hybrid_property
5128 @hybrid_property
5130 def include_pr_author_to_review(self):
5129 def include_pr_author_to_review(self):
5131 return self.pr_author == 'include_pr_author'
5130 return self.pr_author == 'include_pr_author'
5132
5131
5133 @hybrid_property
5132 @hybrid_property
5134 def forbid_commit_author_to_review(self):
5133 def forbid_commit_author_to_review(self):
5135 return self.commit_author == 'forbid_commit_author'
5134 return self.commit_author == 'forbid_commit_author'
5136
5135
5137 @hybrid_property
5136 @hybrid_property
5138 def include_commit_author_to_review(self):
5137 def include_commit_author_to_review(self):
5139 return self.commit_author == 'include_commit_author'
5138 return self.commit_author == 'include_commit_author'
5140
5139
5141 def matches(self, source_branch, target_branch, files_changed):
5140 def matches(self, source_branch, target_branch, files_changed):
5142 """
5141 """
5143 Check if this review rule matches a branch/files in a pull request
5142 Check if this review rule matches a branch/files in a pull request
5144
5143
5145 :param source_branch: source branch name for the commit
5144 :param source_branch: source branch name for the commit
5146 :param target_branch: target branch name for the commit
5145 :param target_branch: target branch name for the commit
5147 :param files_changed: list of file paths changed in the pull request
5146 :param files_changed: list of file paths changed in the pull request
5148 """
5147 """
5149
5148
5150 source_branch = source_branch or ''
5149 source_branch = source_branch or ''
5151 target_branch = target_branch or ''
5150 target_branch = target_branch or ''
5152 files_changed = files_changed or []
5151 files_changed = files_changed or []
5153
5152
5154 branch_matches = True
5153 branch_matches = True
5155 if source_branch or target_branch:
5154 if source_branch or target_branch:
5156 if self.source_branch_pattern == '*':
5155 if self.source_branch_pattern == '*':
5157 source_branch_match = True
5156 source_branch_match = True
5158 else:
5157 else:
5159 if self.source_branch_pattern.startswith('re:'):
5158 if self.source_branch_pattern.startswith('re:'):
5160 source_pattern = self.source_branch_pattern[3:]
5159 source_pattern = self.source_branch_pattern[3:]
5161 else:
5160 else:
5162 source_pattern = '^' + glob2re(self.source_branch_pattern) + '$'
5161 source_pattern = '^' + glob2re(self.source_branch_pattern) + '$'
5163 source_branch_regex = re.compile(source_pattern)
5162 source_branch_regex = re.compile(source_pattern)
5164 source_branch_match = bool(source_branch_regex.search(source_branch))
5163 source_branch_match = bool(source_branch_regex.search(source_branch))
5165 if self.target_branch_pattern == '*':
5164 if self.target_branch_pattern == '*':
5166 target_branch_match = True
5165 target_branch_match = True
5167 else:
5166 else:
5168 if self.target_branch_pattern.startswith('re:'):
5167 if self.target_branch_pattern.startswith('re:'):
5169 target_pattern = self.target_branch_pattern[3:]
5168 target_pattern = self.target_branch_pattern[3:]
5170 else:
5169 else:
5171 target_pattern = '^' + glob2re(self.target_branch_pattern) + '$'
5170 target_pattern = '^' + glob2re(self.target_branch_pattern) + '$'
5172 target_branch_regex = re.compile(target_pattern)
5171 target_branch_regex = re.compile(target_pattern)
5173 target_branch_match = bool(target_branch_regex.search(target_branch))
5172 target_branch_match = bool(target_branch_regex.search(target_branch))
5174
5173
5175 branch_matches = source_branch_match and target_branch_match
5174 branch_matches = source_branch_match and target_branch_match
5176
5175
5177 files_matches = True
5176 files_matches = True
5178 if self.file_pattern != '*':
5177 if self.file_pattern != '*':
5179 files_matches = False
5178 files_matches = False
5180 if self.file_pattern.startswith('re:'):
5179 if self.file_pattern.startswith('re:'):
5181 file_pattern = self.file_pattern[3:]
5180 file_pattern = self.file_pattern[3:]
5182 else:
5181 else:
5183 file_pattern = glob2re(self.file_pattern)
5182 file_pattern = glob2re(self.file_pattern)
5184 file_regex = re.compile(file_pattern)
5183 file_regex = re.compile(file_pattern)
5185 for file_data in files_changed:
5184 for file_data in files_changed:
5186 filename = file_data.get('filename')
5185 filename = file_data.get('filename')
5187
5186
5188 if file_regex.search(filename):
5187 if file_regex.search(filename):
5189 files_matches = True
5188 files_matches = True
5190 break
5189 break
5191
5190
5192 return branch_matches and files_matches
5191 return branch_matches and files_matches
5193
5192
5194 @property
5193 @property
5195 def review_users(self):
5194 def review_users(self):
5196 """ Returns the users which this rule applies to """
5195 """ Returns the users which this rule applies to """
5197
5196
5198 users = collections.OrderedDict()
5197 users = collections.OrderedDict()
5199
5198
5200 for rule_user in self.rule_users:
5199 for rule_user in self.rule_users:
5201 if rule_user.user.active:
5200 if rule_user.user.active:
5202 if rule_user.user not in users:
5201 if rule_user.user not in users:
5203 users[rule_user.user.username] = {
5202 users[rule_user.user.username] = {
5204 'user': rule_user.user,
5203 'user': rule_user.user,
5205 'source': 'user',
5204 'source': 'user',
5206 'source_data': {},
5205 'source_data': {},
5207 'data': rule_user.rule_data()
5206 'data': rule_user.rule_data()
5208 }
5207 }
5209
5208
5210 for rule_user_group in self.rule_user_groups:
5209 for rule_user_group in self.rule_user_groups:
5211 source_data = {
5210 source_data = {
5212 'user_group_id': rule_user_group.users_group.users_group_id,
5211 'user_group_id': rule_user_group.users_group.users_group_id,
5213 'name': rule_user_group.users_group.users_group_name,
5212 'name': rule_user_group.users_group.users_group_name,
5214 'members': len(rule_user_group.users_group.members)
5213 'members': len(rule_user_group.users_group.members)
5215 }
5214 }
5216 for member in rule_user_group.users_group.members:
5215 for member in rule_user_group.users_group.members:
5217 if member.user.active:
5216 if member.user.active:
5218 key = member.user.username
5217 key = member.user.username
5219 if key in users:
5218 if key in users:
5220 # skip this member as we have him already
5219 # skip this member as we have him already
5221 # this prevents from override the "first" matched
5220 # this prevents from override the "first" matched
5222 # users with duplicates in multiple groups
5221 # users with duplicates in multiple groups
5223 continue
5222 continue
5224
5223
5225 users[key] = {
5224 users[key] = {
5226 'user': member.user,
5225 'user': member.user,
5227 'source': 'user_group',
5226 'source': 'user_group',
5228 'source_data': source_data,
5227 'source_data': source_data,
5229 'data': rule_user_group.rule_data()
5228 'data': rule_user_group.rule_data()
5230 }
5229 }
5231
5230
5232 return users
5231 return users
5233
5232
5234 def user_group_vote_rule(self, user_id):
5233 def user_group_vote_rule(self, user_id):
5235
5234
5236 rules = []
5235 rules = []
5237 if not self.rule_user_groups:
5236 if not self.rule_user_groups:
5238 return rules
5237 return rules
5239
5238
5240 for user_group in self.rule_user_groups:
5239 for user_group in self.rule_user_groups:
5241 user_group_members = [x.user_id for x in user_group.users_group.members]
5240 user_group_members = [x.user_id for x in user_group.users_group.members]
5242 if user_id in user_group_members:
5241 if user_id in user_group_members:
5243 rules.append(user_group)
5242 rules.append(user_group)
5244 return rules
5243 return rules
5245
5244
5246 def __str__(self):
5245 def __str__(self):
5247 return '<RepoReviewerRule(id=%r, repo=%r)>' % (
5246 return '<RepoReviewerRule(id=%r, repo=%r)>' % (
5248 self.repo_review_rule_id, self.repo)
5247 self.repo_review_rule_id, self.repo)
5249
5248
5250
5249
5251 class ScheduleEntry(Base, BaseModel):
5250 class ScheduleEntry(Base, BaseModel):
5252 __tablename__ = 'schedule_entries'
5251 __tablename__ = 'schedule_entries'
5253 __table_args__ = (
5252 __table_args__ = (
5254 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
5253 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
5255 UniqueConstraint('task_uid', name='s_task_uid_idx'),
5254 UniqueConstraint('task_uid', name='s_task_uid_idx'),
5256 base_table_args,
5255 base_table_args,
5257 )
5256 )
5258
5257
5259 schedule_types = ['crontab', 'timedelta', 'integer']
5258 schedule_types = ['crontab', 'timedelta', 'integer']
5260 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
5259 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
5261
5260
5262 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
5261 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
5263 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
5262 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
5264 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
5263 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
5265
5264
5266 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
5265 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
5267 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
5266 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
5268
5267
5269 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
5268 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
5270 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
5269 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
5271
5270
5272 # task
5271 # task
5273 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
5272 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
5274 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
5273 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
5275 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
5274 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
5276 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
5275 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
5277
5276
5278 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5277 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5279 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
5278 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
5280
5279
5281 @hybrid_property
5280 @hybrid_property
5282 def schedule_type(self):
5281 def schedule_type(self):
5283 return self._schedule_type
5282 return self._schedule_type
5284
5283
5285 @schedule_type.setter
5284 @schedule_type.setter
5286 def schedule_type(self, val):
5285 def schedule_type(self, val):
5287 if val not in self.schedule_types:
5286 if val not in self.schedule_types:
5288 raise ValueError('Value must be on of `{}` and got `{}`'.format(
5287 raise ValueError('Value must be on of `{}` and got `{}`'.format(
5289 val, self.schedule_type))
5288 val, self.schedule_type))
5290
5289
5291 self._schedule_type = val
5290 self._schedule_type = val
5292
5291
5293 @classmethod
5292 @classmethod
5294 def get_uid(cls, obj):
5293 def get_uid(cls, obj):
5295 args = obj.task_args
5294 args = obj.task_args
5296 kwargs = obj.task_kwargs
5295 kwargs = obj.task_kwargs
5297 if isinstance(args, JsonRaw):
5296 if isinstance(args, JsonRaw):
5298 try:
5297 try:
5299 args = json.loads(args)
5298 args = json.loads(args)
5300 except ValueError:
5299 except ValueError:
5301 args = tuple()
5300 args = tuple()
5302
5301
5303 if isinstance(kwargs, JsonRaw):
5302 if isinstance(kwargs, JsonRaw):
5304 try:
5303 try:
5305 kwargs = json.loads(kwargs)
5304 kwargs = json.loads(kwargs)
5306 except ValueError:
5305 except ValueError:
5307 kwargs = dict()
5306 kwargs = dict()
5308
5307
5309 dot_notation = obj.task_dot_notation
5308 dot_notation = obj.task_dot_notation
5310 val = '.'.join(map(safe_str, [
5309 val = '.'.join(map(safe_str, [
5311 sorted(dot_notation), args, sorted(kwargs.items())]))
5310 sorted(dot_notation), args, sorted(kwargs.items())]))
5312 return hashlib.sha1(val).hexdigest()
5311 return hashlib.sha1(val).hexdigest()
5313
5312
5314 @classmethod
5313 @classmethod
5315 def get_by_schedule_name(cls, schedule_name):
5314 def get_by_schedule_name(cls, schedule_name):
5316 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
5315 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
5317
5316
5318 @classmethod
5317 @classmethod
5319 def get_by_schedule_id(cls, schedule_id):
5318 def get_by_schedule_id(cls, schedule_id):
5320 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
5319 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
5321
5320
5322 @property
5321 @property
5323 def task(self):
5322 def task(self):
5324 return self.task_dot_notation
5323 return self.task_dot_notation
5325
5324
5326 @property
5325 @property
5327 def schedule(self):
5326 def schedule(self):
5328 from rhodecode.lib.celerylib.utils import raw_2_schedule
5327 from rhodecode.lib.celerylib.utils import raw_2_schedule
5329 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
5328 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
5330 return schedule
5329 return schedule
5331
5330
5332 @property
5331 @property
5333 def args(self):
5332 def args(self):
5334 try:
5333 try:
5335 return list(self.task_args or [])
5334 return list(self.task_args or [])
5336 except ValueError:
5335 except ValueError:
5337 return list()
5336 return list()
5338
5337
5339 @property
5338 @property
5340 def kwargs(self):
5339 def kwargs(self):
5341 try:
5340 try:
5342 return dict(self.task_kwargs or {})
5341 return dict(self.task_kwargs or {})
5343 except ValueError:
5342 except ValueError:
5344 return dict()
5343 return dict()
5345
5344
5346 def _as_raw(self, val, indent=None):
5345 def _as_raw(self, val, indent=None):
5347 if hasattr(val, 'de_coerce'):
5346 if hasattr(val, 'de_coerce'):
5348 val = val.de_coerce()
5347 val = val.de_coerce()
5349 if val:
5348 if val:
5350 val = json.dumps(val, indent=indent, sort_keys=True)
5349 val = json.dumps(val, indent=indent, sort_keys=True)
5351
5350
5352 return val
5351 return val
5353
5352
5354 @property
5353 @property
5355 def schedule_definition_raw(self):
5354 def schedule_definition_raw(self):
5356 return self._as_raw(self.schedule_definition)
5355 return self._as_raw(self.schedule_definition)
5357
5356
5358 def args_raw(self, indent=None):
5357 def args_raw(self, indent=None):
5359 return self._as_raw(self.task_args, indent)
5358 return self._as_raw(self.task_args, indent)
5360
5359
5361 def kwargs_raw(self, indent=None):
5360 def kwargs_raw(self, indent=None):
5362 return self._as_raw(self.task_kwargs, indent)
5361 return self._as_raw(self.task_kwargs, indent)
5363
5362
5364 def __str__(self):
5363 def __str__(self):
5365 return '<DB:ScheduleEntry({}:{})>'.format(
5364 return '<DB:ScheduleEntry({}:{})>'.format(
5366 self.schedule_entry_id, self.schedule_name)
5365 self.schedule_entry_id, self.schedule_name)
5367
5366
5368
5367
5369 @event.listens_for(ScheduleEntry, 'before_update')
5368 @event.listens_for(ScheduleEntry, 'before_update')
5370 def update_task_uid(mapper, connection, target):
5369 def update_task_uid(mapper, connection, target):
5371 target.task_uid = ScheduleEntry.get_uid(target)
5370 target.task_uid = ScheduleEntry.get_uid(target)
5372
5371
5373
5372
5374 @event.listens_for(ScheduleEntry, 'before_insert')
5373 @event.listens_for(ScheduleEntry, 'before_insert')
5375 def set_task_uid(mapper, connection, target):
5374 def set_task_uid(mapper, connection, target):
5376 target.task_uid = ScheduleEntry.get_uid(target)
5375 target.task_uid = ScheduleEntry.get_uid(target)
5377
5376
5378
5377
5379 class _BaseBranchPerms(BaseModel):
5378 class _BaseBranchPerms(BaseModel):
5380 @classmethod
5379 @classmethod
5381 def compute_hash(cls, value):
5380 def compute_hash(cls, value):
5382 return sha1_safe(value)
5381 return sha1_safe(value)
5383
5382
5384 @hybrid_property
5383 @hybrid_property
5385 def branch_pattern(self):
5384 def branch_pattern(self):
5386 return self._branch_pattern or '*'
5385 return self._branch_pattern or '*'
5387
5386
5388 @hybrid_property
5387 @hybrid_property
5389 def branch_hash(self):
5388 def branch_hash(self):
5390 return self._branch_hash
5389 return self._branch_hash
5391
5390
5392 def _validate_glob(self, value):
5391 def _validate_glob(self, value):
5393 re.compile('^' + glob2re(value) + '$')
5392 re.compile('^' + glob2re(value) + '$')
5394
5393
5395 @branch_pattern.setter
5394 @branch_pattern.setter
5396 def branch_pattern(self, value):
5395 def branch_pattern(self, value):
5397 self._validate_glob(value)
5396 self._validate_glob(value)
5398 self._branch_pattern = value or '*'
5397 self._branch_pattern = value or '*'
5399 # set the Hash when setting the branch pattern
5398 # set the Hash when setting the branch pattern
5400 self._branch_hash = self.compute_hash(self._branch_pattern)
5399 self._branch_hash = self.compute_hash(self._branch_pattern)
5401
5400
5402 def matches(self, branch):
5401 def matches(self, branch):
5403 """
5402 """
5404 Check if this the branch matches entry
5403 Check if this the branch matches entry
5405
5404
5406 :param branch: branch name for the commit
5405 :param branch: branch name for the commit
5407 """
5406 """
5408
5407
5409 branch = branch or ''
5408 branch = branch or ''
5410
5409
5411 branch_matches = True
5410 branch_matches = True
5412 if branch:
5411 if branch:
5413 branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$')
5412 branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$')
5414 branch_matches = bool(branch_regex.search(branch))
5413 branch_matches = bool(branch_regex.search(branch))
5415
5414
5416 return branch_matches
5415 return branch_matches
5417
5416
5418
5417
5419 class UserToRepoBranchPermission(Base, _BaseBranchPerms):
5418 class UserToRepoBranchPermission(Base, _BaseBranchPerms):
5420 __tablename__ = 'user_to_repo_branch_permissions'
5419 __tablename__ = 'user_to_repo_branch_permissions'
5421 __table_args__ = (
5420 __table_args__ = (
5422 base_table_args
5421 base_table_args
5423 )
5422 )
5424
5423
5425 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5424 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5426
5425
5427 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5426 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5428 repo = relationship('Repository', backref='user_branch_perms')
5427 repo = relationship('Repository', backref='user_branch_perms')
5429
5428
5430 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5429 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5431 permission = relationship('Permission')
5430 permission = relationship('Permission')
5432
5431
5433 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('repo_to_perm.repo_to_perm_id'), nullable=False, unique=None, default=None)
5432 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('repo_to_perm.repo_to_perm_id'), nullable=False, unique=None, default=None)
5434 user_repo_to_perm = relationship('UserRepoToPerm')
5433 user_repo_to_perm = relationship('UserRepoToPerm')
5435
5434
5436 rule_order = Column('rule_order', Integer(), nullable=False)
5435 rule_order = Column('rule_order', Integer(), nullable=False)
5437 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
5436 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
5438 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5437 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5439
5438
5440 def __str__(self):
5439 def __str__(self):
5441 return f'<UserBranchPermission({self.user_repo_to_perm} => {self.branch_pattern!r})>'
5440 return f'<UserBranchPermission({self.user_repo_to_perm} => {self.branch_pattern!r})>'
5442
5441
5443
5442
5444 class UserGroupToRepoBranchPermission(Base, _BaseBranchPerms):
5443 class UserGroupToRepoBranchPermission(Base, _BaseBranchPerms):
5445 __tablename__ = 'user_group_to_repo_branch_permissions'
5444 __tablename__ = 'user_group_to_repo_branch_permissions'
5446 __table_args__ = (
5445 __table_args__ = (
5447 base_table_args
5446 base_table_args
5448 )
5447 )
5449
5448
5450 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5449 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5451
5450
5452 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5451 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5453 repo = relationship('Repository', backref='user_group_branch_perms')
5452 repo = relationship('Repository', backref='user_group_branch_perms')
5454
5453
5455 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5454 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5456 permission = relationship('Permission')
5455 permission = relationship('Permission')
5457
5456
5458 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('users_group_repo_to_perm.users_group_to_perm_id'), nullable=False, unique=None, default=None)
5457 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('users_group_repo_to_perm.users_group_to_perm_id'), nullable=False, unique=None, default=None)
5459 user_group_repo_to_perm = relationship('UserGroupRepoToPerm')
5458 user_group_repo_to_perm = relationship('UserGroupRepoToPerm')
5460
5459
5461 rule_order = Column('rule_order', Integer(), nullable=False)
5460 rule_order = Column('rule_order', Integer(), nullable=False)
5462 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
5461 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
5463 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5462 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5464
5463
5465 def __str__(self):
5464 def __str__(self):
5466 return f'<UserBranchPermission({self.user_group_repo_to_perm} => {self.branch_pattern!r})>'
5465 return f'<UserBranchPermission({self.user_group_repo_to_perm} => {self.branch_pattern!r})>'
5467
5466
5468
5467
5469 class UserBookmark(Base, BaseModel):
5468 class UserBookmark(Base, BaseModel):
5470 __tablename__ = 'user_bookmarks'
5469 __tablename__ = 'user_bookmarks'
5471 __table_args__ = (
5470 __table_args__ = (
5472 UniqueConstraint('user_id', 'bookmark_repo_id'),
5471 UniqueConstraint('user_id', 'bookmark_repo_id'),
5473 UniqueConstraint('user_id', 'bookmark_repo_group_id'),
5472 UniqueConstraint('user_id', 'bookmark_repo_group_id'),
5474 UniqueConstraint('user_id', 'bookmark_position'),
5473 UniqueConstraint('user_id', 'bookmark_position'),
5475 base_table_args
5474 base_table_args
5476 )
5475 )
5477
5476
5478 user_bookmark_id = Column("user_bookmark_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
5477 user_bookmark_id = Column("user_bookmark_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
5479 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
5478 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
5480 position = Column("bookmark_position", Integer(), nullable=False)
5479 position = Column("bookmark_position", Integer(), nullable=False)
5481 title = Column("bookmark_title", String(255), nullable=True, unique=None, default=None)
5480 title = Column("bookmark_title", String(255), nullable=True, unique=None, default=None)
5482 redirect_url = Column("bookmark_redirect_url", String(10240), nullable=True, unique=None, default=None)
5481 redirect_url = Column("bookmark_redirect_url", String(10240), nullable=True, unique=None, default=None)
5483 created_on = Column("created_on", DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5482 created_on = Column("created_on", DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5484
5483
5485 bookmark_repo_id = Column("bookmark_repo_id", Integer(), ForeignKey("repositories.repo_id"), nullable=True, unique=None, default=None)
5484 bookmark_repo_id = Column("bookmark_repo_id", Integer(), ForeignKey("repositories.repo_id"), nullable=True, unique=None, default=None)
5486 bookmark_repo_group_id = Column("bookmark_repo_group_id", Integer(), ForeignKey("groups.group_id"), nullable=True, unique=None, default=None)
5485 bookmark_repo_group_id = Column("bookmark_repo_group_id", Integer(), ForeignKey("groups.group_id"), nullable=True, unique=None, default=None)
5487
5486
5488 user = relationship("User")
5487 user = relationship("User")
5489
5488
5490 repository = relationship("Repository")
5489 repository = relationship("Repository")
5491 repository_group = relationship("RepoGroup")
5490 repository_group = relationship("RepoGroup")
5492
5491
5493 @classmethod
5492 @classmethod
5494 def get_by_position_for_user(cls, position, user_id):
5493 def get_by_position_for_user(cls, position, user_id):
5495 return cls.query() \
5494 return cls.query() \
5496 .filter(UserBookmark.user_id == user_id) \
5495 .filter(UserBookmark.user_id == user_id) \
5497 .filter(UserBookmark.position == position).scalar()
5496 .filter(UserBookmark.position == position).scalar()
5498
5497
5499 @classmethod
5498 @classmethod
5500 def get_bookmarks_for_user(cls, user_id, cache=True):
5499 def get_bookmarks_for_user(cls, user_id, cache=True):
5501 bookmarks = cls.query() \
5500 bookmarks = cls.query() \
5502 .filter(UserBookmark.user_id == user_id) \
5501 .filter(UserBookmark.user_id == user_id) \
5503 .options(joinedload(UserBookmark.repository)) \
5502 .options(joinedload(UserBookmark.repository)) \
5504 .options(joinedload(UserBookmark.repository_group)) \
5503 .options(joinedload(UserBookmark.repository_group)) \
5505 .order_by(UserBookmark.position.asc())
5504 .order_by(UserBookmark.position.asc())
5506
5505
5507 if cache:
5506 if cache:
5508 bookmarks = bookmarks.options(
5507 bookmarks = bookmarks.options(
5509 FromCache("sql_cache_short", "get_user_{}_bookmarks".format(user_id))
5508 FromCache("sql_cache_short", "get_user_{}_bookmarks".format(user_id))
5510 )
5509 )
5511
5510
5512 return bookmarks.all()
5511 return bookmarks.all()
5513
5512
5514 def __str__(self):
5513 def __str__(self):
5515 return f'<UserBookmark({self.position} @ {self.redirect_url!r})>'
5514 return f'<UserBookmark({self.position} @ {self.redirect_url!r})>'
5516
5515
5517
5516
5518 class FileStore(Base, BaseModel):
5517 class FileStore(Base, BaseModel):
5519 __tablename__ = 'file_store'
5518 __tablename__ = 'file_store'
5520 __table_args__ = (
5519 __table_args__ = (
5521 base_table_args
5520 base_table_args
5522 )
5521 )
5523
5522
5524 file_store_id = Column('file_store_id', Integer(), primary_key=True)
5523 file_store_id = Column('file_store_id', Integer(), primary_key=True)
5525 file_uid = Column('file_uid', String(1024), nullable=False)
5524 file_uid = Column('file_uid', String(1024), nullable=False)
5526 file_display_name = Column('file_display_name', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), nullable=True)
5525 file_display_name = Column('file_display_name', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), nullable=True)
5527 file_description = Column('file_description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=True)
5526 file_description = Column('file_description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=True)
5528 file_org_name = Column('file_org_name', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=False)
5527 file_org_name = Column('file_org_name', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=False)
5529
5528
5530 # sha256 hash
5529 # sha256 hash
5531 file_hash = Column('file_hash', String(512), nullable=False)
5530 file_hash = Column('file_hash', String(512), nullable=False)
5532 file_size = Column('file_size', BigInteger(), nullable=False)
5531 file_size = Column('file_size', BigInteger(), nullable=False)
5533
5532
5534 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5533 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5535 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True)
5534 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True)
5536 accessed_count = Column('accessed_count', Integer(), default=0)
5535 accessed_count = Column('accessed_count', Integer(), default=0)
5537
5536
5538 enabled = Column('enabled', Boolean(), nullable=False, default=True)
5537 enabled = Column('enabled', Boolean(), nullable=False, default=True)
5539
5538
5540 # if repo/repo_group reference is set, check for permissions
5539 # if repo/repo_group reference is set, check for permissions
5541 check_acl = Column('check_acl', Boolean(), nullable=False, default=True)
5540 check_acl = Column('check_acl', Boolean(), nullable=False, default=True)
5542
5541
5543 # hidden defines an attachment that should be hidden from showing in artifact listing
5542 # hidden defines an attachment that should be hidden from showing in artifact listing
5544 hidden = Column('hidden', Boolean(), nullable=False, default=False)
5543 hidden = Column('hidden', Boolean(), nullable=False, default=False)
5545
5544
5546 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
5545 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
5547 upload_user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.user_id')
5546 upload_user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.user_id')
5548
5547
5549 file_metadata = relationship('FileStoreMetadata', lazy='joined')
5548 file_metadata = relationship('FileStoreMetadata', lazy='joined')
5550
5549
5551 # scope limited to user, which requester have access to
5550 # scope limited to user, which requester have access to
5552 scope_user_id = Column(
5551 scope_user_id = Column(
5553 'scope_user_id', Integer(), ForeignKey('users.user_id'),
5552 'scope_user_id', Integer(), ForeignKey('users.user_id'),
5554 nullable=True, unique=None, default=None)
5553 nullable=True, unique=None, default=None)
5555 user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.scope_user_id')
5554 user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.scope_user_id')
5556
5555
5557 # scope limited to user group, which requester have access to
5556 # scope limited to user group, which requester have access to
5558 scope_user_group_id = Column(
5557 scope_user_group_id = Column(
5559 'scope_user_group_id', Integer(), ForeignKey('users_groups.users_group_id'),
5558 'scope_user_group_id', Integer(), ForeignKey('users_groups.users_group_id'),
5560 nullable=True, unique=None, default=None)
5559 nullable=True, unique=None, default=None)
5561 user_group = relationship('UserGroup', lazy='joined')
5560 user_group = relationship('UserGroup', lazy='joined')
5562
5561
5563 # scope limited to repo, which requester have access to
5562 # scope limited to repo, which requester have access to
5564 scope_repo_id = Column(
5563 scope_repo_id = Column(
5565 'scope_repo_id', Integer(), ForeignKey('repositories.repo_id'),
5564 'scope_repo_id', Integer(), ForeignKey('repositories.repo_id'),
5566 nullable=True, unique=None, default=None)
5565 nullable=True, unique=None, default=None)
5567 repo = relationship('Repository', lazy='joined')
5566 repo = relationship('Repository', lazy='joined')
5568
5567
5569 # scope limited to repo group, which requester have access to
5568 # scope limited to repo group, which requester have access to
5570 scope_repo_group_id = Column(
5569 scope_repo_group_id = Column(
5571 'scope_repo_group_id', Integer(), ForeignKey('groups.group_id'),
5570 'scope_repo_group_id', Integer(), ForeignKey('groups.group_id'),
5572 nullable=True, unique=None, default=None)
5571 nullable=True, unique=None, default=None)
5573 repo_group = relationship('RepoGroup', lazy='joined')
5572 repo_group = relationship('RepoGroup', lazy='joined')
5574
5573
5575 @classmethod
5574 @classmethod
5576 def get_by_store_uid(cls, file_store_uid, safe=False):
5575 def get_by_store_uid(cls, file_store_uid, safe=False):
5577 if safe:
5576 if safe:
5578 return FileStore.query().filter(FileStore.file_uid == file_store_uid).first()
5577 return FileStore.query().filter(FileStore.file_uid == file_store_uid).first()
5579 else:
5578 else:
5580 return FileStore.query().filter(FileStore.file_uid == file_store_uid).scalar()
5579 return FileStore.query().filter(FileStore.file_uid == file_store_uid).scalar()
5581
5580
5582 @classmethod
5581 @classmethod
5583 def create(cls, file_uid, filename, file_hash, file_size, file_display_name='',
5582 def create(cls, file_uid, filename, file_hash, file_size, file_display_name='',
5584 file_description='', enabled=True, hidden=False, check_acl=True,
5583 file_description='', enabled=True, hidden=False, check_acl=True,
5585 user_id=None, scope_user_id=None, scope_repo_id=None, scope_repo_group_id=None):
5584 user_id=None, scope_user_id=None, scope_repo_id=None, scope_repo_group_id=None):
5586
5585
5587 store_entry = FileStore()
5586 store_entry = FileStore()
5588 store_entry.file_uid = file_uid
5587 store_entry.file_uid = file_uid
5589 store_entry.file_display_name = file_display_name
5588 store_entry.file_display_name = file_display_name
5590 store_entry.file_org_name = filename
5589 store_entry.file_org_name = filename
5591 store_entry.file_size = file_size
5590 store_entry.file_size = file_size
5592 store_entry.file_hash = file_hash
5591 store_entry.file_hash = file_hash
5593 store_entry.file_description = file_description
5592 store_entry.file_description = file_description
5594
5593
5595 store_entry.check_acl = check_acl
5594 store_entry.check_acl = check_acl
5596 store_entry.enabled = enabled
5595 store_entry.enabled = enabled
5597 store_entry.hidden = hidden
5596 store_entry.hidden = hidden
5598
5597
5599 store_entry.user_id = user_id
5598 store_entry.user_id = user_id
5600 store_entry.scope_user_id = scope_user_id
5599 store_entry.scope_user_id = scope_user_id
5601 store_entry.scope_repo_id = scope_repo_id
5600 store_entry.scope_repo_id = scope_repo_id
5602 store_entry.scope_repo_group_id = scope_repo_group_id
5601 store_entry.scope_repo_group_id = scope_repo_group_id
5603
5602
5604 return store_entry
5603 return store_entry
5605
5604
5606 @classmethod
5605 @classmethod
5607 def store_metadata(cls, file_store_id, args, commit=True):
5606 def store_metadata(cls, file_store_id, args, commit=True):
5608 file_store = FileStore.get(file_store_id)
5607 file_store = FileStore.get(file_store_id)
5609 if file_store is None:
5608 if file_store is None:
5610 return
5609 return
5611
5610
5612 for section, key, value, value_type in args:
5611 for section, key, value, value_type in args:
5613 has_key = FileStoreMetadata().query() \
5612 has_key = FileStoreMetadata().query() \
5614 .filter(FileStoreMetadata.file_store_id == file_store.file_store_id) \
5613 .filter(FileStoreMetadata.file_store_id == file_store.file_store_id) \
5615 .filter(FileStoreMetadata.file_store_meta_section == section) \
5614 .filter(FileStoreMetadata.file_store_meta_section == section) \
5616 .filter(FileStoreMetadata.file_store_meta_key == key) \
5615 .filter(FileStoreMetadata.file_store_meta_key == key) \
5617 .scalar()
5616 .scalar()
5618 if has_key:
5617 if has_key:
5619 msg = 'key `{}` already defined under section `{}` for this file.'\
5618 msg = 'key `{}` already defined under section `{}` for this file.'\
5620 .format(key, section)
5619 .format(key, section)
5621 raise ArtifactMetadataDuplicate(msg, err_section=section, err_key=key)
5620 raise ArtifactMetadataDuplicate(msg, err_section=section, err_key=key)
5622
5621
5623 # NOTE(marcink): raises ArtifactMetadataBadValueType
5622 # NOTE(marcink): raises ArtifactMetadataBadValueType
5624 FileStoreMetadata.valid_value_type(value_type)
5623 FileStoreMetadata.valid_value_type(value_type)
5625
5624
5626 meta_entry = FileStoreMetadata()
5625 meta_entry = FileStoreMetadata()
5627 meta_entry.file_store = file_store
5626 meta_entry.file_store = file_store
5628 meta_entry.file_store_meta_section = section
5627 meta_entry.file_store_meta_section = section
5629 meta_entry.file_store_meta_key = key
5628 meta_entry.file_store_meta_key = key
5630 meta_entry.file_store_meta_value_type = value_type
5629 meta_entry.file_store_meta_value_type = value_type
5631 meta_entry.file_store_meta_value = value
5630 meta_entry.file_store_meta_value = value
5632
5631
5633 Session().add(meta_entry)
5632 Session().add(meta_entry)
5634
5633
5635 try:
5634 try:
5636 if commit:
5635 if commit:
5637 Session().commit()
5636 Session().commit()
5638 except IntegrityError:
5637 except IntegrityError:
5639 Session().rollback()
5638 Session().rollback()
5640 raise ArtifactMetadataDuplicate('Duplicate section/key found for this file.')
5639 raise ArtifactMetadataDuplicate('Duplicate section/key found for this file.')
5641
5640
5642 @classmethod
5641 @classmethod
5643 def bump_access_counter(cls, file_uid, commit=True):
5642 def bump_access_counter(cls, file_uid, commit=True):
5644 FileStore().query()\
5643 FileStore().query()\
5645 .filter(FileStore.file_uid == file_uid)\
5644 .filter(FileStore.file_uid == file_uid)\
5646 .update({FileStore.accessed_count: (FileStore.accessed_count + 1),
5645 .update({FileStore.accessed_count: (FileStore.accessed_count + 1),
5647 FileStore.accessed_on: datetime.datetime.now()})
5646 FileStore.accessed_on: datetime.datetime.now()})
5648 if commit:
5647 if commit:
5649 Session().commit()
5648 Session().commit()
5650
5649
5651 def __json__(self):
5650 def __json__(self):
5652 data = {
5651 data = {
5653 'filename': self.file_display_name,
5652 'filename': self.file_display_name,
5654 'filename_org': self.file_org_name,
5653 'filename_org': self.file_org_name,
5655 'file_uid': self.file_uid,
5654 'file_uid': self.file_uid,
5656 'description': self.file_description,
5655 'description': self.file_description,
5657 'hidden': self.hidden,
5656 'hidden': self.hidden,
5658 'size': self.file_size,
5657 'size': self.file_size,
5659 'created_on': self.created_on,
5658 'created_on': self.created_on,
5660 'uploaded_by': self.upload_user.get_api_data(details='basic'),
5659 'uploaded_by': self.upload_user.get_api_data(details='basic'),
5661 'downloaded_times': self.accessed_count,
5660 'downloaded_times': self.accessed_count,
5662 'sha256': self.file_hash,
5661 'sha256': self.file_hash,
5663 'metadata': self.file_metadata,
5662 'metadata': self.file_metadata,
5664 }
5663 }
5665
5664
5666 return data
5665 return data
5667
5666
5668 def __str__(self):
5667 def __str__(self):
5669 return f'<FileStore({self.file_store_id})>'
5668 return f'<FileStore({self.file_store_id})>'
5670
5669
5671
5670
5672 class FileStoreMetadata(Base, BaseModel):
5671 class FileStoreMetadata(Base, BaseModel):
5673 __tablename__ = 'file_store_metadata'
5672 __tablename__ = 'file_store_metadata'
5674 __table_args__ = (
5673 __table_args__ = (
5675 UniqueConstraint('file_store_id', 'file_store_meta_section_hash', 'file_store_meta_key_hash'),
5674 UniqueConstraint('file_store_id', 'file_store_meta_section_hash', 'file_store_meta_key_hash'),
5676 Index('file_store_meta_section_idx', 'file_store_meta_section', mysql_length=255),
5675 Index('file_store_meta_section_idx', 'file_store_meta_section', mysql_length=255),
5677 Index('file_store_meta_key_idx', 'file_store_meta_key', mysql_length=255),
5676 Index('file_store_meta_key_idx', 'file_store_meta_key', mysql_length=255),
5678 base_table_args
5677 base_table_args
5679 )
5678 )
5680 SETTINGS_TYPES = {
5679 SETTINGS_TYPES = {
5681 'str': safe_str,
5680 'str': safe_str,
5682 'int': safe_int,
5681 'int': safe_int,
5683 'unicode': safe_str,
5682 'unicode': safe_str,
5684 'bool': str2bool,
5683 'bool': str2bool,
5685 'list': functools.partial(aslist, sep=',')
5684 'list': functools.partial(aslist, sep=',')
5686 }
5685 }
5687
5686
5688 file_store_meta_id = Column(
5687 file_store_meta_id = Column(
5689 "file_store_meta_id", Integer(), nullable=False, unique=True, default=None,
5688 "file_store_meta_id", Integer(), nullable=False, unique=True, default=None,
5690 primary_key=True)
5689 primary_key=True)
5691 _file_store_meta_section = Column(
5690 _file_store_meta_section = Column(
5692 "file_store_meta_section", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5691 "file_store_meta_section", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5693 nullable=True, unique=None, default=None)
5692 nullable=True, unique=None, default=None)
5694 _file_store_meta_section_hash = Column(
5693 _file_store_meta_section_hash = Column(
5695 "file_store_meta_section_hash", String(255),
5694 "file_store_meta_section_hash", String(255),
5696 nullable=True, unique=None, default=None)
5695 nullable=True, unique=None, default=None)
5697 _file_store_meta_key = Column(
5696 _file_store_meta_key = Column(
5698 "file_store_meta_key", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5697 "file_store_meta_key", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5699 nullable=True, unique=None, default=None)
5698 nullable=True, unique=None, default=None)
5700 _file_store_meta_key_hash = Column(
5699 _file_store_meta_key_hash = Column(
5701 "file_store_meta_key_hash", String(255), nullable=True, unique=None, default=None)
5700 "file_store_meta_key_hash", String(255), nullable=True, unique=None, default=None)
5702 _file_store_meta_value = Column(
5701 _file_store_meta_value = Column(
5703 "file_store_meta_value", UnicodeText().with_variant(UnicodeText(20480), 'mysql'),
5702 "file_store_meta_value", UnicodeText().with_variant(UnicodeText(20480), 'mysql'),
5704 nullable=True, unique=None, default=None)
5703 nullable=True, unique=None, default=None)
5705 _file_store_meta_value_type = Column(
5704 _file_store_meta_value_type = Column(
5706 "file_store_meta_value_type", String(255), nullable=True, unique=None,
5705 "file_store_meta_value_type", String(255), nullable=True, unique=None,
5707 default='unicode')
5706 default='unicode')
5708
5707
5709 file_store_id = Column(
5708 file_store_id = Column(
5710 'file_store_id', Integer(), ForeignKey('file_store.file_store_id'),
5709 'file_store_id', Integer(), ForeignKey('file_store.file_store_id'),
5711 nullable=True, unique=None, default=None)
5710 nullable=True, unique=None, default=None)
5712
5711
5713 file_store = relationship('FileStore', lazy='joined')
5712 file_store = relationship('FileStore', lazy='joined')
5714
5713
5715 @classmethod
5714 @classmethod
5716 def valid_value_type(cls, value):
5715 def valid_value_type(cls, value):
5717 if value.split('.')[0] not in cls.SETTINGS_TYPES:
5716 if value.split('.')[0] not in cls.SETTINGS_TYPES:
5718 raise ArtifactMetadataBadValueType(
5717 raise ArtifactMetadataBadValueType(
5719 'value_type must be one of %s got %s' % (cls.SETTINGS_TYPES.keys(), value))
5718 'value_type must be one of %s got %s' % (cls.SETTINGS_TYPES.keys(), value))
5720
5719
5721 @hybrid_property
5720 @hybrid_property
5722 def file_store_meta_section(self):
5721 def file_store_meta_section(self):
5723 return self._file_store_meta_section
5722 return self._file_store_meta_section
5724
5723
5725 @file_store_meta_section.setter
5724 @file_store_meta_section.setter
5726 def file_store_meta_section(self, value):
5725 def file_store_meta_section(self, value):
5727 self._file_store_meta_section = value
5726 self._file_store_meta_section = value
5728 self._file_store_meta_section_hash = _hash_key(value)
5727 self._file_store_meta_section_hash = _hash_key(value)
5729
5728
5730 @hybrid_property
5729 @hybrid_property
5731 def file_store_meta_key(self):
5730 def file_store_meta_key(self):
5732 return self._file_store_meta_key
5731 return self._file_store_meta_key
5733
5732
5734 @file_store_meta_key.setter
5733 @file_store_meta_key.setter
5735 def file_store_meta_key(self, value):
5734 def file_store_meta_key(self, value):
5736 self._file_store_meta_key = value
5735 self._file_store_meta_key = value
5737 self._file_store_meta_key_hash = _hash_key(value)
5736 self._file_store_meta_key_hash = _hash_key(value)
5738
5737
5739 @hybrid_property
5738 @hybrid_property
5740 def file_store_meta_value(self):
5739 def file_store_meta_value(self):
5741 val = self._file_store_meta_value
5740 val = self._file_store_meta_value
5742
5741
5743 if self._file_store_meta_value_type:
5742 if self._file_store_meta_value_type:
5744 # e.g unicode.encrypted == unicode
5743 # e.g unicode.encrypted == unicode
5745 _type = self._file_store_meta_value_type.split('.')[0]
5744 _type = self._file_store_meta_value_type.split('.')[0]
5746 # decode the encrypted value if it's encrypted field type
5745 # decode the encrypted value if it's encrypted field type
5747 if '.encrypted' in self._file_store_meta_value_type:
5746 if '.encrypted' in self._file_store_meta_value_type:
5748 cipher = EncryptedTextValue()
5747 cipher = EncryptedTextValue()
5749 val = safe_str(cipher.process_result_value(val, None))
5748 val = safe_str(cipher.process_result_value(val, None))
5750 # do final type conversion
5749 # do final type conversion
5751 converter = self.SETTINGS_TYPES.get(_type) or self.SETTINGS_TYPES['unicode']
5750 converter = self.SETTINGS_TYPES.get(_type) or self.SETTINGS_TYPES['unicode']
5752 val = converter(val)
5751 val = converter(val)
5753
5752
5754 return val
5753 return val
5755
5754
5756 @file_store_meta_value.setter
5755 @file_store_meta_value.setter
5757 def file_store_meta_value(self, val):
5756 def file_store_meta_value(self, val):
5758 val = safe_str(val)
5757 val = safe_str(val)
5759 # encode the encrypted value
5758 # encode the encrypted value
5760 if '.encrypted' in self.file_store_meta_value_type:
5759 if '.encrypted' in self.file_store_meta_value_type:
5761 cipher = EncryptedTextValue()
5760 cipher = EncryptedTextValue()
5762 val = safe_str(cipher.process_bind_param(val, None))
5761 val = safe_str(cipher.process_bind_param(val, None))
5763 self._file_store_meta_value = val
5762 self._file_store_meta_value = val
5764
5763
5765 @hybrid_property
5764 @hybrid_property
5766 def file_store_meta_value_type(self):
5765 def file_store_meta_value_type(self):
5767 return self._file_store_meta_value_type
5766 return self._file_store_meta_value_type
5768
5767
5769 @file_store_meta_value_type.setter
5768 @file_store_meta_value_type.setter
5770 def file_store_meta_value_type(self, val):
5769 def file_store_meta_value_type(self, val):
5771 # e.g unicode.encrypted
5770 # e.g unicode.encrypted
5772 self.valid_value_type(val)
5771 self.valid_value_type(val)
5773 self._file_store_meta_value_type = val
5772 self._file_store_meta_value_type = val
5774
5773
5775 def __json__(self):
5774 def __json__(self):
5776 data = {
5775 data = {
5777 'artifact': self.file_store.file_uid,
5776 'artifact': self.file_store.file_uid,
5778 'section': self.file_store_meta_section,
5777 'section': self.file_store_meta_section,
5779 'key': self.file_store_meta_key,
5778 'key': self.file_store_meta_key,
5780 'value': self.file_store_meta_value,
5779 'value': self.file_store_meta_value,
5781 }
5780 }
5782
5781
5783 return data
5782 return data
5784
5783
5785 def __str__(self):
5784 def __str__(self):
5786 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.file_store_meta_section,
5785 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.file_store_meta_section,
5787 self.file_store_meta_key, self.file_store_meta_value)
5786 self.file_store_meta_key, self.file_store_meta_value)
5788
5787
5789
5788
5790 class DbMigrateVersion(Base, BaseModel):
5789 class DbMigrateVersion(Base, BaseModel):
5791 __tablename__ = 'db_migrate_version'
5790 __tablename__ = 'db_migrate_version'
5792 __table_args__ = (
5791 __table_args__ = (
5793 base_table_args,
5792 base_table_args,
5794 )
5793 )
5795
5794
5796 repository_id = Column('repository_id', String(250), primary_key=True)
5795 repository_id = Column('repository_id', String(250), primary_key=True)
5797 repository_path = Column('repository_path', Text)
5796 repository_path = Column('repository_path', Text)
5798 version = Column('version', Integer)
5797 version = Column('version', Integer)
5799
5798
5800 @classmethod
5799 @classmethod
5801 def set_version(cls, version):
5800 def set_version(cls, version):
5802 """
5801 """
5803 Helper for forcing a different version, usually for debugging purposes via ishell.
5802 Helper for forcing a different version, usually for debugging purposes via ishell.
5804 """
5803 """
5805 ver = DbMigrateVersion.query().first()
5804 ver = DbMigrateVersion.query().first()
5806 ver.version = version
5805 ver.version = version
5807 Session().commit()
5806 Session().commit()
5808
5807
5809
5808
5810 class DbSession(Base, BaseModel):
5809 class DbSession(Base, BaseModel):
5811 __tablename__ = 'db_session'
5810 __tablename__ = 'db_session'
5812 __table_args__ = (
5811 __table_args__ = (
5813 base_table_args,
5812 base_table_args,
5814 )
5813 )
5815
5814
5816 def __str__(self):
5815 def __str__(self):
5817 return f'<DB:DbSession({self.id})>'
5816 return f'<DB:DbSession({self.id})>'
5818
5817
5819 id = Column('id', Integer())
5818 id = Column('id', Integer())
5820 namespace = Column('namespace', String(255), primary_key=True)
5819 namespace = Column('namespace', String(255), primary_key=True)
5821 accessed = Column('accessed', DateTime, nullable=False)
5820 accessed = Column('accessed', DateTime, nullable=False)
5822 created = Column('created', DateTime, nullable=False)
5821 created = Column('created', DateTime, nullable=False)
5823 data = Column('data', PickleType, nullable=False)
5822 data = Column('data', PickleType, nullable=False)
@@ -1,641 +1,640 b''
1 # -*- coding: utf-8 -*-
2
1
3 # Copyright (C) 2010-2020 RhodeCode GmbH
2 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
3 #
5 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
8 #
7 #
9 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
11 # GNU General Public License for more details.
13 #
12 #
14 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
15 #
17 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
19
21 """
20 """
22 this is forms validation classes
21 this is forms validation classes
23 http://formencode.org/module-formencode.validators.html
22 http://formencode.org/module-formencode.validators.html
24 for list off all availible validators
23 for list off all availible validators
25
24
26 we can create our own validators
25 we can create our own validators
27
26
28 The table below outlines the options which can be used in a schema in addition to the validators themselves
27 The table below outlines the options which can be used in a schema in addition to the validators themselves
29 pre_validators [] These validators will be applied before the schema
28 pre_validators [] These validators will be applied before the schema
30 chained_validators [] These validators will be applied after the schema
29 chained_validators [] These validators will be applied after the schema
31 allow_extra_fields False If True, then it is not an error when keys that aren't associated with a validator are present
30 allow_extra_fields False If True, then it is not an error when keys that aren't associated with a validator are present
32 filter_extra_fields False If True, then keys that aren't associated with a validator are removed
31 filter_extra_fields False If True, then keys that aren't associated with a validator are removed
33 if_key_missing NoDefault If this is given, then any keys that aren't available but are expected will be replaced with this value (and then validated). This does not override a present .if_missing attribute on validators. NoDefault is a special FormEncode class to mean that no default values has been specified and therefore missing keys shouldn't take a default value.
32 if_key_missing NoDefault If this is given, then any keys that aren't available but are expected will be replaced with this value (and then validated). This does not override a present .if_missing attribute on validators. NoDefault is a special FormEncode class to mean that no default values has been specified and therefore missing keys shouldn't take a default value.
34 ignore_key_missing False If True, then missing keys will be missing in the result, if the validator doesn't have .if_missing on it already
33 ignore_key_missing False If True, then missing keys will be missing in the result, if the validator doesn't have .if_missing on it already
35
34
36
35
37 <name> = formencode.validators.<name of validator>
36 <name> = formencode.validators.<name of validator>
38 <name> must equal form name
37 <name> must equal form name
39 list=[1,2,3,4,5]
38 list=[1,2,3,4,5]
40 for SELECT use formencode.All(OneOf(list), Int())
39 for SELECT use formencode.All(OneOf(list), Int())
41
40
42 """
41 """
43
42
44 import deform
43 import deform
45 import logging
44 import logging
46 import formencode
45 import formencode
47
46
48 from pkg_resources import resource_filename
47 from pkg_resources import resource_filename
49 from formencode import All, Pipe
48 from formencode import All, Pipe
50
49
51 from pyramid.threadlocal import get_current_request
50 from pyramid.threadlocal import get_current_request
52
51
53 from rhodecode import BACKENDS
52 from rhodecode import BACKENDS
54 from rhodecode.lib import helpers
53 from rhodecode.lib import helpers
55 from rhodecode.model import validators as v
54 from rhodecode.model import validators as v
56
55
57 log = logging.getLogger(__name__)
56 log = logging.getLogger(__name__)
58
57
59
58
60 deform_templates = resource_filename('deform', 'templates')
59 deform_templates = resource_filename('deform', 'templates')
61 rhodecode_templates = resource_filename('rhodecode', 'templates/forms')
60 rhodecode_templates = resource_filename('rhodecode', 'templates/forms')
62 search_path = (rhodecode_templates, deform_templates)
61 search_path = (rhodecode_templates, deform_templates)
63
62
64
63
65 class RhodecodeFormZPTRendererFactory(deform.ZPTRendererFactory):
64 class RhodecodeFormZPTRendererFactory(deform.ZPTRendererFactory):
66 """ Subclass of ZPTRendererFactory to add rhodecode context variables """
65 """ Subclass of ZPTRendererFactory to add rhodecode context variables """
67 def __call__(self, template_name, **kw):
66 def __call__(self, template_name, **kw):
68 kw['h'] = helpers
67 kw['h'] = helpers
69 kw['request'] = get_current_request()
68 kw['request'] = get_current_request()
70 return self.load(template_name)(**kw)
69 return self.load(template_name)(**kw)
71
70
72
71
73 form_renderer = RhodecodeFormZPTRendererFactory(search_path)
72 form_renderer = RhodecodeFormZPTRendererFactory(search_path)
74 deform.Form.set_default_renderer(form_renderer)
73 deform.Form.set_default_renderer(form_renderer)
75
74
76
75
77 def LoginForm(localizer):
76 def LoginForm(localizer):
78 _ = localizer
77 _ = localizer
79
78
80 class _LoginForm(formencode.Schema):
79 class _LoginForm(formencode.Schema):
81 allow_extra_fields = True
80 allow_extra_fields = True
82 filter_extra_fields = True
81 filter_extra_fields = True
83 username = v.UnicodeString(
82 username = v.UnicodeString(
84 strip=True,
83 strip=True,
85 min=1,
84 min=1,
86 not_empty=True,
85 not_empty=True,
87 messages={
86 messages={
88 'empty': _(u'Please enter a login'),
87 'empty': _(u'Please enter a login'),
89 'tooShort': _(u'Enter a value %(min)i characters long or more')
88 'tooShort': _(u'Enter a value %(min)i characters long or more')
90 }
89 }
91 )
90 )
92
91
93 password = v.UnicodeString(
92 password = v.UnicodeString(
94 strip=False,
93 strip=False,
95 min=3,
94 min=3,
96 max=72,
95 max=72,
97 not_empty=True,
96 not_empty=True,
98 messages={
97 messages={
99 'empty': _(u'Please enter a password'),
98 'empty': _(u'Please enter a password'),
100 'tooShort': _(u'Enter %(min)i characters or more')}
99 'tooShort': _(u'Enter %(min)i characters or more')}
101 )
100 )
102
101
103 remember = v.StringBoolean(if_missing=False)
102 remember = v.StringBoolean(if_missing=False)
104
103
105 chained_validators = [v.ValidAuth(localizer)]
104 chained_validators = [v.ValidAuth(localizer)]
106 return _LoginForm
105 return _LoginForm
107
106
108
107
109 def UserForm(localizer, edit=False, available_languages=None, old_data=None):
108 def UserForm(localizer, edit=False, available_languages=None, old_data=None):
110 old_data = old_data or {}
109 old_data = old_data or {}
111 available_languages = available_languages or []
110 available_languages = available_languages or []
112 _ = localizer
111 _ = localizer
113
112
114 class _UserForm(formencode.Schema):
113 class _UserForm(formencode.Schema):
115 allow_extra_fields = True
114 allow_extra_fields = True
116 filter_extra_fields = True
115 filter_extra_fields = True
117 username = All(v.UnicodeString(strip=True, min=1, not_empty=True),
116 username = All(v.UnicodeString(strip=True, min=1, not_empty=True),
118 v.ValidUsername(localizer, edit, old_data))
117 v.ValidUsername(localizer, edit, old_data))
119 if edit:
118 if edit:
120 new_password = All(
119 new_password = All(
121 v.ValidPassword(localizer),
120 v.ValidPassword(localizer),
122 v.UnicodeString(strip=False, min=6, max=72, not_empty=False)
121 v.UnicodeString(strip=False, min=6, max=72, not_empty=False)
123 )
122 )
124 password_confirmation = All(
123 password_confirmation = All(
125 v.ValidPassword(localizer),
124 v.ValidPassword(localizer),
126 v.UnicodeString(strip=False, min=6, max=72, not_empty=False),
125 v.UnicodeString(strip=False, min=6, max=72, not_empty=False),
127 )
126 )
128 admin = v.StringBoolean(if_missing=False)
127 admin = v.StringBoolean(if_missing=False)
129 else:
128 else:
130 password = All(
129 password = All(
131 v.ValidPassword(localizer),
130 v.ValidPassword(localizer),
132 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
131 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
133 )
132 )
134 password_confirmation = All(
133 password_confirmation = All(
135 v.ValidPassword(localizer),
134 v.ValidPassword(localizer),
136 v.UnicodeString(strip=False, min=6, max=72, not_empty=False)
135 v.UnicodeString(strip=False, min=6, max=72, not_empty=False)
137 )
136 )
138
137
139 password_change = v.StringBoolean(if_missing=False)
138 password_change = v.StringBoolean(if_missing=False)
140 create_repo_group = v.StringBoolean(if_missing=False)
139 create_repo_group = v.StringBoolean(if_missing=False)
141
140
142 active = v.StringBoolean(if_missing=False)
141 active = v.StringBoolean(if_missing=False)
143 firstname = v.UnicodeString(strip=True, min=1, not_empty=False)
142 firstname = v.UnicodeString(strip=True, min=1, not_empty=False)
144 lastname = v.UnicodeString(strip=True, min=1, not_empty=False)
143 lastname = v.UnicodeString(strip=True, min=1, not_empty=False)
145 email = All(v.UniqSystemEmail(localizer, old_data), v.Email(not_empty=True))
144 email = All(v.UniqSystemEmail(localizer, old_data), v.Email(not_empty=True))
146 description = v.UnicodeString(strip=True, min=1, max=250, not_empty=False,
145 description = v.UnicodeString(strip=True, min=1, max=250, not_empty=False,
147 if_missing='')
146 if_missing='')
148 extern_name = v.UnicodeString(strip=True)
147 extern_name = v.UnicodeString(strip=True)
149 extern_type = v.UnicodeString(strip=True)
148 extern_type = v.UnicodeString(strip=True)
150 language = v.OneOf(available_languages, hideList=False,
149 language = v.OneOf(available_languages, hideList=False,
151 testValueList=True, if_missing=None)
150 testValueList=True, if_missing=None)
152 chained_validators = [v.ValidPasswordsMatch(localizer)]
151 chained_validators = [v.ValidPasswordsMatch(localizer)]
153 return _UserForm
152 return _UserForm
154
153
155
154
156 def UserGroupForm(localizer, edit=False, old_data=None, allow_disabled=False):
155 def UserGroupForm(localizer, edit=False, old_data=None, allow_disabled=False):
157 old_data = old_data or {}
156 old_data = old_data or {}
158 _ = localizer
157 _ = localizer
159
158
160 class _UserGroupForm(formencode.Schema):
159 class _UserGroupForm(formencode.Schema):
161 allow_extra_fields = True
160 allow_extra_fields = True
162 filter_extra_fields = True
161 filter_extra_fields = True
163
162
164 users_group_name = All(
163 users_group_name = All(
165 v.UnicodeString(strip=True, min=1, not_empty=True),
164 v.UnicodeString(strip=True, min=1, not_empty=True),
166 v.ValidUserGroup(localizer, edit, old_data)
165 v.ValidUserGroup(localizer, edit, old_data)
167 )
166 )
168 user_group_description = v.UnicodeString(strip=True, min=1,
167 user_group_description = v.UnicodeString(strip=True, min=1,
169 not_empty=False)
168 not_empty=False)
170
169
171 users_group_active = v.StringBoolean(if_missing=False)
170 users_group_active = v.StringBoolean(if_missing=False)
172
171
173 if edit:
172 if edit:
174 # this is user group owner
173 # this is user group owner
175 user = All(
174 user = All(
176 v.UnicodeString(not_empty=True),
175 v.UnicodeString(not_empty=True),
177 v.ValidRepoUser(localizer, allow_disabled))
176 v.ValidRepoUser(localizer, allow_disabled))
178 return _UserGroupForm
177 return _UserGroupForm
179
178
180
179
181 def RepoGroupForm(localizer, edit=False, old_data=None, available_groups=None,
180 def RepoGroupForm(localizer, edit=False, old_data=None, available_groups=None,
182 can_create_in_root=False, allow_disabled=False):
181 can_create_in_root=False, allow_disabled=False):
183 _ = localizer
182 _ = localizer
184 old_data = old_data or {}
183 old_data = old_data or {}
185 available_groups = available_groups or []
184 available_groups = available_groups or []
186
185
187 class _RepoGroupForm(formencode.Schema):
186 class _RepoGroupForm(formencode.Schema):
188 allow_extra_fields = True
187 allow_extra_fields = True
189 filter_extra_fields = False
188 filter_extra_fields = False
190
189
191 group_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
190 group_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
192 v.SlugifyName(localizer),)
191 v.SlugifyName(localizer),)
193 group_description = v.UnicodeString(strip=True, min=1,
192 group_description = v.UnicodeString(strip=True, min=1,
194 not_empty=False)
193 not_empty=False)
195 group_copy_permissions = v.StringBoolean(if_missing=False)
194 group_copy_permissions = v.StringBoolean(if_missing=False)
196
195
197 group_parent_id = v.OneOf(available_groups, hideList=False,
196 group_parent_id = v.OneOf(available_groups, hideList=False,
198 testValueList=True, not_empty=True)
197 testValueList=True, not_empty=True)
199 enable_locking = v.StringBoolean(if_missing=False)
198 enable_locking = v.StringBoolean(if_missing=False)
200 chained_validators = [
199 chained_validators = [
201 v.ValidRepoGroup(localizer, edit, old_data, can_create_in_root)]
200 v.ValidRepoGroup(localizer, edit, old_data, can_create_in_root)]
202
201
203 if edit:
202 if edit:
204 # this is repo group owner
203 # this is repo group owner
205 user = All(
204 user = All(
206 v.UnicodeString(not_empty=True),
205 v.UnicodeString(not_empty=True),
207 v.ValidRepoUser(localizer, allow_disabled))
206 v.ValidRepoUser(localizer, allow_disabled))
208 return _RepoGroupForm
207 return _RepoGroupForm
209
208
210
209
211 def RegisterForm(localizer, edit=False, old_data=None):
210 def RegisterForm(localizer, edit=False, old_data=None):
212 _ = localizer
211 _ = localizer
213 old_data = old_data or {}
212 old_data = old_data or {}
214
213
215 class _RegisterForm(formencode.Schema):
214 class _RegisterForm(formencode.Schema):
216 allow_extra_fields = True
215 allow_extra_fields = True
217 filter_extra_fields = True
216 filter_extra_fields = True
218 username = All(
217 username = All(
219 v.ValidUsername(localizer, edit, old_data),
218 v.ValidUsername(localizer, edit, old_data),
220 v.UnicodeString(strip=True, min=1, not_empty=True)
219 v.UnicodeString(strip=True, min=1, not_empty=True)
221 )
220 )
222 password = All(
221 password = All(
223 v.ValidPassword(localizer),
222 v.ValidPassword(localizer),
224 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
223 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
225 )
224 )
226 password_confirmation = All(
225 password_confirmation = All(
227 v.ValidPassword(localizer),
226 v.ValidPassword(localizer),
228 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
227 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
229 )
228 )
230 active = v.StringBoolean(if_missing=False)
229 active = v.StringBoolean(if_missing=False)
231 firstname = v.UnicodeString(strip=True, min=1, not_empty=False)
230 firstname = v.UnicodeString(strip=True, min=1, not_empty=False)
232 lastname = v.UnicodeString(strip=True, min=1, not_empty=False)
231 lastname = v.UnicodeString(strip=True, min=1, not_empty=False)
233 email = All(v.UniqSystemEmail(localizer, old_data), v.Email(not_empty=True))
232 email = All(v.UniqSystemEmail(localizer, old_data), v.Email(not_empty=True))
234
233
235 chained_validators = [v.ValidPasswordsMatch(localizer)]
234 chained_validators = [v.ValidPasswordsMatch(localizer)]
236 return _RegisterForm
235 return _RegisterForm
237
236
238
237
239 def PasswordResetForm(localizer):
238 def PasswordResetForm(localizer):
240 _ = localizer
239 _ = localizer
241
240
242 class _PasswordResetForm(formencode.Schema):
241 class _PasswordResetForm(formencode.Schema):
243 allow_extra_fields = True
242 allow_extra_fields = True
244 filter_extra_fields = True
243 filter_extra_fields = True
245 email = All(v.ValidSystemEmail(localizer), v.Email(not_empty=True))
244 email = All(v.ValidSystemEmail(localizer), v.Email(not_empty=True))
246 return _PasswordResetForm
245 return _PasswordResetForm
247
246
248
247
249 def RepoForm(localizer, edit=False, old_data=None, repo_groups=None, allow_disabled=False):
248 def RepoForm(localizer, edit=False, old_data=None, repo_groups=None, allow_disabled=False):
250 _ = localizer
249 _ = localizer
251 old_data = old_data or {}
250 old_data = old_data or {}
252 repo_groups = repo_groups or []
251 repo_groups = repo_groups or []
253 supported_backends = BACKENDS.keys()
252 supported_backends = BACKENDS.keys()
254
253
255 class _RepoForm(formencode.Schema):
254 class _RepoForm(formencode.Schema):
256 allow_extra_fields = True
255 allow_extra_fields = True
257 filter_extra_fields = False
256 filter_extra_fields = False
258 repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
257 repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
259 v.SlugifyName(localizer), v.CannotHaveGitSuffix(localizer))
258 v.SlugifyName(localizer), v.CannotHaveGitSuffix(localizer))
260 repo_group = All(v.CanWriteGroup(localizer, old_data),
259 repo_group = All(v.CanWriteGroup(localizer, old_data),
261 v.OneOf(repo_groups, hideList=True))
260 v.OneOf(repo_groups, hideList=True))
262 repo_type = v.OneOf(supported_backends, required=False,
261 repo_type = v.OneOf(supported_backends, required=False,
263 if_missing=old_data.get('repo_type'))
262 if_missing=old_data.get('repo_type'))
264 repo_description = v.UnicodeString(strip=True, min=1, not_empty=False)
263 repo_description = v.UnicodeString(strip=True, min=1, not_empty=False)
265 repo_private = v.StringBoolean(if_missing=False)
264 repo_private = v.StringBoolean(if_missing=False)
266 repo_copy_permissions = v.StringBoolean(if_missing=False)
265 repo_copy_permissions = v.StringBoolean(if_missing=False)
267 clone_uri = All(v.UnicodeString(strip=True, min=1, not_empty=False))
266 clone_uri = All(v.UnicodeString(strip=True, min=1, not_empty=False))
268
267
269 repo_enable_statistics = v.StringBoolean(if_missing=False)
268 repo_enable_statistics = v.StringBoolean(if_missing=False)
270 repo_enable_downloads = v.StringBoolean(if_missing=False)
269 repo_enable_downloads = v.StringBoolean(if_missing=False)
271 repo_enable_locking = v.StringBoolean(if_missing=False)
270 repo_enable_locking = v.StringBoolean(if_missing=False)
272
271
273 if edit:
272 if edit:
274 # this is repo owner
273 # this is repo owner
275 user = All(
274 user = All(
276 v.UnicodeString(not_empty=True),
275 v.UnicodeString(not_empty=True),
277 v.ValidRepoUser(localizer, allow_disabled))
276 v.ValidRepoUser(localizer, allow_disabled))
278 clone_uri_change = v.UnicodeString(
277 clone_uri_change = v.UnicodeString(
279 not_empty=False, if_missing=v.Missing)
278 not_empty=False, if_missing=v.Missing)
280
279
281 chained_validators = [v.ValidCloneUri(localizer),
280 chained_validators = [v.ValidCloneUri(localizer),
282 v.ValidRepoName(localizer, edit, old_data)]
281 v.ValidRepoName(localizer, edit, old_data)]
283 return _RepoForm
282 return _RepoForm
284
283
285
284
286 def RepoPermsForm(localizer):
285 def RepoPermsForm(localizer):
287 _ = localizer
286 _ = localizer
288
287
289 class _RepoPermsForm(formencode.Schema):
288 class _RepoPermsForm(formencode.Schema):
290 allow_extra_fields = True
289 allow_extra_fields = True
291 filter_extra_fields = False
290 filter_extra_fields = False
292 chained_validators = [v.ValidPerms(localizer, type_='repo')]
291 chained_validators = [v.ValidPerms(localizer, type_='repo')]
293 return _RepoPermsForm
292 return _RepoPermsForm
294
293
295
294
296 def RepoGroupPermsForm(localizer, valid_recursive_choices):
295 def RepoGroupPermsForm(localizer, valid_recursive_choices):
297 _ = localizer
296 _ = localizer
298
297
299 class _RepoGroupPermsForm(formencode.Schema):
298 class _RepoGroupPermsForm(formencode.Schema):
300 allow_extra_fields = True
299 allow_extra_fields = True
301 filter_extra_fields = False
300 filter_extra_fields = False
302 recursive = v.OneOf(valid_recursive_choices)
301 recursive = v.OneOf(valid_recursive_choices)
303 chained_validators = [v.ValidPerms(localizer, type_='repo_group')]
302 chained_validators = [v.ValidPerms(localizer, type_='repo_group')]
304 return _RepoGroupPermsForm
303 return _RepoGroupPermsForm
305
304
306
305
307 def UserGroupPermsForm(localizer):
306 def UserGroupPermsForm(localizer):
308 _ = localizer
307 _ = localizer
309
308
310 class _UserPermsForm(formencode.Schema):
309 class _UserPermsForm(formencode.Schema):
311 allow_extra_fields = True
310 allow_extra_fields = True
312 filter_extra_fields = False
311 filter_extra_fields = False
313 chained_validators = [v.ValidPerms(localizer, type_='user_group')]
312 chained_validators = [v.ValidPerms(localizer, type_='user_group')]
314 return _UserPermsForm
313 return _UserPermsForm
315
314
316
315
317 def RepoFieldForm(localizer):
316 def RepoFieldForm(localizer):
318 _ = localizer
317 _ = localizer
319
318
320 class _RepoFieldForm(formencode.Schema):
319 class _RepoFieldForm(formencode.Schema):
321 filter_extra_fields = True
320 filter_extra_fields = True
322 allow_extra_fields = True
321 allow_extra_fields = True
323
322
324 new_field_key = All(v.FieldKey(localizer),
323 new_field_key = All(v.FieldKey(localizer),
325 v.UnicodeString(strip=True, min=3, not_empty=True))
324 v.UnicodeString(strip=True, min=3, not_empty=True))
326 new_field_value = v.UnicodeString(not_empty=False, if_missing=u'')
325 new_field_value = v.UnicodeString(not_empty=False, if_missing=u'')
327 new_field_type = v.OneOf(['str', 'unicode', 'list', 'tuple'],
326 new_field_type = v.OneOf(['str', 'unicode', 'list', 'tuple'],
328 if_missing='str')
327 if_missing='str')
329 new_field_label = v.UnicodeString(not_empty=False)
328 new_field_label = v.UnicodeString(not_empty=False)
330 new_field_desc = v.UnicodeString(not_empty=False)
329 new_field_desc = v.UnicodeString(not_empty=False)
331 return _RepoFieldForm
330 return _RepoFieldForm
332
331
333
332
334 def RepoForkForm(localizer, edit=False, old_data=None,
333 def RepoForkForm(localizer, edit=False, old_data=None,
335 supported_backends=BACKENDS.keys(), repo_groups=None):
334 supported_backends=BACKENDS.keys(), repo_groups=None):
336 _ = localizer
335 _ = localizer
337 old_data = old_data or {}
336 old_data = old_data or {}
338 repo_groups = repo_groups or []
337 repo_groups = repo_groups or []
339
338
340 class _RepoForkForm(formencode.Schema):
339 class _RepoForkForm(formencode.Schema):
341 allow_extra_fields = True
340 allow_extra_fields = True
342 filter_extra_fields = False
341 filter_extra_fields = False
343 repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
342 repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
344 v.SlugifyName(localizer))
343 v.SlugifyName(localizer))
345 repo_group = All(v.CanWriteGroup(localizer, ),
344 repo_group = All(v.CanWriteGroup(localizer, ),
346 v.OneOf(repo_groups, hideList=True))
345 v.OneOf(repo_groups, hideList=True))
347 repo_type = All(v.ValidForkType(localizer, old_data), v.OneOf(supported_backends))
346 repo_type = All(v.ValidForkType(localizer, old_data), v.OneOf(supported_backends))
348 description = v.UnicodeString(strip=True, min=1, not_empty=True)
347 description = v.UnicodeString(strip=True, min=1, not_empty=True)
349 private = v.StringBoolean(if_missing=False)
348 private = v.StringBoolean(if_missing=False)
350 copy_permissions = v.StringBoolean(if_missing=False)
349 copy_permissions = v.StringBoolean(if_missing=False)
351 fork_parent_id = v.UnicodeString()
350 fork_parent_id = v.UnicodeString()
352 chained_validators = [v.ValidForkName(localizer, edit, old_data)]
351 chained_validators = [v.ValidForkName(localizer, edit, old_data)]
353 return _RepoForkForm
352 return _RepoForkForm
354
353
355
354
356 def ApplicationSettingsForm(localizer):
355 def ApplicationSettingsForm(localizer):
357 _ = localizer
356 _ = localizer
358
357
359 class _ApplicationSettingsForm(formencode.Schema):
358 class _ApplicationSettingsForm(formencode.Schema):
360 allow_extra_fields = True
359 allow_extra_fields = True
361 filter_extra_fields = False
360 filter_extra_fields = False
362 rhodecode_title = v.UnicodeString(strip=True, max=40, not_empty=False)
361 rhodecode_title = v.UnicodeString(strip=True, max=40, not_empty=False)
363 rhodecode_realm = v.UnicodeString(strip=True, min=1, not_empty=True)
362 rhodecode_realm = v.UnicodeString(strip=True, min=1, not_empty=True)
364 rhodecode_pre_code = v.UnicodeString(strip=True, min=1, not_empty=False)
363 rhodecode_pre_code = v.UnicodeString(strip=True, min=1, not_empty=False)
365 rhodecode_post_code = v.UnicodeString(strip=True, min=1, not_empty=False)
364 rhodecode_post_code = v.UnicodeString(strip=True, min=1, not_empty=False)
366 rhodecode_captcha_public_key = v.UnicodeString(strip=True, min=1, not_empty=False)
365 rhodecode_captcha_public_key = v.UnicodeString(strip=True, min=1, not_empty=False)
367 rhodecode_captcha_private_key = v.UnicodeString(strip=True, min=1, not_empty=False)
366 rhodecode_captcha_private_key = v.UnicodeString(strip=True, min=1, not_empty=False)
368 rhodecode_create_personal_repo_group = v.StringBoolean(if_missing=False)
367 rhodecode_create_personal_repo_group = v.StringBoolean(if_missing=False)
369 rhodecode_personal_repo_group_pattern = v.UnicodeString(strip=True, min=1, not_empty=False)
368 rhodecode_personal_repo_group_pattern = v.UnicodeString(strip=True, min=1, not_empty=False)
370 return _ApplicationSettingsForm
369 return _ApplicationSettingsForm
371
370
372
371
373 def ApplicationVisualisationForm(localizer):
372 def ApplicationVisualisationForm(localizer):
374 from rhodecode.model.db import Repository
373 from rhodecode.model.db import Repository
375 _ = localizer
374 _ = localizer
376
375
377 class _ApplicationVisualisationForm(formencode.Schema):
376 class _ApplicationVisualisationForm(formencode.Schema):
378 allow_extra_fields = True
377 allow_extra_fields = True
379 filter_extra_fields = False
378 filter_extra_fields = False
380 rhodecode_show_public_icon = v.StringBoolean(if_missing=False)
379 rhodecode_show_public_icon = v.StringBoolean(if_missing=False)
381 rhodecode_show_private_icon = v.StringBoolean(if_missing=False)
380 rhodecode_show_private_icon = v.StringBoolean(if_missing=False)
382 rhodecode_stylify_metatags = v.StringBoolean(if_missing=False)
381 rhodecode_stylify_metatags = v.StringBoolean(if_missing=False)
383
382
384 rhodecode_repository_fields = v.StringBoolean(if_missing=False)
383 rhodecode_repository_fields = v.StringBoolean(if_missing=False)
385 rhodecode_lightweight_journal = v.StringBoolean(if_missing=False)
384 rhodecode_lightweight_journal = v.StringBoolean(if_missing=False)
386 rhodecode_dashboard_items = v.Int(min=5, not_empty=True)
385 rhodecode_dashboard_items = v.Int(min=5, not_empty=True)
387 rhodecode_admin_grid_items = v.Int(min=5, not_empty=True)
386 rhodecode_admin_grid_items = v.Int(min=5, not_empty=True)
388 rhodecode_show_version = v.StringBoolean(if_missing=False)
387 rhodecode_show_version = v.StringBoolean(if_missing=False)
389 rhodecode_use_gravatar = v.StringBoolean(if_missing=False)
388 rhodecode_use_gravatar = v.StringBoolean(if_missing=False)
390 rhodecode_markup_renderer = v.OneOf(['markdown', 'rst'])
389 rhodecode_markup_renderer = v.OneOf(['markdown', 'rst'])
391 rhodecode_gravatar_url = v.UnicodeString(min=3)
390 rhodecode_gravatar_url = v.UnicodeString(min=3)
392 rhodecode_clone_uri_tmpl = v.UnicodeString(not_empty=False, if_empty=Repository.DEFAULT_CLONE_URI)
391 rhodecode_clone_uri_tmpl = v.UnicodeString(not_empty=False, if_empty=Repository.DEFAULT_CLONE_URI)
393 rhodecode_clone_uri_id_tmpl = v.UnicodeString(not_empty=False, if_empty=Repository.DEFAULT_CLONE_URI_ID)
392 rhodecode_clone_uri_id_tmpl = v.UnicodeString(not_empty=False, if_empty=Repository.DEFAULT_CLONE_URI_ID)
394 rhodecode_clone_uri_ssh_tmpl = v.UnicodeString(not_empty=False, if_empty=Repository.DEFAULT_CLONE_URI_SSH)
393 rhodecode_clone_uri_ssh_tmpl = v.UnicodeString(not_empty=False, if_empty=Repository.DEFAULT_CLONE_URI_SSH)
395 rhodecode_support_url = v.UnicodeString()
394 rhodecode_support_url = v.UnicodeString()
396 rhodecode_show_revision_number = v.StringBoolean(if_missing=False)
395 rhodecode_show_revision_number = v.StringBoolean(if_missing=False)
397 rhodecode_show_sha_length = v.Int(min=4, not_empty=True)
396 rhodecode_show_sha_length = v.Int(min=4, not_empty=True)
398 return _ApplicationVisualisationForm
397 return _ApplicationVisualisationForm
399
398
400
399
401 class _BaseVcsSettingsForm(formencode.Schema):
400 class _BaseVcsSettingsForm(formencode.Schema):
402
401
403 allow_extra_fields = True
402 allow_extra_fields = True
404 filter_extra_fields = False
403 filter_extra_fields = False
405 hooks_changegroup_repo_size = v.StringBoolean(if_missing=False)
404 hooks_changegroup_repo_size = v.StringBoolean(if_missing=False)
406 hooks_changegroup_push_logger = v.StringBoolean(if_missing=False)
405 hooks_changegroup_push_logger = v.StringBoolean(if_missing=False)
407 hooks_outgoing_pull_logger = v.StringBoolean(if_missing=False)
406 hooks_outgoing_pull_logger = v.StringBoolean(if_missing=False)
408
407
409 # PR/Code-review
408 # PR/Code-review
410 rhodecode_pr_merge_enabled = v.StringBoolean(if_missing=False)
409 rhodecode_pr_merge_enabled = v.StringBoolean(if_missing=False)
411 rhodecode_use_outdated_comments = v.StringBoolean(if_missing=False)
410 rhodecode_use_outdated_comments = v.StringBoolean(if_missing=False)
412
411
413 # hg
412 # hg
414 extensions_largefiles = v.StringBoolean(if_missing=False)
413 extensions_largefiles = v.StringBoolean(if_missing=False)
415 extensions_evolve = v.StringBoolean(if_missing=False)
414 extensions_evolve = v.StringBoolean(if_missing=False)
416 phases_publish = v.StringBoolean(if_missing=False)
415 phases_publish = v.StringBoolean(if_missing=False)
417
416
418 rhodecode_hg_use_rebase_for_merging = v.StringBoolean(if_missing=False)
417 rhodecode_hg_use_rebase_for_merging = v.StringBoolean(if_missing=False)
419 rhodecode_hg_close_branch_before_merging = v.StringBoolean(if_missing=False)
418 rhodecode_hg_close_branch_before_merging = v.StringBoolean(if_missing=False)
420
419
421 # git
420 # git
422 vcs_git_lfs_enabled = v.StringBoolean(if_missing=False)
421 vcs_git_lfs_enabled = v.StringBoolean(if_missing=False)
423 rhodecode_git_use_rebase_for_merging = v.StringBoolean(if_missing=False)
422 rhodecode_git_use_rebase_for_merging = v.StringBoolean(if_missing=False)
424 rhodecode_git_close_branch_before_merging = v.StringBoolean(if_missing=False)
423 rhodecode_git_close_branch_before_merging = v.StringBoolean(if_missing=False)
425
424
426 # svn
425 # svn
427 vcs_svn_proxy_http_requests_enabled = v.StringBoolean(if_missing=False)
426 vcs_svn_proxy_http_requests_enabled = v.StringBoolean(if_missing=False)
428 vcs_svn_proxy_http_server_url = v.UnicodeString(strip=True, if_missing=None)
427 vcs_svn_proxy_http_server_url = v.UnicodeString(strip=True, if_missing=None)
429
428
430 # cache
429 # cache
431 rhodecode_diff_cache = v.StringBoolean(if_missing=False)
430 rhodecode_diff_cache = v.StringBoolean(if_missing=False)
432
431
433
432
434 def ApplicationUiSettingsForm(localizer):
433 def ApplicationUiSettingsForm(localizer):
435 _ = localizer
434 _ = localizer
436
435
437 class _ApplicationUiSettingsForm(_BaseVcsSettingsForm):
436 class _ApplicationUiSettingsForm(_BaseVcsSettingsForm):
438 web_push_ssl = v.StringBoolean(if_missing=False)
437 web_push_ssl = v.StringBoolean(if_missing=False)
439 paths_root_path = All(
438 paths_root_path = All(
440 v.ValidPath(localizer),
439 v.ValidPath(localizer),
441 v.UnicodeString(strip=True, min=1, not_empty=True)
440 v.UnicodeString(strip=True, min=1, not_empty=True)
442 )
441 )
443 largefiles_usercache = All(
442 largefiles_usercache = All(
444 v.ValidPath(localizer),
443 v.ValidPath(localizer),
445 v.UnicodeString(strip=True, min=2, not_empty=True))
444 v.UnicodeString(strip=True, min=2, not_empty=True))
446 vcs_git_lfs_store_location = All(
445 vcs_git_lfs_store_location = All(
447 v.ValidPath(localizer),
446 v.ValidPath(localizer),
448 v.UnicodeString(strip=True, min=2, not_empty=True))
447 v.UnicodeString(strip=True, min=2, not_empty=True))
449 extensions_hgsubversion = v.StringBoolean(if_missing=False)
448 extensions_hgsubversion = v.StringBoolean(if_missing=False)
450 extensions_hggit = v.StringBoolean(if_missing=False)
449 extensions_hggit = v.StringBoolean(if_missing=False)
451 new_svn_branch = v.ValidSvnPattern(localizer, section='vcs_svn_branch')
450 new_svn_branch = v.ValidSvnPattern(localizer, section='vcs_svn_branch')
452 new_svn_tag = v.ValidSvnPattern(localizer, section='vcs_svn_tag')
451 new_svn_tag = v.ValidSvnPattern(localizer, section='vcs_svn_tag')
453 return _ApplicationUiSettingsForm
452 return _ApplicationUiSettingsForm
454
453
455
454
456 def RepoVcsSettingsForm(localizer, repo_name):
455 def RepoVcsSettingsForm(localizer, repo_name):
457 _ = localizer
456 _ = localizer
458
457
459 class _RepoVcsSettingsForm(_BaseVcsSettingsForm):
458 class _RepoVcsSettingsForm(_BaseVcsSettingsForm):
460 inherit_global_settings = v.StringBoolean(if_missing=False)
459 inherit_global_settings = v.StringBoolean(if_missing=False)
461 new_svn_branch = v.ValidSvnPattern(localizer,
460 new_svn_branch = v.ValidSvnPattern(localizer,
462 section='vcs_svn_branch', repo_name=repo_name)
461 section='vcs_svn_branch', repo_name=repo_name)
463 new_svn_tag = v.ValidSvnPattern(localizer,
462 new_svn_tag = v.ValidSvnPattern(localizer,
464 section='vcs_svn_tag', repo_name=repo_name)
463 section='vcs_svn_tag', repo_name=repo_name)
465 return _RepoVcsSettingsForm
464 return _RepoVcsSettingsForm
466
465
467
466
468 def LabsSettingsForm(localizer):
467 def LabsSettingsForm(localizer):
469 _ = localizer
468 _ = localizer
470
469
471 class _LabSettingsForm(formencode.Schema):
470 class _LabSettingsForm(formencode.Schema):
472 allow_extra_fields = True
471 allow_extra_fields = True
473 filter_extra_fields = False
472 filter_extra_fields = False
474 return _LabSettingsForm
473 return _LabSettingsForm
475
474
476
475
477 def ApplicationPermissionsForm(
476 def ApplicationPermissionsForm(
478 localizer, register_choices, password_reset_choices,
477 localizer, register_choices, password_reset_choices,
479 extern_activate_choices):
478 extern_activate_choices):
480 _ = localizer
479 _ = localizer
481
480
482 class _DefaultPermissionsForm(formencode.Schema):
481 class _DefaultPermissionsForm(formencode.Schema):
483 allow_extra_fields = True
482 allow_extra_fields = True
484 filter_extra_fields = True
483 filter_extra_fields = True
485
484
486 anonymous = v.StringBoolean(if_missing=False)
485 anonymous = v.StringBoolean(if_missing=False)
487 default_register = v.OneOf(register_choices)
486 default_register = v.OneOf(register_choices)
488 default_register_message = v.UnicodeString()
487 default_register_message = v.UnicodeString()
489 default_password_reset = v.OneOf(password_reset_choices)
488 default_password_reset = v.OneOf(password_reset_choices)
490 default_extern_activate = v.OneOf(extern_activate_choices)
489 default_extern_activate = v.OneOf(extern_activate_choices)
491 return _DefaultPermissionsForm
490 return _DefaultPermissionsForm
492
491
493
492
494 def ObjectPermissionsForm(localizer, repo_perms_choices, group_perms_choices,
493 def ObjectPermissionsForm(localizer, repo_perms_choices, group_perms_choices,
495 user_group_perms_choices):
494 user_group_perms_choices):
496 _ = localizer
495 _ = localizer
497
496
498 class _ObjectPermissionsForm(formencode.Schema):
497 class _ObjectPermissionsForm(formencode.Schema):
499 allow_extra_fields = True
498 allow_extra_fields = True
500 filter_extra_fields = True
499 filter_extra_fields = True
501 overwrite_default_repo = v.StringBoolean(if_missing=False)
500 overwrite_default_repo = v.StringBoolean(if_missing=False)
502 overwrite_default_group = v.StringBoolean(if_missing=False)
501 overwrite_default_group = v.StringBoolean(if_missing=False)
503 overwrite_default_user_group = v.StringBoolean(if_missing=False)
502 overwrite_default_user_group = v.StringBoolean(if_missing=False)
504
503
505 default_repo_perm = v.OneOf(repo_perms_choices)
504 default_repo_perm = v.OneOf(repo_perms_choices)
506 default_group_perm = v.OneOf(group_perms_choices)
505 default_group_perm = v.OneOf(group_perms_choices)
507 default_user_group_perm = v.OneOf(user_group_perms_choices)
506 default_user_group_perm = v.OneOf(user_group_perms_choices)
508
507
509 return _ObjectPermissionsForm
508 return _ObjectPermissionsForm
510
509
511
510
512 def BranchPermissionsForm(localizer, branch_perms_choices):
511 def BranchPermissionsForm(localizer, branch_perms_choices):
513 _ = localizer
512 _ = localizer
514
513
515 class _BranchPermissionsForm(formencode.Schema):
514 class _BranchPermissionsForm(formencode.Schema):
516 allow_extra_fields = True
515 allow_extra_fields = True
517 filter_extra_fields = True
516 filter_extra_fields = True
518 overwrite_default_branch = v.StringBoolean(if_missing=False)
517 overwrite_default_branch = v.StringBoolean(if_missing=False)
519 default_branch_perm = v.OneOf(branch_perms_choices)
518 default_branch_perm = v.OneOf(branch_perms_choices)
520
519
521 return _BranchPermissionsForm
520 return _BranchPermissionsForm
522
521
523
522
524 def UserPermissionsForm(localizer, create_choices, create_on_write_choices,
523 def UserPermissionsForm(localizer, create_choices, create_on_write_choices,
525 repo_group_create_choices, user_group_create_choices,
524 repo_group_create_choices, user_group_create_choices,
526 fork_choices, inherit_default_permissions_choices):
525 fork_choices, inherit_default_permissions_choices):
527 _ = localizer
526 _ = localizer
528
527
529 class _DefaultPermissionsForm(formencode.Schema):
528 class _DefaultPermissionsForm(formencode.Schema):
530 allow_extra_fields = True
529 allow_extra_fields = True
531 filter_extra_fields = True
530 filter_extra_fields = True
532
531
533 anonymous = v.StringBoolean(if_missing=False)
532 anonymous = v.StringBoolean(if_missing=False)
534
533
535 default_repo_create = v.OneOf(create_choices)
534 default_repo_create = v.OneOf(create_choices)
536 default_repo_create_on_write = v.OneOf(create_on_write_choices)
535 default_repo_create_on_write = v.OneOf(create_on_write_choices)
537 default_user_group_create = v.OneOf(user_group_create_choices)
536 default_user_group_create = v.OneOf(user_group_create_choices)
538 default_repo_group_create = v.OneOf(repo_group_create_choices)
537 default_repo_group_create = v.OneOf(repo_group_create_choices)
539 default_fork_create = v.OneOf(fork_choices)
538 default_fork_create = v.OneOf(fork_choices)
540 default_inherit_default_permissions = v.OneOf(inherit_default_permissions_choices)
539 default_inherit_default_permissions = v.OneOf(inherit_default_permissions_choices)
541 return _DefaultPermissionsForm
540 return _DefaultPermissionsForm
542
541
543
542
544 def UserIndividualPermissionsForm(localizer):
543 def UserIndividualPermissionsForm(localizer):
545 _ = localizer
544 _ = localizer
546
545
547 class _DefaultPermissionsForm(formencode.Schema):
546 class _DefaultPermissionsForm(formencode.Schema):
548 allow_extra_fields = True
547 allow_extra_fields = True
549 filter_extra_fields = True
548 filter_extra_fields = True
550
549
551 inherit_default_permissions = v.StringBoolean(if_missing=False)
550 inherit_default_permissions = v.StringBoolean(if_missing=False)
552 return _DefaultPermissionsForm
551 return _DefaultPermissionsForm
553
552
554
553
555 def DefaultsForm(localizer, edit=False, old_data=None, supported_backends=BACKENDS.keys()):
554 def DefaultsForm(localizer, edit=False, old_data=None, supported_backends=BACKENDS.keys()):
556 _ = localizer
555 _ = localizer
557 old_data = old_data or {}
556 old_data = old_data or {}
558
557
559 class _DefaultsForm(formencode.Schema):
558 class _DefaultsForm(formencode.Schema):
560 allow_extra_fields = True
559 allow_extra_fields = True
561 filter_extra_fields = True
560 filter_extra_fields = True
562 default_repo_type = v.OneOf(supported_backends)
561 default_repo_type = v.OneOf(supported_backends)
563 default_repo_private = v.StringBoolean(if_missing=False)
562 default_repo_private = v.StringBoolean(if_missing=False)
564 default_repo_enable_statistics = v.StringBoolean(if_missing=False)
563 default_repo_enable_statistics = v.StringBoolean(if_missing=False)
565 default_repo_enable_downloads = v.StringBoolean(if_missing=False)
564 default_repo_enable_downloads = v.StringBoolean(if_missing=False)
566 default_repo_enable_locking = v.StringBoolean(if_missing=False)
565 default_repo_enable_locking = v.StringBoolean(if_missing=False)
567 return _DefaultsForm
566 return _DefaultsForm
568
567
569
568
570 def AuthSettingsForm(localizer):
569 def AuthSettingsForm(localizer):
571 _ = localizer
570 _ = localizer
572
571
573 class _AuthSettingsForm(formencode.Schema):
572 class _AuthSettingsForm(formencode.Schema):
574 allow_extra_fields = True
573 allow_extra_fields = True
575 filter_extra_fields = True
574 filter_extra_fields = True
576 auth_plugins = All(v.ValidAuthPlugins(localizer),
575 auth_plugins = All(v.ValidAuthPlugins(localizer),
577 v.UniqueListFromString(localizer)(not_empty=True))
576 v.UniqueListFromString(localizer)(not_empty=True))
578 return _AuthSettingsForm
577 return _AuthSettingsForm
579
578
580
579
581 def UserExtraEmailForm(localizer):
580 def UserExtraEmailForm(localizer):
582 _ = localizer
581 _ = localizer
583
582
584 class _UserExtraEmailForm(formencode.Schema):
583 class _UserExtraEmailForm(formencode.Schema):
585 email = All(v.UniqSystemEmail(localizer), v.Email(not_empty=True))
584 email = All(v.UniqSystemEmail(localizer), v.Email(not_empty=True))
586 return _UserExtraEmailForm
585 return _UserExtraEmailForm
587
586
588
587
589 def UserExtraIpForm(localizer):
588 def UserExtraIpForm(localizer):
590 _ = localizer
589 _ = localizer
591
590
592 class _UserExtraIpForm(formencode.Schema):
591 class _UserExtraIpForm(formencode.Schema):
593 ip = v.ValidIp(localizer)(not_empty=True)
592 ip = v.ValidIp(localizer)(not_empty=True)
594 return _UserExtraIpForm
593 return _UserExtraIpForm
595
594
596
595
597 def PullRequestForm(localizer, repo_id):
596 def PullRequestForm(localizer, repo_id):
598 _ = localizer
597 _ = localizer
599
598
600 class ReviewerForm(formencode.Schema):
599 class ReviewerForm(formencode.Schema):
601 user_id = v.Int(not_empty=True)
600 user_id = v.Int(not_empty=True)
602 reasons = All()
601 reasons = All()
603 rules = All(v.UniqueList(localizer, convert=int)())
602 rules = All(v.UniqueList(localizer, convert=int)())
604 mandatory = v.StringBoolean()
603 mandatory = v.StringBoolean()
605 role = v.String(if_missing='reviewer')
604 role = v.String(if_missing='reviewer')
606
605
607 class ObserverForm(formencode.Schema):
606 class ObserverForm(formencode.Schema):
608 user_id = v.Int(not_empty=True)
607 user_id = v.Int(not_empty=True)
609 reasons = All()
608 reasons = All()
610 rules = All(v.UniqueList(localizer, convert=int)())
609 rules = All(v.UniqueList(localizer, convert=int)())
611 mandatory = v.StringBoolean()
610 mandatory = v.StringBoolean()
612 role = v.String(if_missing='observer')
611 role = v.String(if_missing='observer')
613
612
614 class _PullRequestForm(formencode.Schema):
613 class _PullRequestForm(formencode.Schema):
615 allow_extra_fields = True
614 allow_extra_fields = True
616 filter_extra_fields = True
615 filter_extra_fields = True
617
616
618 common_ancestor = v.UnicodeString(strip=True, required=True)
617 common_ancestor = v.UnicodeString(strip=True, required=True)
619 source_repo = v.UnicodeString(strip=True, required=True)
618 source_repo = v.UnicodeString(strip=True, required=True)
620 source_ref = v.UnicodeString(strip=True, required=True)
619 source_ref = v.UnicodeString(strip=True, required=True)
621 target_repo = v.UnicodeString(strip=True, required=True)
620 target_repo = v.UnicodeString(strip=True, required=True)
622 target_ref = v.UnicodeString(strip=True, required=True)
621 target_ref = v.UnicodeString(strip=True, required=True)
623 revisions = All(#v.NotReviewedRevisions(localizer, repo_id)(),
622 revisions = All(#v.NotReviewedRevisions(localizer, repo_id)(),
624 v.UniqueList(localizer)(not_empty=True))
623 v.UniqueList(localizer)(not_empty=True))
625 review_members = formencode.ForEach(ReviewerForm())
624 review_members = formencode.ForEach(ReviewerForm())
626 observer_members = formencode.ForEach(ObserverForm())
625 observer_members = formencode.ForEach(ObserverForm())
627 pullrequest_title = v.UnicodeString(strip=True, required=True, min=1, max=255)
626 pullrequest_title = v.UnicodeString(strip=True, required=True, min=1, max=255)
628 pullrequest_desc = v.UnicodeString(strip=True, required=False)
627 pullrequest_desc = v.UnicodeString(strip=True, required=False)
629 description_renderer = v.UnicodeString(strip=True, required=False)
628 description_renderer = v.UnicodeString(strip=True, required=False)
630
629
631 return _PullRequestForm
630 return _PullRequestForm
632
631
633
632
634 def IssueTrackerPatternsForm(localizer):
633 def IssueTrackerPatternsForm(localizer):
635 _ = localizer
634 _ = localizer
636
635
637 class _IssueTrackerPatternsForm(formencode.Schema):
636 class _IssueTrackerPatternsForm(formencode.Schema):
638 allow_extra_fields = True
637 allow_extra_fields = True
639 filter_extra_fields = False
638 filter_extra_fields = False
640 chained_validators = [v.ValidPattern(localizer)]
639 chained_validators = [v.ValidPattern(localizer)]
641 return _IssueTrackerPatternsForm
640 return _IssueTrackerPatternsForm
@@ -1,256 +1,256 b''
1 # -*- coding: utf-8 -*-
1
2
2
3 # Copyright (C) 2013-2020 RhodeCode GmbH
3 # Copyright (C) 2013-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 gist model for RhodeCode
22 gist model for RhodeCode
23 """
23 """
24
24
25 import os
25 import os
26 import time
26 import time
27 import logging
27 import logging
28 import traceback
28 import traceback
29 import shutil
29 import shutil
30
30
31 from pyramid.threadlocal import get_current_request
31 from pyramid.threadlocal import get_current_request
32
32
33 from rhodecode.lib.utils2 import (
33 from rhodecode.lib.utils2 import (
34 safe_unicode, unique_id, safe_int, time_to_datetime, AttributeDict)
34 safe_unicode, unique_id, safe_int, time_to_datetime, AttributeDict)
35 from rhodecode.lib.ext_json import json
35 from rhodecode.lib.ext_json import json
36 from rhodecode.lib.vcs import VCSError
36 from rhodecode.lib.vcs import VCSError
37 from rhodecode.model import BaseModel
37 from rhodecode.model import BaseModel
38 from rhodecode.model.db import Gist
38 from rhodecode.model.db import Gist
39 from rhodecode.model.repo import RepoModel
39 from rhodecode.model.repo import RepoModel
40 from rhodecode.model.scm import ScmModel
40 from rhodecode.model.scm import ScmModel
41
41
42 log = logging.getLogger(__name__)
42 log = logging.getLogger(__name__)
43
43
44 GIST_STORE_LOC = '.rc_gist_store'
44 GIST_STORE_LOC = '.rc_gist_store'
45 GIST_METADATA_FILE = '.rc_gist_metadata'
45 GIST_METADATA_FILE = '.rc_gist_metadata'
46
46
47
47
48 class GistModel(BaseModel):
48 class GistModel(BaseModel):
49 cls = Gist
49 cls = Gist
50 vcs_backend = 'hg'
50 vcs_backend = 'hg'
51
51
52 def _get_gist(self, gist):
52 def _get_gist(self, gist):
53 """
53 """
54 Helper method to get gist by ID, or gist_access_id as a fallback
54 Helper method to get gist by ID, or gist_access_id as a fallback
55
55
56 :param gist: GistID, gist_access_id, or Gist instance
56 :param gist: GistID, gist_access_id, or Gist instance
57 """
57 """
58 return self._get_instance(Gist, gist, callback=Gist.get_by_access_id)
58 return self._get_instance(Gist, gist, callback=Gist.get_by_access_id)
59
59
60 def __delete_gist(self, gist):
60 def __delete_gist(self, gist):
61 """
61 """
62 removes gist from filesystem
62 removes gist from filesystem
63
63
64 :param gist: gist object
64 :param gist: gist object
65 """
65 """
66 root_path = RepoModel().repos_path
66 root_path = RepoModel().repos_path
67 rm_path = os.path.join(root_path, GIST_STORE_LOC, gist.gist_access_id)
67 rm_path = os.path.join(root_path, GIST_STORE_LOC, gist.gist_access_id)
68 log.info("Removing %s", rm_path)
68 log.info("Removing %s", rm_path)
69 shutil.rmtree(rm_path)
69 shutil.rmtree(rm_path)
70
70
71 def _store_metadata(self, repo, gist_id, gist_access_id, user_id, username,
71 def _store_metadata(self, repo, gist_id, gist_access_id, user_id, username,
72 gist_type, gist_expires, gist_acl_level):
72 gist_type, gist_expires, gist_acl_level):
73 """
73 """
74 store metadata inside the gist repo, this can be later used for imports
74 store metadata inside the gist repo, this can be later used for imports
75 or gist identification. Currently we use this inside RhodeCode tools
75 or gist identification. Currently we use this inside RhodeCode tools
76 to do cleanup of gists that are in storage but not in database.
76 to do cleanup of gists that are in storage but not in database.
77 """
77 """
78 metadata = {
78 metadata = {
79 'metadata_version': '2',
79 'metadata_version': '2',
80 'gist_db_id': gist_id,
80 'gist_db_id': gist_id,
81 'gist_access_id': gist_access_id,
81 'gist_access_id': gist_access_id,
82 'gist_owner_id': user_id,
82 'gist_owner_id': user_id,
83 'gist_owner_username': username,
83 'gist_owner_username': username,
84 'gist_type': gist_type,
84 'gist_type': gist_type,
85 'gist_expires': gist_expires,
85 'gist_expires': gist_expires,
86 'gist_updated': time.time(),
86 'gist_updated': time.time(),
87 'gist_acl_level': gist_acl_level,
87 'gist_acl_level': gist_acl_level,
88 }
88 }
89 metadata_file = os.path.join(repo.path, '.hg', GIST_METADATA_FILE)
89 metadata_file = os.path.join(repo.path, '.hg', GIST_METADATA_FILE)
90 with open(metadata_file, 'wb') as f:
90 with open(metadata_file, 'wb') as f:
91 f.write(json.dumps(metadata))
91 f.write(json.dumps(metadata))
92
92
93 def get_gist(self, gist):
93 def get_gist(self, gist):
94 return self._get_gist(gist)
94 return self._get_gist(gist)
95
95
96 def get_gist_files(self, gist_access_id, revision=None):
96 def get_gist_files(self, gist_access_id, revision=None):
97 """
97 """
98 Get files for given gist
98 Get files for given gist
99
99
100 :param gist_access_id:
100 :param gist_access_id:
101 """
101 """
102 repo = Gist.get_by_access_id(gist_access_id)
102 repo = Gist.get_by_access_id(gist_access_id)
103 vcs_repo = repo.scm_instance()
103 vcs_repo = repo.scm_instance()
104 if not vcs_repo:
104 if not vcs_repo:
105 raise VCSError('Failed to load gist repository for {}'.format(repo))
105 raise VCSError('Failed to load gist repository for {}'.format(repo))
106
106
107 commit = vcs_repo.get_commit(commit_id=revision)
107 commit = vcs_repo.get_commit(commit_id=revision)
108 return commit, [n for n in commit.get_node('/')]
108 return commit, [n for n in commit.get_node('/')]
109
109
110 def create(self, description, owner, gist_mapping,
110 def create(self, description, owner, gist_mapping,
111 gist_type=Gist.GIST_PUBLIC, lifetime=-1, gist_id=None,
111 gist_type=Gist.GIST_PUBLIC, lifetime=-1, gist_id=None,
112 gist_acl_level=Gist.ACL_LEVEL_PRIVATE):
112 gist_acl_level=Gist.ACL_LEVEL_PRIVATE):
113 """
113 """
114 Create a gist
114 Create a gist
115
115
116 :param description: description of the gist
116 :param description: description of the gist
117 :param owner: user who created this gist
117 :param owner: user who created this gist
118 :param gist_mapping: mapping [{'filename': 'file1.txt', 'content': content}, ...}]
118 :param gist_mapping: mapping [{'filename': 'file1.txt', 'content': content}, ...}]
119 :param gist_type: type of gist private/public
119 :param gist_type: type of gist private/public
120 :param lifetime: in minutes, -1 == forever
120 :param lifetime: in minutes, -1 == forever
121 :param gist_acl_level: acl level for this gist
121 :param gist_acl_level: acl level for this gist
122 """
122 """
123 owner = self._get_user(owner)
123 owner = self._get_user(owner)
124 gist_id = safe_unicode(gist_id or unique_id(20))
124 gist_id = safe_unicode(gist_id or unique_id(20))
125 lifetime = safe_int(lifetime, -1)
125 lifetime = safe_int(lifetime, -1)
126 gist_expires = time.time() + (lifetime * 60) if lifetime != -1 else -1
126 gist_expires = time.time() + (lifetime * 60) if lifetime != -1 else -1
127 expiration = (time_to_datetime(gist_expires)
127 expiration = (time_to_datetime(gist_expires)
128 if gist_expires != -1 else 'forever')
128 if gist_expires != -1 else 'forever')
129 log.debug('set GIST expiration date to: %s', expiration)
129 log.debug('set GIST expiration date to: %s', expiration)
130 # create the Database version
130 # create the Database version
131 gist = Gist()
131 gist = Gist()
132 gist.gist_description = description
132 gist.gist_description = description
133 gist.gist_access_id = gist_id
133 gist.gist_access_id = gist_id
134 gist.gist_owner = owner.user_id
134 gist.gist_owner = owner.user_id
135 gist.gist_expires = gist_expires
135 gist.gist_expires = gist_expires
136 gist.gist_type = safe_unicode(gist_type)
136 gist.gist_type = safe_unicode(gist_type)
137 gist.acl_level = gist_acl_level
137 gist.acl_level = gist_acl_level
138 self.sa.add(gist)
138 self.sa.add(gist)
139 self.sa.flush()
139 self.sa.flush()
140 if gist_type == Gist.GIST_PUBLIC:
140 if gist_type == Gist.GIST_PUBLIC:
141 # use DB ID for easy to use GIST ID
141 # use DB ID for easy to use GIST ID
142 gist_id = safe_unicode(gist.gist_id)
142 gist_id = safe_unicode(gist.gist_id)
143 gist.gist_access_id = gist_id
143 gist.gist_access_id = gist_id
144 self.sa.add(gist)
144 self.sa.add(gist)
145
145
146 gist_repo_path = os.path.join(GIST_STORE_LOC, gist_id)
146 gist_repo_path = os.path.join(GIST_STORE_LOC, gist_id)
147 log.debug('Creating new %s GIST repo in %s', gist_type, gist_repo_path)
147 log.debug('Creating new %s GIST repo in %s', gist_type, gist_repo_path)
148 repo = RepoModel()._create_filesystem_repo(
148 repo = RepoModel()._create_filesystem_repo(
149 repo_name=gist_id, repo_type=self.vcs_backend, repo_group=GIST_STORE_LOC,
149 repo_name=gist_id, repo_type=self.vcs_backend, repo_group=GIST_STORE_LOC,
150 use_global_config=True)
150 use_global_config=True)
151
151
152 # now create single multifile commit
152 # now create single multifile commit
153 message = 'added file'
153 message = 'added file'
154 message += 's: ' if len(gist_mapping) > 1 else ': '
154 message += 's: ' if len(gist_mapping) > 1 else ': '
155 message += ', '.join([x for x in gist_mapping])
155 message += ', '.join([x for x in gist_mapping])
156
156
157 # fake RhodeCode Repository object
157 # fake RhodeCode Repository object
158 fake_repo = AttributeDict({
158 fake_repo = AttributeDict({
159 'repo_name': gist_repo_path,
159 'repo_name': gist_repo_path,
160 'scm_instance': lambda *args, **kwargs: repo,
160 'scm_instance': lambda *args, **kwargs: repo,
161 })
161 })
162
162
163 ScmModel().create_nodes(
163 ScmModel().create_nodes(
164 user=owner.user_id, repo=fake_repo,
164 user=owner.user_id, repo=fake_repo,
165 message=message,
165 message=message,
166 nodes=gist_mapping,
166 nodes=gist_mapping,
167 trigger_push_hook=False
167 trigger_push_hook=False
168 )
168 )
169
169
170 self._store_metadata(repo, gist.gist_id, gist.gist_access_id,
170 self._store_metadata(repo, gist.gist_id, gist.gist_access_id,
171 owner.user_id, owner.username, gist.gist_type,
171 owner.user_id, owner.username, gist.gist_type,
172 gist.gist_expires, gist_acl_level)
172 gist.gist_expires, gist_acl_level)
173 return gist
173 return gist
174
174
175 def delete(self, gist, fs_remove=True):
175 def delete(self, gist, fs_remove=True):
176 gist = self._get_gist(gist)
176 gist = self._get_gist(gist)
177 try:
177 try:
178 self.sa.delete(gist)
178 self.sa.delete(gist)
179 if fs_remove:
179 if fs_remove:
180 self.__delete_gist(gist)
180 self.__delete_gist(gist)
181 else:
181 else:
182 log.debug('skipping removal from filesystem')
182 log.debug('skipping removal from filesystem')
183 except Exception:
183 except Exception:
184 log.error(traceback.format_exc())
184 log.error(traceback.format_exc())
185 raise
185 raise
186
186
187 def update(self, gist, description, owner, gist_mapping, lifetime,
187 def update(self, gist, description, owner, gist_mapping, lifetime,
188 gist_acl_level):
188 gist_acl_level):
189 gist = self._get_gist(gist)
189 gist = self._get_gist(gist)
190 gist_repo = gist.scm_instance()
190 gist_repo = gist.scm_instance()
191
191
192 if lifetime == 0: # preserve old value
192 if lifetime == 0: # preserve old value
193 gist_expires = gist.gist_expires
193 gist_expires = gist.gist_expires
194 else:
194 else:
195 gist_expires = (
195 gist_expires = (
196 time.time() + (lifetime * 60) if lifetime != -1 else -1)
196 time.time() + (lifetime * 60) if lifetime != -1 else -1)
197
197
198 # calculate operation type based on given data
198 # calculate operation type based on given data
199 gist_mapping_op = {}
199 gist_mapping_op = {}
200 for k, v in gist_mapping.items():
200 for k, v in gist_mapping.items():
201 # add, mod, del
201 # add, mod, del
202 if not v['filename_org'] and v['filename']:
202 if not v['filename_org'] and v['filename']:
203 op = 'add'
203 op = 'add'
204 elif v['filename_org'] and not v['filename']:
204 elif v['filename_org'] and not v['filename']:
205 op = 'del'
205 op = 'del'
206 else:
206 else:
207 op = 'mod'
207 op = 'mod'
208
208
209 v['op'] = op
209 v['op'] = op
210 gist_mapping_op[k] = v
210 gist_mapping_op[k] = v
211
211
212 gist.gist_description = description
212 gist.gist_description = description
213 gist.gist_expires = gist_expires
213 gist.gist_expires = gist_expires
214 gist.owner = owner
214 gist.owner = owner
215 gist.acl_level = gist_acl_level
215 gist.acl_level = gist_acl_level
216 self.sa.add(gist)
216 self.sa.add(gist)
217 self.sa.flush()
217 self.sa.flush()
218
218
219 message = 'updated file'
219 message = 'updated file'
220 message += 's: ' if len(gist_mapping) > 1 else ': '
220 message += 's: ' if len(gist_mapping) > 1 else ': '
221 message += ', '.join([x for x in gist_mapping])
221 message += ', '.join([x for x in gist_mapping])
222
222
223 # fake RhodeCode Repository object
223 # fake RhodeCode Repository object
224 fake_repo = AttributeDict({
224 fake_repo = AttributeDict({
225 'repo_name': gist_repo.path,
225 'repo_name': gist_repo.path,
226 'scm_instance': lambda *args, **kwargs: gist_repo,
226 'scm_instance': lambda *args, **kwargs: gist_repo,
227 })
227 })
228
228
229 self._store_metadata(gist_repo, gist.gist_id, gist.gist_access_id,
229 self._store_metadata(gist_repo, gist.gist_id, gist.gist_access_id,
230 owner.user_id, owner.username, gist.gist_type,
230 owner.user_id, owner.username, gist.gist_type,
231 gist.gist_expires, gist_acl_level)
231 gist.gist_expires, gist_acl_level)
232
232
233 # this can throw NodeNotChangedError, if changes we're trying to commit
233 # this can throw NodeNotChangedError, if changes we're trying to commit
234 # are not actually changes...
234 # are not actually changes...
235 ScmModel().update_nodes(
235 ScmModel().update_nodes(
236 user=owner.user_id,
236 user=owner.user_id,
237 repo=fake_repo,
237 repo=fake_repo,
238 message=message,
238 message=message,
239 nodes=gist_mapping_op,
239 nodes=gist_mapping_op,
240 trigger_push_hook=False
240 trigger_push_hook=False
241 )
241 )
242
242
243 return gist
243 return gist
244
244
245 def get_url(self, gist, request=None):
245 def get_url(self, gist, request=None):
246 import rhodecode
246 import rhodecode
247
247
248 if not request:
248 if not request:
249 request = get_current_request()
249 request = get_current_request()
250
250
251 alias_url = rhodecode.CONFIG.get('gist_alias_url')
251 alias_url = rhodecode.CONFIG.get('gist_alias_url')
252 if alias_url:
252 if alias_url:
253 return alias_url.replace('{gistid}', gist.gist_access_id)
253 return alias_url.replace('{gistid}', gist.gist_access_id)
254
254
255 return request.route_url('gist_show', gist_id=gist.gist_access_id)
255 return request.route_url('gist_show', gist_id=gist.gist_access_id)
256
256
@@ -1,241 +1,241 b''
1 # -*- coding: utf-8 -*-
1
2
2
3 # Copyright (C) 2011-2020 RhodeCode GmbH
3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 Model for integrations
23 Model for integrations
24 """
24 """
25
25
26
26
27 import logging
27 import logging
28
28
29 from sqlalchemy import or_, and_
29 from sqlalchemy import or_, and_
30
30
31 import rhodecode
31 import rhodecode
32 from rhodecode import events
32 from rhodecode import events
33 from rhodecode.integrations.types.base import EEIntegration
33 from rhodecode.integrations.types.base import EEIntegration
34 from rhodecode.lib.caching_query import FromCache
34 from rhodecode.lib.caching_query import FromCache
35 from rhodecode.model import BaseModel
35 from rhodecode.model import BaseModel
36 from rhodecode.model.db import Integration, Repository, RepoGroup, true, false, case
36 from rhodecode.model.db import Integration, Repository, RepoGroup, true, false, case
37 from rhodecode.integrations import integration_type_registry
37 from rhodecode.integrations import integration_type_registry
38
38
39 log = logging.getLogger(__name__)
39 log = logging.getLogger(__name__)
40
40
41
41
42 class IntegrationModel(BaseModel):
42 class IntegrationModel(BaseModel):
43
43
44 cls = Integration
44 cls = Integration
45
45
46 def __get_integration(self, integration):
46 def __get_integration(self, integration):
47 if isinstance(integration, Integration):
47 if isinstance(integration, Integration):
48 return integration
48 return integration
49 elif isinstance(integration, int):
49 elif isinstance(integration, int):
50 return self.sa.query(Integration).get(integration)
50 return self.sa.query(Integration).get(integration)
51 else:
51 else:
52 if integration:
52 if integration:
53 raise Exception('integration must be int or Instance'
53 raise Exception('integration must be int or Instance'
54 ' of Integration got %s' % type(integration))
54 ' of Integration got %s' % type(integration))
55
55
56 def create(self, IntegrationType, name, enabled, repo, repo_group,
56 def create(self, IntegrationType, name, enabled, repo, repo_group,
57 child_repos_only, settings):
57 child_repos_only, settings):
58 """ Create an IntegrationType integration """
58 """ Create an IntegrationType integration """
59 integration = Integration()
59 integration = Integration()
60 integration.integration_type = IntegrationType.key
60 integration.integration_type = IntegrationType.key
61 self.sa.add(integration)
61 self.sa.add(integration)
62 self.update_integration(integration, name, enabled, repo, repo_group,
62 self.update_integration(integration, name, enabled, repo, repo_group,
63 child_repos_only, settings)
63 child_repos_only, settings)
64 self.sa.commit()
64 self.sa.commit()
65 return integration
65 return integration
66
66
67 def update_integration(self, integration, name, enabled, repo, repo_group,
67 def update_integration(self, integration, name, enabled, repo, repo_group,
68 child_repos_only, settings):
68 child_repos_only, settings):
69 integration = self.__get_integration(integration)
69 integration = self.__get_integration(integration)
70
70
71 integration.repo = repo
71 integration.repo = repo
72 integration.repo_group = repo_group
72 integration.repo_group = repo_group
73 integration.child_repos_only = child_repos_only
73 integration.child_repos_only = child_repos_only
74 integration.name = name
74 integration.name = name
75 integration.enabled = enabled
75 integration.enabled = enabled
76 integration.settings = settings
76 integration.settings = settings
77
77
78 return integration
78 return integration
79
79
80 def delete(self, integration):
80 def delete(self, integration):
81 integration = self.__get_integration(integration)
81 integration = self.__get_integration(integration)
82 if integration:
82 if integration:
83 self.sa.delete(integration)
83 self.sa.delete(integration)
84 return True
84 return True
85 return False
85 return False
86
86
87 def get_integration_handler(self, integration):
87 def get_integration_handler(self, integration):
88 TypeClass = integration_type_registry.get(integration.integration_type)
88 TypeClass = integration_type_registry.get(integration.integration_type)
89 if not TypeClass:
89 if not TypeClass:
90 log.error('No class could be found for integration type: {}'.format(
90 log.error('No class could be found for integration type: {}'.format(
91 integration.integration_type))
91 integration.integration_type))
92 return None
92 return None
93 elif isinstance(TypeClass, EEIntegration) or issubclass(TypeClass, EEIntegration):
93 elif isinstance(TypeClass, EEIntegration) or issubclass(TypeClass, EEIntegration):
94 log.error('EE integration cannot be '
94 log.error('EE integration cannot be '
95 'executed for integration type: {}'.format(
95 'executed for integration type: {}'.format(
96 integration.integration_type))
96 integration.integration_type))
97 return None
97 return None
98
98
99 return TypeClass(integration.settings)
99 return TypeClass(integration.settings)
100
100
101 def send_event(self, integration, event):
101 def send_event(self, integration, event):
102 """ Send an event to an integration """
102 """ Send an event to an integration """
103 handler = self.get_integration_handler(integration)
103 handler = self.get_integration_handler(integration)
104 if handler:
104 if handler:
105 log.debug(
105 log.debug(
106 'events: sending event %s on integration %s using handler %s',
106 'events: sending event %s on integration %s using handler %s',
107 event, integration, handler)
107 event, integration, handler)
108 handler.send_event(event)
108 handler.send_event(event)
109
109
110 def get_integrations(self, scope, IntegrationType=None):
110 def get_integrations(self, scope, IntegrationType=None):
111 """
111 """
112 Return integrations for a scope, which must be one of:
112 Return integrations for a scope, which must be one of:
113
113
114 'all' - every integration, global/repogroup/repo
114 'all' - every integration, global/repogroup/repo
115 'global' - global integrations only
115 'global' - global integrations only
116 <Repository> instance - integrations for this repo only
116 <Repository> instance - integrations for this repo only
117 <RepoGroup> instance - integrations for this repogroup only
117 <RepoGroup> instance - integrations for this repogroup only
118 """
118 """
119
119
120 if isinstance(scope, Repository):
120 if isinstance(scope, Repository):
121 query = self.sa.query(Integration).filter(
121 query = self.sa.query(Integration).filter(
122 Integration.repo == scope)
122 Integration.repo == scope)
123 elif isinstance(scope, RepoGroup):
123 elif isinstance(scope, RepoGroup):
124 query = self.sa.query(Integration).filter(
124 query = self.sa.query(Integration).filter(
125 Integration.repo_group == scope)
125 Integration.repo_group == scope)
126 elif scope == 'global':
126 elif scope == 'global':
127 # global integrations
127 # global integrations
128 query = self.sa.query(Integration).filter(
128 query = self.sa.query(Integration).filter(
129 and_(Integration.repo_id == None, Integration.repo_group_id == None)
129 and_(Integration.repo_id == None, Integration.repo_group_id == None)
130 )
130 )
131 elif scope == 'root-repos':
131 elif scope == 'root-repos':
132 query = self.sa.query(Integration).filter(
132 query = self.sa.query(Integration).filter(
133 and_(Integration.repo_id == None,
133 and_(Integration.repo_id == None,
134 Integration.repo_group_id == None,
134 Integration.repo_group_id == None,
135 Integration.child_repos_only == true())
135 Integration.child_repos_only == true())
136 )
136 )
137 elif scope == 'all':
137 elif scope == 'all':
138 query = self.sa.query(Integration)
138 query = self.sa.query(Integration)
139 else:
139 else:
140 raise Exception(
140 raise Exception(
141 "invalid `scope`, must be one of: "
141 "invalid `scope`, must be one of: "
142 "['global', 'all', <Repository>, <RepoGroup>]")
142 "['global', 'all', <Repository>, <RepoGroup>]")
143
143
144 if IntegrationType is not None:
144 if IntegrationType is not None:
145 query = query.filter(
145 query = query.filter(
146 Integration.integration_type==IntegrationType.key)
146 Integration.integration_type==IntegrationType.key)
147
147
148 result = []
148 result = []
149 for integration in query.all():
149 for integration in query.all():
150 IntType = integration_type_registry.get(integration.integration_type)
150 IntType = integration_type_registry.get(integration.integration_type)
151 result.append((IntType, integration))
151 result.append((IntType, integration))
152 return result
152 return result
153
153
154 def get_for_event(self, event, cache=False):
154 def get_for_event(self, event, cache=False):
155 """
155 """
156 Get integrations that match an event
156 Get integrations that match an event
157 """
157 """
158 # base query
158 # base query
159 query = self.sa.query(
159 query = self.sa.query(
160 Integration
160 Integration
161 ).filter(
161 ).filter(
162 Integration.enabled == true()
162 Integration.enabled == true()
163 )
163 )
164
164
165 global_integrations_filter = and_(
165 global_integrations_filter = and_(
166 Integration.repo_id == None,
166 Integration.repo_id == None,
167 Integration.repo_group_id == None,
167 Integration.repo_group_id == None,
168 Integration.child_repos_only == false(),
168 Integration.child_repos_only == false(),
169 )
169 )
170
170
171 if isinstance(event, events.RepoEvent):
171 if isinstance(event, events.RepoEvent):
172 root_repos_integrations_filter = and_(
172 root_repos_integrations_filter = and_(
173 Integration.repo_id == None,
173 Integration.repo_id == None,
174 Integration.repo_group_id == None,
174 Integration.repo_group_id == None,
175 Integration.child_repos_only == true(),
175 Integration.child_repos_only == true(),
176 )
176 )
177
177
178 clauses = [
178 clauses = [
179 global_integrations_filter,
179 global_integrations_filter,
180 ]
180 ]
181 cases = [
181 cases = [
182 (global_integrations_filter, 1),
182 (global_integrations_filter, 1),
183 (root_repos_integrations_filter, 2),
183 (root_repos_integrations_filter, 2),
184 ]
184 ]
185
185
186 # repo group integrations
186 # repo group integrations
187 if event.repo.group:
187 if event.repo.group:
188 # repo group with only root level repos
188 # repo group with only root level repos
189 group_child_repos_filter = and_(
189 group_child_repos_filter = and_(
190 Integration.repo_group_id == event.repo.group.group_id,
190 Integration.repo_group_id == event.repo.group.group_id,
191 Integration.child_repos_only == true()
191 Integration.child_repos_only == true()
192 )
192 )
193
193
194 clauses.append(group_child_repos_filter)
194 clauses.append(group_child_repos_filter)
195 cases.append(
195 cases.append(
196 (group_child_repos_filter, 3),
196 (group_child_repos_filter, 3),
197 )
197 )
198
198
199 # repo group cascade to kids
199 # repo group cascade to kids
200 group_recursive_repos_filter = and_(
200 group_recursive_repos_filter = and_(
201 Integration.repo_group_id.in_(
201 Integration.repo_group_id.in_(
202 [group.group_id for group in event.repo.groups_with_parents]
202 [group.group_id for group in event.repo.groups_with_parents]
203 ),
203 ),
204 Integration.child_repos_only == false()
204 Integration.child_repos_only == false()
205 )
205 )
206 clauses.append(group_recursive_repos_filter)
206 clauses.append(group_recursive_repos_filter)
207 cases.append(
207 cases.append(
208 (group_recursive_repos_filter, 4),
208 (group_recursive_repos_filter, 4),
209 )
209 )
210
210
211 if not event.repo.group: # root repo
211 if not event.repo.group: # root repo
212 clauses.append(root_repos_integrations_filter)
212 clauses.append(root_repos_integrations_filter)
213
213
214 # repo integrations
214 # repo integrations
215 if event.repo.repo_id: # pre create events dont have a repo_id yet
215 if event.repo.repo_id: # pre create events dont have a repo_id yet
216 specific_repo_filter = Integration.repo_id == event.repo.repo_id
216 specific_repo_filter = Integration.repo_id == event.repo.repo_id
217 clauses.append(specific_repo_filter)
217 clauses.append(specific_repo_filter)
218 cases.append(
218 cases.append(
219 (specific_repo_filter, 5),
219 (specific_repo_filter, 5),
220 )
220 )
221
221
222 order_by_criterion = case(cases)
222 order_by_criterion = case(cases)
223
223
224 query = query.filter(or_(*clauses))
224 query = query.filter(or_(*clauses))
225 query = query.order_by(order_by_criterion)
225 query = query.order_by(order_by_criterion)
226
226
227 if cache:
227 if cache:
228 cache_key = "get_enabled_repo_integrations_%i" % event.repo.repo_id
228 cache_key = "get_enabled_repo_integrations_%i" % event.repo.repo_id
229 query = query.options(
229 query = query.options(
230 FromCache("sql_cache_short", cache_key))
230 FromCache("sql_cache_short", cache_key))
231 else: # only global integrations
231 else: # only global integrations
232 order_by_criterion = Integration.integration_id
232 order_by_criterion = Integration.integration_id
233
233
234 query = query.filter(global_integrations_filter)
234 query = query.filter(global_integrations_filter)
235 query = query.order_by(order_by_criterion)
235 query = query.order_by(order_by_criterion)
236 if cache:
236 if cache:
237 query = query.options(
237 query = query.options(
238 FromCache("sql_cache_short", "get_enabled_global_integrations"))
238 FromCache("sql_cache_short", "get_enabled_global_integrations"))
239
239
240 result = query.all()
240 result = query.all()
241 return result
241 return result
@@ -1,56 +1,55 b''
1 # -*- coding: utf-8 -*-
2
1
3 # Copyright (C) 2010-2020 RhodeCode GmbH
2 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
3 #
5 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
8 #
7 #
9 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
11 # GNU General Public License for more details.
13 #
12 #
14 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
15 #
17 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
19
21 """
20 """
22 SQLAlchemy Metadata and Session object
21 SQLAlchemy Metadata and Session object
23 """
22 """
24
23
25 from sqlalchemy.orm import declarative_base
24 from sqlalchemy.orm import declarative_base
26 from sqlalchemy.orm import scoped_session, sessionmaker
25 from sqlalchemy.orm import scoped_session, sessionmaker
27 from sqlalchemy.orm import Session as SASession
26 from sqlalchemy.orm import Session as SASession
28 from rhodecode.lib.caching_query import ORMCache
27 from rhodecode.lib.caching_query import ORMCache
29
28
30
29
31 __all__ = ['Base', 'Session', 'raw_query_executor']
30 __all__ = ['Base', 'Session', 'raw_query_executor']
32
31
33 # scoped_session. Apply our custom CachingQuery class to it,
32 # scoped_session. Apply our custom CachingQuery class to it,
34 # using a callable that will associate the dictionary
33 # using a callable that will associate the dictionary
35 # of regions with the Query.
34 # of regions with the Query.
36 # to use cache use this in query
35 # to use cache use this in query
37 # .options(FromCache("sqlalchemy_cache_type", "cachekey"))
36 # .options(FromCache("sqlalchemy_cache_type", "cachekey"))
38 Session = scoped_session(
37 Session = scoped_session(
39 sessionmaker(
38 sessionmaker(
40 expire_on_commit=True,
39 expire_on_commit=True,
41 )
40 )
42 )
41 )
43
42
44 # pass empty regions so we can fetch it on-demand inside ORMCache
43 # pass empty regions so we can fetch it on-demand inside ORMCache
45 cache = ORMCache(regions={})
44 cache = ORMCache(regions={})
46 cache.listen_on_session(Session)
45 cache.listen_on_session(Session)
47
46
48
47
49 # The declarative Base
48 # The declarative Base
50 Base = declarative_base()
49 Base = declarative_base()
51
50
52
51
53 def raw_query_executor():
52 def raw_query_executor():
54 engine = Base.metadata.bind
53 engine = Base.metadata.bind
55 session = SASession(engine)
54 session = SASession(engine)
56 return session
55 return session
@@ -1,453 +1,453 b''
1 # -*- coding: utf-8 -*-
1
2
2
3 # Copyright (C) 2011-2020 RhodeCode GmbH
3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 Model for notifications
23 Model for notifications
24 """
24 """
25
25
26 import logging
26 import logging
27 import traceback
27 import traceback
28
28
29 import premailer
29 import premailer
30 from pyramid.threadlocal import get_current_request
30 from pyramid.threadlocal import get_current_request
31 from sqlalchemy.sql.expression import false, true
31 from sqlalchemy.sql.expression import false, true
32
32
33 import rhodecode
33 import rhodecode
34 from rhodecode.lib import helpers as h
34 from rhodecode.lib import helpers as h
35 from rhodecode.model import BaseModel
35 from rhodecode.model import BaseModel
36 from rhodecode.model.db import Notification, User, UserNotification
36 from rhodecode.model.db import Notification, User, UserNotification
37 from rhodecode.model.meta import Session
37 from rhodecode.model.meta import Session
38 from rhodecode.translation import TranslationString
38 from rhodecode.translation import TranslationString
39
39
40 log = logging.getLogger(__name__)
40 log = logging.getLogger(__name__)
41
41
42
42
43 class NotificationModel(BaseModel):
43 class NotificationModel(BaseModel):
44
44
45 cls = Notification
45 cls = Notification
46
46
47 def __get_notification(self, notification):
47 def __get_notification(self, notification):
48 if isinstance(notification, Notification):
48 if isinstance(notification, Notification):
49 return notification
49 return notification
50 elif isinstance(notification, int):
50 elif isinstance(notification, int):
51 return Notification.get(notification)
51 return Notification.get(notification)
52 else:
52 else:
53 if notification:
53 if notification:
54 raise Exception('notification must be int or Instance'
54 raise Exception('notification must be int or Instance'
55 ' of Notification got %s' % type(notification))
55 ' of Notification got %s' % type(notification))
56
56
57 def create(
57 def create(
58 self, created_by, notification_subject='', notification_body='',
58 self, created_by, notification_subject='', notification_body='',
59 notification_type=Notification.TYPE_MESSAGE, recipients=None,
59 notification_type=Notification.TYPE_MESSAGE, recipients=None,
60 mention_recipients=None, with_email=True, email_kwargs=None):
60 mention_recipients=None, with_email=True, email_kwargs=None):
61 """
61 """
62
62
63 Creates notification of given type
63 Creates notification of given type
64
64
65 :param created_by: int, str or User instance. User who created this
65 :param created_by: int, str or User instance. User who created this
66 notification
66 notification
67 :param notification_subject: subject of notification itself,
67 :param notification_subject: subject of notification itself,
68 it will be generated automatically from notification_type if not specified
68 it will be generated automatically from notification_type if not specified
69 :param notification_body: body of notification text
69 :param notification_body: body of notification text
70 it will be generated automatically from notification_type if not specified
70 it will be generated automatically from notification_type if not specified
71 :param notification_type: type of notification, based on that we
71 :param notification_type: type of notification, based on that we
72 pick templates
72 pick templates
73 :param recipients: list of int, str or User objects, when None
73 :param recipients: list of int, str or User objects, when None
74 is given send to all admins
74 is given send to all admins
75 :param mention_recipients: list of int, str or User objects,
75 :param mention_recipients: list of int, str or User objects,
76 that were mentioned
76 that were mentioned
77 :param with_email: send email with this notification
77 :param with_email: send email with this notification
78 :param email_kwargs: dict with arguments to generate email
78 :param email_kwargs: dict with arguments to generate email
79 """
79 """
80
80
81 from rhodecode.lib.celerylib import tasks, run_task
81 from rhodecode.lib.celerylib import tasks, run_task
82
82
83 if recipients and not getattr(recipients, '__iter__', False):
83 if recipients and not getattr(recipients, '__iter__', False):
84 raise Exception('recipients must be an iterable object')
84 raise Exception('recipients must be an iterable object')
85
85
86 if not (notification_subject and notification_body) and not notification_type:
86 if not (notification_subject and notification_body) and not notification_type:
87 raise ValueError('notification_subject, and notification_body '
87 raise ValueError('notification_subject, and notification_body '
88 'cannot be empty when notification_type is not specified')
88 'cannot be empty when notification_type is not specified')
89
89
90 created_by_obj = self._get_user(created_by)
90 created_by_obj = self._get_user(created_by)
91
91
92 if not created_by_obj:
92 if not created_by_obj:
93 raise Exception('unknown user %s' % created_by)
93 raise Exception('unknown user %s' % created_by)
94
94
95 # default MAIN body if not given
95 # default MAIN body if not given
96 email_kwargs = email_kwargs or {'body': notification_body}
96 email_kwargs = email_kwargs or {'body': notification_body}
97 mention_recipients = mention_recipients or set()
97 mention_recipients = mention_recipients or set()
98
98
99 if recipients is None:
99 if recipients is None:
100 # recipients is None means to all admins
100 # recipients is None means to all admins
101 recipients_objs = User.query().filter(User.admin == true()).all()
101 recipients_objs = User.query().filter(User.admin == true()).all()
102 log.debug('sending notifications %s to admins: %s',
102 log.debug('sending notifications %s to admins: %s',
103 notification_type, recipients_objs)
103 notification_type, recipients_objs)
104 else:
104 else:
105 recipients_objs = set()
105 recipients_objs = set()
106 for u in recipients:
106 for u in recipients:
107 obj = self._get_user(u)
107 obj = self._get_user(u)
108 if obj:
108 if obj:
109 recipients_objs.add(obj)
109 recipients_objs.add(obj)
110 else: # we didn't find this user, log the error and carry on
110 else: # we didn't find this user, log the error and carry on
111 log.error('cannot notify unknown user %r', u)
111 log.error('cannot notify unknown user %r', u)
112
112
113 if not recipients_objs:
113 if not recipients_objs:
114 raise Exception('no valid recipients specified')
114 raise Exception('no valid recipients specified')
115
115
116 log.debug('sending notifications %s to %s',
116 log.debug('sending notifications %s to %s',
117 notification_type, recipients_objs)
117 notification_type, recipients_objs)
118
118
119 # add mentioned users into recipients
119 # add mentioned users into recipients
120 final_recipients = set(recipients_objs).union(mention_recipients)
120 final_recipients = set(recipients_objs).union(mention_recipients)
121
121
122 (subject, email_body, email_body_plaintext) = \
122 (subject, email_body, email_body_plaintext) = \
123 EmailNotificationModel().render_email(notification_type, **email_kwargs)
123 EmailNotificationModel().render_email(notification_type, **email_kwargs)
124
124
125 if not notification_subject:
125 if not notification_subject:
126 notification_subject = subject
126 notification_subject = subject
127
127
128 if not notification_body:
128 if not notification_body:
129 notification_body = email_body_plaintext
129 notification_body = email_body_plaintext
130
130
131 notification = Notification.create(
131 notification = Notification.create(
132 created_by=created_by_obj, subject=notification_subject,
132 created_by=created_by_obj, subject=notification_subject,
133 body=notification_body, recipients=final_recipients,
133 body=notification_body, recipients=final_recipients,
134 type_=notification_type
134 type_=notification_type
135 )
135 )
136
136
137 if not with_email: # skip sending email, and just create notification
137 if not with_email: # skip sending email, and just create notification
138 return notification
138 return notification
139
139
140 # don't send email to person who created this comment
140 # don't send email to person who created this comment
141 rec_objs = set(recipients_objs).difference({created_by_obj})
141 rec_objs = set(recipients_objs).difference({created_by_obj})
142
142
143 # now notify all recipients in question
143 # now notify all recipients in question
144
144
145 for recipient in rec_objs.union(mention_recipients):
145 for recipient in rec_objs.union(mention_recipients):
146 # inject current recipient
146 # inject current recipient
147 email_kwargs['recipient'] = recipient
147 email_kwargs['recipient'] = recipient
148 email_kwargs['mention'] = recipient in mention_recipients
148 email_kwargs['mention'] = recipient in mention_recipients
149 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
149 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
150 notification_type, **email_kwargs)
150 notification_type, **email_kwargs)
151
151
152 extra_headers = None
152 extra_headers = None
153 if 'thread_ids' in email_kwargs:
153 if 'thread_ids' in email_kwargs:
154 extra_headers = {'thread_ids': email_kwargs.pop('thread_ids')}
154 extra_headers = {'thread_ids': email_kwargs.pop('thread_ids')}
155
155
156 log.debug('Creating notification email task for user:`%s`', recipient)
156 log.debug('Creating notification email task for user:`%s`', recipient)
157 task = run_task(tasks.send_email, recipient.email, subject,
157 task = run_task(tasks.send_email, recipient.email, subject,
158 email_body_plaintext, email_body, extra_headers=extra_headers)
158 email_body_plaintext, email_body, extra_headers=extra_headers)
159 log.debug('Created email task: %s', task)
159 log.debug('Created email task: %s', task)
160
160
161 return notification
161 return notification
162
162
163 def delete(self, user, notification):
163 def delete(self, user, notification):
164 # we don't want to remove actual notification just the assignment
164 # we don't want to remove actual notification just the assignment
165 try:
165 try:
166 notification = self.__get_notification(notification)
166 notification = self.__get_notification(notification)
167 user = self._get_user(user)
167 user = self._get_user(user)
168 if notification and user:
168 if notification and user:
169 obj = UserNotification.query()\
169 obj = UserNotification.query()\
170 .filter(UserNotification.user == user)\
170 .filter(UserNotification.user == user)\
171 .filter(UserNotification.notification == notification)\
171 .filter(UserNotification.notification == notification)\
172 .one()
172 .one()
173 Session().delete(obj)
173 Session().delete(obj)
174 return True
174 return True
175 except Exception:
175 except Exception:
176 log.error(traceback.format_exc())
176 log.error(traceback.format_exc())
177 raise
177 raise
178
178
179 def get_for_user(self, user, filter_=None):
179 def get_for_user(self, user, filter_=None):
180 """
180 """
181 Get mentions for given user, filter them if filter dict is given
181 Get mentions for given user, filter them if filter dict is given
182 """
182 """
183 user = self._get_user(user)
183 user = self._get_user(user)
184
184
185 q = UserNotification.query()\
185 q = UserNotification.query()\
186 .filter(UserNotification.user == user)\
186 .filter(UserNotification.user == user)\
187 .join((
187 .join((
188 Notification, UserNotification.notification_id ==
188 Notification, UserNotification.notification_id ==
189 Notification.notification_id))
189 Notification.notification_id))
190 if filter_ == ['all']:
190 if filter_ == ['all']:
191 q = q # no filter
191 q = q # no filter
192 elif filter_ == ['unread']:
192 elif filter_ == ['unread']:
193 q = q.filter(UserNotification.read == false())
193 q = q.filter(UserNotification.read == false())
194 elif filter_:
194 elif filter_:
195 q = q.filter(Notification.type_.in_(filter_))
195 q = q.filter(Notification.type_.in_(filter_))
196
196
197 return q
197 return q
198
198
199 def mark_read(self, user, notification):
199 def mark_read(self, user, notification):
200 try:
200 try:
201 notification = self.__get_notification(notification)
201 notification = self.__get_notification(notification)
202 user = self._get_user(user)
202 user = self._get_user(user)
203 if notification and user:
203 if notification and user:
204 obj = UserNotification.query()\
204 obj = UserNotification.query()\
205 .filter(UserNotification.user == user)\
205 .filter(UserNotification.user == user)\
206 .filter(UserNotification.notification == notification)\
206 .filter(UserNotification.notification == notification)\
207 .one()
207 .one()
208 obj.read = True
208 obj.read = True
209 Session().add(obj)
209 Session().add(obj)
210 return True
210 return True
211 except Exception:
211 except Exception:
212 log.error(traceback.format_exc())
212 log.error(traceback.format_exc())
213 raise
213 raise
214
214
215 def mark_all_read_for_user(self, user, filter_=None):
215 def mark_all_read_for_user(self, user, filter_=None):
216 user = self._get_user(user)
216 user = self._get_user(user)
217 q = UserNotification.query()\
217 q = UserNotification.query()\
218 .filter(UserNotification.user == user)\
218 .filter(UserNotification.user == user)\
219 .filter(UserNotification.read == false())\
219 .filter(UserNotification.read == false())\
220 .join((
220 .join((
221 Notification, UserNotification.notification_id ==
221 Notification, UserNotification.notification_id ==
222 Notification.notification_id))
222 Notification.notification_id))
223 if filter_ == ['unread']:
223 if filter_ == ['unread']:
224 q = q.filter(UserNotification.read == false())
224 q = q.filter(UserNotification.read == false())
225 elif filter_:
225 elif filter_:
226 q = q.filter(Notification.type_.in_(filter_))
226 q = q.filter(Notification.type_.in_(filter_))
227
227
228 # this is a little inefficient but sqlalchemy doesn't support
228 # this is a little inefficient but sqlalchemy doesn't support
229 # update on joined tables :(
229 # update on joined tables :(
230 for obj in q.all():
230 for obj in q.all():
231 obj.read = True
231 obj.read = True
232 Session().add(obj)
232 Session().add(obj)
233
233
234 def get_unread_cnt_for_user(self, user):
234 def get_unread_cnt_for_user(self, user):
235 user = self._get_user(user)
235 user = self._get_user(user)
236 return UserNotification.query()\
236 return UserNotification.query()\
237 .filter(UserNotification.read == false())\
237 .filter(UserNotification.read == false())\
238 .filter(UserNotification.user == user).count()
238 .filter(UserNotification.user == user).count()
239
239
240 def get_unread_for_user(self, user):
240 def get_unread_for_user(self, user):
241 user = self._get_user(user)
241 user = self._get_user(user)
242 return [x.notification for x in UserNotification.query()
242 return [x.notification for x in UserNotification.query()
243 .filter(UserNotification.read == false())
243 .filter(UserNotification.read == false())
244 .filter(UserNotification.user == user).all()]
244 .filter(UserNotification.user == user).all()]
245
245
246 def get_user_notification(self, user, notification):
246 def get_user_notification(self, user, notification):
247 user = self._get_user(user)
247 user = self._get_user(user)
248 notification = self.__get_notification(notification)
248 notification = self.__get_notification(notification)
249
249
250 return UserNotification.query()\
250 return UserNotification.query()\
251 .filter(UserNotification.notification == notification)\
251 .filter(UserNotification.notification == notification)\
252 .filter(UserNotification.user == user).scalar()
252 .filter(UserNotification.user == user).scalar()
253
253
254 def make_description(self, notification, translate, show_age=True):
254 def make_description(self, notification, translate, show_age=True):
255 """
255 """
256 Creates a human readable description based on properties
256 Creates a human readable description based on properties
257 of notification object
257 of notification object
258 """
258 """
259 _ = translate
259 _ = translate
260 _map = {
260 _map = {
261 notification.TYPE_CHANGESET_COMMENT: [
261 notification.TYPE_CHANGESET_COMMENT: [
262 _('%(user)s commented on commit %(date_or_age)s'),
262 _('%(user)s commented on commit %(date_or_age)s'),
263 _('%(user)s commented on commit at %(date_or_age)s'),
263 _('%(user)s commented on commit at %(date_or_age)s'),
264 ],
264 ],
265 notification.TYPE_MESSAGE: [
265 notification.TYPE_MESSAGE: [
266 _('%(user)s sent message %(date_or_age)s'),
266 _('%(user)s sent message %(date_or_age)s'),
267 _('%(user)s sent message at %(date_or_age)s'),
267 _('%(user)s sent message at %(date_or_age)s'),
268 ],
268 ],
269 notification.TYPE_MENTION: [
269 notification.TYPE_MENTION: [
270 _('%(user)s mentioned you %(date_or_age)s'),
270 _('%(user)s mentioned you %(date_or_age)s'),
271 _('%(user)s mentioned you at %(date_or_age)s'),
271 _('%(user)s mentioned you at %(date_or_age)s'),
272 ],
272 ],
273 notification.TYPE_REGISTRATION: [
273 notification.TYPE_REGISTRATION: [
274 _('%(user)s registered in RhodeCode %(date_or_age)s'),
274 _('%(user)s registered in RhodeCode %(date_or_age)s'),
275 _('%(user)s registered in RhodeCode at %(date_or_age)s'),
275 _('%(user)s registered in RhodeCode at %(date_or_age)s'),
276 ],
276 ],
277 notification.TYPE_PULL_REQUEST: [
277 notification.TYPE_PULL_REQUEST: [
278 _('%(user)s opened new pull request %(date_or_age)s'),
278 _('%(user)s opened new pull request %(date_or_age)s'),
279 _('%(user)s opened new pull request at %(date_or_age)s'),
279 _('%(user)s opened new pull request at %(date_or_age)s'),
280 ],
280 ],
281 notification.TYPE_PULL_REQUEST_UPDATE: [
281 notification.TYPE_PULL_REQUEST_UPDATE: [
282 _('%(user)s updated pull request %(date_or_age)s'),
282 _('%(user)s updated pull request %(date_or_age)s'),
283 _('%(user)s updated pull request at %(date_or_age)s'),
283 _('%(user)s updated pull request at %(date_or_age)s'),
284 ],
284 ],
285 notification.TYPE_PULL_REQUEST_COMMENT: [
285 notification.TYPE_PULL_REQUEST_COMMENT: [
286 _('%(user)s commented on pull request %(date_or_age)s'),
286 _('%(user)s commented on pull request %(date_or_age)s'),
287 _('%(user)s commented on pull request at %(date_or_age)s'),
287 _('%(user)s commented on pull request at %(date_or_age)s'),
288 ],
288 ],
289 }
289 }
290
290
291 templates = _map[notification.type_]
291 templates = _map[notification.type_]
292
292
293 if show_age:
293 if show_age:
294 template = templates[0]
294 template = templates[0]
295 date_or_age = h.age(notification.created_on)
295 date_or_age = h.age(notification.created_on)
296 if translate:
296 if translate:
297 date_or_age = translate(date_or_age)
297 date_or_age = translate(date_or_age)
298
298
299 if isinstance(date_or_age, TranslationString):
299 if isinstance(date_or_age, TranslationString):
300 date_or_age = date_or_age.interpolate()
300 date_or_age = date_or_age.interpolate()
301
301
302 else:
302 else:
303 template = templates[1]
303 template = templates[1]
304 date_or_age = h.format_date(notification.created_on)
304 date_or_age = h.format_date(notification.created_on)
305
305
306 return template % {
306 return template % {
307 'user': notification.created_by_user.username,
307 'user': notification.created_by_user.username,
308 'date_or_age': date_or_age,
308 'date_or_age': date_or_age,
309 }
309 }
310
310
311
311
312 # Templates for Titles, that could be overwritten by rcextensions
312 # Templates for Titles, that could be overwritten by rcextensions
313 # Title of email for pull-request update
313 # Title of email for pull-request update
314 EMAIL_PR_UPDATE_SUBJECT_TEMPLATE = ''
314 EMAIL_PR_UPDATE_SUBJECT_TEMPLATE = ''
315 # Title of email for request for pull request review
315 # Title of email for request for pull request review
316 EMAIL_PR_REVIEW_SUBJECT_TEMPLATE = ''
316 EMAIL_PR_REVIEW_SUBJECT_TEMPLATE = ''
317
317
318 # Title of email for general comment on pull request
318 # Title of email for general comment on pull request
319 EMAIL_PR_COMMENT_SUBJECT_TEMPLATE = ''
319 EMAIL_PR_COMMENT_SUBJECT_TEMPLATE = ''
320 # Title of email for general comment which includes status change on pull request
320 # Title of email for general comment which includes status change on pull request
321 EMAIL_PR_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE = ''
321 EMAIL_PR_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE = ''
322 # Title of email for inline comment on a file in pull request
322 # Title of email for inline comment on a file in pull request
323 EMAIL_PR_COMMENT_FILE_SUBJECT_TEMPLATE = ''
323 EMAIL_PR_COMMENT_FILE_SUBJECT_TEMPLATE = ''
324
324
325 # Title of email for general comment on commit
325 # Title of email for general comment on commit
326 EMAIL_COMMENT_SUBJECT_TEMPLATE = ''
326 EMAIL_COMMENT_SUBJECT_TEMPLATE = ''
327 # Title of email for general comment which includes status change on commit
327 # Title of email for general comment which includes status change on commit
328 EMAIL_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE = ''
328 EMAIL_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE = ''
329 # Title of email for inline comment on a file in commit
329 # Title of email for inline comment on a file in commit
330 EMAIL_COMMENT_FILE_SUBJECT_TEMPLATE = ''
330 EMAIL_COMMENT_FILE_SUBJECT_TEMPLATE = ''
331
331
332 import cssutils
332 import cssutils
333 # hijack css utils logger and replace with ours
333 # hijack css utils logger and replace with ours
334 log = logging.getLogger('rhodecode.cssutils.premailer')
334 log = logging.getLogger('rhodecode.cssutils.premailer')
335 cssutils.log.setLog(log)
335 cssutils.log.setLog(log)
336
336
337
337
338 class EmailNotificationModel(BaseModel):
338 class EmailNotificationModel(BaseModel):
339 TYPE_COMMIT_COMMENT = Notification.TYPE_CHANGESET_COMMENT
339 TYPE_COMMIT_COMMENT = Notification.TYPE_CHANGESET_COMMENT
340 TYPE_REGISTRATION = Notification.TYPE_REGISTRATION
340 TYPE_REGISTRATION = Notification.TYPE_REGISTRATION
341 TYPE_PULL_REQUEST = Notification.TYPE_PULL_REQUEST
341 TYPE_PULL_REQUEST = Notification.TYPE_PULL_REQUEST
342 TYPE_PULL_REQUEST_COMMENT = Notification.TYPE_PULL_REQUEST_COMMENT
342 TYPE_PULL_REQUEST_COMMENT = Notification.TYPE_PULL_REQUEST_COMMENT
343 TYPE_PULL_REQUEST_UPDATE = Notification.TYPE_PULL_REQUEST_UPDATE
343 TYPE_PULL_REQUEST_UPDATE = Notification.TYPE_PULL_REQUEST_UPDATE
344 TYPE_MAIN = Notification.TYPE_MESSAGE
344 TYPE_MAIN = Notification.TYPE_MESSAGE
345
345
346 TYPE_PASSWORD_RESET = 'password_reset'
346 TYPE_PASSWORD_RESET = 'password_reset'
347 TYPE_PASSWORD_RESET_CONFIRMATION = 'password_reset_confirmation'
347 TYPE_PASSWORD_RESET_CONFIRMATION = 'password_reset_confirmation'
348 TYPE_EMAIL_TEST = 'email_test'
348 TYPE_EMAIL_TEST = 'email_test'
349 TYPE_EMAIL_EXCEPTION = 'exception'
349 TYPE_EMAIL_EXCEPTION = 'exception'
350 TYPE_UPDATE_AVAILABLE = 'update_available'
350 TYPE_UPDATE_AVAILABLE = 'update_available'
351 TYPE_TEST = 'test'
351 TYPE_TEST = 'test'
352
352
353 email_types = {
353 email_types = {
354 TYPE_MAIN:
354 TYPE_MAIN:
355 'rhodecode:templates/email_templates/main.mako',
355 'rhodecode:templates/email_templates/main.mako',
356 TYPE_TEST:
356 TYPE_TEST:
357 'rhodecode:templates/email_templates/test.mako',
357 'rhodecode:templates/email_templates/test.mako',
358 TYPE_EMAIL_EXCEPTION:
358 TYPE_EMAIL_EXCEPTION:
359 'rhodecode:templates/email_templates/exception_tracker.mako',
359 'rhodecode:templates/email_templates/exception_tracker.mako',
360 TYPE_UPDATE_AVAILABLE:
360 TYPE_UPDATE_AVAILABLE:
361 'rhodecode:templates/email_templates/update_available.mako',
361 'rhodecode:templates/email_templates/update_available.mako',
362 TYPE_EMAIL_TEST:
362 TYPE_EMAIL_TEST:
363 'rhodecode:templates/email_templates/email_test.mako',
363 'rhodecode:templates/email_templates/email_test.mako',
364 TYPE_REGISTRATION:
364 TYPE_REGISTRATION:
365 'rhodecode:templates/email_templates/user_registration.mako',
365 'rhodecode:templates/email_templates/user_registration.mako',
366 TYPE_PASSWORD_RESET:
366 TYPE_PASSWORD_RESET:
367 'rhodecode:templates/email_templates/password_reset.mako',
367 'rhodecode:templates/email_templates/password_reset.mako',
368 TYPE_PASSWORD_RESET_CONFIRMATION:
368 TYPE_PASSWORD_RESET_CONFIRMATION:
369 'rhodecode:templates/email_templates/password_reset_confirmation.mako',
369 'rhodecode:templates/email_templates/password_reset_confirmation.mako',
370 TYPE_COMMIT_COMMENT:
370 TYPE_COMMIT_COMMENT:
371 'rhodecode:templates/email_templates/commit_comment.mako',
371 'rhodecode:templates/email_templates/commit_comment.mako',
372 TYPE_PULL_REQUEST:
372 TYPE_PULL_REQUEST:
373 'rhodecode:templates/email_templates/pull_request_review.mako',
373 'rhodecode:templates/email_templates/pull_request_review.mako',
374 TYPE_PULL_REQUEST_COMMENT:
374 TYPE_PULL_REQUEST_COMMENT:
375 'rhodecode:templates/email_templates/pull_request_comment.mako',
375 'rhodecode:templates/email_templates/pull_request_comment.mako',
376 TYPE_PULL_REQUEST_UPDATE:
376 TYPE_PULL_REQUEST_UPDATE:
377 'rhodecode:templates/email_templates/pull_request_update.mako',
377 'rhodecode:templates/email_templates/pull_request_update.mako',
378 }
378 }
379
379
380 premailer_instance = premailer.Premailer()
380 premailer_instance = premailer.Premailer()
381
381
382 def __init__(self):
382 def __init__(self):
383 """
383 """
384 Example usage::
384 Example usage::
385
385
386 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
386 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
387 EmailNotificationModel.TYPE_TEST, **email_kwargs)
387 EmailNotificationModel.TYPE_TEST, **email_kwargs)
388
388
389 """
389 """
390 super(EmailNotificationModel, self).__init__()
390 super(EmailNotificationModel, self).__init__()
391 self.rhodecode_instance_name = rhodecode.CONFIG.get('rhodecode_title')
391 self.rhodecode_instance_name = rhodecode.CONFIG.get('rhodecode_title')
392
392
393 def _update_kwargs_for_render(self, kwargs):
393 def _update_kwargs_for_render(self, kwargs):
394 """
394 """
395 Inject params required for Mako rendering
395 Inject params required for Mako rendering
396
396
397 :param kwargs:
397 :param kwargs:
398 """
398 """
399
399
400 kwargs['rhodecode_instance_name'] = self.rhodecode_instance_name
400 kwargs['rhodecode_instance_name'] = self.rhodecode_instance_name
401 kwargs['rhodecode_version'] = rhodecode.__version__
401 kwargs['rhodecode_version'] = rhodecode.__version__
402 instance_url = h.route_url('home')
402 instance_url = h.route_url('home')
403 _kwargs = {
403 _kwargs = {
404 'instance_url': instance_url,
404 'instance_url': instance_url,
405 'whitespace_filter': self.whitespace_filter,
405 'whitespace_filter': self.whitespace_filter,
406 'email_pr_update_subject_template': EMAIL_PR_UPDATE_SUBJECT_TEMPLATE,
406 'email_pr_update_subject_template': EMAIL_PR_UPDATE_SUBJECT_TEMPLATE,
407 'email_pr_review_subject_template': EMAIL_PR_REVIEW_SUBJECT_TEMPLATE,
407 'email_pr_review_subject_template': EMAIL_PR_REVIEW_SUBJECT_TEMPLATE,
408 'email_pr_comment_subject_template': EMAIL_PR_COMMENT_SUBJECT_TEMPLATE,
408 'email_pr_comment_subject_template': EMAIL_PR_COMMENT_SUBJECT_TEMPLATE,
409 'email_pr_comment_status_change_subject_template': EMAIL_PR_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE,
409 'email_pr_comment_status_change_subject_template': EMAIL_PR_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE,
410 'email_pr_comment_file_subject_template': EMAIL_PR_COMMENT_FILE_SUBJECT_TEMPLATE,
410 'email_pr_comment_file_subject_template': EMAIL_PR_COMMENT_FILE_SUBJECT_TEMPLATE,
411 'email_comment_subject_template': EMAIL_COMMENT_SUBJECT_TEMPLATE,
411 'email_comment_subject_template': EMAIL_COMMENT_SUBJECT_TEMPLATE,
412 'email_comment_status_change_subject_template': EMAIL_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE,
412 'email_comment_status_change_subject_template': EMAIL_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE,
413 'email_comment_file_subject_template': EMAIL_COMMENT_FILE_SUBJECT_TEMPLATE,
413 'email_comment_file_subject_template': EMAIL_COMMENT_FILE_SUBJECT_TEMPLATE,
414 }
414 }
415 _kwargs.update(kwargs)
415 _kwargs.update(kwargs)
416 return _kwargs
416 return _kwargs
417
417
418 def whitespace_filter(self, text):
418 def whitespace_filter(self, text):
419 return text.replace('\n', '').replace('\t', '')
419 return text.replace('\n', '').replace('\t', '')
420
420
421 def get_renderer(self, type_, request):
421 def get_renderer(self, type_, request):
422 template_name = self.email_types[type_]
422 template_name = self.email_types[type_]
423 return request.get_partial_renderer(template_name)
423 return request.get_partial_renderer(template_name)
424
424
425 def render_email(self, type_, **kwargs):
425 def render_email(self, type_, **kwargs):
426 """
426 """
427 renders template for email, and returns a tuple of
427 renders template for email, and returns a tuple of
428 (subject, email_headers, email_html_body, email_plaintext_body)
428 (subject, email_headers, email_html_body, email_plaintext_body)
429 """
429 """
430 request = get_current_request()
430 request = get_current_request()
431
431
432 # translator and helpers inject
432 # translator and helpers inject
433 _kwargs = self._update_kwargs_for_render(kwargs)
433 _kwargs = self._update_kwargs_for_render(kwargs)
434 email_template = self.get_renderer(type_, request=request)
434 email_template = self.get_renderer(type_, request=request)
435 subject = email_template.render('subject', **_kwargs)
435 subject = email_template.render('subject', **_kwargs)
436
436
437 try:
437 try:
438 body_plaintext = email_template.render('body_plaintext', **_kwargs)
438 body_plaintext = email_template.render('body_plaintext', **_kwargs)
439 except AttributeError:
439 except AttributeError:
440 # it's not defined in template, ok we can skip it
440 # it's not defined in template, ok we can skip it
441 body_plaintext = ''
441 body_plaintext = ''
442
442
443 # render WHOLE template
443 # render WHOLE template
444 body = email_template.render(None, **_kwargs)
444 body = email_template.render(None, **_kwargs)
445
445
446 try:
446 try:
447 # Inline CSS styles and conversion
447 # Inline CSS styles and conversion
448 body = self.premailer_instance.transform(body)
448 body = self.premailer_instance.transform(body)
449 except Exception:
449 except Exception:
450 log.exception('Failed to parse body with premailer')
450 log.exception('Failed to parse body with premailer')
451 pass
451 pass
452
452
453 return subject, body, body_plaintext
453 return subject, body, body_plaintext
@@ -1,600 +1,599 b''
1 # -*- coding: utf-8 -*-
2
1
3 # Copyright (C) 2010-2020 RhodeCode GmbH
2 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
3 #
5 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
8 #
7 #
9 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
11 # GNU General Public License for more details.
13 #
12 #
14 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
15 #
17 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
19
21 """
20 """
22 permissions model for RhodeCode
21 permissions model for RhodeCode
23 """
22 """
24 import collections
23 import collections
25 import logging
24 import logging
26 import traceback
25 import traceback
27
26
28 from sqlalchemy.exc import DatabaseError
27 from sqlalchemy.exc import DatabaseError
29
28
30 from rhodecode import events
29 from rhodecode import events
31 from rhodecode.model import BaseModel
30 from rhodecode.model import BaseModel
32 from rhodecode.model.db import (
31 from rhodecode.model.db import (
33 User, Permission, UserToPerm, UserRepoToPerm, UserRepoGroupToPerm,
32 User, Permission, UserToPerm, UserRepoToPerm, UserRepoGroupToPerm,
34 UserUserGroupToPerm, UserGroup, UserGroupToPerm, UserToRepoBranchPermission)
33 UserUserGroupToPerm, UserGroup, UserGroupToPerm, UserToRepoBranchPermission)
35 from rhodecode.lib.utils2 import str2bool, safe_int
34 from rhodecode.lib.utils2 import str2bool, safe_int
36
35
37 log = logging.getLogger(__name__)
36 log = logging.getLogger(__name__)
38
37
39
38
40 class PermissionModel(BaseModel):
39 class PermissionModel(BaseModel):
41 """
40 """
42 Permissions model for RhodeCode
41 Permissions model for RhodeCode
43 """
42 """
44 FORKING_DISABLED = 'hg.fork.none'
43 FORKING_DISABLED = 'hg.fork.none'
45 FORKING_ENABLED = 'hg.fork.repository'
44 FORKING_ENABLED = 'hg.fork.repository'
46
45
47 cls = Permission
46 cls = Permission
48 global_perms = {
47 global_perms = {
49 'default_repo_create': None,
48 'default_repo_create': None,
50 # special case for create repos on write access to group
49 # special case for create repos on write access to group
51 'default_repo_create_on_write': None,
50 'default_repo_create_on_write': None,
52 'default_repo_group_create': None,
51 'default_repo_group_create': None,
53 'default_user_group_create': None,
52 'default_user_group_create': None,
54 'default_fork_create': None,
53 'default_fork_create': None,
55 'default_inherit_default_permissions': None,
54 'default_inherit_default_permissions': None,
56 'default_register': None,
55 'default_register': None,
57 'default_password_reset': None,
56 'default_password_reset': None,
58 'default_extern_activate': None,
57 'default_extern_activate': None,
59
58
60 # object permissions below
59 # object permissions below
61 'default_repo_perm': None,
60 'default_repo_perm': None,
62 'default_group_perm': None,
61 'default_group_perm': None,
63 'default_user_group_perm': None,
62 'default_user_group_perm': None,
64
63
65 # branch
64 # branch
66 'default_branch_perm': None,
65 'default_branch_perm': None,
67 }
66 }
68
67
69 def set_global_permission_choices(self, c_obj, gettext_translator):
68 def set_global_permission_choices(self, c_obj, gettext_translator):
70 _ = gettext_translator
69 _ = gettext_translator
71
70
72 c_obj.repo_perms_choices = [
71 c_obj.repo_perms_choices = [
73 ('repository.none', _('None'),),
72 ('repository.none', _('None'),),
74 ('repository.read', _('Read'),),
73 ('repository.read', _('Read'),),
75 ('repository.write', _('Write'),),
74 ('repository.write', _('Write'),),
76 ('repository.admin', _('Admin'),)]
75 ('repository.admin', _('Admin'),)]
77
76
78 c_obj.group_perms_choices = [
77 c_obj.group_perms_choices = [
79 ('group.none', _('None'),),
78 ('group.none', _('None'),),
80 ('group.read', _('Read'),),
79 ('group.read', _('Read'),),
81 ('group.write', _('Write'),),
80 ('group.write', _('Write'),),
82 ('group.admin', _('Admin'),)]
81 ('group.admin', _('Admin'),)]
83
82
84 c_obj.user_group_perms_choices = [
83 c_obj.user_group_perms_choices = [
85 ('usergroup.none', _('None'),),
84 ('usergroup.none', _('None'),),
86 ('usergroup.read', _('Read'),),
85 ('usergroup.read', _('Read'),),
87 ('usergroup.write', _('Write'),),
86 ('usergroup.write', _('Write'),),
88 ('usergroup.admin', _('Admin'),)]
87 ('usergroup.admin', _('Admin'),)]
89
88
90 c_obj.branch_perms_choices = [
89 c_obj.branch_perms_choices = [
91 ('branch.none', _('Protected/No Access'),),
90 ('branch.none', _('Protected/No Access'),),
92 ('branch.merge', _('Web merge'),),
91 ('branch.merge', _('Web merge'),),
93 ('branch.push', _('Push'),),
92 ('branch.push', _('Push'),),
94 ('branch.push_force', _('Force Push'),)]
93 ('branch.push_force', _('Force Push'),)]
95
94
96 c_obj.register_choices = [
95 c_obj.register_choices = [
97 ('hg.register.none', _('Disabled')),
96 ('hg.register.none', _('Disabled')),
98 ('hg.register.manual_activate', _('Allowed with manual account activation')),
97 ('hg.register.manual_activate', _('Allowed with manual account activation')),
99 ('hg.register.auto_activate', _('Allowed with automatic account activation'))]
98 ('hg.register.auto_activate', _('Allowed with automatic account activation'))]
100
99
101 c_obj.password_reset_choices = [
100 c_obj.password_reset_choices = [
102 ('hg.password_reset.enabled', _('Allow password recovery')),
101 ('hg.password_reset.enabled', _('Allow password recovery')),
103 ('hg.password_reset.hidden', _('Hide password recovery link')),
102 ('hg.password_reset.hidden', _('Hide password recovery link')),
104 ('hg.password_reset.disabled', _('Disable password recovery'))]
103 ('hg.password_reset.disabled', _('Disable password recovery'))]
105
104
106 c_obj.extern_activate_choices = [
105 c_obj.extern_activate_choices = [
107 ('hg.extern_activate.manual', _('Manual activation of external account')),
106 ('hg.extern_activate.manual', _('Manual activation of external account')),
108 ('hg.extern_activate.auto', _('Automatic activation of external account'))]
107 ('hg.extern_activate.auto', _('Automatic activation of external account'))]
109
108
110 c_obj.repo_create_choices = [
109 c_obj.repo_create_choices = [
111 ('hg.create.none', _('Disabled')),
110 ('hg.create.none', _('Disabled')),
112 ('hg.create.repository', _('Enabled'))]
111 ('hg.create.repository', _('Enabled'))]
113
112
114 c_obj.repo_create_on_write_choices = [
113 c_obj.repo_create_on_write_choices = [
115 ('hg.create.write_on_repogroup.false', _('Disabled')),
114 ('hg.create.write_on_repogroup.false', _('Disabled')),
116 ('hg.create.write_on_repogroup.true', _('Enabled'))]
115 ('hg.create.write_on_repogroup.true', _('Enabled'))]
117
116
118 c_obj.user_group_create_choices = [
117 c_obj.user_group_create_choices = [
119 ('hg.usergroup.create.false', _('Disabled')),
118 ('hg.usergroup.create.false', _('Disabled')),
120 ('hg.usergroup.create.true', _('Enabled'))]
119 ('hg.usergroup.create.true', _('Enabled'))]
121
120
122 c_obj.repo_group_create_choices = [
121 c_obj.repo_group_create_choices = [
123 ('hg.repogroup.create.false', _('Disabled')),
122 ('hg.repogroup.create.false', _('Disabled')),
124 ('hg.repogroup.create.true', _('Enabled'))]
123 ('hg.repogroup.create.true', _('Enabled'))]
125
124
126 c_obj.fork_choices = [
125 c_obj.fork_choices = [
127 (self.FORKING_DISABLED, _('Disabled')),
126 (self.FORKING_DISABLED, _('Disabled')),
128 (self.FORKING_ENABLED, _('Enabled'))]
127 (self.FORKING_ENABLED, _('Enabled'))]
129
128
130 c_obj.inherit_default_permission_choices = [
129 c_obj.inherit_default_permission_choices = [
131 ('hg.inherit_default_perms.false', _('Disabled')),
130 ('hg.inherit_default_perms.false', _('Disabled')),
132 ('hg.inherit_default_perms.true', _('Enabled'))]
131 ('hg.inherit_default_perms.true', _('Enabled'))]
133
132
134 def get_default_perms(self, object_perms, suffix):
133 def get_default_perms(self, object_perms, suffix):
135 defaults = {}
134 defaults = {}
136 for perm in object_perms:
135 for perm in object_perms:
137 # perms
136 # perms
138 if perm.permission.permission_name.startswith('repository.'):
137 if perm.permission.permission_name.startswith('repository.'):
139 defaults['default_repo_perm' + suffix] = perm.permission.permission_name
138 defaults['default_repo_perm' + suffix] = perm.permission.permission_name
140
139
141 if perm.permission.permission_name.startswith('group.'):
140 if perm.permission.permission_name.startswith('group.'):
142 defaults['default_group_perm' + suffix] = perm.permission.permission_name
141 defaults['default_group_perm' + suffix] = perm.permission.permission_name
143
142
144 if perm.permission.permission_name.startswith('usergroup.'):
143 if perm.permission.permission_name.startswith('usergroup.'):
145 defaults['default_user_group_perm' + suffix] = perm.permission.permission_name
144 defaults['default_user_group_perm' + suffix] = perm.permission.permission_name
146
145
147 # branch
146 # branch
148 if perm.permission.permission_name.startswith('branch.'):
147 if perm.permission.permission_name.startswith('branch.'):
149 defaults['default_branch_perm' + suffix] = perm.permission.permission_name
148 defaults['default_branch_perm' + suffix] = perm.permission.permission_name
150
149
151 # creation of objects
150 # creation of objects
152 if perm.permission.permission_name.startswith('hg.create.write_on_repogroup'):
151 if perm.permission.permission_name.startswith('hg.create.write_on_repogroup'):
153 defaults['default_repo_create_on_write' + suffix] = perm.permission.permission_name
152 defaults['default_repo_create_on_write' + suffix] = perm.permission.permission_name
154
153
155 elif perm.permission.permission_name.startswith('hg.create.'):
154 elif perm.permission.permission_name.startswith('hg.create.'):
156 defaults['default_repo_create' + suffix] = perm.permission.permission_name
155 defaults['default_repo_create' + suffix] = perm.permission.permission_name
157
156
158 if perm.permission.permission_name.startswith('hg.fork.'):
157 if perm.permission.permission_name.startswith('hg.fork.'):
159 defaults['default_fork_create' + suffix] = perm.permission.permission_name
158 defaults['default_fork_create' + suffix] = perm.permission.permission_name
160
159
161 if perm.permission.permission_name.startswith('hg.inherit_default_perms.'):
160 if perm.permission.permission_name.startswith('hg.inherit_default_perms.'):
162 defaults['default_inherit_default_permissions' + suffix] = perm.permission.permission_name
161 defaults['default_inherit_default_permissions' + suffix] = perm.permission.permission_name
163
162
164 if perm.permission.permission_name.startswith('hg.repogroup.'):
163 if perm.permission.permission_name.startswith('hg.repogroup.'):
165 defaults['default_repo_group_create' + suffix] = perm.permission.permission_name
164 defaults['default_repo_group_create' + suffix] = perm.permission.permission_name
166
165
167 if perm.permission.permission_name.startswith('hg.usergroup.'):
166 if perm.permission.permission_name.startswith('hg.usergroup.'):
168 defaults['default_user_group_create' + suffix] = perm.permission.permission_name
167 defaults['default_user_group_create' + suffix] = perm.permission.permission_name
169
168
170 # registration and external account activation
169 # registration and external account activation
171 if perm.permission.permission_name.startswith('hg.register.'):
170 if perm.permission.permission_name.startswith('hg.register.'):
172 defaults['default_register' + suffix] = perm.permission.permission_name
171 defaults['default_register' + suffix] = perm.permission.permission_name
173
172
174 if perm.permission.permission_name.startswith('hg.password_reset.'):
173 if perm.permission.permission_name.startswith('hg.password_reset.'):
175 defaults['default_password_reset' + suffix] = perm.permission.permission_name
174 defaults['default_password_reset' + suffix] = perm.permission.permission_name
176
175
177 if perm.permission.permission_name.startswith('hg.extern_activate.'):
176 if perm.permission.permission_name.startswith('hg.extern_activate.'):
178 defaults['default_extern_activate' + suffix] = perm.permission.permission_name
177 defaults['default_extern_activate' + suffix] = perm.permission.permission_name
179
178
180 return defaults
179 return defaults
181
180
182 def _make_new_user_perm(self, user, perm_name):
181 def _make_new_user_perm(self, user, perm_name):
183 log.debug('Creating new user permission:%s', perm_name)
182 log.debug('Creating new user permission:%s', perm_name)
184 new = UserToPerm()
183 new = UserToPerm()
185 new.user = user
184 new.user = user
186 new.permission = Permission.get_by_key(perm_name)
185 new.permission = Permission.get_by_key(perm_name)
187 return new
186 return new
188
187
189 def _make_new_user_group_perm(self, user_group, perm_name):
188 def _make_new_user_group_perm(self, user_group, perm_name):
190 log.debug('Creating new user group permission:%s', perm_name)
189 log.debug('Creating new user group permission:%s', perm_name)
191 new = UserGroupToPerm()
190 new = UserGroupToPerm()
192 new.users_group = user_group
191 new.users_group = user_group
193 new.permission = Permission.get_by_key(perm_name)
192 new.permission = Permission.get_by_key(perm_name)
194 return new
193 return new
195
194
196 def _keep_perm(self, perm_name, keep_fields):
195 def _keep_perm(self, perm_name, keep_fields):
197 def get_pat(field_name):
196 def get_pat(field_name):
198 return {
197 return {
199 # global perms
198 # global perms
200 'default_repo_create': 'hg.create.',
199 'default_repo_create': 'hg.create.',
201 # special case for create repos on write access to group
200 # special case for create repos on write access to group
202 'default_repo_create_on_write': 'hg.create.write_on_repogroup.',
201 'default_repo_create_on_write': 'hg.create.write_on_repogroup.',
203 'default_repo_group_create': 'hg.repogroup.create.',
202 'default_repo_group_create': 'hg.repogroup.create.',
204 'default_user_group_create': 'hg.usergroup.create.',
203 'default_user_group_create': 'hg.usergroup.create.',
205 'default_fork_create': 'hg.fork.',
204 'default_fork_create': 'hg.fork.',
206 'default_inherit_default_permissions': 'hg.inherit_default_perms.',
205 'default_inherit_default_permissions': 'hg.inherit_default_perms.',
207
206
208 # application perms
207 # application perms
209 'default_register': 'hg.register.',
208 'default_register': 'hg.register.',
210 'default_password_reset': 'hg.password_reset.',
209 'default_password_reset': 'hg.password_reset.',
211 'default_extern_activate': 'hg.extern_activate.',
210 'default_extern_activate': 'hg.extern_activate.',
212
211
213 # object permissions below
212 # object permissions below
214 'default_repo_perm': 'repository.',
213 'default_repo_perm': 'repository.',
215 'default_group_perm': 'group.',
214 'default_group_perm': 'group.',
216 'default_user_group_perm': 'usergroup.',
215 'default_user_group_perm': 'usergroup.',
217 # branch
216 # branch
218 'default_branch_perm': 'branch.',
217 'default_branch_perm': 'branch.',
219
218
220 }[field_name]
219 }[field_name]
221 for field in keep_fields:
220 for field in keep_fields:
222 pat = get_pat(field)
221 pat = get_pat(field)
223 if perm_name.startswith(pat):
222 if perm_name.startswith(pat):
224 return True
223 return True
225 return False
224 return False
226
225
227 def _clear_object_perm(self, object_perms, preserve=None):
226 def _clear_object_perm(self, object_perms, preserve=None):
228 preserve = preserve or []
227 preserve = preserve or []
229 _deleted = []
228 _deleted = []
230 for perm in object_perms:
229 for perm in object_perms:
231 perm_name = perm.permission.permission_name
230 perm_name = perm.permission.permission_name
232 if not self._keep_perm(perm_name, keep_fields=preserve):
231 if not self._keep_perm(perm_name, keep_fields=preserve):
233 _deleted.append(perm_name)
232 _deleted.append(perm_name)
234 self.sa.delete(perm)
233 self.sa.delete(perm)
235 return _deleted
234 return _deleted
236
235
237 def _clear_user_perms(self, user_id, preserve=None):
236 def _clear_user_perms(self, user_id, preserve=None):
238 perms = self.sa.query(UserToPerm)\
237 perms = self.sa.query(UserToPerm)\
239 .filter(UserToPerm.user_id == user_id)\
238 .filter(UserToPerm.user_id == user_id)\
240 .all()
239 .all()
241 return self._clear_object_perm(perms, preserve=preserve)
240 return self._clear_object_perm(perms, preserve=preserve)
242
241
243 def _clear_user_group_perms(self, user_group_id, preserve=None):
242 def _clear_user_group_perms(self, user_group_id, preserve=None):
244 perms = self.sa.query(UserGroupToPerm)\
243 perms = self.sa.query(UserGroupToPerm)\
245 .filter(UserGroupToPerm.users_group_id == user_group_id)\
244 .filter(UserGroupToPerm.users_group_id == user_group_id)\
246 .all()
245 .all()
247 return self._clear_object_perm(perms, preserve=preserve)
246 return self._clear_object_perm(perms, preserve=preserve)
248
247
249 def _set_new_object_perms(self, obj_type, to_object, form_result, preserve=None):
248 def _set_new_object_perms(self, obj_type, to_object, form_result, preserve=None):
250 # clear current entries, to make this function idempotent
249 # clear current entries, to make this function idempotent
251 # it will fix even if we define more permissions or permissions
250 # it will fix even if we define more permissions or permissions
252 # are somehow missing
251 # are somehow missing
253 preserve = preserve or []
252 preserve = preserve or []
254 _global_perms = self.global_perms.copy()
253 _global_perms = self.global_perms.copy()
255 if obj_type not in ['user', 'user_group']:
254 if obj_type not in ['user', 'user_group']:
256 raise ValueError("obj_type must be on of 'user' or 'user_group'")
255 raise ValueError("obj_type must be on of 'user' or 'user_group'")
257 global_perms = len(_global_perms)
256 global_perms = len(_global_perms)
258 default_user_perms = len(Permission.DEFAULT_USER_PERMISSIONS)
257 default_user_perms = len(Permission.DEFAULT_USER_PERMISSIONS)
259 if global_perms != default_user_perms:
258 if global_perms != default_user_perms:
260 raise Exception(
259 raise Exception(
261 'Inconsistent permissions definition. Got {} vs {}'.format(
260 'Inconsistent permissions definition. Got {} vs {}'.format(
262 global_perms, default_user_perms))
261 global_perms, default_user_perms))
263
262
264 if obj_type == 'user':
263 if obj_type == 'user':
265 self._clear_user_perms(to_object.user_id, preserve)
264 self._clear_user_perms(to_object.user_id, preserve)
266 if obj_type == 'user_group':
265 if obj_type == 'user_group':
267 self._clear_user_group_perms(to_object.users_group_id, preserve)
266 self._clear_user_group_perms(to_object.users_group_id, preserve)
268
267
269 # now kill the keys that we want to preserve from the form.
268 # now kill the keys that we want to preserve from the form.
270 for key in preserve:
269 for key in preserve:
271 del _global_perms[key]
270 del _global_perms[key]
272
271
273 for k in _global_perms.copy():
272 for k in _global_perms.copy():
274 _global_perms[k] = form_result[k]
273 _global_perms[k] = form_result[k]
275
274
276 # at that stage we validate all are passed inside form_result
275 # at that stage we validate all are passed inside form_result
277 for _perm_key, perm_value in _global_perms.items():
276 for _perm_key, perm_value in _global_perms.items():
278 if perm_value is None:
277 if perm_value is None:
279 raise ValueError('Missing permission for %s' % (_perm_key,))
278 raise ValueError('Missing permission for %s' % (_perm_key,))
280
279
281 if obj_type == 'user':
280 if obj_type == 'user':
282 p = self._make_new_user_perm(object, perm_value)
281 p = self._make_new_user_perm(object, perm_value)
283 self.sa.add(p)
282 self.sa.add(p)
284 if obj_type == 'user_group':
283 if obj_type == 'user_group':
285 p = self._make_new_user_group_perm(object, perm_value)
284 p = self._make_new_user_group_perm(object, perm_value)
286 self.sa.add(p)
285 self.sa.add(p)
287
286
288 def _set_new_user_perms(self, user, form_result, preserve=None):
287 def _set_new_user_perms(self, user, form_result, preserve=None):
289 return self._set_new_object_perms(
288 return self._set_new_object_perms(
290 'user', user, form_result, preserve)
289 'user', user, form_result, preserve)
291
290
292 def _set_new_user_group_perms(self, user_group, form_result, preserve=None):
291 def _set_new_user_group_perms(self, user_group, form_result, preserve=None):
293 return self._set_new_object_perms(
292 return self._set_new_object_perms(
294 'user_group', user_group, form_result, preserve)
293 'user_group', user_group, form_result, preserve)
295
294
296 def set_new_user_perms(self, user, form_result):
295 def set_new_user_perms(self, user, form_result):
297 # calculate what to preserve from what is given in form_result
296 # calculate what to preserve from what is given in form_result
298 preserve = set(self.global_perms.keys()).difference(set(form_result.keys()))
297 preserve = set(self.global_perms.keys()).difference(set(form_result.keys()))
299 return self._set_new_user_perms(user, form_result, preserve)
298 return self._set_new_user_perms(user, form_result, preserve)
300
299
301 def set_new_user_group_perms(self, user_group, form_result):
300 def set_new_user_group_perms(self, user_group, form_result):
302 # calculate what to preserve from what is given in form_result
301 # calculate what to preserve from what is given in form_result
303 preserve = set(self.global_perms.keys()).difference(set(form_result.keys()))
302 preserve = set(self.global_perms.keys()).difference(set(form_result.keys()))
304 return self._set_new_user_group_perms(user_group, form_result, preserve)
303 return self._set_new_user_group_perms(user_group, form_result, preserve)
305
304
306 def create_permissions(self):
305 def create_permissions(self):
307 """
306 """
308 Create permissions for whole system
307 Create permissions for whole system
309 """
308 """
310 for p in Permission.PERMS:
309 for p in Permission.PERMS:
311 if not Permission.get_by_key(p[0]):
310 if not Permission.get_by_key(p[0]):
312 new_perm = Permission()
311 new_perm = Permission()
313 new_perm.permission_name = p[0]
312 new_perm.permission_name = p[0]
314 new_perm.permission_longname = p[0] # translation err with p[1]
313 new_perm.permission_longname = p[0] # translation err with p[1]
315 self.sa.add(new_perm)
314 self.sa.add(new_perm)
316
315
317 def _create_default_object_permission(self, obj_type, obj, obj_perms,
316 def _create_default_object_permission(self, obj_type, obj, obj_perms,
318 force=False):
317 force=False):
319 if obj_type not in ['user', 'user_group']:
318 if obj_type not in ['user', 'user_group']:
320 raise ValueError("obj_type must be on of 'user' or 'user_group'")
319 raise ValueError("obj_type must be on of 'user' or 'user_group'")
321
320
322 def _get_group(perm_name):
321 def _get_group(perm_name):
323 return '.'.join(perm_name.split('.')[:1])
322 return '.'.join(perm_name.split('.')[:1])
324
323
325 defined_perms_groups = map(
324 defined_perms_groups = map(
326 _get_group, (x.permission.permission_name for x in obj_perms))
325 _get_group, (x.permission.permission_name for x in obj_perms))
327 log.debug('GOT ALREADY DEFINED:%s', obj_perms)
326 log.debug('GOT ALREADY DEFINED:%s', obj_perms)
328
327
329 if force:
328 if force:
330 self._clear_object_perm(obj_perms)
329 self._clear_object_perm(obj_perms)
331 self.sa.commit()
330 self.sa.commit()
332 defined_perms_groups = []
331 defined_perms_groups = []
333 # for every default permission that needs to be created, we check if
332 # for every default permission that needs to be created, we check if
334 # it's group is already defined, if it's not we create default perm
333 # it's group is already defined, if it's not we create default perm
335 for perm_name in Permission.DEFAULT_USER_PERMISSIONS:
334 for perm_name in Permission.DEFAULT_USER_PERMISSIONS:
336 gr = _get_group(perm_name)
335 gr = _get_group(perm_name)
337 if gr not in defined_perms_groups:
336 if gr not in defined_perms_groups:
338 log.debug('GR:%s not found, creating permission %s',
337 log.debug('GR:%s not found, creating permission %s',
339 gr, perm_name)
338 gr, perm_name)
340 if obj_type == 'user':
339 if obj_type == 'user':
341 new_perm = self._make_new_user_perm(obj, perm_name)
340 new_perm = self._make_new_user_perm(obj, perm_name)
342 self.sa.add(new_perm)
341 self.sa.add(new_perm)
343 if obj_type == 'user_group':
342 if obj_type == 'user_group':
344 new_perm = self._make_new_user_group_perm(obj, perm_name)
343 new_perm = self._make_new_user_group_perm(obj, perm_name)
345 self.sa.add(new_perm)
344 self.sa.add(new_perm)
346
345
347 def create_default_user_permissions(self, user, force=False):
346 def create_default_user_permissions(self, user, force=False):
348 """
347 """
349 Creates only missing default permissions for user, if force is set it
348 Creates only missing default permissions for user, if force is set it
350 resets the default permissions for that user
349 resets the default permissions for that user
351
350
352 :param user:
351 :param user:
353 :param force:
352 :param force:
354 """
353 """
355 user = self._get_user(user)
354 user = self._get_user(user)
356 obj_perms = UserToPerm.query().filter(UserToPerm.user == user).all()
355 obj_perms = UserToPerm.query().filter(UserToPerm.user == user).all()
357 return self._create_default_object_permission(
356 return self._create_default_object_permission(
358 'user', user, obj_perms, force)
357 'user', user, obj_perms, force)
359
358
360 def create_default_user_group_permissions(self, user_group, force=False):
359 def create_default_user_group_permissions(self, user_group, force=False):
361 """
360 """
362 Creates only missing default permissions for user group, if force is
361 Creates only missing default permissions for user group, if force is
363 set it resets the default permissions for that user group
362 set it resets the default permissions for that user group
364
363
365 :param user_group:
364 :param user_group:
366 :param force:
365 :param force:
367 """
366 """
368 user_group = self._get_user_group(user_group)
367 user_group = self._get_user_group(user_group)
369 obj_perms = UserToPerm.query().filter(UserGroupToPerm.users_group == user_group).all()
368 obj_perms = UserToPerm.query().filter(UserGroupToPerm.users_group == user_group).all()
370 return self._create_default_object_permission(
369 return self._create_default_object_permission(
371 'user_group', user_group, obj_perms, force)
370 'user_group', user_group, obj_perms, force)
372
371
373 def update_application_permissions(self, form_result):
372 def update_application_permissions(self, form_result):
374 if 'perm_user_id' in form_result:
373 if 'perm_user_id' in form_result:
375 perm_user = User.get(safe_int(form_result['perm_user_id']))
374 perm_user = User.get(safe_int(form_result['perm_user_id']))
376 else:
375 else:
377 # used mostly to do lookup for default user
376 # used mostly to do lookup for default user
378 perm_user = User.get_by_username(form_result['perm_user_name'])
377 perm_user = User.get_by_username(form_result['perm_user_name'])
379
378
380 try:
379 try:
381 # stage 1 set anonymous access
380 # stage 1 set anonymous access
382 if perm_user.username == User.DEFAULT_USER:
381 if perm_user.username == User.DEFAULT_USER:
383 perm_user.active = str2bool(form_result['anonymous'])
382 perm_user.active = str2bool(form_result['anonymous'])
384 self.sa.add(perm_user)
383 self.sa.add(perm_user)
385
384
386 # stage 2 reset defaults and set them from form data
385 # stage 2 reset defaults and set them from form data
387 self._set_new_user_perms(perm_user, form_result, preserve=[
386 self._set_new_user_perms(perm_user, form_result, preserve=[
388 'default_repo_perm',
387 'default_repo_perm',
389 'default_group_perm',
388 'default_group_perm',
390 'default_user_group_perm',
389 'default_user_group_perm',
391 'default_branch_perm',
390 'default_branch_perm',
392
391
393 'default_repo_group_create',
392 'default_repo_group_create',
394 'default_user_group_create',
393 'default_user_group_create',
395 'default_repo_create_on_write',
394 'default_repo_create_on_write',
396 'default_repo_create',
395 'default_repo_create',
397 'default_fork_create',
396 'default_fork_create',
398 'default_inherit_default_permissions'])
397 'default_inherit_default_permissions'])
399
398
400 self.sa.commit()
399 self.sa.commit()
401 except (DatabaseError,):
400 except (DatabaseError,):
402 log.error(traceback.format_exc())
401 log.error(traceback.format_exc())
403 self.sa.rollback()
402 self.sa.rollback()
404 raise
403 raise
405
404
406 def update_user_permissions(self, form_result):
405 def update_user_permissions(self, form_result):
407 if 'perm_user_id' in form_result:
406 if 'perm_user_id' in form_result:
408 perm_user = User.get(safe_int(form_result['perm_user_id']))
407 perm_user = User.get(safe_int(form_result['perm_user_id']))
409 else:
408 else:
410 # used mostly to do lookup for default user
409 # used mostly to do lookup for default user
411 perm_user = User.get_by_username(form_result['perm_user_name'])
410 perm_user = User.get_by_username(form_result['perm_user_name'])
412 try:
411 try:
413 # stage 2 reset defaults and set them from form data
412 # stage 2 reset defaults and set them from form data
414 self._set_new_user_perms(perm_user, form_result, preserve=[
413 self._set_new_user_perms(perm_user, form_result, preserve=[
415 'default_repo_perm',
414 'default_repo_perm',
416 'default_group_perm',
415 'default_group_perm',
417 'default_user_group_perm',
416 'default_user_group_perm',
418 'default_branch_perm',
417 'default_branch_perm',
419
418
420 'default_register',
419 'default_register',
421 'default_password_reset',
420 'default_password_reset',
422 'default_extern_activate'])
421 'default_extern_activate'])
423 self.sa.commit()
422 self.sa.commit()
424 except (DatabaseError,):
423 except (DatabaseError,):
425 log.error(traceback.format_exc())
424 log.error(traceback.format_exc())
426 self.sa.rollback()
425 self.sa.rollback()
427 raise
426 raise
428
427
429 def update_user_group_permissions(self, form_result):
428 def update_user_group_permissions(self, form_result):
430 if 'perm_user_group_id' in form_result:
429 if 'perm_user_group_id' in form_result:
431 perm_user_group = UserGroup.get(safe_int(form_result['perm_user_group_id']))
430 perm_user_group = UserGroup.get(safe_int(form_result['perm_user_group_id']))
432 else:
431 else:
433 # used mostly to do lookup for default user
432 # used mostly to do lookup for default user
434 perm_user_group = UserGroup.get_by_group_name(form_result['perm_user_group_name'])
433 perm_user_group = UserGroup.get_by_group_name(form_result['perm_user_group_name'])
435 try:
434 try:
436 # stage 2 reset defaults and set them from form data
435 # stage 2 reset defaults and set them from form data
437 self._set_new_user_group_perms(perm_user_group, form_result, preserve=[
436 self._set_new_user_group_perms(perm_user_group, form_result, preserve=[
438 'default_repo_perm',
437 'default_repo_perm',
439 'default_group_perm',
438 'default_group_perm',
440 'default_user_group_perm',
439 'default_user_group_perm',
441 'default_branch_perm',
440 'default_branch_perm',
442
441
443 'default_register',
442 'default_register',
444 'default_password_reset',
443 'default_password_reset',
445 'default_extern_activate'])
444 'default_extern_activate'])
446 self.sa.commit()
445 self.sa.commit()
447 except (DatabaseError,):
446 except (DatabaseError,):
448 log.error(traceback.format_exc())
447 log.error(traceback.format_exc())
449 self.sa.rollback()
448 self.sa.rollback()
450 raise
449 raise
451
450
452 def update_object_permissions(self, form_result):
451 def update_object_permissions(self, form_result):
453 if 'perm_user_id' in form_result:
452 if 'perm_user_id' in form_result:
454 perm_user = User.get(safe_int(form_result['perm_user_id']))
453 perm_user = User.get(safe_int(form_result['perm_user_id']))
455 else:
454 else:
456 # used mostly to do lookup for default user
455 # used mostly to do lookup for default user
457 perm_user = User.get_by_username(form_result['perm_user_name'])
456 perm_user = User.get_by_username(form_result['perm_user_name'])
458 try:
457 try:
459
458
460 # stage 2 reset defaults and set them from form data
459 # stage 2 reset defaults and set them from form data
461 self._set_new_user_perms(perm_user, form_result, preserve=[
460 self._set_new_user_perms(perm_user, form_result, preserve=[
462 'default_repo_group_create',
461 'default_repo_group_create',
463 'default_user_group_create',
462 'default_user_group_create',
464 'default_repo_create_on_write',
463 'default_repo_create_on_write',
465 'default_repo_create',
464 'default_repo_create',
466 'default_fork_create',
465 'default_fork_create',
467 'default_inherit_default_permissions',
466 'default_inherit_default_permissions',
468 'default_branch_perm',
467 'default_branch_perm',
469
468
470 'default_register',
469 'default_register',
471 'default_password_reset',
470 'default_password_reset',
472 'default_extern_activate'])
471 'default_extern_activate'])
473
472
474 # overwrite default repo permissions
473 # overwrite default repo permissions
475 if form_result['overwrite_default_repo']:
474 if form_result['overwrite_default_repo']:
476 _def_name = form_result['default_repo_perm'].split('repository.')[-1]
475 _def_name = form_result['default_repo_perm'].split('repository.')[-1]
477 _def = Permission.get_by_key('repository.' + _def_name)
476 _def = Permission.get_by_key('repository.' + _def_name)
478 for r2p in self.sa.query(UserRepoToPerm)\
477 for r2p in self.sa.query(UserRepoToPerm)\
479 .filter(UserRepoToPerm.user == perm_user)\
478 .filter(UserRepoToPerm.user == perm_user)\
480 .all():
479 .all():
481 # don't reset PRIVATE repositories
480 # don't reset PRIVATE repositories
482 if not r2p.repository.private:
481 if not r2p.repository.private:
483 r2p.permission = _def
482 r2p.permission = _def
484 self.sa.add(r2p)
483 self.sa.add(r2p)
485
484
486 # overwrite default repo group permissions
485 # overwrite default repo group permissions
487 if form_result['overwrite_default_group']:
486 if form_result['overwrite_default_group']:
488 _def_name = form_result['default_group_perm'].split('group.')[-1]
487 _def_name = form_result['default_group_perm'].split('group.')[-1]
489 _def = Permission.get_by_key('group.' + _def_name)
488 _def = Permission.get_by_key('group.' + _def_name)
490 for g2p in self.sa.query(UserRepoGroupToPerm)\
489 for g2p in self.sa.query(UserRepoGroupToPerm)\
491 .filter(UserRepoGroupToPerm.user == perm_user)\
490 .filter(UserRepoGroupToPerm.user == perm_user)\
492 .all():
491 .all():
493 g2p.permission = _def
492 g2p.permission = _def
494 self.sa.add(g2p)
493 self.sa.add(g2p)
495
494
496 # overwrite default user group permissions
495 # overwrite default user group permissions
497 if form_result['overwrite_default_user_group']:
496 if form_result['overwrite_default_user_group']:
498 _def_name = form_result['default_user_group_perm'].split('usergroup.')[-1]
497 _def_name = form_result['default_user_group_perm'].split('usergroup.')[-1]
499 # user groups
498 # user groups
500 _def = Permission.get_by_key('usergroup.' + _def_name)
499 _def = Permission.get_by_key('usergroup.' + _def_name)
501 for g2p in self.sa.query(UserUserGroupToPerm)\
500 for g2p in self.sa.query(UserUserGroupToPerm)\
502 .filter(UserUserGroupToPerm.user == perm_user)\
501 .filter(UserUserGroupToPerm.user == perm_user)\
503 .all():
502 .all():
504 g2p.permission = _def
503 g2p.permission = _def
505 self.sa.add(g2p)
504 self.sa.add(g2p)
506
505
507 # COMMIT
506 # COMMIT
508 self.sa.commit()
507 self.sa.commit()
509 except (DatabaseError,):
508 except (DatabaseError,):
510 log.exception('Failed to set default object permissions')
509 log.exception('Failed to set default object permissions')
511 self.sa.rollback()
510 self.sa.rollback()
512 raise
511 raise
513
512
514 def update_branch_permissions(self, form_result):
513 def update_branch_permissions(self, form_result):
515 if 'perm_user_id' in form_result:
514 if 'perm_user_id' in form_result:
516 perm_user = User.get(safe_int(form_result['perm_user_id']))
515 perm_user = User.get(safe_int(form_result['perm_user_id']))
517 else:
516 else:
518 # used mostly to do lookup for default user
517 # used mostly to do lookup for default user
519 perm_user = User.get_by_username(form_result['perm_user_name'])
518 perm_user = User.get_by_username(form_result['perm_user_name'])
520 try:
519 try:
521
520
522 # stage 2 reset defaults and set them from form data
521 # stage 2 reset defaults and set them from form data
523 self._set_new_user_perms(perm_user, form_result, preserve=[
522 self._set_new_user_perms(perm_user, form_result, preserve=[
524 'default_repo_perm',
523 'default_repo_perm',
525 'default_group_perm',
524 'default_group_perm',
526 'default_user_group_perm',
525 'default_user_group_perm',
527
526
528 'default_repo_group_create',
527 'default_repo_group_create',
529 'default_user_group_create',
528 'default_user_group_create',
530 'default_repo_create_on_write',
529 'default_repo_create_on_write',
531 'default_repo_create',
530 'default_repo_create',
532 'default_fork_create',
531 'default_fork_create',
533 'default_inherit_default_permissions',
532 'default_inherit_default_permissions',
534
533
535 'default_register',
534 'default_register',
536 'default_password_reset',
535 'default_password_reset',
537 'default_extern_activate'])
536 'default_extern_activate'])
538
537
539 # overwrite default branch permissions
538 # overwrite default branch permissions
540 if form_result['overwrite_default_branch']:
539 if form_result['overwrite_default_branch']:
541 _def_name = \
540 _def_name = \
542 form_result['default_branch_perm'].split('branch.')[-1]
541 form_result['default_branch_perm'].split('branch.')[-1]
543
542
544 _def = Permission.get_by_key('branch.' + _def_name)
543 _def = Permission.get_by_key('branch.' + _def_name)
545
544
546 user_perms = UserToRepoBranchPermission.query()\
545 user_perms = UserToRepoBranchPermission.query()\
547 .join(UserToRepoBranchPermission.user_repo_to_perm)\
546 .join(UserToRepoBranchPermission.user_repo_to_perm)\
548 .filter(UserRepoToPerm.user == perm_user).all()
547 .filter(UserRepoToPerm.user == perm_user).all()
549
548
550 for g2p in user_perms:
549 for g2p in user_perms:
551 g2p.permission = _def
550 g2p.permission = _def
552 self.sa.add(g2p)
551 self.sa.add(g2p)
553
552
554 # COMMIT
553 # COMMIT
555 self.sa.commit()
554 self.sa.commit()
556 except (DatabaseError,):
555 except (DatabaseError,):
557 log.exception('Failed to set default branch permissions')
556 log.exception('Failed to set default branch permissions')
558 self.sa.rollback()
557 self.sa.rollback()
559 raise
558 raise
560
559
561 def get_users_with_repo_write(self, db_repo):
560 def get_users_with_repo_write(self, db_repo):
562 write_plus = ['repository.write', 'repository.admin']
561 write_plus = ['repository.write', 'repository.admin']
563 default_user_id = User.get_default_user_id()
562 default_user_id = User.get_default_user_id()
564 user_write_permissions = collections.OrderedDict()
563 user_write_permissions = collections.OrderedDict()
565
564
566 # write or higher and DEFAULT user for inheritance
565 # write or higher and DEFAULT user for inheritance
567 for perm in db_repo.permissions():
566 for perm in db_repo.permissions():
568 if perm.permission in write_plus or perm.user_id == default_user_id:
567 if perm.permission in write_plus or perm.user_id == default_user_id:
569 user_write_permissions[perm.user_id] = perm
568 user_write_permissions[perm.user_id] = perm
570 return user_write_permissions
569 return user_write_permissions
571
570
572 def get_user_groups_with_repo_write(self, db_repo):
571 def get_user_groups_with_repo_write(self, db_repo):
573 write_plus = ['repository.write', 'repository.admin']
572 write_plus = ['repository.write', 'repository.admin']
574 user_group_write_permissions = collections.OrderedDict()
573 user_group_write_permissions = collections.OrderedDict()
575
574
576 # write or higher and DEFAULT user for inheritance
575 # write or higher and DEFAULT user for inheritance
577 for p in db_repo.permission_user_groups():
576 for p in db_repo.permission_user_groups():
578 if p.permission in write_plus:
577 if p.permission in write_plus:
579 user_group_write_permissions[p.users_group_id] = p
578 user_group_write_permissions[p.users_group_id] = p
580 return user_group_write_permissions
579 return user_group_write_permissions
581
580
582 def trigger_permission_flush(self, affected_user_ids=None):
581 def trigger_permission_flush(self, affected_user_ids=None):
583 affected_user_ids = affected_user_ids or User.get_all_user_ids()
582 affected_user_ids = affected_user_ids or User.get_all_user_ids()
584 events.trigger(events.UserPermissionsChange(affected_user_ids))
583 events.trigger(events.UserPermissionsChange(affected_user_ids))
585
584
586 def flush_user_permission_caches(self, changes, affected_user_ids=None):
585 def flush_user_permission_caches(self, changes, affected_user_ids=None):
587 affected_user_ids = affected_user_ids or []
586 affected_user_ids = affected_user_ids or []
588
587
589 for change in changes['added'] + changes['updated'] + changes['deleted']:
588 for change in changes['added'] + changes['updated'] + changes['deleted']:
590 if change['type'] == 'user':
589 if change['type'] == 'user':
591 affected_user_ids.append(change['id'])
590 affected_user_ids.append(change['id'])
592 if change['type'] == 'user_group':
591 if change['type'] == 'user_group':
593 user_group = UserGroup.get(safe_int(change['id']))
592 user_group = UserGroup.get(safe_int(change['id']))
594 if user_group:
593 if user_group:
595 group_members_ids = [x.user_id for x in user_group.members]
594 group_members_ids = [x.user_id for x in user_group.members]
596 affected_user_ids.extend(group_members_ids)
595 affected_user_ids.extend(group_members_ids)
597
596
598 self.trigger_permission_flush(affected_user_ids)
597 self.trigger_permission_flush(affected_user_ids)
599
598
600 return affected_user_ids
599 return affected_user_ids
@@ -1,2380 +1,2380 b''
1 # -*- coding: utf-8 -*-
1
2
2
3 # Copyright (C) 2012-2020 RhodeCode GmbH
3 # Copyright (C) 2012-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 pull request model for RhodeCode
23 pull request model for RhodeCode
24 """
24 """
25
25
26
26
27 import json
27 import json
28 import logging
28 import logging
29 import os
29 import os
30
30
31 import datetime
31 import datetime
32 import urllib.request, urllib.parse, urllib.error
32 import urllib.request, urllib.parse, urllib.error
33 import collections
33 import collections
34
34
35 from pyramid.threadlocal import get_current_request
35 from pyramid.threadlocal import get_current_request
36
36
37 from rhodecode.lib.vcs.nodes import FileNode
37 from rhodecode.lib.vcs.nodes import FileNode
38 from rhodecode.translation import lazy_ugettext
38 from rhodecode.translation import lazy_ugettext
39 from rhodecode.lib import helpers as h, hooks_utils, diffs
39 from rhodecode.lib import helpers as h, hooks_utils, diffs
40 from rhodecode.lib import audit_logger
40 from rhodecode.lib import audit_logger
41 from collections import OrderedDict
41 from collections import OrderedDict
42 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
42 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
43 from rhodecode.lib.markup_renderer import (
43 from rhodecode.lib.markup_renderer import (
44 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
44 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
45 from rhodecode.lib.utils2 import (
45 from rhodecode.lib.utils2 import (
46 safe_unicode, safe_str, md5_safe, AttributeDict, safe_int,
46 safe_unicode, safe_str, md5_safe, AttributeDict, safe_int,
47 get_current_rhodecode_user)
47 get_current_rhodecode_user)
48 from rhodecode.lib.vcs.backends.base import (
48 from rhodecode.lib.vcs.backends.base import (
49 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason,
49 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason,
50 TargetRefMissing, SourceRefMissing)
50 TargetRefMissing, SourceRefMissing)
51 from rhodecode.lib.vcs.conf import settings as vcs_settings
51 from rhodecode.lib.vcs.conf import settings as vcs_settings
52 from rhodecode.lib.vcs.exceptions import (
52 from rhodecode.lib.vcs.exceptions import (
53 CommitDoesNotExistError, EmptyRepositoryError)
53 CommitDoesNotExistError, EmptyRepositoryError)
54 from rhodecode.model import BaseModel
54 from rhodecode.model import BaseModel
55 from rhodecode.model.changeset_status import ChangesetStatusModel
55 from rhodecode.model.changeset_status import ChangesetStatusModel
56 from rhodecode.model.comment import CommentsModel
56 from rhodecode.model.comment import CommentsModel
57 from rhodecode.model.db import (
57 from rhodecode.model.db import (
58 aliased, null, lazyload, and_, or_, func, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
58 aliased, null, lazyload, and_, or_, func, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
59 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User)
59 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User)
60 from rhodecode.model.meta import Session
60 from rhodecode.model.meta import Session
61 from rhodecode.model.notification import NotificationModel, \
61 from rhodecode.model.notification import NotificationModel, \
62 EmailNotificationModel
62 EmailNotificationModel
63 from rhodecode.model.scm import ScmModel
63 from rhodecode.model.scm import ScmModel
64 from rhodecode.model.settings import VcsSettingsModel
64 from rhodecode.model.settings import VcsSettingsModel
65
65
66
66
67 log = logging.getLogger(__name__)
67 log = logging.getLogger(__name__)
68
68
69
69
70 # Data structure to hold the response data when updating commits during a pull
70 # Data structure to hold the response data when updating commits during a pull
71 # request update.
71 # request update.
72 class UpdateResponse(object):
72 class UpdateResponse(object):
73
73
74 def __init__(self, executed, reason, new, old, common_ancestor_id,
74 def __init__(self, executed, reason, new, old, common_ancestor_id,
75 commit_changes, source_changed, target_changed):
75 commit_changes, source_changed, target_changed):
76
76
77 self.executed = executed
77 self.executed = executed
78 self.reason = reason
78 self.reason = reason
79 self.new = new
79 self.new = new
80 self.old = old
80 self.old = old
81 self.common_ancestor_id = common_ancestor_id
81 self.common_ancestor_id = common_ancestor_id
82 self.changes = commit_changes
82 self.changes = commit_changes
83 self.source_changed = source_changed
83 self.source_changed = source_changed
84 self.target_changed = target_changed
84 self.target_changed = target_changed
85
85
86
86
87 def get_diff_info(
87 def get_diff_info(
88 source_repo, source_ref, target_repo, target_ref, get_authors=False,
88 source_repo, source_ref, target_repo, target_ref, get_authors=False,
89 get_commit_authors=True):
89 get_commit_authors=True):
90 """
90 """
91 Calculates detailed diff information for usage in preview of creation of a pull-request.
91 Calculates detailed diff information for usage in preview of creation of a pull-request.
92 This is also used for default reviewers logic
92 This is also used for default reviewers logic
93 """
93 """
94
94
95 source_scm = source_repo.scm_instance()
95 source_scm = source_repo.scm_instance()
96 target_scm = target_repo.scm_instance()
96 target_scm = target_repo.scm_instance()
97
97
98 ancestor_id = target_scm.get_common_ancestor(target_ref, source_ref, source_scm)
98 ancestor_id = target_scm.get_common_ancestor(target_ref, source_ref, source_scm)
99 if not ancestor_id:
99 if not ancestor_id:
100 raise ValueError(
100 raise ValueError(
101 'cannot calculate diff info without a common ancestor. '
101 'cannot calculate diff info without a common ancestor. '
102 'Make sure both repositories are related, and have a common forking commit.')
102 'Make sure both repositories are related, and have a common forking commit.')
103
103
104 # case here is that want a simple diff without incoming commits,
104 # case here is that want a simple diff without incoming commits,
105 # previewing what will be merged based only on commits in the source.
105 # previewing what will be merged based only on commits in the source.
106 log.debug('Using ancestor %s as source_ref instead of %s',
106 log.debug('Using ancestor %s as source_ref instead of %s',
107 ancestor_id, source_ref)
107 ancestor_id, source_ref)
108
108
109 # source of changes now is the common ancestor
109 # source of changes now is the common ancestor
110 source_commit = source_scm.get_commit(commit_id=ancestor_id)
110 source_commit = source_scm.get_commit(commit_id=ancestor_id)
111 # target commit becomes the source ref as it is the last commit
111 # target commit becomes the source ref as it is the last commit
112 # for diff generation this logic gives proper diff
112 # for diff generation this logic gives proper diff
113 target_commit = source_scm.get_commit(commit_id=source_ref)
113 target_commit = source_scm.get_commit(commit_id=source_ref)
114
114
115 vcs_diff = \
115 vcs_diff = \
116 source_scm.get_diff(commit1=source_commit, commit2=target_commit,
116 source_scm.get_diff(commit1=source_commit, commit2=target_commit,
117 ignore_whitespace=False, context=3)
117 ignore_whitespace=False, context=3)
118
118
119 diff_processor = diffs.DiffProcessor(
119 diff_processor = diffs.DiffProcessor(
120 vcs_diff, format='newdiff', diff_limit=None,
120 vcs_diff, format='newdiff', diff_limit=None,
121 file_limit=None, show_full_diff=True)
121 file_limit=None, show_full_diff=True)
122
122
123 _parsed = diff_processor.prepare()
123 _parsed = diff_processor.prepare()
124
124
125 all_files = []
125 all_files = []
126 all_files_changes = []
126 all_files_changes = []
127 changed_lines = {}
127 changed_lines = {}
128 stats = [0, 0]
128 stats = [0, 0]
129 for f in _parsed:
129 for f in _parsed:
130 all_files.append(f['filename'])
130 all_files.append(f['filename'])
131 all_files_changes.append({
131 all_files_changes.append({
132 'filename': f['filename'],
132 'filename': f['filename'],
133 'stats': f['stats']
133 'stats': f['stats']
134 })
134 })
135 stats[0] += f['stats']['added']
135 stats[0] += f['stats']['added']
136 stats[1] += f['stats']['deleted']
136 stats[1] += f['stats']['deleted']
137
137
138 changed_lines[f['filename']] = []
138 changed_lines[f['filename']] = []
139 if len(f['chunks']) < 2:
139 if len(f['chunks']) < 2:
140 continue
140 continue
141 # first line is "context" information
141 # first line is "context" information
142 for chunks in f['chunks'][1:]:
142 for chunks in f['chunks'][1:]:
143 for chunk in chunks['lines']:
143 for chunk in chunks['lines']:
144 if chunk['action'] not in ('del', 'mod'):
144 if chunk['action'] not in ('del', 'mod'):
145 continue
145 continue
146 changed_lines[f['filename']].append(chunk['old_lineno'])
146 changed_lines[f['filename']].append(chunk['old_lineno'])
147
147
148 commit_authors = []
148 commit_authors = []
149 user_counts = {}
149 user_counts = {}
150 email_counts = {}
150 email_counts = {}
151 author_counts = {}
151 author_counts = {}
152 _commit_cache = {}
152 _commit_cache = {}
153
153
154 commits = []
154 commits = []
155 if get_commit_authors:
155 if get_commit_authors:
156 log.debug('Obtaining commit authors from set of commits')
156 log.debug('Obtaining commit authors from set of commits')
157 _compare_data = target_scm.compare(
157 _compare_data = target_scm.compare(
158 target_ref, source_ref, source_scm, merge=True,
158 target_ref, source_ref, source_scm, merge=True,
159 pre_load=["author", "date", "message"]
159 pre_load=["author", "date", "message"]
160 )
160 )
161
161
162 for commit in _compare_data:
162 for commit in _compare_data:
163 # NOTE(marcink): we serialize here, so we don't produce more vcsserver calls on data returned
163 # NOTE(marcink): we serialize here, so we don't produce more vcsserver calls on data returned
164 # at this function which is later called via JSON serialization
164 # at this function which is later called via JSON serialization
165 serialized_commit = dict(
165 serialized_commit = dict(
166 author=commit.author,
166 author=commit.author,
167 date=commit.date,
167 date=commit.date,
168 message=commit.message,
168 message=commit.message,
169 commit_id=commit.raw_id,
169 commit_id=commit.raw_id,
170 raw_id=commit.raw_id
170 raw_id=commit.raw_id
171 )
171 )
172 commits.append(serialized_commit)
172 commits.append(serialized_commit)
173 user = User.get_from_cs_author(serialized_commit['author'])
173 user = User.get_from_cs_author(serialized_commit['author'])
174 if user and user not in commit_authors:
174 if user and user not in commit_authors:
175 commit_authors.append(user)
175 commit_authors.append(user)
176
176
177 # lines
177 # lines
178 if get_authors:
178 if get_authors:
179 log.debug('Calculating authors of changed files')
179 log.debug('Calculating authors of changed files')
180 target_commit = source_repo.get_commit(ancestor_id)
180 target_commit = source_repo.get_commit(ancestor_id)
181
181
182 for fname, lines in changed_lines.items():
182 for fname, lines in changed_lines.items():
183
183
184 try:
184 try:
185 node = target_commit.get_node(fname, pre_load=["is_binary"])
185 node = target_commit.get_node(fname, pre_load=["is_binary"])
186 except Exception:
186 except Exception:
187 log.exception("Failed to load node with path %s", fname)
187 log.exception("Failed to load node with path %s", fname)
188 continue
188 continue
189
189
190 if not isinstance(node, FileNode):
190 if not isinstance(node, FileNode):
191 continue
191 continue
192
192
193 # NOTE(marcink): for binary node we don't do annotation, just use last author
193 # NOTE(marcink): for binary node we don't do annotation, just use last author
194 if node.is_binary:
194 if node.is_binary:
195 author = node.last_commit.author
195 author = node.last_commit.author
196 email = node.last_commit.author_email
196 email = node.last_commit.author_email
197
197
198 user = User.get_from_cs_author(author)
198 user = User.get_from_cs_author(author)
199 if user:
199 if user:
200 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
200 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
201 author_counts[author] = author_counts.get(author, 0) + 1
201 author_counts[author] = author_counts.get(author, 0) + 1
202 email_counts[email] = email_counts.get(email, 0) + 1
202 email_counts[email] = email_counts.get(email, 0) + 1
203
203
204 continue
204 continue
205
205
206 for annotation in node.annotate:
206 for annotation in node.annotate:
207 line_no, commit_id, get_commit_func, line_text = annotation
207 line_no, commit_id, get_commit_func, line_text = annotation
208 if line_no in lines:
208 if line_no in lines:
209 if commit_id not in _commit_cache:
209 if commit_id not in _commit_cache:
210 _commit_cache[commit_id] = get_commit_func()
210 _commit_cache[commit_id] = get_commit_func()
211 commit = _commit_cache[commit_id]
211 commit = _commit_cache[commit_id]
212 author = commit.author
212 author = commit.author
213 email = commit.author_email
213 email = commit.author_email
214 user = User.get_from_cs_author(author)
214 user = User.get_from_cs_author(author)
215 if user:
215 if user:
216 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
216 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
217 author_counts[author] = author_counts.get(author, 0) + 1
217 author_counts[author] = author_counts.get(author, 0) + 1
218 email_counts[email] = email_counts.get(email, 0) + 1
218 email_counts[email] = email_counts.get(email, 0) + 1
219
219
220 log.debug('Default reviewers processing finished')
220 log.debug('Default reviewers processing finished')
221
221
222 return {
222 return {
223 'commits': commits,
223 'commits': commits,
224 'files': all_files_changes,
224 'files': all_files_changes,
225 'stats': stats,
225 'stats': stats,
226 'ancestor': ancestor_id,
226 'ancestor': ancestor_id,
227 # original authors of modified files
227 # original authors of modified files
228 'original_authors': {
228 'original_authors': {
229 'users': user_counts,
229 'users': user_counts,
230 'authors': author_counts,
230 'authors': author_counts,
231 'emails': email_counts,
231 'emails': email_counts,
232 },
232 },
233 'commit_authors': commit_authors
233 'commit_authors': commit_authors
234 }
234 }
235
235
236
236
237 class PullRequestModel(BaseModel):
237 class PullRequestModel(BaseModel):
238
238
239 cls = PullRequest
239 cls = PullRequest
240
240
241 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
241 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
242
242
243 UPDATE_STATUS_MESSAGES = {
243 UPDATE_STATUS_MESSAGES = {
244 UpdateFailureReason.NONE: lazy_ugettext(
244 UpdateFailureReason.NONE: lazy_ugettext(
245 'Pull request update successful.'),
245 'Pull request update successful.'),
246 UpdateFailureReason.UNKNOWN: lazy_ugettext(
246 UpdateFailureReason.UNKNOWN: lazy_ugettext(
247 'Pull request update failed because of an unknown error.'),
247 'Pull request update failed because of an unknown error.'),
248 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
248 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
249 'No update needed because the source and target have not changed.'),
249 'No update needed because the source and target have not changed.'),
250 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
250 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
251 'Pull request cannot be updated because the reference type is '
251 'Pull request cannot be updated because the reference type is '
252 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
252 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
253 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
253 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
254 'This pull request cannot be updated because the target '
254 'This pull request cannot be updated because the target '
255 'reference is missing.'),
255 'reference is missing.'),
256 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
256 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
257 'This pull request cannot be updated because the source '
257 'This pull request cannot be updated because the source '
258 'reference is missing.'),
258 'reference is missing.'),
259 }
259 }
260 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
260 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
261 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
261 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
262
262
263 def __get_pull_request(self, pull_request):
263 def __get_pull_request(self, pull_request):
264 return self._get_instance((
264 return self._get_instance((
265 PullRequest, PullRequestVersion), pull_request)
265 PullRequest, PullRequestVersion), pull_request)
266
266
267 def _check_perms(self, perms, pull_request, user, api=False):
267 def _check_perms(self, perms, pull_request, user, api=False):
268 if not api:
268 if not api:
269 return h.HasRepoPermissionAny(*perms)(
269 return h.HasRepoPermissionAny(*perms)(
270 user=user, repo_name=pull_request.target_repo.repo_name)
270 user=user, repo_name=pull_request.target_repo.repo_name)
271 else:
271 else:
272 return h.HasRepoPermissionAnyApi(*perms)(
272 return h.HasRepoPermissionAnyApi(*perms)(
273 user=user, repo_name=pull_request.target_repo.repo_name)
273 user=user, repo_name=pull_request.target_repo.repo_name)
274
274
275 def check_user_read(self, pull_request, user, api=False):
275 def check_user_read(self, pull_request, user, api=False):
276 _perms = ('repository.admin', 'repository.write', 'repository.read',)
276 _perms = ('repository.admin', 'repository.write', 'repository.read',)
277 return self._check_perms(_perms, pull_request, user, api)
277 return self._check_perms(_perms, pull_request, user, api)
278
278
279 def check_user_merge(self, pull_request, user, api=False):
279 def check_user_merge(self, pull_request, user, api=False):
280 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
280 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
281 return self._check_perms(_perms, pull_request, user, api)
281 return self._check_perms(_perms, pull_request, user, api)
282
282
283 def check_user_update(self, pull_request, user, api=False):
283 def check_user_update(self, pull_request, user, api=False):
284 owner = user.user_id == pull_request.user_id
284 owner = user.user_id == pull_request.user_id
285 return self.check_user_merge(pull_request, user, api) or owner
285 return self.check_user_merge(pull_request, user, api) or owner
286
286
287 def check_user_delete(self, pull_request, user):
287 def check_user_delete(self, pull_request, user):
288 owner = user.user_id == pull_request.user_id
288 owner = user.user_id == pull_request.user_id
289 _perms = ('repository.admin',)
289 _perms = ('repository.admin',)
290 return self._check_perms(_perms, pull_request, user) or owner
290 return self._check_perms(_perms, pull_request, user) or owner
291
291
292 def is_user_reviewer(self, pull_request, user):
292 def is_user_reviewer(self, pull_request, user):
293 return user.user_id in [
293 return user.user_id in [
294 x.user_id for x in
294 x.user_id for x in
295 pull_request.get_pull_request_reviewers(PullRequestReviewers.ROLE_REVIEWER)
295 pull_request.get_pull_request_reviewers(PullRequestReviewers.ROLE_REVIEWER)
296 if x.user
296 if x.user
297 ]
297 ]
298
298
299 def check_user_change_status(self, pull_request, user, api=False):
299 def check_user_change_status(self, pull_request, user, api=False):
300 return self.check_user_update(pull_request, user, api) \
300 return self.check_user_update(pull_request, user, api) \
301 or self.is_user_reviewer(pull_request, user)
301 or self.is_user_reviewer(pull_request, user)
302
302
303 def check_user_comment(self, pull_request, user):
303 def check_user_comment(self, pull_request, user):
304 owner = user.user_id == pull_request.user_id
304 owner = user.user_id == pull_request.user_id
305 return self.check_user_read(pull_request, user) or owner
305 return self.check_user_read(pull_request, user) or owner
306
306
307 def get(self, pull_request):
307 def get(self, pull_request):
308 return self.__get_pull_request(pull_request)
308 return self.__get_pull_request(pull_request)
309
309
310 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
310 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
311 statuses=None, opened_by=None, order_by=None,
311 statuses=None, opened_by=None, order_by=None,
312 order_dir='desc', only_created=False):
312 order_dir='desc', only_created=False):
313 repo = None
313 repo = None
314 if repo_name:
314 if repo_name:
315 repo = self._get_repo(repo_name)
315 repo = self._get_repo(repo_name)
316
316
317 q = PullRequest.query()
317 q = PullRequest.query()
318
318
319 if search_q:
319 if search_q:
320 like_expression = u'%{}%'.format(safe_unicode(search_q))
320 like_expression = u'%{}%'.format(safe_unicode(search_q))
321 q = q.join(User, User.user_id == PullRequest.user_id)
321 q = q.join(User, User.user_id == PullRequest.user_id)
322 q = q.filter(or_(
322 q = q.filter(or_(
323 cast(PullRequest.pull_request_id, String).ilike(like_expression),
323 cast(PullRequest.pull_request_id, String).ilike(like_expression),
324 User.username.ilike(like_expression),
324 User.username.ilike(like_expression),
325 PullRequest.title.ilike(like_expression),
325 PullRequest.title.ilike(like_expression),
326 PullRequest.description.ilike(like_expression),
326 PullRequest.description.ilike(like_expression),
327 ))
327 ))
328
328
329 # source or target
329 # source or target
330 if repo and source:
330 if repo and source:
331 q = q.filter(PullRequest.source_repo == repo)
331 q = q.filter(PullRequest.source_repo == repo)
332 elif repo:
332 elif repo:
333 q = q.filter(PullRequest.target_repo == repo)
333 q = q.filter(PullRequest.target_repo == repo)
334
334
335 # closed,opened
335 # closed,opened
336 if statuses:
336 if statuses:
337 q = q.filter(PullRequest.status.in_(statuses))
337 q = q.filter(PullRequest.status.in_(statuses))
338
338
339 # opened by filter
339 # opened by filter
340 if opened_by:
340 if opened_by:
341 q = q.filter(PullRequest.user_id.in_(opened_by))
341 q = q.filter(PullRequest.user_id.in_(opened_by))
342
342
343 # only get those that are in "created" state
343 # only get those that are in "created" state
344 if only_created:
344 if only_created:
345 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
345 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
346
346
347 order_map = {
347 order_map = {
348 'name_raw': PullRequest.pull_request_id,
348 'name_raw': PullRequest.pull_request_id,
349 'id': PullRequest.pull_request_id,
349 'id': PullRequest.pull_request_id,
350 'title': PullRequest.title,
350 'title': PullRequest.title,
351 'updated_on_raw': PullRequest.updated_on,
351 'updated_on_raw': PullRequest.updated_on,
352 'target_repo': PullRequest.target_repo_id
352 'target_repo': PullRequest.target_repo_id
353 }
353 }
354 if order_by and order_by in order_map:
354 if order_by and order_by in order_map:
355 if order_dir == 'asc':
355 if order_dir == 'asc':
356 q = q.order_by(order_map[order_by].asc())
356 q = q.order_by(order_map[order_by].asc())
357 else:
357 else:
358 q = q.order_by(order_map[order_by].desc())
358 q = q.order_by(order_map[order_by].desc())
359
359
360 return q
360 return q
361
361
362 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
362 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
363 opened_by=None):
363 opened_by=None):
364 """
364 """
365 Count the number of pull requests for a specific repository.
365 Count the number of pull requests for a specific repository.
366
366
367 :param repo_name: target or source repo
367 :param repo_name: target or source repo
368 :param search_q: filter by text
368 :param search_q: filter by text
369 :param source: boolean flag to specify if repo_name refers to source
369 :param source: boolean flag to specify if repo_name refers to source
370 :param statuses: list of pull request statuses
370 :param statuses: list of pull request statuses
371 :param opened_by: author user of the pull request
371 :param opened_by: author user of the pull request
372 :returns: int number of pull requests
372 :returns: int number of pull requests
373 """
373 """
374 q = self._prepare_get_all_query(
374 q = self._prepare_get_all_query(
375 repo_name, search_q=search_q, source=source, statuses=statuses,
375 repo_name, search_q=search_q, source=source, statuses=statuses,
376 opened_by=opened_by)
376 opened_by=opened_by)
377
377
378 return q.count()
378 return q.count()
379
379
380 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
380 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
381 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
381 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
382 """
382 """
383 Get all pull requests for a specific repository.
383 Get all pull requests for a specific repository.
384
384
385 :param repo_name: target or source repo
385 :param repo_name: target or source repo
386 :param search_q: filter by text
386 :param search_q: filter by text
387 :param source: boolean flag to specify if repo_name refers to source
387 :param source: boolean flag to specify if repo_name refers to source
388 :param statuses: list of pull request statuses
388 :param statuses: list of pull request statuses
389 :param opened_by: author user of the pull request
389 :param opened_by: author user of the pull request
390 :param offset: pagination offset
390 :param offset: pagination offset
391 :param length: length of returned list
391 :param length: length of returned list
392 :param order_by: order of the returned list
392 :param order_by: order of the returned list
393 :param order_dir: 'asc' or 'desc' ordering direction
393 :param order_dir: 'asc' or 'desc' ordering direction
394 :returns: list of pull requests
394 :returns: list of pull requests
395 """
395 """
396 q = self._prepare_get_all_query(
396 q = self._prepare_get_all_query(
397 repo_name, search_q=search_q, source=source, statuses=statuses,
397 repo_name, search_q=search_q, source=source, statuses=statuses,
398 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
398 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
399
399
400 if length:
400 if length:
401 pull_requests = q.limit(length).offset(offset).all()
401 pull_requests = q.limit(length).offset(offset).all()
402 else:
402 else:
403 pull_requests = q.all()
403 pull_requests = q.all()
404
404
405 return pull_requests
405 return pull_requests
406
406
407 def count_awaiting_review(self, repo_name, search_q=None, statuses=None):
407 def count_awaiting_review(self, repo_name, search_q=None, statuses=None):
408 """
408 """
409 Count the number of pull requests for a specific repository that are
409 Count the number of pull requests for a specific repository that are
410 awaiting review.
410 awaiting review.
411
411
412 :param repo_name: target or source repo
412 :param repo_name: target or source repo
413 :param search_q: filter by text
413 :param search_q: filter by text
414 :param statuses: list of pull request statuses
414 :param statuses: list of pull request statuses
415 :returns: int number of pull requests
415 :returns: int number of pull requests
416 """
416 """
417 pull_requests = self.get_awaiting_review(
417 pull_requests = self.get_awaiting_review(
418 repo_name, search_q=search_q, statuses=statuses)
418 repo_name, search_q=search_q, statuses=statuses)
419
419
420 return len(pull_requests)
420 return len(pull_requests)
421
421
422 def get_awaiting_review(self, repo_name, search_q=None, statuses=None,
422 def get_awaiting_review(self, repo_name, search_q=None, statuses=None,
423 offset=0, length=None, order_by=None, order_dir='desc'):
423 offset=0, length=None, order_by=None, order_dir='desc'):
424 """
424 """
425 Get all pull requests for a specific repository that are awaiting
425 Get all pull requests for a specific repository that are awaiting
426 review.
426 review.
427
427
428 :param repo_name: target or source repo
428 :param repo_name: target or source repo
429 :param search_q: filter by text
429 :param search_q: filter by text
430 :param statuses: list of pull request statuses
430 :param statuses: list of pull request statuses
431 :param offset: pagination offset
431 :param offset: pagination offset
432 :param length: length of returned list
432 :param length: length of returned list
433 :param order_by: order of the returned list
433 :param order_by: order of the returned list
434 :param order_dir: 'asc' or 'desc' ordering direction
434 :param order_dir: 'asc' or 'desc' ordering direction
435 :returns: list of pull requests
435 :returns: list of pull requests
436 """
436 """
437 pull_requests = self.get_all(
437 pull_requests = self.get_all(
438 repo_name, search_q=search_q, statuses=statuses,
438 repo_name, search_q=search_q, statuses=statuses,
439 order_by=order_by, order_dir=order_dir)
439 order_by=order_by, order_dir=order_dir)
440
440
441 _filtered_pull_requests = []
441 _filtered_pull_requests = []
442 for pr in pull_requests:
442 for pr in pull_requests:
443 status = pr.calculated_review_status()
443 status = pr.calculated_review_status()
444 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
444 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
445 ChangesetStatus.STATUS_UNDER_REVIEW]:
445 ChangesetStatus.STATUS_UNDER_REVIEW]:
446 _filtered_pull_requests.append(pr)
446 _filtered_pull_requests.append(pr)
447 if length:
447 if length:
448 return _filtered_pull_requests[offset:offset+length]
448 return _filtered_pull_requests[offset:offset+length]
449 else:
449 else:
450 return _filtered_pull_requests
450 return _filtered_pull_requests
451
451
452 def _prepare_awaiting_my_review_review_query(
452 def _prepare_awaiting_my_review_review_query(
453 self, repo_name, user_id, search_q=None, statuses=None,
453 self, repo_name, user_id, search_q=None, statuses=None,
454 order_by=None, order_dir='desc'):
454 order_by=None, order_dir='desc'):
455
455
456 for_review_statuses = [
456 for_review_statuses = [
457 ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED
457 ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED
458 ]
458 ]
459
459
460 pull_request_alias = aliased(PullRequest)
460 pull_request_alias = aliased(PullRequest)
461 status_alias = aliased(ChangesetStatus)
461 status_alias = aliased(ChangesetStatus)
462 reviewers_alias = aliased(PullRequestReviewers)
462 reviewers_alias = aliased(PullRequestReviewers)
463 repo_alias = aliased(Repository)
463 repo_alias = aliased(Repository)
464
464
465 last_ver_subq = Session()\
465 last_ver_subq = Session()\
466 .query(func.min(ChangesetStatus.version)) \
466 .query(func.min(ChangesetStatus.version)) \
467 .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\
467 .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\
468 .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \
468 .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \
469 .subquery()
469 .subquery()
470
470
471 q = Session().query(pull_request_alias) \
471 q = Session().query(pull_request_alias) \
472 .options(lazyload(pull_request_alias.author)) \
472 .options(lazyload(pull_request_alias.author)) \
473 .join(reviewers_alias,
473 .join(reviewers_alias,
474 reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \
474 reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \
475 .join(repo_alias,
475 .join(repo_alias,
476 repo_alias.repo_id == pull_request_alias.target_repo_id) \
476 repo_alias.repo_id == pull_request_alias.target_repo_id) \
477 .outerjoin(status_alias,
477 .outerjoin(status_alias,
478 and_(status_alias.user_id == reviewers_alias.user_id,
478 and_(status_alias.user_id == reviewers_alias.user_id,
479 status_alias.pull_request_id == reviewers_alias.pull_request_id)) \
479 status_alias.pull_request_id == reviewers_alias.pull_request_id)) \
480 .filter(or_(status_alias.version == null(),
480 .filter(or_(status_alias.version == null(),
481 status_alias.version == last_ver_subq)) \
481 status_alias.version == last_ver_subq)) \
482 .filter(reviewers_alias.user_id == user_id) \
482 .filter(reviewers_alias.user_id == user_id) \
483 .filter(repo_alias.repo_name == repo_name) \
483 .filter(repo_alias.repo_name == repo_name) \
484 .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \
484 .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \
485 .group_by(pull_request_alias)
485 .group_by(pull_request_alias)
486
486
487 # closed,opened
487 # closed,opened
488 if statuses:
488 if statuses:
489 q = q.filter(pull_request_alias.status.in_(statuses))
489 q = q.filter(pull_request_alias.status.in_(statuses))
490
490
491 if search_q:
491 if search_q:
492 like_expression = u'%{}%'.format(safe_unicode(search_q))
492 like_expression = u'%{}%'.format(safe_unicode(search_q))
493 q = q.join(User, User.user_id == pull_request_alias.user_id)
493 q = q.join(User, User.user_id == pull_request_alias.user_id)
494 q = q.filter(or_(
494 q = q.filter(or_(
495 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
495 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
496 User.username.ilike(like_expression),
496 User.username.ilike(like_expression),
497 pull_request_alias.title.ilike(like_expression),
497 pull_request_alias.title.ilike(like_expression),
498 pull_request_alias.description.ilike(like_expression),
498 pull_request_alias.description.ilike(like_expression),
499 ))
499 ))
500
500
501 order_map = {
501 order_map = {
502 'name_raw': pull_request_alias.pull_request_id,
502 'name_raw': pull_request_alias.pull_request_id,
503 'title': pull_request_alias.title,
503 'title': pull_request_alias.title,
504 'updated_on_raw': pull_request_alias.updated_on,
504 'updated_on_raw': pull_request_alias.updated_on,
505 'target_repo': pull_request_alias.target_repo_id
505 'target_repo': pull_request_alias.target_repo_id
506 }
506 }
507 if order_by and order_by in order_map:
507 if order_by and order_by in order_map:
508 if order_dir == 'asc':
508 if order_dir == 'asc':
509 q = q.order_by(order_map[order_by].asc())
509 q = q.order_by(order_map[order_by].asc())
510 else:
510 else:
511 q = q.order_by(order_map[order_by].desc())
511 q = q.order_by(order_map[order_by].desc())
512
512
513 return q
513 return q
514
514
515 def count_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None):
515 def count_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None):
516 """
516 """
517 Count the number of pull requests for a specific repository that are
517 Count the number of pull requests for a specific repository that are
518 awaiting review from a specific user.
518 awaiting review from a specific user.
519
519
520 :param repo_name: target or source repo
520 :param repo_name: target or source repo
521 :param user_id: reviewer user of the pull request
521 :param user_id: reviewer user of the pull request
522 :param search_q: filter by text
522 :param search_q: filter by text
523 :param statuses: list of pull request statuses
523 :param statuses: list of pull request statuses
524 :returns: int number of pull requests
524 :returns: int number of pull requests
525 """
525 """
526 q = self._prepare_awaiting_my_review_review_query(
526 q = self._prepare_awaiting_my_review_review_query(
527 repo_name, user_id, search_q=search_q, statuses=statuses)
527 repo_name, user_id, search_q=search_q, statuses=statuses)
528 return q.count()
528 return q.count()
529
529
530 def get_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None,
530 def get_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None,
531 offset=0, length=None, order_by=None, order_dir='desc'):
531 offset=0, length=None, order_by=None, order_dir='desc'):
532 """
532 """
533 Get all pull requests for a specific repository that are awaiting
533 Get all pull requests for a specific repository that are awaiting
534 review from a specific user.
534 review from a specific user.
535
535
536 :param repo_name: target or source repo
536 :param repo_name: target or source repo
537 :param user_id: reviewer user of the pull request
537 :param user_id: reviewer user of the pull request
538 :param search_q: filter by text
538 :param search_q: filter by text
539 :param statuses: list of pull request statuses
539 :param statuses: list of pull request statuses
540 :param offset: pagination offset
540 :param offset: pagination offset
541 :param length: length of returned list
541 :param length: length of returned list
542 :param order_by: order of the returned list
542 :param order_by: order of the returned list
543 :param order_dir: 'asc' or 'desc' ordering direction
543 :param order_dir: 'asc' or 'desc' ordering direction
544 :returns: list of pull requests
544 :returns: list of pull requests
545 """
545 """
546
546
547 q = self._prepare_awaiting_my_review_review_query(
547 q = self._prepare_awaiting_my_review_review_query(
548 repo_name, user_id, search_q=search_q, statuses=statuses,
548 repo_name, user_id, search_q=search_q, statuses=statuses,
549 order_by=order_by, order_dir=order_dir)
549 order_by=order_by, order_dir=order_dir)
550
550
551 if length:
551 if length:
552 pull_requests = q.limit(length).offset(offset).all()
552 pull_requests = q.limit(length).offset(offset).all()
553 else:
553 else:
554 pull_requests = q.all()
554 pull_requests = q.all()
555
555
556 return pull_requests
556 return pull_requests
557
557
558 def _prepare_im_participating_query(self, user_id=None, statuses=None, query='',
558 def _prepare_im_participating_query(self, user_id=None, statuses=None, query='',
559 order_by=None, order_dir='desc'):
559 order_by=None, order_dir='desc'):
560 """
560 """
561 return a query of pull-requests user is an creator, or he's added as a reviewer
561 return a query of pull-requests user is an creator, or he's added as a reviewer
562 """
562 """
563 q = PullRequest.query()
563 q = PullRequest.query()
564 if user_id:
564 if user_id:
565 reviewers_subquery = Session().query(
565 reviewers_subquery = Session().query(
566 PullRequestReviewers.pull_request_id).filter(
566 PullRequestReviewers.pull_request_id).filter(
567 PullRequestReviewers.user_id == user_id).subquery()
567 PullRequestReviewers.user_id == user_id).subquery()
568 user_filter = or_(
568 user_filter = or_(
569 PullRequest.user_id == user_id,
569 PullRequest.user_id == user_id,
570 PullRequest.pull_request_id.in_(reviewers_subquery)
570 PullRequest.pull_request_id.in_(reviewers_subquery)
571 )
571 )
572 q = PullRequest.query().filter(user_filter)
572 q = PullRequest.query().filter(user_filter)
573
573
574 # closed,opened
574 # closed,opened
575 if statuses:
575 if statuses:
576 q = q.filter(PullRequest.status.in_(statuses))
576 q = q.filter(PullRequest.status.in_(statuses))
577
577
578 if query:
578 if query:
579 like_expression = u'%{}%'.format(safe_unicode(query))
579 like_expression = u'%{}%'.format(safe_unicode(query))
580 q = q.join(User, User.user_id == PullRequest.user_id)
580 q = q.join(User, User.user_id == PullRequest.user_id)
581 q = q.filter(or_(
581 q = q.filter(or_(
582 cast(PullRequest.pull_request_id, String).ilike(like_expression),
582 cast(PullRequest.pull_request_id, String).ilike(like_expression),
583 User.username.ilike(like_expression),
583 User.username.ilike(like_expression),
584 PullRequest.title.ilike(like_expression),
584 PullRequest.title.ilike(like_expression),
585 PullRequest.description.ilike(like_expression),
585 PullRequest.description.ilike(like_expression),
586 ))
586 ))
587
587
588 order_map = {
588 order_map = {
589 'name_raw': PullRequest.pull_request_id,
589 'name_raw': PullRequest.pull_request_id,
590 'title': PullRequest.title,
590 'title': PullRequest.title,
591 'updated_on_raw': PullRequest.updated_on,
591 'updated_on_raw': PullRequest.updated_on,
592 'target_repo': PullRequest.target_repo_id
592 'target_repo': PullRequest.target_repo_id
593 }
593 }
594 if order_by and order_by in order_map:
594 if order_by and order_by in order_map:
595 if order_dir == 'asc':
595 if order_dir == 'asc':
596 q = q.order_by(order_map[order_by].asc())
596 q = q.order_by(order_map[order_by].asc())
597 else:
597 else:
598 q = q.order_by(order_map[order_by].desc())
598 q = q.order_by(order_map[order_by].desc())
599
599
600 return q
600 return q
601
601
602 def count_im_participating_in(self, user_id=None, statuses=None, query=''):
602 def count_im_participating_in(self, user_id=None, statuses=None, query=''):
603 q = self._prepare_im_participating_query(user_id, statuses=statuses, query=query)
603 q = self._prepare_im_participating_query(user_id, statuses=statuses, query=query)
604 return q.count()
604 return q.count()
605
605
606 def get_im_participating_in(
606 def get_im_participating_in(
607 self, user_id=None, statuses=None, query='', offset=0,
607 self, user_id=None, statuses=None, query='', offset=0,
608 length=None, order_by=None, order_dir='desc'):
608 length=None, order_by=None, order_dir='desc'):
609 """
609 """
610 Get all Pull requests that i'm participating in as a reviewer, or i have opened
610 Get all Pull requests that i'm participating in as a reviewer, or i have opened
611 """
611 """
612
612
613 q = self._prepare_im_participating_query(
613 q = self._prepare_im_participating_query(
614 user_id, statuses=statuses, query=query, order_by=order_by,
614 user_id, statuses=statuses, query=query, order_by=order_by,
615 order_dir=order_dir)
615 order_dir=order_dir)
616
616
617 if length:
617 if length:
618 pull_requests = q.limit(length).offset(offset).all()
618 pull_requests = q.limit(length).offset(offset).all()
619 else:
619 else:
620 pull_requests = q.all()
620 pull_requests = q.all()
621
621
622 return pull_requests
622 return pull_requests
623
623
624 def _prepare_participating_in_for_review_query(
624 def _prepare_participating_in_for_review_query(
625 self, user_id, statuses=None, query='', order_by=None, order_dir='desc'):
625 self, user_id, statuses=None, query='', order_by=None, order_dir='desc'):
626
626
627 for_review_statuses = [
627 for_review_statuses = [
628 ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED
628 ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED
629 ]
629 ]
630
630
631 pull_request_alias = aliased(PullRequest)
631 pull_request_alias = aliased(PullRequest)
632 status_alias = aliased(ChangesetStatus)
632 status_alias = aliased(ChangesetStatus)
633 reviewers_alias = aliased(PullRequestReviewers)
633 reviewers_alias = aliased(PullRequestReviewers)
634
634
635 last_ver_subq = Session()\
635 last_ver_subq = Session()\
636 .query(func.min(ChangesetStatus.version)) \
636 .query(func.min(ChangesetStatus.version)) \
637 .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\
637 .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\
638 .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \
638 .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \
639 .subquery()
639 .subquery()
640
640
641 q = Session().query(pull_request_alias) \
641 q = Session().query(pull_request_alias) \
642 .options(lazyload(pull_request_alias.author)) \
642 .options(lazyload(pull_request_alias.author)) \
643 .join(reviewers_alias,
643 .join(reviewers_alias,
644 reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \
644 reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \
645 .outerjoin(status_alias,
645 .outerjoin(status_alias,
646 and_(status_alias.user_id == reviewers_alias.user_id,
646 and_(status_alias.user_id == reviewers_alias.user_id,
647 status_alias.pull_request_id == reviewers_alias.pull_request_id)) \
647 status_alias.pull_request_id == reviewers_alias.pull_request_id)) \
648 .filter(or_(status_alias.version == null(),
648 .filter(or_(status_alias.version == null(),
649 status_alias.version == last_ver_subq)) \
649 status_alias.version == last_ver_subq)) \
650 .filter(reviewers_alias.user_id == user_id) \
650 .filter(reviewers_alias.user_id == user_id) \
651 .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \
651 .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \
652 .group_by(pull_request_alias)
652 .group_by(pull_request_alias)
653
653
654 # closed,opened
654 # closed,opened
655 if statuses:
655 if statuses:
656 q = q.filter(pull_request_alias.status.in_(statuses))
656 q = q.filter(pull_request_alias.status.in_(statuses))
657
657
658 if query:
658 if query:
659 like_expression = u'%{}%'.format(safe_unicode(query))
659 like_expression = u'%{}%'.format(safe_unicode(query))
660 q = q.join(User, User.user_id == pull_request_alias.user_id)
660 q = q.join(User, User.user_id == pull_request_alias.user_id)
661 q = q.filter(or_(
661 q = q.filter(or_(
662 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
662 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
663 User.username.ilike(like_expression),
663 User.username.ilike(like_expression),
664 pull_request_alias.title.ilike(like_expression),
664 pull_request_alias.title.ilike(like_expression),
665 pull_request_alias.description.ilike(like_expression),
665 pull_request_alias.description.ilike(like_expression),
666 ))
666 ))
667
667
668 order_map = {
668 order_map = {
669 'name_raw': pull_request_alias.pull_request_id,
669 'name_raw': pull_request_alias.pull_request_id,
670 'title': pull_request_alias.title,
670 'title': pull_request_alias.title,
671 'updated_on_raw': pull_request_alias.updated_on,
671 'updated_on_raw': pull_request_alias.updated_on,
672 'target_repo': pull_request_alias.target_repo_id
672 'target_repo': pull_request_alias.target_repo_id
673 }
673 }
674 if order_by and order_by in order_map:
674 if order_by and order_by in order_map:
675 if order_dir == 'asc':
675 if order_dir == 'asc':
676 q = q.order_by(order_map[order_by].asc())
676 q = q.order_by(order_map[order_by].asc())
677 else:
677 else:
678 q = q.order_by(order_map[order_by].desc())
678 q = q.order_by(order_map[order_by].desc())
679
679
680 return q
680 return q
681
681
682 def count_im_participating_in_for_review(self, user_id, statuses=None, query=''):
682 def count_im_participating_in_for_review(self, user_id, statuses=None, query=''):
683 q = self._prepare_participating_in_for_review_query(user_id, statuses=statuses, query=query)
683 q = self._prepare_participating_in_for_review_query(user_id, statuses=statuses, query=query)
684 return q.count()
684 return q.count()
685
685
686 def get_im_participating_in_for_review(
686 def get_im_participating_in_for_review(
687 self, user_id, statuses=None, query='', offset=0,
687 self, user_id, statuses=None, query='', offset=0,
688 length=None, order_by=None, order_dir='desc'):
688 length=None, order_by=None, order_dir='desc'):
689 """
689 """
690 Get all Pull requests that needs user approval or rejection
690 Get all Pull requests that needs user approval or rejection
691 """
691 """
692
692
693 q = self._prepare_participating_in_for_review_query(
693 q = self._prepare_participating_in_for_review_query(
694 user_id, statuses=statuses, query=query, order_by=order_by,
694 user_id, statuses=statuses, query=query, order_by=order_by,
695 order_dir=order_dir)
695 order_dir=order_dir)
696
696
697 if length:
697 if length:
698 pull_requests = q.limit(length).offset(offset).all()
698 pull_requests = q.limit(length).offset(offset).all()
699 else:
699 else:
700 pull_requests = q.all()
700 pull_requests = q.all()
701
701
702 return pull_requests
702 return pull_requests
703
703
704 def get_versions(self, pull_request):
704 def get_versions(self, pull_request):
705 """
705 """
706 returns version of pull request sorted by ID descending
706 returns version of pull request sorted by ID descending
707 """
707 """
708 return PullRequestVersion.query()\
708 return PullRequestVersion.query()\
709 .filter(PullRequestVersion.pull_request == pull_request)\
709 .filter(PullRequestVersion.pull_request == pull_request)\
710 .order_by(PullRequestVersion.pull_request_version_id.asc())\
710 .order_by(PullRequestVersion.pull_request_version_id.asc())\
711 .all()
711 .all()
712
712
713 def get_pr_version(self, pull_request_id, version=None):
713 def get_pr_version(self, pull_request_id, version=None):
714 at_version = None
714 at_version = None
715
715
716 if version and version == 'latest':
716 if version and version == 'latest':
717 pull_request_ver = PullRequest.get(pull_request_id)
717 pull_request_ver = PullRequest.get(pull_request_id)
718 pull_request_obj = pull_request_ver
718 pull_request_obj = pull_request_ver
719 _org_pull_request_obj = pull_request_obj
719 _org_pull_request_obj = pull_request_obj
720 at_version = 'latest'
720 at_version = 'latest'
721 elif version:
721 elif version:
722 pull_request_ver = PullRequestVersion.get_or_404(version)
722 pull_request_ver = PullRequestVersion.get_or_404(version)
723 pull_request_obj = pull_request_ver
723 pull_request_obj = pull_request_ver
724 _org_pull_request_obj = pull_request_ver.pull_request
724 _org_pull_request_obj = pull_request_ver.pull_request
725 at_version = pull_request_ver.pull_request_version_id
725 at_version = pull_request_ver.pull_request_version_id
726 else:
726 else:
727 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
727 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
728 pull_request_id)
728 pull_request_id)
729
729
730 pull_request_display_obj = PullRequest.get_pr_display_object(
730 pull_request_display_obj = PullRequest.get_pr_display_object(
731 pull_request_obj, _org_pull_request_obj)
731 pull_request_obj, _org_pull_request_obj)
732
732
733 return _org_pull_request_obj, pull_request_obj, \
733 return _org_pull_request_obj, pull_request_obj, \
734 pull_request_display_obj, at_version
734 pull_request_display_obj, at_version
735
735
736 def pr_commits_versions(self, versions):
736 def pr_commits_versions(self, versions):
737 """
737 """
738 Maps the pull-request commits into all known PR versions. This way we can obtain
738 Maps the pull-request commits into all known PR versions. This way we can obtain
739 each pr version the commit was introduced in.
739 each pr version the commit was introduced in.
740 """
740 """
741 commit_versions = collections.defaultdict(list)
741 commit_versions = collections.defaultdict(list)
742 num_versions = [x.pull_request_version_id for x in versions]
742 num_versions = [x.pull_request_version_id for x in versions]
743 for ver in versions:
743 for ver in versions:
744 for commit_id in ver.revisions:
744 for commit_id in ver.revisions:
745 ver_idx = ChangesetComment.get_index_from_version(
745 ver_idx = ChangesetComment.get_index_from_version(
746 ver.pull_request_version_id, num_versions=num_versions)
746 ver.pull_request_version_id, num_versions=num_versions)
747 commit_versions[commit_id].append(ver_idx)
747 commit_versions[commit_id].append(ver_idx)
748 return commit_versions
748 return commit_versions
749
749
750 def create(self, created_by, source_repo, source_ref, target_repo,
750 def create(self, created_by, source_repo, source_ref, target_repo,
751 target_ref, revisions, reviewers, observers, title, description=None,
751 target_ref, revisions, reviewers, observers, title, description=None,
752 common_ancestor_id=None,
752 common_ancestor_id=None,
753 description_renderer=None,
753 description_renderer=None,
754 reviewer_data=None, translator=None, auth_user=None):
754 reviewer_data=None, translator=None, auth_user=None):
755 translator = translator or get_current_request().translate
755 translator = translator or get_current_request().translate
756
756
757 created_by_user = self._get_user(created_by)
757 created_by_user = self._get_user(created_by)
758 auth_user = auth_user or created_by_user.AuthUser()
758 auth_user = auth_user or created_by_user.AuthUser()
759 source_repo = self._get_repo(source_repo)
759 source_repo = self._get_repo(source_repo)
760 target_repo = self._get_repo(target_repo)
760 target_repo = self._get_repo(target_repo)
761
761
762 pull_request = PullRequest()
762 pull_request = PullRequest()
763 pull_request.source_repo = source_repo
763 pull_request.source_repo = source_repo
764 pull_request.source_ref = source_ref
764 pull_request.source_ref = source_ref
765 pull_request.target_repo = target_repo
765 pull_request.target_repo = target_repo
766 pull_request.target_ref = target_ref
766 pull_request.target_ref = target_ref
767 pull_request.revisions = revisions
767 pull_request.revisions = revisions
768 pull_request.title = title
768 pull_request.title = title
769 pull_request.description = description
769 pull_request.description = description
770 pull_request.description_renderer = description_renderer
770 pull_request.description_renderer = description_renderer
771 pull_request.author = created_by_user
771 pull_request.author = created_by_user
772 pull_request.reviewer_data = reviewer_data
772 pull_request.reviewer_data = reviewer_data
773 pull_request.pull_request_state = pull_request.STATE_CREATING
773 pull_request.pull_request_state = pull_request.STATE_CREATING
774 pull_request.common_ancestor_id = common_ancestor_id
774 pull_request.common_ancestor_id = common_ancestor_id
775
775
776 Session().add(pull_request)
776 Session().add(pull_request)
777 Session().flush()
777 Session().flush()
778
778
779 reviewer_ids = set()
779 reviewer_ids = set()
780 # members / reviewers
780 # members / reviewers
781 for reviewer_object in reviewers:
781 for reviewer_object in reviewers:
782 user_id, reasons, mandatory, role, rules = reviewer_object
782 user_id, reasons, mandatory, role, rules = reviewer_object
783 user = self._get_user(user_id)
783 user = self._get_user(user_id)
784
784
785 # skip duplicates
785 # skip duplicates
786 if user.user_id in reviewer_ids:
786 if user.user_id in reviewer_ids:
787 continue
787 continue
788
788
789 reviewer_ids.add(user.user_id)
789 reviewer_ids.add(user.user_id)
790
790
791 reviewer = PullRequestReviewers()
791 reviewer = PullRequestReviewers()
792 reviewer.user = user
792 reviewer.user = user
793 reviewer.pull_request = pull_request
793 reviewer.pull_request = pull_request
794 reviewer.reasons = reasons
794 reviewer.reasons = reasons
795 reviewer.mandatory = mandatory
795 reviewer.mandatory = mandatory
796 reviewer.role = role
796 reviewer.role = role
797
797
798 # NOTE(marcink): pick only first rule for now
798 # NOTE(marcink): pick only first rule for now
799 rule_id = list(rules)[0] if rules else None
799 rule_id = list(rules)[0] if rules else None
800 rule = RepoReviewRule.get(rule_id) if rule_id else None
800 rule = RepoReviewRule.get(rule_id) if rule_id else None
801 if rule:
801 if rule:
802 review_group = rule.user_group_vote_rule(user_id)
802 review_group = rule.user_group_vote_rule(user_id)
803 # we check if this particular reviewer is member of a voting group
803 # we check if this particular reviewer is member of a voting group
804 if review_group:
804 if review_group:
805 # NOTE(marcink):
805 # NOTE(marcink):
806 # can be that user is member of more but we pick the first same,
806 # can be that user is member of more but we pick the first same,
807 # same as default reviewers algo
807 # same as default reviewers algo
808 review_group = review_group[0]
808 review_group = review_group[0]
809
809
810 rule_data = {
810 rule_data = {
811 'rule_name':
811 'rule_name':
812 rule.review_rule_name,
812 rule.review_rule_name,
813 'rule_user_group_entry_id':
813 'rule_user_group_entry_id':
814 review_group.repo_review_rule_users_group_id,
814 review_group.repo_review_rule_users_group_id,
815 'rule_user_group_name':
815 'rule_user_group_name':
816 review_group.users_group.users_group_name,
816 review_group.users_group.users_group_name,
817 'rule_user_group_members':
817 'rule_user_group_members':
818 [x.user.username for x in review_group.users_group.members],
818 [x.user.username for x in review_group.users_group.members],
819 'rule_user_group_members_id':
819 'rule_user_group_members_id':
820 [x.user.user_id for x in review_group.users_group.members],
820 [x.user.user_id for x in review_group.users_group.members],
821 }
821 }
822 # e.g {'vote_rule': -1, 'mandatory': True}
822 # e.g {'vote_rule': -1, 'mandatory': True}
823 rule_data.update(review_group.rule_data())
823 rule_data.update(review_group.rule_data())
824
824
825 reviewer.rule_data = rule_data
825 reviewer.rule_data = rule_data
826
826
827 Session().add(reviewer)
827 Session().add(reviewer)
828 Session().flush()
828 Session().flush()
829
829
830 for observer_object in observers:
830 for observer_object in observers:
831 user_id, reasons, mandatory, role, rules = observer_object
831 user_id, reasons, mandatory, role, rules = observer_object
832 user = self._get_user(user_id)
832 user = self._get_user(user_id)
833
833
834 # skip duplicates from reviewers
834 # skip duplicates from reviewers
835 if user.user_id in reviewer_ids:
835 if user.user_id in reviewer_ids:
836 continue
836 continue
837
837
838 #reviewer_ids.add(user.user_id)
838 #reviewer_ids.add(user.user_id)
839
839
840 observer = PullRequestReviewers()
840 observer = PullRequestReviewers()
841 observer.user = user
841 observer.user = user
842 observer.pull_request = pull_request
842 observer.pull_request = pull_request
843 observer.reasons = reasons
843 observer.reasons = reasons
844 observer.mandatory = mandatory
844 observer.mandatory = mandatory
845 observer.role = role
845 observer.role = role
846
846
847 # NOTE(marcink): pick only first rule for now
847 # NOTE(marcink): pick only first rule for now
848 rule_id = list(rules)[0] if rules else None
848 rule_id = list(rules)[0] if rules else None
849 rule = RepoReviewRule.get(rule_id) if rule_id else None
849 rule = RepoReviewRule.get(rule_id) if rule_id else None
850 if rule:
850 if rule:
851 # TODO(marcink): do we need this for observers ??
851 # TODO(marcink): do we need this for observers ??
852 pass
852 pass
853
853
854 Session().add(observer)
854 Session().add(observer)
855 Session().flush()
855 Session().flush()
856
856
857 # Set approval status to "Under Review" for all commits which are
857 # Set approval status to "Under Review" for all commits which are
858 # part of this pull request.
858 # part of this pull request.
859 ChangesetStatusModel().set_status(
859 ChangesetStatusModel().set_status(
860 repo=target_repo,
860 repo=target_repo,
861 status=ChangesetStatus.STATUS_UNDER_REVIEW,
861 status=ChangesetStatus.STATUS_UNDER_REVIEW,
862 user=created_by_user,
862 user=created_by_user,
863 pull_request=pull_request
863 pull_request=pull_request
864 )
864 )
865 # we commit early at this point. This has to do with a fact
865 # we commit early at this point. This has to do with a fact
866 # that before queries do some row-locking. And because of that
866 # that before queries do some row-locking. And because of that
867 # we need to commit and finish transaction before below validate call
867 # we need to commit and finish transaction before below validate call
868 # that for large repos could be long resulting in long row locks
868 # that for large repos could be long resulting in long row locks
869 Session().commit()
869 Session().commit()
870
870
871 # prepare workspace, and run initial merge simulation. Set state during that
871 # prepare workspace, and run initial merge simulation. Set state during that
872 # operation
872 # operation
873 pull_request = PullRequest.get(pull_request.pull_request_id)
873 pull_request = PullRequest.get(pull_request.pull_request_id)
874
874
875 # set as merging, for merge simulation, and if finished to created so we mark
875 # set as merging, for merge simulation, and if finished to created so we mark
876 # simulation is working fine
876 # simulation is working fine
877 with pull_request.set_state(PullRequest.STATE_MERGING,
877 with pull_request.set_state(PullRequest.STATE_MERGING,
878 final_state=PullRequest.STATE_CREATED) as state_obj:
878 final_state=PullRequest.STATE_CREATED) as state_obj:
879 MergeCheck.validate(
879 MergeCheck.validate(
880 pull_request, auth_user=auth_user, translator=translator)
880 pull_request, auth_user=auth_user, translator=translator)
881
881
882 self.notify_reviewers(pull_request, reviewer_ids, created_by_user)
882 self.notify_reviewers(pull_request, reviewer_ids, created_by_user)
883 self.trigger_pull_request_hook(pull_request, created_by_user, 'create')
883 self.trigger_pull_request_hook(pull_request, created_by_user, 'create')
884
884
885 creation_data = pull_request.get_api_data(with_merge_state=False)
885 creation_data = pull_request.get_api_data(with_merge_state=False)
886 self._log_audit_action(
886 self._log_audit_action(
887 'repo.pull_request.create', {'data': creation_data},
887 'repo.pull_request.create', {'data': creation_data},
888 auth_user, pull_request)
888 auth_user, pull_request)
889
889
890 return pull_request
890 return pull_request
891
891
892 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
892 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
893 pull_request = self.__get_pull_request(pull_request)
893 pull_request = self.__get_pull_request(pull_request)
894 target_scm = pull_request.target_repo.scm_instance()
894 target_scm = pull_request.target_repo.scm_instance()
895 if action == 'create':
895 if action == 'create':
896 trigger_hook = hooks_utils.trigger_create_pull_request_hook
896 trigger_hook = hooks_utils.trigger_create_pull_request_hook
897 elif action == 'merge':
897 elif action == 'merge':
898 trigger_hook = hooks_utils.trigger_merge_pull_request_hook
898 trigger_hook = hooks_utils.trigger_merge_pull_request_hook
899 elif action == 'close':
899 elif action == 'close':
900 trigger_hook = hooks_utils.trigger_close_pull_request_hook
900 trigger_hook = hooks_utils.trigger_close_pull_request_hook
901 elif action == 'review_status_change':
901 elif action == 'review_status_change':
902 trigger_hook = hooks_utils.trigger_review_pull_request_hook
902 trigger_hook = hooks_utils.trigger_review_pull_request_hook
903 elif action == 'update':
903 elif action == 'update':
904 trigger_hook = hooks_utils.trigger_update_pull_request_hook
904 trigger_hook = hooks_utils.trigger_update_pull_request_hook
905 elif action == 'comment':
905 elif action == 'comment':
906 trigger_hook = hooks_utils.trigger_comment_pull_request_hook
906 trigger_hook = hooks_utils.trigger_comment_pull_request_hook
907 elif action == 'comment_edit':
907 elif action == 'comment_edit':
908 trigger_hook = hooks_utils.trigger_comment_pull_request_edit_hook
908 trigger_hook = hooks_utils.trigger_comment_pull_request_edit_hook
909 else:
909 else:
910 return
910 return
911
911
912 log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s',
912 log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s',
913 pull_request, action, trigger_hook)
913 pull_request, action, trigger_hook)
914 trigger_hook(
914 trigger_hook(
915 username=user.username,
915 username=user.username,
916 repo_name=pull_request.target_repo.repo_name,
916 repo_name=pull_request.target_repo.repo_name,
917 repo_type=target_scm.alias,
917 repo_type=target_scm.alias,
918 pull_request=pull_request,
918 pull_request=pull_request,
919 data=data)
919 data=data)
920
920
921 def _get_commit_ids(self, pull_request):
921 def _get_commit_ids(self, pull_request):
922 """
922 """
923 Return the commit ids of the merged pull request.
923 Return the commit ids of the merged pull request.
924
924
925 This method is not dealing correctly yet with the lack of autoupdates
925 This method is not dealing correctly yet with the lack of autoupdates
926 nor with the implicit target updates.
926 nor with the implicit target updates.
927 For example: if a commit in the source repo is already in the target it
927 For example: if a commit in the source repo is already in the target it
928 will be reported anyways.
928 will be reported anyways.
929 """
929 """
930 merge_rev = pull_request.merge_rev
930 merge_rev = pull_request.merge_rev
931 if merge_rev is None:
931 if merge_rev is None:
932 raise ValueError('This pull request was not merged yet')
932 raise ValueError('This pull request was not merged yet')
933
933
934 commit_ids = list(pull_request.revisions)
934 commit_ids = list(pull_request.revisions)
935 if merge_rev not in commit_ids:
935 if merge_rev not in commit_ids:
936 commit_ids.append(merge_rev)
936 commit_ids.append(merge_rev)
937
937
938 return commit_ids
938 return commit_ids
939
939
940 def merge_repo(self, pull_request, user, extras):
940 def merge_repo(self, pull_request, user, extras):
941 repo_type = pull_request.source_repo.repo_type
941 repo_type = pull_request.source_repo.repo_type
942 log.debug("Merging pull request %s", pull_request.pull_request_id)
942 log.debug("Merging pull request %s", pull_request.pull_request_id)
943 extras['user_agent'] = '{}/internal-merge'.format(repo_type)
943 extras['user_agent'] = '{}/internal-merge'.format(repo_type)
944 merge_state = self._merge_pull_request(pull_request, user, extras)
944 merge_state = self._merge_pull_request(pull_request, user, extras)
945 if merge_state.executed:
945 if merge_state.executed:
946 log.debug("Merge was successful, updating the pull request comments.")
946 log.debug("Merge was successful, updating the pull request comments.")
947 self._comment_and_close_pr(pull_request, user, merge_state)
947 self._comment_and_close_pr(pull_request, user, merge_state)
948
948
949 self._log_audit_action(
949 self._log_audit_action(
950 'repo.pull_request.merge',
950 'repo.pull_request.merge',
951 {'merge_state': merge_state.__dict__},
951 {'merge_state': merge_state.__dict__},
952 user, pull_request)
952 user, pull_request)
953
953
954 else:
954 else:
955 log.warn("Merge failed, not updating the pull request.")
955 log.warn("Merge failed, not updating the pull request.")
956 return merge_state
956 return merge_state
957
957
958 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
958 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
959 target_vcs = pull_request.target_repo.scm_instance()
959 target_vcs = pull_request.target_repo.scm_instance()
960 source_vcs = pull_request.source_repo.scm_instance()
960 source_vcs = pull_request.source_repo.scm_instance()
961
961
962 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
962 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
963 pr_id=pull_request.pull_request_id,
963 pr_id=pull_request.pull_request_id,
964 pr_title=pull_request.title,
964 pr_title=pull_request.title,
965 pr_desc=pull_request.description,
965 pr_desc=pull_request.description,
966 source_repo=source_vcs.name,
966 source_repo=source_vcs.name,
967 source_ref_name=pull_request.source_ref_parts.name,
967 source_ref_name=pull_request.source_ref_parts.name,
968 target_repo=target_vcs.name,
968 target_repo=target_vcs.name,
969 target_ref_name=pull_request.target_ref_parts.name,
969 target_ref_name=pull_request.target_ref_parts.name,
970 )
970 )
971
971
972 workspace_id = self._workspace_id(pull_request)
972 workspace_id = self._workspace_id(pull_request)
973 repo_id = pull_request.target_repo.repo_id
973 repo_id = pull_request.target_repo.repo_id
974 use_rebase = self._use_rebase_for_merging(pull_request)
974 use_rebase = self._use_rebase_for_merging(pull_request)
975 close_branch = self._close_branch_before_merging(pull_request)
975 close_branch = self._close_branch_before_merging(pull_request)
976 user_name = self._user_name_for_merging(pull_request, user)
976 user_name = self._user_name_for_merging(pull_request, user)
977
977
978 target_ref = self._refresh_reference(
978 target_ref = self._refresh_reference(
979 pull_request.target_ref_parts, target_vcs)
979 pull_request.target_ref_parts, target_vcs)
980
980
981 callback_daemon, extras = prepare_callback_daemon(
981 callback_daemon, extras = prepare_callback_daemon(
982 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
982 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
983 host=vcs_settings.HOOKS_HOST,
983 host=vcs_settings.HOOKS_HOST,
984 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
984 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
985
985
986 with callback_daemon:
986 with callback_daemon:
987 # TODO: johbo: Implement a clean way to run a config_override
987 # TODO: johbo: Implement a clean way to run a config_override
988 # for a single call.
988 # for a single call.
989 target_vcs.config.set(
989 target_vcs.config.set(
990 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
990 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
991
991
992 merge_state = target_vcs.merge(
992 merge_state = target_vcs.merge(
993 repo_id, workspace_id, target_ref, source_vcs,
993 repo_id, workspace_id, target_ref, source_vcs,
994 pull_request.source_ref_parts,
994 pull_request.source_ref_parts,
995 user_name=user_name, user_email=user.email,
995 user_name=user_name, user_email=user.email,
996 message=message, use_rebase=use_rebase,
996 message=message, use_rebase=use_rebase,
997 close_branch=close_branch)
997 close_branch=close_branch)
998 return merge_state
998 return merge_state
999
999
1000 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
1000 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
1001 pull_request.merge_rev = merge_state.merge_ref.commit_id
1001 pull_request.merge_rev = merge_state.merge_ref.commit_id
1002 pull_request.updated_on = datetime.datetime.now()
1002 pull_request.updated_on = datetime.datetime.now()
1003 close_msg = close_msg or 'Pull request merged and closed'
1003 close_msg = close_msg or 'Pull request merged and closed'
1004
1004
1005 CommentsModel().create(
1005 CommentsModel().create(
1006 text=safe_unicode(close_msg),
1006 text=safe_unicode(close_msg),
1007 repo=pull_request.target_repo.repo_id,
1007 repo=pull_request.target_repo.repo_id,
1008 user=user.user_id,
1008 user=user.user_id,
1009 pull_request=pull_request.pull_request_id,
1009 pull_request=pull_request.pull_request_id,
1010 f_path=None,
1010 f_path=None,
1011 line_no=None,
1011 line_no=None,
1012 closing_pr=True
1012 closing_pr=True
1013 )
1013 )
1014
1014
1015 Session().add(pull_request)
1015 Session().add(pull_request)
1016 Session().flush()
1016 Session().flush()
1017 # TODO: paris: replace invalidation with less radical solution
1017 # TODO: paris: replace invalidation with less radical solution
1018 ScmModel().mark_for_invalidation(
1018 ScmModel().mark_for_invalidation(
1019 pull_request.target_repo.repo_name)
1019 pull_request.target_repo.repo_name)
1020 self.trigger_pull_request_hook(pull_request, user, 'merge')
1020 self.trigger_pull_request_hook(pull_request, user, 'merge')
1021
1021
1022 def has_valid_update_type(self, pull_request):
1022 def has_valid_update_type(self, pull_request):
1023 source_ref_type = pull_request.source_ref_parts.type
1023 source_ref_type = pull_request.source_ref_parts.type
1024 return source_ref_type in self.REF_TYPES
1024 return source_ref_type in self.REF_TYPES
1025
1025
1026 def get_flow_commits(self, pull_request):
1026 def get_flow_commits(self, pull_request):
1027
1027
1028 # source repo
1028 # source repo
1029 source_ref_name = pull_request.source_ref_parts.name
1029 source_ref_name = pull_request.source_ref_parts.name
1030 source_ref_type = pull_request.source_ref_parts.type
1030 source_ref_type = pull_request.source_ref_parts.type
1031 source_ref_id = pull_request.source_ref_parts.commit_id
1031 source_ref_id = pull_request.source_ref_parts.commit_id
1032 source_repo = pull_request.source_repo.scm_instance()
1032 source_repo = pull_request.source_repo.scm_instance()
1033
1033
1034 try:
1034 try:
1035 if source_ref_type in self.REF_TYPES:
1035 if source_ref_type in self.REF_TYPES:
1036 source_commit = source_repo.get_commit(
1036 source_commit = source_repo.get_commit(
1037 source_ref_name, reference_obj=pull_request.source_ref_parts)
1037 source_ref_name, reference_obj=pull_request.source_ref_parts)
1038 else:
1038 else:
1039 source_commit = source_repo.get_commit(source_ref_id)
1039 source_commit = source_repo.get_commit(source_ref_id)
1040 except CommitDoesNotExistError:
1040 except CommitDoesNotExistError:
1041 raise SourceRefMissing()
1041 raise SourceRefMissing()
1042
1042
1043 # target repo
1043 # target repo
1044 target_ref_name = pull_request.target_ref_parts.name
1044 target_ref_name = pull_request.target_ref_parts.name
1045 target_ref_type = pull_request.target_ref_parts.type
1045 target_ref_type = pull_request.target_ref_parts.type
1046 target_ref_id = pull_request.target_ref_parts.commit_id
1046 target_ref_id = pull_request.target_ref_parts.commit_id
1047 target_repo = pull_request.target_repo.scm_instance()
1047 target_repo = pull_request.target_repo.scm_instance()
1048
1048
1049 try:
1049 try:
1050 if target_ref_type in self.REF_TYPES:
1050 if target_ref_type in self.REF_TYPES:
1051 target_commit = target_repo.get_commit(
1051 target_commit = target_repo.get_commit(
1052 target_ref_name, reference_obj=pull_request.target_ref_parts)
1052 target_ref_name, reference_obj=pull_request.target_ref_parts)
1053 else:
1053 else:
1054 target_commit = target_repo.get_commit(target_ref_id)
1054 target_commit = target_repo.get_commit(target_ref_id)
1055 except CommitDoesNotExistError:
1055 except CommitDoesNotExistError:
1056 raise TargetRefMissing()
1056 raise TargetRefMissing()
1057
1057
1058 return source_commit, target_commit
1058 return source_commit, target_commit
1059
1059
1060 def update_commits(self, pull_request, updating_user):
1060 def update_commits(self, pull_request, updating_user):
1061 """
1061 """
1062 Get the updated list of commits for the pull request
1062 Get the updated list of commits for the pull request
1063 and return the new pull request version and the list
1063 and return the new pull request version and the list
1064 of commits processed by this update action
1064 of commits processed by this update action
1065
1065
1066 updating_user is the user_object who triggered the update
1066 updating_user is the user_object who triggered the update
1067 """
1067 """
1068 pull_request = self.__get_pull_request(pull_request)
1068 pull_request = self.__get_pull_request(pull_request)
1069 source_ref_type = pull_request.source_ref_parts.type
1069 source_ref_type = pull_request.source_ref_parts.type
1070 source_ref_name = pull_request.source_ref_parts.name
1070 source_ref_name = pull_request.source_ref_parts.name
1071 source_ref_id = pull_request.source_ref_parts.commit_id
1071 source_ref_id = pull_request.source_ref_parts.commit_id
1072
1072
1073 target_ref_type = pull_request.target_ref_parts.type
1073 target_ref_type = pull_request.target_ref_parts.type
1074 target_ref_name = pull_request.target_ref_parts.name
1074 target_ref_name = pull_request.target_ref_parts.name
1075 target_ref_id = pull_request.target_ref_parts.commit_id
1075 target_ref_id = pull_request.target_ref_parts.commit_id
1076
1076
1077 if not self.has_valid_update_type(pull_request):
1077 if not self.has_valid_update_type(pull_request):
1078 log.debug("Skipping update of pull request %s due to ref type: %s",
1078 log.debug("Skipping update of pull request %s due to ref type: %s",
1079 pull_request, source_ref_type)
1079 pull_request, source_ref_type)
1080 return UpdateResponse(
1080 return UpdateResponse(
1081 executed=False,
1081 executed=False,
1082 reason=UpdateFailureReason.WRONG_REF_TYPE,
1082 reason=UpdateFailureReason.WRONG_REF_TYPE,
1083 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1083 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1084 source_changed=False, target_changed=False)
1084 source_changed=False, target_changed=False)
1085
1085
1086 try:
1086 try:
1087 source_commit, target_commit = self.get_flow_commits(pull_request)
1087 source_commit, target_commit = self.get_flow_commits(pull_request)
1088 except SourceRefMissing:
1088 except SourceRefMissing:
1089 return UpdateResponse(
1089 return UpdateResponse(
1090 executed=False,
1090 executed=False,
1091 reason=UpdateFailureReason.MISSING_SOURCE_REF,
1091 reason=UpdateFailureReason.MISSING_SOURCE_REF,
1092 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1092 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1093 source_changed=False, target_changed=False)
1093 source_changed=False, target_changed=False)
1094 except TargetRefMissing:
1094 except TargetRefMissing:
1095 return UpdateResponse(
1095 return UpdateResponse(
1096 executed=False,
1096 executed=False,
1097 reason=UpdateFailureReason.MISSING_TARGET_REF,
1097 reason=UpdateFailureReason.MISSING_TARGET_REF,
1098 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1098 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1099 source_changed=False, target_changed=False)
1099 source_changed=False, target_changed=False)
1100
1100
1101 source_changed = source_ref_id != source_commit.raw_id
1101 source_changed = source_ref_id != source_commit.raw_id
1102 target_changed = target_ref_id != target_commit.raw_id
1102 target_changed = target_ref_id != target_commit.raw_id
1103
1103
1104 if not (source_changed or target_changed):
1104 if not (source_changed or target_changed):
1105 log.debug("Nothing changed in pull request %s", pull_request)
1105 log.debug("Nothing changed in pull request %s", pull_request)
1106 return UpdateResponse(
1106 return UpdateResponse(
1107 executed=False,
1107 executed=False,
1108 reason=UpdateFailureReason.NO_CHANGE,
1108 reason=UpdateFailureReason.NO_CHANGE,
1109 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1109 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1110 source_changed=target_changed, target_changed=source_changed)
1110 source_changed=target_changed, target_changed=source_changed)
1111
1111
1112 change_in_found = 'target repo' if target_changed else 'source repo'
1112 change_in_found = 'target repo' if target_changed else 'source repo'
1113 log.debug('Updating pull request because of change in %s detected',
1113 log.debug('Updating pull request because of change in %s detected',
1114 change_in_found)
1114 change_in_found)
1115
1115
1116 # Finally there is a need for an update, in case of source change
1116 # Finally there is a need for an update, in case of source change
1117 # we create a new version, else just an update
1117 # we create a new version, else just an update
1118 if source_changed:
1118 if source_changed:
1119 pull_request_version = self._create_version_from_snapshot(pull_request)
1119 pull_request_version = self._create_version_from_snapshot(pull_request)
1120 self._link_comments_to_version(pull_request_version)
1120 self._link_comments_to_version(pull_request_version)
1121 else:
1121 else:
1122 try:
1122 try:
1123 ver = pull_request.versions[-1]
1123 ver = pull_request.versions[-1]
1124 except IndexError:
1124 except IndexError:
1125 ver = None
1125 ver = None
1126
1126
1127 pull_request.pull_request_version_id = \
1127 pull_request.pull_request_version_id = \
1128 ver.pull_request_version_id if ver else None
1128 ver.pull_request_version_id if ver else None
1129 pull_request_version = pull_request
1129 pull_request_version = pull_request
1130
1130
1131 source_repo = pull_request.source_repo.scm_instance()
1131 source_repo = pull_request.source_repo.scm_instance()
1132 target_repo = pull_request.target_repo.scm_instance()
1132 target_repo = pull_request.target_repo.scm_instance()
1133
1133
1134 # re-compute commit ids
1134 # re-compute commit ids
1135 old_commit_ids = pull_request.revisions
1135 old_commit_ids = pull_request.revisions
1136 pre_load = ["author", "date", "message", "branch"]
1136 pre_load = ["author", "date", "message", "branch"]
1137 commit_ranges = target_repo.compare(
1137 commit_ranges = target_repo.compare(
1138 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
1138 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
1139 pre_load=pre_load)
1139 pre_load=pre_load)
1140
1140
1141 target_ref = target_commit.raw_id
1141 target_ref = target_commit.raw_id
1142 source_ref = source_commit.raw_id
1142 source_ref = source_commit.raw_id
1143 ancestor_commit_id = target_repo.get_common_ancestor(
1143 ancestor_commit_id = target_repo.get_common_ancestor(
1144 target_ref, source_ref, source_repo)
1144 target_ref, source_ref, source_repo)
1145
1145
1146 if not ancestor_commit_id:
1146 if not ancestor_commit_id:
1147 raise ValueError(
1147 raise ValueError(
1148 'cannot calculate diff info without a common ancestor. '
1148 'cannot calculate diff info without a common ancestor. '
1149 'Make sure both repositories are related, and have a common forking commit.')
1149 'Make sure both repositories are related, and have a common forking commit.')
1150
1150
1151 pull_request.common_ancestor_id = ancestor_commit_id
1151 pull_request.common_ancestor_id = ancestor_commit_id
1152
1152
1153 pull_request.source_ref = '%s:%s:%s' % (
1153 pull_request.source_ref = '%s:%s:%s' % (
1154 source_ref_type, source_ref_name, source_commit.raw_id)
1154 source_ref_type, source_ref_name, source_commit.raw_id)
1155 pull_request.target_ref = '%s:%s:%s' % (
1155 pull_request.target_ref = '%s:%s:%s' % (
1156 target_ref_type, target_ref_name, ancestor_commit_id)
1156 target_ref_type, target_ref_name, ancestor_commit_id)
1157
1157
1158 pull_request.revisions = [
1158 pull_request.revisions = [
1159 commit.raw_id for commit in reversed(commit_ranges)]
1159 commit.raw_id for commit in reversed(commit_ranges)]
1160 pull_request.updated_on = datetime.datetime.now()
1160 pull_request.updated_on = datetime.datetime.now()
1161 Session().add(pull_request)
1161 Session().add(pull_request)
1162 new_commit_ids = pull_request.revisions
1162 new_commit_ids = pull_request.revisions
1163
1163
1164 old_diff_data, new_diff_data = self._generate_update_diffs(
1164 old_diff_data, new_diff_data = self._generate_update_diffs(
1165 pull_request, pull_request_version)
1165 pull_request, pull_request_version)
1166
1166
1167 # calculate commit and file changes
1167 # calculate commit and file changes
1168 commit_changes = self._calculate_commit_id_changes(
1168 commit_changes = self._calculate_commit_id_changes(
1169 old_commit_ids, new_commit_ids)
1169 old_commit_ids, new_commit_ids)
1170 file_changes = self._calculate_file_changes(
1170 file_changes = self._calculate_file_changes(
1171 old_diff_data, new_diff_data)
1171 old_diff_data, new_diff_data)
1172
1172
1173 # set comments as outdated if DIFFS changed
1173 # set comments as outdated if DIFFS changed
1174 CommentsModel().outdate_comments(
1174 CommentsModel().outdate_comments(
1175 pull_request, old_diff_data=old_diff_data,
1175 pull_request, old_diff_data=old_diff_data,
1176 new_diff_data=new_diff_data)
1176 new_diff_data=new_diff_data)
1177
1177
1178 valid_commit_changes = (commit_changes.added or commit_changes.removed)
1178 valid_commit_changes = (commit_changes.added or commit_changes.removed)
1179 file_node_changes = (
1179 file_node_changes = (
1180 file_changes.added or file_changes.modified or file_changes.removed)
1180 file_changes.added or file_changes.modified or file_changes.removed)
1181 pr_has_changes = valid_commit_changes or file_node_changes
1181 pr_has_changes = valid_commit_changes or file_node_changes
1182
1182
1183 # Add an automatic comment to the pull request, in case
1183 # Add an automatic comment to the pull request, in case
1184 # anything has changed
1184 # anything has changed
1185 if pr_has_changes:
1185 if pr_has_changes:
1186 update_comment = CommentsModel().create(
1186 update_comment = CommentsModel().create(
1187 text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes),
1187 text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes),
1188 repo=pull_request.target_repo,
1188 repo=pull_request.target_repo,
1189 user=pull_request.author,
1189 user=pull_request.author,
1190 pull_request=pull_request,
1190 pull_request=pull_request,
1191 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
1191 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
1192
1192
1193 # Update status to "Under Review" for added commits
1193 # Update status to "Under Review" for added commits
1194 for commit_id in commit_changes.added:
1194 for commit_id in commit_changes.added:
1195 ChangesetStatusModel().set_status(
1195 ChangesetStatusModel().set_status(
1196 repo=pull_request.source_repo,
1196 repo=pull_request.source_repo,
1197 status=ChangesetStatus.STATUS_UNDER_REVIEW,
1197 status=ChangesetStatus.STATUS_UNDER_REVIEW,
1198 comment=update_comment,
1198 comment=update_comment,
1199 user=pull_request.author,
1199 user=pull_request.author,
1200 pull_request=pull_request,
1200 pull_request=pull_request,
1201 revision=commit_id)
1201 revision=commit_id)
1202
1202
1203 # initial commit
1203 # initial commit
1204 Session().commit()
1204 Session().commit()
1205
1205
1206 if pr_has_changes:
1206 if pr_has_changes:
1207 # send update email to users
1207 # send update email to users
1208 try:
1208 try:
1209 self.notify_users(pull_request=pull_request, updating_user=updating_user,
1209 self.notify_users(pull_request=pull_request, updating_user=updating_user,
1210 ancestor_commit_id=ancestor_commit_id,
1210 ancestor_commit_id=ancestor_commit_id,
1211 commit_changes=commit_changes,
1211 commit_changes=commit_changes,
1212 file_changes=file_changes)
1212 file_changes=file_changes)
1213 Session().commit()
1213 Session().commit()
1214 except Exception:
1214 except Exception:
1215 log.exception('Failed to send email notification to users')
1215 log.exception('Failed to send email notification to users')
1216 Session().rollback()
1216 Session().rollback()
1217
1217
1218 log.debug(
1218 log.debug(
1219 'Updated pull request %s, added_ids: %s, common_ids: %s, '
1219 'Updated pull request %s, added_ids: %s, common_ids: %s, '
1220 'removed_ids: %s', pull_request.pull_request_id,
1220 'removed_ids: %s', pull_request.pull_request_id,
1221 commit_changes.added, commit_changes.common, commit_changes.removed)
1221 commit_changes.added, commit_changes.common, commit_changes.removed)
1222 log.debug(
1222 log.debug(
1223 'Updated pull request with the following file changes: %s',
1223 'Updated pull request with the following file changes: %s',
1224 file_changes)
1224 file_changes)
1225
1225
1226 log.info(
1226 log.info(
1227 "Updated pull request %s from commit %s to commit %s, "
1227 "Updated pull request %s from commit %s to commit %s, "
1228 "stored new version %s of this pull request.",
1228 "stored new version %s of this pull request.",
1229 pull_request.pull_request_id, source_ref_id,
1229 pull_request.pull_request_id, source_ref_id,
1230 pull_request.source_ref_parts.commit_id,
1230 pull_request.source_ref_parts.commit_id,
1231 pull_request_version.pull_request_version_id)
1231 pull_request_version.pull_request_version_id)
1232
1232
1233 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
1233 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
1234
1234
1235 return UpdateResponse(
1235 return UpdateResponse(
1236 executed=True, reason=UpdateFailureReason.NONE,
1236 executed=True, reason=UpdateFailureReason.NONE,
1237 old=pull_request, new=pull_request_version,
1237 old=pull_request, new=pull_request_version,
1238 common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes,
1238 common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes,
1239 source_changed=source_changed, target_changed=target_changed)
1239 source_changed=source_changed, target_changed=target_changed)
1240
1240
1241 def _create_version_from_snapshot(self, pull_request):
1241 def _create_version_from_snapshot(self, pull_request):
1242 version = PullRequestVersion()
1242 version = PullRequestVersion()
1243 version.title = pull_request.title
1243 version.title = pull_request.title
1244 version.description = pull_request.description
1244 version.description = pull_request.description
1245 version.status = pull_request.status
1245 version.status = pull_request.status
1246 version.pull_request_state = pull_request.pull_request_state
1246 version.pull_request_state = pull_request.pull_request_state
1247 version.created_on = datetime.datetime.now()
1247 version.created_on = datetime.datetime.now()
1248 version.updated_on = pull_request.updated_on
1248 version.updated_on = pull_request.updated_on
1249 version.user_id = pull_request.user_id
1249 version.user_id = pull_request.user_id
1250 version.source_repo = pull_request.source_repo
1250 version.source_repo = pull_request.source_repo
1251 version.source_ref = pull_request.source_ref
1251 version.source_ref = pull_request.source_ref
1252 version.target_repo = pull_request.target_repo
1252 version.target_repo = pull_request.target_repo
1253 version.target_ref = pull_request.target_ref
1253 version.target_ref = pull_request.target_ref
1254
1254
1255 version._last_merge_source_rev = pull_request._last_merge_source_rev
1255 version._last_merge_source_rev = pull_request._last_merge_source_rev
1256 version._last_merge_target_rev = pull_request._last_merge_target_rev
1256 version._last_merge_target_rev = pull_request._last_merge_target_rev
1257 version.last_merge_status = pull_request.last_merge_status
1257 version.last_merge_status = pull_request.last_merge_status
1258 version.last_merge_metadata = pull_request.last_merge_metadata
1258 version.last_merge_metadata = pull_request.last_merge_metadata
1259 version.shadow_merge_ref = pull_request.shadow_merge_ref
1259 version.shadow_merge_ref = pull_request.shadow_merge_ref
1260 version.merge_rev = pull_request.merge_rev
1260 version.merge_rev = pull_request.merge_rev
1261 version.reviewer_data = pull_request.reviewer_data
1261 version.reviewer_data = pull_request.reviewer_data
1262
1262
1263 version.revisions = pull_request.revisions
1263 version.revisions = pull_request.revisions
1264 version.common_ancestor_id = pull_request.common_ancestor_id
1264 version.common_ancestor_id = pull_request.common_ancestor_id
1265 version.pull_request = pull_request
1265 version.pull_request = pull_request
1266 Session().add(version)
1266 Session().add(version)
1267 Session().flush()
1267 Session().flush()
1268
1268
1269 return version
1269 return version
1270
1270
1271 def _generate_update_diffs(self, pull_request, pull_request_version):
1271 def _generate_update_diffs(self, pull_request, pull_request_version):
1272
1272
1273 diff_context = (
1273 diff_context = (
1274 self.DIFF_CONTEXT +
1274 self.DIFF_CONTEXT +
1275 CommentsModel.needed_extra_diff_context())
1275 CommentsModel.needed_extra_diff_context())
1276 hide_whitespace_changes = False
1276 hide_whitespace_changes = False
1277 source_repo = pull_request_version.source_repo
1277 source_repo = pull_request_version.source_repo
1278 source_ref_id = pull_request_version.source_ref_parts.commit_id
1278 source_ref_id = pull_request_version.source_ref_parts.commit_id
1279 target_ref_id = pull_request_version.target_ref_parts.commit_id
1279 target_ref_id = pull_request_version.target_ref_parts.commit_id
1280 old_diff = self._get_diff_from_pr_or_version(
1280 old_diff = self._get_diff_from_pr_or_version(
1281 source_repo, source_ref_id, target_ref_id,
1281 source_repo, source_ref_id, target_ref_id,
1282 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1282 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1283
1283
1284 source_repo = pull_request.source_repo
1284 source_repo = pull_request.source_repo
1285 source_ref_id = pull_request.source_ref_parts.commit_id
1285 source_ref_id = pull_request.source_ref_parts.commit_id
1286 target_ref_id = pull_request.target_ref_parts.commit_id
1286 target_ref_id = pull_request.target_ref_parts.commit_id
1287
1287
1288 new_diff = self._get_diff_from_pr_or_version(
1288 new_diff = self._get_diff_from_pr_or_version(
1289 source_repo, source_ref_id, target_ref_id,
1289 source_repo, source_ref_id, target_ref_id,
1290 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1290 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1291
1291
1292 old_diff_data = diffs.DiffProcessor(old_diff)
1292 old_diff_data = diffs.DiffProcessor(old_diff)
1293 old_diff_data.prepare()
1293 old_diff_data.prepare()
1294 new_diff_data = diffs.DiffProcessor(new_diff)
1294 new_diff_data = diffs.DiffProcessor(new_diff)
1295 new_diff_data.prepare()
1295 new_diff_data.prepare()
1296
1296
1297 return old_diff_data, new_diff_data
1297 return old_diff_data, new_diff_data
1298
1298
1299 def _link_comments_to_version(self, pull_request_version):
1299 def _link_comments_to_version(self, pull_request_version):
1300 """
1300 """
1301 Link all unlinked comments of this pull request to the given version.
1301 Link all unlinked comments of this pull request to the given version.
1302
1302
1303 :param pull_request_version: The `PullRequestVersion` to which
1303 :param pull_request_version: The `PullRequestVersion` to which
1304 the comments shall be linked.
1304 the comments shall be linked.
1305
1305
1306 """
1306 """
1307 pull_request = pull_request_version.pull_request
1307 pull_request = pull_request_version.pull_request
1308 comments = ChangesetComment.query()\
1308 comments = ChangesetComment.query()\
1309 .filter(
1309 .filter(
1310 # TODO: johbo: Should we query for the repo at all here?
1310 # TODO: johbo: Should we query for the repo at all here?
1311 # Pending decision on how comments of PRs are to be related
1311 # Pending decision on how comments of PRs are to be related
1312 # to either the source repo, the target repo or no repo at all.
1312 # to either the source repo, the target repo or no repo at all.
1313 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
1313 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
1314 ChangesetComment.pull_request == pull_request,
1314 ChangesetComment.pull_request == pull_request,
1315 ChangesetComment.pull_request_version == None)\
1315 ChangesetComment.pull_request_version == None)\
1316 .order_by(ChangesetComment.comment_id.asc())
1316 .order_by(ChangesetComment.comment_id.asc())
1317
1317
1318 # TODO: johbo: Find out why this breaks if it is done in a bulk
1318 # TODO: johbo: Find out why this breaks if it is done in a bulk
1319 # operation.
1319 # operation.
1320 for comment in comments:
1320 for comment in comments:
1321 comment.pull_request_version_id = (
1321 comment.pull_request_version_id = (
1322 pull_request_version.pull_request_version_id)
1322 pull_request_version.pull_request_version_id)
1323 Session().add(comment)
1323 Session().add(comment)
1324
1324
1325 def _calculate_commit_id_changes(self, old_ids, new_ids):
1325 def _calculate_commit_id_changes(self, old_ids, new_ids):
1326 added = [x for x in new_ids if x not in old_ids]
1326 added = [x for x in new_ids if x not in old_ids]
1327 common = [x for x in new_ids if x in old_ids]
1327 common = [x for x in new_ids if x in old_ids]
1328 removed = [x for x in old_ids if x not in new_ids]
1328 removed = [x for x in old_ids if x not in new_ids]
1329 total = new_ids
1329 total = new_ids
1330 return ChangeTuple(added, common, removed, total)
1330 return ChangeTuple(added, common, removed, total)
1331
1331
1332 def _calculate_file_changes(self, old_diff_data, new_diff_data):
1332 def _calculate_file_changes(self, old_diff_data, new_diff_data):
1333
1333
1334 old_files = OrderedDict()
1334 old_files = OrderedDict()
1335 for diff_data in old_diff_data.parsed_diff:
1335 for diff_data in old_diff_data.parsed_diff:
1336 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
1336 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
1337
1337
1338 added_files = []
1338 added_files = []
1339 modified_files = []
1339 modified_files = []
1340 removed_files = []
1340 removed_files = []
1341 for diff_data in new_diff_data.parsed_diff:
1341 for diff_data in new_diff_data.parsed_diff:
1342 new_filename = diff_data['filename']
1342 new_filename = diff_data['filename']
1343 new_hash = md5_safe(diff_data['raw_diff'])
1343 new_hash = md5_safe(diff_data['raw_diff'])
1344
1344
1345 old_hash = old_files.get(new_filename)
1345 old_hash = old_files.get(new_filename)
1346 if not old_hash:
1346 if not old_hash:
1347 # file is not present in old diff, we have to figure out from parsed diff
1347 # file is not present in old diff, we have to figure out from parsed diff
1348 # operation ADD/REMOVE
1348 # operation ADD/REMOVE
1349 operations_dict = diff_data['stats']['ops']
1349 operations_dict = diff_data['stats']['ops']
1350 if diffs.DEL_FILENODE in operations_dict:
1350 if diffs.DEL_FILENODE in operations_dict:
1351 removed_files.append(new_filename)
1351 removed_files.append(new_filename)
1352 else:
1352 else:
1353 added_files.append(new_filename)
1353 added_files.append(new_filename)
1354 else:
1354 else:
1355 if new_hash != old_hash:
1355 if new_hash != old_hash:
1356 modified_files.append(new_filename)
1356 modified_files.append(new_filename)
1357 # now remove a file from old, since we have seen it already
1357 # now remove a file from old, since we have seen it already
1358 del old_files[new_filename]
1358 del old_files[new_filename]
1359
1359
1360 # removed files is when there are present in old, but not in NEW,
1360 # removed files is when there are present in old, but not in NEW,
1361 # since we remove old files that are present in new diff, left-overs
1361 # since we remove old files that are present in new diff, left-overs
1362 # if any should be the removed files
1362 # if any should be the removed files
1363 removed_files.extend(old_files.keys())
1363 removed_files.extend(old_files.keys())
1364
1364
1365 return FileChangeTuple(added_files, modified_files, removed_files)
1365 return FileChangeTuple(added_files, modified_files, removed_files)
1366
1366
1367 def _render_update_message(self, ancestor_commit_id, changes, file_changes):
1367 def _render_update_message(self, ancestor_commit_id, changes, file_changes):
1368 """
1368 """
1369 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
1369 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
1370 so it's always looking the same disregarding on which default
1370 so it's always looking the same disregarding on which default
1371 renderer system is using.
1371 renderer system is using.
1372
1372
1373 :param ancestor_commit_id: ancestor raw_id
1373 :param ancestor_commit_id: ancestor raw_id
1374 :param changes: changes named tuple
1374 :param changes: changes named tuple
1375 :param file_changes: file changes named tuple
1375 :param file_changes: file changes named tuple
1376
1376
1377 """
1377 """
1378 new_status = ChangesetStatus.get_status_lbl(
1378 new_status = ChangesetStatus.get_status_lbl(
1379 ChangesetStatus.STATUS_UNDER_REVIEW)
1379 ChangesetStatus.STATUS_UNDER_REVIEW)
1380
1380
1381 changed_files = (
1381 changed_files = (
1382 file_changes.added + file_changes.modified + file_changes.removed)
1382 file_changes.added + file_changes.modified + file_changes.removed)
1383
1383
1384 params = {
1384 params = {
1385 'under_review_label': new_status,
1385 'under_review_label': new_status,
1386 'added_commits': changes.added,
1386 'added_commits': changes.added,
1387 'removed_commits': changes.removed,
1387 'removed_commits': changes.removed,
1388 'changed_files': changed_files,
1388 'changed_files': changed_files,
1389 'added_files': file_changes.added,
1389 'added_files': file_changes.added,
1390 'modified_files': file_changes.modified,
1390 'modified_files': file_changes.modified,
1391 'removed_files': file_changes.removed,
1391 'removed_files': file_changes.removed,
1392 'ancestor_commit_id': ancestor_commit_id
1392 'ancestor_commit_id': ancestor_commit_id
1393 }
1393 }
1394 renderer = RstTemplateRenderer()
1394 renderer = RstTemplateRenderer()
1395 return renderer.render('pull_request_update.mako', **params)
1395 return renderer.render('pull_request_update.mako', **params)
1396
1396
1397 def edit(self, pull_request, title, description, description_renderer, user):
1397 def edit(self, pull_request, title, description, description_renderer, user):
1398 pull_request = self.__get_pull_request(pull_request)
1398 pull_request = self.__get_pull_request(pull_request)
1399 old_data = pull_request.get_api_data(with_merge_state=False)
1399 old_data = pull_request.get_api_data(with_merge_state=False)
1400 if pull_request.is_closed():
1400 if pull_request.is_closed():
1401 raise ValueError('This pull request is closed')
1401 raise ValueError('This pull request is closed')
1402 if title:
1402 if title:
1403 pull_request.title = title
1403 pull_request.title = title
1404 pull_request.description = description
1404 pull_request.description = description
1405 pull_request.updated_on = datetime.datetime.now()
1405 pull_request.updated_on = datetime.datetime.now()
1406 pull_request.description_renderer = description_renderer
1406 pull_request.description_renderer = description_renderer
1407 Session().add(pull_request)
1407 Session().add(pull_request)
1408 self._log_audit_action(
1408 self._log_audit_action(
1409 'repo.pull_request.edit', {'old_data': old_data},
1409 'repo.pull_request.edit', {'old_data': old_data},
1410 user, pull_request)
1410 user, pull_request)
1411
1411
1412 def update_reviewers(self, pull_request, reviewer_data, user):
1412 def update_reviewers(self, pull_request, reviewer_data, user):
1413 """
1413 """
1414 Update the reviewers in the pull request
1414 Update the reviewers in the pull request
1415
1415
1416 :param pull_request: the pr to update
1416 :param pull_request: the pr to update
1417 :param reviewer_data: list of tuples
1417 :param reviewer_data: list of tuples
1418 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1418 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1419 :param user: current use who triggers this action
1419 :param user: current use who triggers this action
1420 """
1420 """
1421
1421
1422 pull_request = self.__get_pull_request(pull_request)
1422 pull_request = self.__get_pull_request(pull_request)
1423 if pull_request.is_closed():
1423 if pull_request.is_closed():
1424 raise ValueError('This pull request is closed')
1424 raise ValueError('This pull request is closed')
1425
1425
1426 reviewers = {}
1426 reviewers = {}
1427 for user_id, reasons, mandatory, role, rules in reviewer_data:
1427 for user_id, reasons, mandatory, role, rules in reviewer_data:
1428 if isinstance(user_id, (int, str)):
1428 if isinstance(user_id, (int, str)):
1429 user_id = self._get_user(user_id).user_id
1429 user_id = self._get_user(user_id).user_id
1430 reviewers[user_id] = {
1430 reviewers[user_id] = {
1431 'reasons': reasons, 'mandatory': mandatory, 'role': role}
1431 'reasons': reasons, 'mandatory': mandatory, 'role': role}
1432
1432
1433 reviewers_ids = set(reviewers.keys())
1433 reviewers_ids = set(reviewers.keys())
1434 current_reviewers = PullRequestReviewers.get_pull_request_reviewers(
1434 current_reviewers = PullRequestReviewers.get_pull_request_reviewers(
1435 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_REVIEWER)
1435 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_REVIEWER)
1436
1436
1437 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1437 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1438
1438
1439 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1439 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1440 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1440 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1441
1441
1442 log.debug("Adding %s reviewers", ids_to_add)
1442 log.debug("Adding %s reviewers", ids_to_add)
1443 log.debug("Removing %s reviewers", ids_to_remove)
1443 log.debug("Removing %s reviewers", ids_to_remove)
1444 changed = False
1444 changed = False
1445 added_audit_reviewers = []
1445 added_audit_reviewers = []
1446 removed_audit_reviewers = []
1446 removed_audit_reviewers = []
1447
1447
1448 for uid in ids_to_add:
1448 for uid in ids_to_add:
1449 changed = True
1449 changed = True
1450 _usr = self._get_user(uid)
1450 _usr = self._get_user(uid)
1451 reviewer = PullRequestReviewers()
1451 reviewer = PullRequestReviewers()
1452 reviewer.user = _usr
1452 reviewer.user = _usr
1453 reviewer.pull_request = pull_request
1453 reviewer.pull_request = pull_request
1454 reviewer.reasons = reviewers[uid]['reasons']
1454 reviewer.reasons = reviewers[uid]['reasons']
1455 # NOTE(marcink): mandatory shouldn't be changed now
1455 # NOTE(marcink): mandatory shouldn't be changed now
1456 # reviewer.mandatory = reviewers[uid]['reasons']
1456 # reviewer.mandatory = reviewers[uid]['reasons']
1457 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1457 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1458 reviewer.role = PullRequestReviewers.ROLE_REVIEWER
1458 reviewer.role = PullRequestReviewers.ROLE_REVIEWER
1459 Session().add(reviewer)
1459 Session().add(reviewer)
1460 added_audit_reviewers.append(reviewer.get_dict())
1460 added_audit_reviewers.append(reviewer.get_dict())
1461
1461
1462 for uid in ids_to_remove:
1462 for uid in ids_to_remove:
1463 changed = True
1463 changed = True
1464 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1464 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1465 # This is an edge case that handles previous state of having the same reviewer twice.
1465 # This is an edge case that handles previous state of having the same reviewer twice.
1466 # this CAN happen due to the lack of DB checks
1466 # this CAN happen due to the lack of DB checks
1467 reviewers = PullRequestReviewers.query()\
1467 reviewers = PullRequestReviewers.query()\
1468 .filter(PullRequestReviewers.user_id == uid,
1468 .filter(PullRequestReviewers.user_id == uid,
1469 PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER,
1469 PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER,
1470 PullRequestReviewers.pull_request == pull_request)\
1470 PullRequestReviewers.pull_request == pull_request)\
1471 .all()
1471 .all()
1472
1472
1473 for obj in reviewers:
1473 for obj in reviewers:
1474 added_audit_reviewers.append(obj.get_dict())
1474 added_audit_reviewers.append(obj.get_dict())
1475 Session().delete(obj)
1475 Session().delete(obj)
1476
1476
1477 if changed:
1477 if changed:
1478 Session().expire_all()
1478 Session().expire_all()
1479 pull_request.updated_on = datetime.datetime.now()
1479 pull_request.updated_on = datetime.datetime.now()
1480 Session().add(pull_request)
1480 Session().add(pull_request)
1481
1481
1482 # finally store audit logs
1482 # finally store audit logs
1483 for user_data in added_audit_reviewers:
1483 for user_data in added_audit_reviewers:
1484 self._log_audit_action(
1484 self._log_audit_action(
1485 'repo.pull_request.reviewer.add', {'data': user_data},
1485 'repo.pull_request.reviewer.add', {'data': user_data},
1486 user, pull_request)
1486 user, pull_request)
1487 for user_data in removed_audit_reviewers:
1487 for user_data in removed_audit_reviewers:
1488 self._log_audit_action(
1488 self._log_audit_action(
1489 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1489 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1490 user, pull_request)
1490 user, pull_request)
1491
1491
1492 self.notify_reviewers(pull_request, ids_to_add, user)
1492 self.notify_reviewers(pull_request, ids_to_add, user)
1493 return ids_to_add, ids_to_remove
1493 return ids_to_add, ids_to_remove
1494
1494
1495 def update_observers(self, pull_request, observer_data, user):
1495 def update_observers(self, pull_request, observer_data, user):
1496 """
1496 """
1497 Update the observers in the pull request
1497 Update the observers in the pull request
1498
1498
1499 :param pull_request: the pr to update
1499 :param pull_request: the pr to update
1500 :param observer_data: list of tuples
1500 :param observer_data: list of tuples
1501 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1501 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1502 :param user: current use who triggers this action
1502 :param user: current use who triggers this action
1503 """
1503 """
1504 pull_request = self.__get_pull_request(pull_request)
1504 pull_request = self.__get_pull_request(pull_request)
1505 if pull_request.is_closed():
1505 if pull_request.is_closed():
1506 raise ValueError('This pull request is closed')
1506 raise ValueError('This pull request is closed')
1507
1507
1508 observers = {}
1508 observers = {}
1509 for user_id, reasons, mandatory, role, rules in observer_data:
1509 for user_id, reasons, mandatory, role, rules in observer_data:
1510 if isinstance(user_id, (int, str)):
1510 if isinstance(user_id, (int, str)):
1511 user_id = self._get_user(user_id).user_id
1511 user_id = self._get_user(user_id).user_id
1512 observers[user_id] = {
1512 observers[user_id] = {
1513 'reasons': reasons, 'observers': mandatory, 'role': role}
1513 'reasons': reasons, 'observers': mandatory, 'role': role}
1514
1514
1515 observers_ids = set(observers.keys())
1515 observers_ids = set(observers.keys())
1516 current_observers = PullRequestReviewers.get_pull_request_reviewers(
1516 current_observers = PullRequestReviewers.get_pull_request_reviewers(
1517 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_OBSERVER)
1517 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_OBSERVER)
1518
1518
1519 current_observers_ids = set([x.user.user_id for x in current_observers])
1519 current_observers_ids = set([x.user.user_id for x in current_observers])
1520
1520
1521 ids_to_add = observers_ids.difference(current_observers_ids)
1521 ids_to_add = observers_ids.difference(current_observers_ids)
1522 ids_to_remove = current_observers_ids.difference(observers_ids)
1522 ids_to_remove = current_observers_ids.difference(observers_ids)
1523
1523
1524 log.debug("Adding %s observer", ids_to_add)
1524 log.debug("Adding %s observer", ids_to_add)
1525 log.debug("Removing %s observer", ids_to_remove)
1525 log.debug("Removing %s observer", ids_to_remove)
1526 changed = False
1526 changed = False
1527 added_audit_observers = []
1527 added_audit_observers = []
1528 removed_audit_observers = []
1528 removed_audit_observers = []
1529
1529
1530 for uid in ids_to_add:
1530 for uid in ids_to_add:
1531 changed = True
1531 changed = True
1532 _usr = self._get_user(uid)
1532 _usr = self._get_user(uid)
1533 observer = PullRequestReviewers()
1533 observer = PullRequestReviewers()
1534 observer.user = _usr
1534 observer.user = _usr
1535 observer.pull_request = pull_request
1535 observer.pull_request = pull_request
1536 observer.reasons = observers[uid]['reasons']
1536 observer.reasons = observers[uid]['reasons']
1537 # NOTE(marcink): mandatory shouldn't be changed now
1537 # NOTE(marcink): mandatory shouldn't be changed now
1538 # observer.mandatory = observer[uid]['reasons']
1538 # observer.mandatory = observer[uid]['reasons']
1539
1539
1540 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1540 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1541 observer.role = PullRequestReviewers.ROLE_OBSERVER
1541 observer.role = PullRequestReviewers.ROLE_OBSERVER
1542 Session().add(observer)
1542 Session().add(observer)
1543 added_audit_observers.append(observer.get_dict())
1543 added_audit_observers.append(observer.get_dict())
1544
1544
1545 for uid in ids_to_remove:
1545 for uid in ids_to_remove:
1546 changed = True
1546 changed = True
1547 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1547 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1548 # This is an edge case that handles previous state of having the same reviewer twice.
1548 # This is an edge case that handles previous state of having the same reviewer twice.
1549 # this CAN happen due to the lack of DB checks
1549 # this CAN happen due to the lack of DB checks
1550 observers = PullRequestReviewers.query()\
1550 observers = PullRequestReviewers.query()\
1551 .filter(PullRequestReviewers.user_id == uid,
1551 .filter(PullRequestReviewers.user_id == uid,
1552 PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER,
1552 PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER,
1553 PullRequestReviewers.pull_request == pull_request)\
1553 PullRequestReviewers.pull_request == pull_request)\
1554 .all()
1554 .all()
1555
1555
1556 for obj in observers:
1556 for obj in observers:
1557 added_audit_observers.append(obj.get_dict())
1557 added_audit_observers.append(obj.get_dict())
1558 Session().delete(obj)
1558 Session().delete(obj)
1559
1559
1560 if changed:
1560 if changed:
1561 Session().expire_all()
1561 Session().expire_all()
1562 pull_request.updated_on = datetime.datetime.now()
1562 pull_request.updated_on = datetime.datetime.now()
1563 Session().add(pull_request)
1563 Session().add(pull_request)
1564
1564
1565 # finally store audit logs
1565 # finally store audit logs
1566 for user_data in added_audit_observers:
1566 for user_data in added_audit_observers:
1567 self._log_audit_action(
1567 self._log_audit_action(
1568 'repo.pull_request.observer.add', {'data': user_data},
1568 'repo.pull_request.observer.add', {'data': user_data},
1569 user, pull_request)
1569 user, pull_request)
1570 for user_data in removed_audit_observers:
1570 for user_data in removed_audit_observers:
1571 self._log_audit_action(
1571 self._log_audit_action(
1572 'repo.pull_request.observer.delete', {'old_data': user_data},
1572 'repo.pull_request.observer.delete', {'old_data': user_data},
1573 user, pull_request)
1573 user, pull_request)
1574
1574
1575 self.notify_observers(pull_request, ids_to_add, user)
1575 self.notify_observers(pull_request, ids_to_add, user)
1576 return ids_to_add, ids_to_remove
1576 return ids_to_add, ids_to_remove
1577
1577
1578 def get_url(self, pull_request, request=None, permalink=False):
1578 def get_url(self, pull_request, request=None, permalink=False):
1579 if not request:
1579 if not request:
1580 request = get_current_request()
1580 request = get_current_request()
1581
1581
1582 if permalink:
1582 if permalink:
1583 return request.route_url(
1583 return request.route_url(
1584 'pull_requests_global',
1584 'pull_requests_global',
1585 pull_request_id=pull_request.pull_request_id,)
1585 pull_request_id=pull_request.pull_request_id,)
1586 else:
1586 else:
1587 return request.route_url('pullrequest_show',
1587 return request.route_url('pullrequest_show',
1588 repo_name=safe_str(pull_request.target_repo.repo_name),
1588 repo_name=safe_str(pull_request.target_repo.repo_name),
1589 pull_request_id=pull_request.pull_request_id,)
1589 pull_request_id=pull_request.pull_request_id,)
1590
1590
1591 def get_shadow_clone_url(self, pull_request, request=None):
1591 def get_shadow_clone_url(self, pull_request, request=None):
1592 """
1592 """
1593 Returns qualified url pointing to the shadow repository. If this pull
1593 Returns qualified url pointing to the shadow repository. If this pull
1594 request is closed there is no shadow repository and ``None`` will be
1594 request is closed there is no shadow repository and ``None`` will be
1595 returned.
1595 returned.
1596 """
1596 """
1597 if pull_request.is_closed():
1597 if pull_request.is_closed():
1598 return None
1598 return None
1599 else:
1599 else:
1600 pr_url = urllib.parse.unquote(self.get_url(pull_request, request=request))
1600 pr_url = urllib.parse.unquote(self.get_url(pull_request, request=request))
1601 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1601 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1602
1602
1603 def _notify_reviewers(self, pull_request, user_ids, role, user):
1603 def _notify_reviewers(self, pull_request, user_ids, role, user):
1604 # notification to reviewers/observers
1604 # notification to reviewers/observers
1605 if not user_ids:
1605 if not user_ids:
1606 return
1606 return
1607
1607
1608 log.debug('Notify following %s users about pull-request %s', role, user_ids)
1608 log.debug('Notify following %s users about pull-request %s', role, user_ids)
1609
1609
1610 pull_request_obj = pull_request
1610 pull_request_obj = pull_request
1611 # get the current participants of this pull request
1611 # get the current participants of this pull request
1612 recipients = user_ids
1612 recipients = user_ids
1613 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1613 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1614
1614
1615 pr_source_repo = pull_request_obj.source_repo
1615 pr_source_repo = pull_request_obj.source_repo
1616 pr_target_repo = pull_request_obj.target_repo
1616 pr_target_repo = pull_request_obj.target_repo
1617
1617
1618 pr_url = h.route_url('pullrequest_show',
1618 pr_url = h.route_url('pullrequest_show',
1619 repo_name=pr_target_repo.repo_name,
1619 repo_name=pr_target_repo.repo_name,
1620 pull_request_id=pull_request_obj.pull_request_id,)
1620 pull_request_id=pull_request_obj.pull_request_id,)
1621
1621
1622 # set some variables for email notification
1622 # set some variables for email notification
1623 pr_target_repo_url = h.route_url(
1623 pr_target_repo_url = h.route_url(
1624 'repo_summary', repo_name=pr_target_repo.repo_name)
1624 'repo_summary', repo_name=pr_target_repo.repo_name)
1625
1625
1626 pr_source_repo_url = h.route_url(
1626 pr_source_repo_url = h.route_url(
1627 'repo_summary', repo_name=pr_source_repo.repo_name)
1627 'repo_summary', repo_name=pr_source_repo.repo_name)
1628
1628
1629 # pull request specifics
1629 # pull request specifics
1630 pull_request_commits = [
1630 pull_request_commits = [
1631 (x.raw_id, x.message)
1631 (x.raw_id, x.message)
1632 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1632 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1633
1633
1634 current_rhodecode_user = user
1634 current_rhodecode_user = user
1635 kwargs = {
1635 kwargs = {
1636 'user': current_rhodecode_user,
1636 'user': current_rhodecode_user,
1637 'pull_request_author': pull_request.author,
1637 'pull_request_author': pull_request.author,
1638 'pull_request': pull_request_obj,
1638 'pull_request': pull_request_obj,
1639 'pull_request_commits': pull_request_commits,
1639 'pull_request_commits': pull_request_commits,
1640
1640
1641 'pull_request_target_repo': pr_target_repo,
1641 'pull_request_target_repo': pr_target_repo,
1642 'pull_request_target_repo_url': pr_target_repo_url,
1642 'pull_request_target_repo_url': pr_target_repo_url,
1643
1643
1644 'pull_request_source_repo': pr_source_repo,
1644 'pull_request_source_repo': pr_source_repo,
1645 'pull_request_source_repo_url': pr_source_repo_url,
1645 'pull_request_source_repo_url': pr_source_repo_url,
1646
1646
1647 'pull_request_url': pr_url,
1647 'pull_request_url': pr_url,
1648 'thread_ids': [pr_url],
1648 'thread_ids': [pr_url],
1649 'user_role': role
1649 'user_role': role
1650 }
1650 }
1651
1651
1652 # create notification objects, and emails
1652 # create notification objects, and emails
1653 NotificationModel().create(
1653 NotificationModel().create(
1654 created_by=current_rhodecode_user,
1654 created_by=current_rhodecode_user,
1655 notification_subject='', # Filled in based on the notification_type
1655 notification_subject='', # Filled in based on the notification_type
1656 notification_body='', # Filled in based on the notification_type
1656 notification_body='', # Filled in based on the notification_type
1657 notification_type=notification_type,
1657 notification_type=notification_type,
1658 recipients=recipients,
1658 recipients=recipients,
1659 email_kwargs=kwargs,
1659 email_kwargs=kwargs,
1660 )
1660 )
1661
1661
1662 def notify_reviewers(self, pull_request, reviewers_ids, user):
1662 def notify_reviewers(self, pull_request, reviewers_ids, user):
1663 return self._notify_reviewers(pull_request, reviewers_ids,
1663 return self._notify_reviewers(pull_request, reviewers_ids,
1664 PullRequestReviewers.ROLE_REVIEWER, user)
1664 PullRequestReviewers.ROLE_REVIEWER, user)
1665
1665
1666 def notify_observers(self, pull_request, observers_ids, user):
1666 def notify_observers(self, pull_request, observers_ids, user):
1667 return self._notify_reviewers(pull_request, observers_ids,
1667 return self._notify_reviewers(pull_request, observers_ids,
1668 PullRequestReviewers.ROLE_OBSERVER, user)
1668 PullRequestReviewers.ROLE_OBSERVER, user)
1669
1669
1670 def notify_users(self, pull_request, updating_user, ancestor_commit_id,
1670 def notify_users(self, pull_request, updating_user, ancestor_commit_id,
1671 commit_changes, file_changes):
1671 commit_changes, file_changes):
1672
1672
1673 updating_user_id = updating_user.user_id
1673 updating_user_id = updating_user.user_id
1674 reviewers = set([x.user.user_id for x in pull_request.get_pull_request_reviewers()])
1674 reviewers = set([x.user.user_id for x in pull_request.get_pull_request_reviewers()])
1675 # NOTE(marcink): send notification to all other users except to
1675 # NOTE(marcink): send notification to all other users except to
1676 # person who updated the PR
1676 # person who updated the PR
1677 recipients = reviewers.difference(set([updating_user_id]))
1677 recipients = reviewers.difference(set([updating_user_id]))
1678
1678
1679 log.debug('Notify following recipients about pull-request update %s', recipients)
1679 log.debug('Notify following recipients about pull-request update %s', recipients)
1680
1680
1681 pull_request_obj = pull_request
1681 pull_request_obj = pull_request
1682
1682
1683 # send email about the update
1683 # send email about the update
1684 changed_files = (
1684 changed_files = (
1685 file_changes.added + file_changes.modified + file_changes.removed)
1685 file_changes.added + file_changes.modified + file_changes.removed)
1686
1686
1687 pr_source_repo = pull_request_obj.source_repo
1687 pr_source_repo = pull_request_obj.source_repo
1688 pr_target_repo = pull_request_obj.target_repo
1688 pr_target_repo = pull_request_obj.target_repo
1689
1689
1690 pr_url = h.route_url('pullrequest_show',
1690 pr_url = h.route_url('pullrequest_show',
1691 repo_name=pr_target_repo.repo_name,
1691 repo_name=pr_target_repo.repo_name,
1692 pull_request_id=pull_request_obj.pull_request_id,)
1692 pull_request_id=pull_request_obj.pull_request_id,)
1693
1693
1694 # set some variables for email notification
1694 # set some variables for email notification
1695 pr_target_repo_url = h.route_url(
1695 pr_target_repo_url = h.route_url(
1696 'repo_summary', repo_name=pr_target_repo.repo_name)
1696 'repo_summary', repo_name=pr_target_repo.repo_name)
1697
1697
1698 pr_source_repo_url = h.route_url(
1698 pr_source_repo_url = h.route_url(
1699 'repo_summary', repo_name=pr_source_repo.repo_name)
1699 'repo_summary', repo_name=pr_source_repo.repo_name)
1700
1700
1701 email_kwargs = {
1701 email_kwargs = {
1702 'date': datetime.datetime.now(),
1702 'date': datetime.datetime.now(),
1703 'updating_user': updating_user,
1703 'updating_user': updating_user,
1704
1704
1705 'pull_request': pull_request_obj,
1705 'pull_request': pull_request_obj,
1706
1706
1707 'pull_request_target_repo': pr_target_repo,
1707 'pull_request_target_repo': pr_target_repo,
1708 'pull_request_target_repo_url': pr_target_repo_url,
1708 'pull_request_target_repo_url': pr_target_repo_url,
1709
1709
1710 'pull_request_source_repo': pr_source_repo,
1710 'pull_request_source_repo': pr_source_repo,
1711 'pull_request_source_repo_url': pr_source_repo_url,
1711 'pull_request_source_repo_url': pr_source_repo_url,
1712
1712
1713 'pull_request_url': pr_url,
1713 'pull_request_url': pr_url,
1714
1714
1715 'ancestor_commit_id': ancestor_commit_id,
1715 'ancestor_commit_id': ancestor_commit_id,
1716 'added_commits': commit_changes.added,
1716 'added_commits': commit_changes.added,
1717 'removed_commits': commit_changes.removed,
1717 'removed_commits': commit_changes.removed,
1718 'changed_files': changed_files,
1718 'changed_files': changed_files,
1719 'added_files': file_changes.added,
1719 'added_files': file_changes.added,
1720 'modified_files': file_changes.modified,
1720 'modified_files': file_changes.modified,
1721 'removed_files': file_changes.removed,
1721 'removed_files': file_changes.removed,
1722 'thread_ids': [pr_url],
1722 'thread_ids': [pr_url],
1723 }
1723 }
1724
1724
1725 # create notification objects, and emails
1725 # create notification objects, and emails
1726 NotificationModel().create(
1726 NotificationModel().create(
1727 created_by=updating_user,
1727 created_by=updating_user,
1728 notification_subject='', # Filled in based on the notification_type
1728 notification_subject='', # Filled in based on the notification_type
1729 notification_body='', # Filled in based on the notification_type
1729 notification_body='', # Filled in based on the notification_type
1730 notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE,
1730 notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE,
1731 recipients=recipients,
1731 recipients=recipients,
1732 email_kwargs=email_kwargs,
1732 email_kwargs=email_kwargs,
1733 )
1733 )
1734
1734
1735 def delete(self, pull_request, user=None):
1735 def delete(self, pull_request, user=None):
1736 if not user:
1736 if not user:
1737 user = getattr(get_current_rhodecode_user(), 'username', None)
1737 user = getattr(get_current_rhodecode_user(), 'username', None)
1738
1738
1739 pull_request = self.__get_pull_request(pull_request)
1739 pull_request = self.__get_pull_request(pull_request)
1740 old_data = pull_request.get_api_data(with_merge_state=False)
1740 old_data = pull_request.get_api_data(with_merge_state=False)
1741 self._cleanup_merge_workspace(pull_request)
1741 self._cleanup_merge_workspace(pull_request)
1742 self._log_audit_action(
1742 self._log_audit_action(
1743 'repo.pull_request.delete', {'old_data': old_data},
1743 'repo.pull_request.delete', {'old_data': old_data},
1744 user, pull_request)
1744 user, pull_request)
1745 Session().delete(pull_request)
1745 Session().delete(pull_request)
1746
1746
1747 def close_pull_request(self, pull_request, user):
1747 def close_pull_request(self, pull_request, user):
1748 pull_request = self.__get_pull_request(pull_request)
1748 pull_request = self.__get_pull_request(pull_request)
1749 self._cleanup_merge_workspace(pull_request)
1749 self._cleanup_merge_workspace(pull_request)
1750 pull_request.status = PullRequest.STATUS_CLOSED
1750 pull_request.status = PullRequest.STATUS_CLOSED
1751 pull_request.updated_on = datetime.datetime.now()
1751 pull_request.updated_on = datetime.datetime.now()
1752 Session().add(pull_request)
1752 Session().add(pull_request)
1753 self.trigger_pull_request_hook(pull_request, pull_request.author, 'close')
1753 self.trigger_pull_request_hook(pull_request, pull_request.author, 'close')
1754
1754
1755 pr_data = pull_request.get_api_data(with_merge_state=False)
1755 pr_data = pull_request.get_api_data(with_merge_state=False)
1756 self._log_audit_action(
1756 self._log_audit_action(
1757 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1757 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1758
1758
1759 def close_pull_request_with_comment(
1759 def close_pull_request_with_comment(
1760 self, pull_request, user, repo, message=None, auth_user=None):
1760 self, pull_request, user, repo, message=None, auth_user=None):
1761
1761
1762 pull_request_review_status = pull_request.calculated_review_status()
1762 pull_request_review_status = pull_request.calculated_review_status()
1763
1763
1764 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1764 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1765 # approved only if we have voting consent
1765 # approved only if we have voting consent
1766 status = ChangesetStatus.STATUS_APPROVED
1766 status = ChangesetStatus.STATUS_APPROVED
1767 else:
1767 else:
1768 status = ChangesetStatus.STATUS_REJECTED
1768 status = ChangesetStatus.STATUS_REJECTED
1769 status_lbl = ChangesetStatus.get_status_lbl(status)
1769 status_lbl = ChangesetStatus.get_status_lbl(status)
1770
1770
1771 default_message = (
1771 default_message = (
1772 'Closing with status change {transition_icon} {status}.'
1772 'Closing with status change {transition_icon} {status}.'
1773 ).format(transition_icon='>', status=status_lbl)
1773 ).format(transition_icon='>', status=status_lbl)
1774 text = message or default_message
1774 text = message or default_message
1775
1775
1776 # create a comment, and link it to new status
1776 # create a comment, and link it to new status
1777 comment = CommentsModel().create(
1777 comment = CommentsModel().create(
1778 text=text,
1778 text=text,
1779 repo=repo.repo_id,
1779 repo=repo.repo_id,
1780 user=user.user_id,
1780 user=user.user_id,
1781 pull_request=pull_request.pull_request_id,
1781 pull_request=pull_request.pull_request_id,
1782 status_change=status_lbl,
1782 status_change=status_lbl,
1783 status_change_type=status,
1783 status_change_type=status,
1784 closing_pr=True,
1784 closing_pr=True,
1785 auth_user=auth_user,
1785 auth_user=auth_user,
1786 )
1786 )
1787
1787
1788 # calculate old status before we change it
1788 # calculate old status before we change it
1789 old_calculated_status = pull_request.calculated_review_status()
1789 old_calculated_status = pull_request.calculated_review_status()
1790 ChangesetStatusModel().set_status(
1790 ChangesetStatusModel().set_status(
1791 repo.repo_id,
1791 repo.repo_id,
1792 status,
1792 status,
1793 user.user_id,
1793 user.user_id,
1794 comment=comment,
1794 comment=comment,
1795 pull_request=pull_request.pull_request_id
1795 pull_request=pull_request.pull_request_id
1796 )
1796 )
1797
1797
1798 Session().flush()
1798 Session().flush()
1799
1799
1800 self.trigger_pull_request_hook(pull_request, user, 'comment',
1800 self.trigger_pull_request_hook(pull_request, user, 'comment',
1801 data={'comment': comment})
1801 data={'comment': comment})
1802
1802
1803 # we now calculate the status of pull request again, and based on that
1803 # we now calculate the status of pull request again, and based on that
1804 # calculation trigger status change. This might happen in cases
1804 # calculation trigger status change. This might happen in cases
1805 # that non-reviewer admin closes a pr, which means his vote doesn't
1805 # that non-reviewer admin closes a pr, which means his vote doesn't
1806 # change the status, while if he's a reviewer this might change it.
1806 # change the status, while if he's a reviewer this might change it.
1807 calculated_status = pull_request.calculated_review_status()
1807 calculated_status = pull_request.calculated_review_status()
1808 if old_calculated_status != calculated_status:
1808 if old_calculated_status != calculated_status:
1809 self.trigger_pull_request_hook(pull_request, user, 'review_status_change',
1809 self.trigger_pull_request_hook(pull_request, user, 'review_status_change',
1810 data={'status': calculated_status})
1810 data={'status': calculated_status})
1811
1811
1812 # finally close the PR
1812 # finally close the PR
1813 PullRequestModel().close_pull_request(pull_request.pull_request_id, user)
1813 PullRequestModel().close_pull_request(pull_request.pull_request_id, user)
1814
1814
1815 return comment, status
1815 return comment, status
1816
1816
1817 def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False):
1817 def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False):
1818 _ = translator or get_current_request().translate
1818 _ = translator or get_current_request().translate
1819
1819
1820 if not self._is_merge_enabled(pull_request):
1820 if not self._is_merge_enabled(pull_request):
1821 return None, False, _('Server-side pull request merging is disabled.')
1821 return None, False, _('Server-side pull request merging is disabled.')
1822
1822
1823 if pull_request.is_closed():
1823 if pull_request.is_closed():
1824 return None, False, _('This pull request is closed.')
1824 return None, False, _('This pull request is closed.')
1825
1825
1826 merge_possible, msg = self._check_repo_requirements(
1826 merge_possible, msg = self._check_repo_requirements(
1827 target=pull_request.target_repo, source=pull_request.source_repo,
1827 target=pull_request.target_repo, source=pull_request.source_repo,
1828 translator=_)
1828 translator=_)
1829 if not merge_possible:
1829 if not merge_possible:
1830 return None, merge_possible, msg
1830 return None, merge_possible, msg
1831
1831
1832 try:
1832 try:
1833 merge_response = self._try_merge(
1833 merge_response = self._try_merge(
1834 pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh)
1834 pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh)
1835 log.debug("Merge response: %s", merge_response)
1835 log.debug("Merge response: %s", merge_response)
1836 return merge_response, merge_response.possible, merge_response.merge_status_message
1836 return merge_response, merge_response.possible, merge_response.merge_status_message
1837 except NotImplementedError:
1837 except NotImplementedError:
1838 return None, False, _('Pull request merging is not supported.')
1838 return None, False, _('Pull request merging is not supported.')
1839
1839
1840 def _check_repo_requirements(self, target, source, translator):
1840 def _check_repo_requirements(self, target, source, translator):
1841 """
1841 """
1842 Check if `target` and `source` have compatible requirements.
1842 Check if `target` and `source` have compatible requirements.
1843
1843
1844 Currently this is just checking for largefiles.
1844 Currently this is just checking for largefiles.
1845 """
1845 """
1846 _ = translator
1846 _ = translator
1847 target_has_largefiles = self._has_largefiles(target)
1847 target_has_largefiles = self._has_largefiles(target)
1848 source_has_largefiles = self._has_largefiles(source)
1848 source_has_largefiles = self._has_largefiles(source)
1849 merge_possible = True
1849 merge_possible = True
1850 message = u''
1850 message = u''
1851
1851
1852 if target_has_largefiles != source_has_largefiles:
1852 if target_has_largefiles != source_has_largefiles:
1853 merge_possible = False
1853 merge_possible = False
1854 if source_has_largefiles:
1854 if source_has_largefiles:
1855 message = _(
1855 message = _(
1856 'Target repository large files support is disabled.')
1856 'Target repository large files support is disabled.')
1857 else:
1857 else:
1858 message = _(
1858 message = _(
1859 'Source repository large files support is disabled.')
1859 'Source repository large files support is disabled.')
1860
1860
1861 return merge_possible, message
1861 return merge_possible, message
1862
1862
1863 def _has_largefiles(self, repo):
1863 def _has_largefiles(self, repo):
1864 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1864 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1865 'extensions', 'largefiles')
1865 'extensions', 'largefiles')
1866 return largefiles_ui and largefiles_ui[0].active
1866 return largefiles_ui and largefiles_ui[0].active
1867
1867
1868 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1868 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1869 """
1869 """
1870 Try to merge the pull request and return the merge status.
1870 Try to merge the pull request and return the merge status.
1871 """
1871 """
1872 log.debug(
1872 log.debug(
1873 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1873 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1874 pull_request.pull_request_id, force_shadow_repo_refresh)
1874 pull_request.pull_request_id, force_shadow_repo_refresh)
1875 target_vcs = pull_request.target_repo.scm_instance()
1875 target_vcs = pull_request.target_repo.scm_instance()
1876 # Refresh the target reference.
1876 # Refresh the target reference.
1877 try:
1877 try:
1878 target_ref = self._refresh_reference(
1878 target_ref = self._refresh_reference(
1879 pull_request.target_ref_parts, target_vcs)
1879 pull_request.target_ref_parts, target_vcs)
1880 except CommitDoesNotExistError:
1880 except CommitDoesNotExistError:
1881 merge_state = MergeResponse(
1881 merge_state = MergeResponse(
1882 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1882 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1883 metadata={'target_ref': pull_request.target_ref_parts})
1883 metadata={'target_ref': pull_request.target_ref_parts})
1884 return merge_state
1884 return merge_state
1885
1885
1886 target_locked = pull_request.target_repo.locked
1886 target_locked = pull_request.target_repo.locked
1887 if target_locked and target_locked[0]:
1887 if target_locked and target_locked[0]:
1888 locked_by = 'user:{}'.format(target_locked[0])
1888 locked_by = 'user:{}'.format(target_locked[0])
1889 log.debug("The target repository is locked by %s.", locked_by)
1889 log.debug("The target repository is locked by %s.", locked_by)
1890 merge_state = MergeResponse(
1890 merge_state = MergeResponse(
1891 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1891 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1892 metadata={'locked_by': locked_by})
1892 metadata={'locked_by': locked_by})
1893 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1893 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1894 pull_request, target_ref):
1894 pull_request, target_ref):
1895 log.debug("Refreshing the merge status of the repository.")
1895 log.debug("Refreshing the merge status of the repository.")
1896 merge_state = self._refresh_merge_state(
1896 merge_state = self._refresh_merge_state(
1897 pull_request, target_vcs, target_ref)
1897 pull_request, target_vcs, target_ref)
1898 else:
1898 else:
1899 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1899 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1900 metadata = {
1900 metadata = {
1901 'unresolved_files': '',
1901 'unresolved_files': '',
1902 'target_ref': pull_request.target_ref_parts,
1902 'target_ref': pull_request.target_ref_parts,
1903 'source_ref': pull_request.source_ref_parts,
1903 'source_ref': pull_request.source_ref_parts,
1904 }
1904 }
1905 if pull_request.last_merge_metadata:
1905 if pull_request.last_merge_metadata:
1906 metadata.update(pull_request.last_merge_metadata_parsed)
1906 metadata.update(pull_request.last_merge_metadata_parsed)
1907
1907
1908 if not possible and target_ref.type == 'branch':
1908 if not possible and target_ref.type == 'branch':
1909 # NOTE(marcink): case for mercurial multiple heads on branch
1909 # NOTE(marcink): case for mercurial multiple heads on branch
1910 heads = target_vcs._heads(target_ref.name)
1910 heads = target_vcs._heads(target_ref.name)
1911 if len(heads) != 1:
1911 if len(heads) != 1:
1912 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1912 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1913 metadata.update({
1913 metadata.update({
1914 'heads': heads
1914 'heads': heads
1915 })
1915 })
1916
1916
1917 merge_state = MergeResponse(
1917 merge_state = MergeResponse(
1918 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1918 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1919
1919
1920 return merge_state
1920 return merge_state
1921
1921
1922 def _refresh_reference(self, reference, vcs_repository):
1922 def _refresh_reference(self, reference, vcs_repository):
1923 if reference.type in self.UPDATABLE_REF_TYPES:
1923 if reference.type in self.UPDATABLE_REF_TYPES:
1924 name_or_id = reference.name
1924 name_or_id = reference.name
1925 else:
1925 else:
1926 name_or_id = reference.commit_id
1926 name_or_id = reference.commit_id
1927
1927
1928 refreshed_commit = vcs_repository.get_commit(name_or_id)
1928 refreshed_commit = vcs_repository.get_commit(name_or_id)
1929 refreshed_reference = Reference(
1929 refreshed_reference = Reference(
1930 reference.type, reference.name, refreshed_commit.raw_id)
1930 reference.type, reference.name, refreshed_commit.raw_id)
1931 return refreshed_reference
1931 return refreshed_reference
1932
1932
1933 def _needs_merge_state_refresh(self, pull_request, target_reference):
1933 def _needs_merge_state_refresh(self, pull_request, target_reference):
1934 return not(
1934 return not(
1935 pull_request.revisions and
1935 pull_request.revisions and
1936 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1936 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1937 target_reference.commit_id == pull_request._last_merge_target_rev)
1937 target_reference.commit_id == pull_request._last_merge_target_rev)
1938
1938
1939 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1939 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1940 workspace_id = self._workspace_id(pull_request)
1940 workspace_id = self._workspace_id(pull_request)
1941 source_vcs = pull_request.source_repo.scm_instance()
1941 source_vcs = pull_request.source_repo.scm_instance()
1942 repo_id = pull_request.target_repo.repo_id
1942 repo_id = pull_request.target_repo.repo_id
1943 use_rebase = self._use_rebase_for_merging(pull_request)
1943 use_rebase = self._use_rebase_for_merging(pull_request)
1944 close_branch = self._close_branch_before_merging(pull_request)
1944 close_branch = self._close_branch_before_merging(pull_request)
1945 merge_state = target_vcs.merge(
1945 merge_state = target_vcs.merge(
1946 repo_id, workspace_id,
1946 repo_id, workspace_id,
1947 target_reference, source_vcs, pull_request.source_ref_parts,
1947 target_reference, source_vcs, pull_request.source_ref_parts,
1948 dry_run=True, use_rebase=use_rebase,
1948 dry_run=True, use_rebase=use_rebase,
1949 close_branch=close_branch)
1949 close_branch=close_branch)
1950
1950
1951 # Do not store the response if there was an unknown error.
1951 # Do not store the response if there was an unknown error.
1952 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1952 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1953 pull_request._last_merge_source_rev = \
1953 pull_request._last_merge_source_rev = \
1954 pull_request.source_ref_parts.commit_id
1954 pull_request.source_ref_parts.commit_id
1955 pull_request._last_merge_target_rev = target_reference.commit_id
1955 pull_request._last_merge_target_rev = target_reference.commit_id
1956 pull_request.last_merge_status = merge_state.failure_reason
1956 pull_request.last_merge_status = merge_state.failure_reason
1957 pull_request.last_merge_metadata = merge_state.metadata
1957 pull_request.last_merge_metadata = merge_state.metadata
1958
1958
1959 pull_request.shadow_merge_ref = merge_state.merge_ref
1959 pull_request.shadow_merge_ref = merge_state.merge_ref
1960 Session().add(pull_request)
1960 Session().add(pull_request)
1961 Session().commit()
1961 Session().commit()
1962
1962
1963 return merge_state
1963 return merge_state
1964
1964
1965 def _workspace_id(self, pull_request):
1965 def _workspace_id(self, pull_request):
1966 workspace_id = 'pr-%s' % pull_request.pull_request_id
1966 workspace_id = 'pr-%s' % pull_request.pull_request_id
1967 return workspace_id
1967 return workspace_id
1968
1968
1969 def generate_repo_data(self, repo, commit_id=None, branch=None,
1969 def generate_repo_data(self, repo, commit_id=None, branch=None,
1970 bookmark=None, translator=None):
1970 bookmark=None, translator=None):
1971 from rhodecode.model.repo import RepoModel
1971 from rhodecode.model.repo import RepoModel
1972
1972
1973 all_refs, selected_ref = \
1973 all_refs, selected_ref = \
1974 self._get_repo_pullrequest_sources(
1974 self._get_repo_pullrequest_sources(
1975 repo.scm_instance(), commit_id=commit_id,
1975 repo.scm_instance(), commit_id=commit_id,
1976 branch=branch, bookmark=bookmark, translator=translator)
1976 branch=branch, bookmark=bookmark, translator=translator)
1977
1977
1978 refs_select2 = []
1978 refs_select2 = []
1979 for element in all_refs:
1979 for element in all_refs:
1980 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1980 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1981 refs_select2.append({'text': element[1], 'children': children})
1981 refs_select2.append({'text': element[1], 'children': children})
1982
1982
1983 return {
1983 return {
1984 'user': {
1984 'user': {
1985 'user_id': repo.user.user_id,
1985 'user_id': repo.user.user_id,
1986 'username': repo.user.username,
1986 'username': repo.user.username,
1987 'firstname': repo.user.first_name,
1987 'firstname': repo.user.first_name,
1988 'lastname': repo.user.last_name,
1988 'lastname': repo.user.last_name,
1989 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1989 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1990 },
1990 },
1991 'name': repo.repo_name,
1991 'name': repo.repo_name,
1992 'link': RepoModel().get_url(repo),
1992 'link': RepoModel().get_url(repo),
1993 'description': h.chop_at_smart(repo.description_safe, '\n'),
1993 'description': h.chop_at_smart(repo.description_safe, '\n'),
1994 'refs': {
1994 'refs': {
1995 'all_refs': all_refs,
1995 'all_refs': all_refs,
1996 'selected_ref': selected_ref,
1996 'selected_ref': selected_ref,
1997 'select2_refs': refs_select2
1997 'select2_refs': refs_select2
1998 }
1998 }
1999 }
1999 }
2000
2000
2001 def generate_pullrequest_title(self, source, source_ref, target):
2001 def generate_pullrequest_title(self, source, source_ref, target):
2002 return u'{source}#{at_ref} to {target}'.format(
2002 return u'{source}#{at_ref} to {target}'.format(
2003 source=source,
2003 source=source,
2004 at_ref=source_ref,
2004 at_ref=source_ref,
2005 target=target,
2005 target=target,
2006 )
2006 )
2007
2007
2008 def _cleanup_merge_workspace(self, pull_request):
2008 def _cleanup_merge_workspace(self, pull_request):
2009 # Merging related cleanup
2009 # Merging related cleanup
2010 repo_id = pull_request.target_repo.repo_id
2010 repo_id = pull_request.target_repo.repo_id
2011 target_scm = pull_request.target_repo.scm_instance()
2011 target_scm = pull_request.target_repo.scm_instance()
2012 workspace_id = self._workspace_id(pull_request)
2012 workspace_id = self._workspace_id(pull_request)
2013
2013
2014 try:
2014 try:
2015 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
2015 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
2016 except NotImplementedError:
2016 except NotImplementedError:
2017 pass
2017 pass
2018
2018
2019 def _get_repo_pullrequest_sources(
2019 def _get_repo_pullrequest_sources(
2020 self, repo, commit_id=None, branch=None, bookmark=None,
2020 self, repo, commit_id=None, branch=None, bookmark=None,
2021 translator=None):
2021 translator=None):
2022 """
2022 """
2023 Return a structure with repo's interesting commits, suitable for
2023 Return a structure with repo's interesting commits, suitable for
2024 the selectors in pullrequest controller
2024 the selectors in pullrequest controller
2025
2025
2026 :param commit_id: a commit that must be in the list somehow
2026 :param commit_id: a commit that must be in the list somehow
2027 and selected by default
2027 and selected by default
2028 :param branch: a branch that must be in the list and selected
2028 :param branch: a branch that must be in the list and selected
2029 by default - even if closed
2029 by default - even if closed
2030 :param bookmark: a bookmark that must be in the list and selected
2030 :param bookmark: a bookmark that must be in the list and selected
2031 """
2031 """
2032 _ = translator or get_current_request().translate
2032 _ = translator or get_current_request().translate
2033
2033
2034 commit_id = safe_str(commit_id) if commit_id else None
2034 commit_id = safe_str(commit_id) if commit_id else None
2035 branch = safe_unicode(branch) if branch else None
2035 branch = safe_unicode(branch) if branch else None
2036 bookmark = safe_unicode(bookmark) if bookmark else None
2036 bookmark = safe_unicode(bookmark) if bookmark else None
2037
2037
2038 selected = None
2038 selected = None
2039
2039
2040 # order matters: first source that has commit_id in it will be selected
2040 # order matters: first source that has commit_id in it will be selected
2041 sources = []
2041 sources = []
2042 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
2042 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
2043 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
2043 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
2044
2044
2045 if commit_id:
2045 if commit_id:
2046 ref_commit = (h.short_id(commit_id), commit_id)
2046 ref_commit = (h.short_id(commit_id), commit_id)
2047 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
2047 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
2048
2048
2049 sources.append(
2049 sources.append(
2050 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
2050 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
2051 )
2051 )
2052
2052
2053 groups = []
2053 groups = []
2054
2054
2055 for group_key, ref_list, group_name, match in sources:
2055 for group_key, ref_list, group_name, match in sources:
2056 group_refs = []
2056 group_refs = []
2057 for ref_name, ref_id in ref_list:
2057 for ref_name, ref_id in ref_list:
2058 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
2058 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
2059 group_refs.append((ref_key, ref_name))
2059 group_refs.append((ref_key, ref_name))
2060
2060
2061 if not selected:
2061 if not selected:
2062 if set([commit_id, match]) & set([ref_id, ref_name]):
2062 if set([commit_id, match]) & set([ref_id, ref_name]):
2063 selected = ref_key
2063 selected = ref_key
2064
2064
2065 if group_refs:
2065 if group_refs:
2066 groups.append((group_refs, group_name))
2066 groups.append((group_refs, group_name))
2067
2067
2068 if not selected:
2068 if not selected:
2069 ref = commit_id or branch or bookmark
2069 ref = commit_id or branch or bookmark
2070 if ref:
2070 if ref:
2071 raise CommitDoesNotExistError(
2071 raise CommitDoesNotExistError(
2072 u'No commit refs could be found matching: {}'.format(ref))
2072 u'No commit refs could be found matching: {}'.format(ref))
2073 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
2073 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
2074 selected = u'branch:{}:{}'.format(
2074 selected = u'branch:{}:{}'.format(
2075 safe_unicode(repo.DEFAULT_BRANCH_NAME),
2075 safe_unicode(repo.DEFAULT_BRANCH_NAME),
2076 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
2076 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
2077 )
2077 )
2078 elif repo.commit_ids:
2078 elif repo.commit_ids:
2079 # make the user select in this case
2079 # make the user select in this case
2080 selected = None
2080 selected = None
2081 else:
2081 else:
2082 raise EmptyRepositoryError()
2082 raise EmptyRepositoryError()
2083 return groups, selected
2083 return groups, selected
2084
2084
2085 def get_diff(self, source_repo, source_ref_id, target_ref_id,
2085 def get_diff(self, source_repo, source_ref_id, target_ref_id,
2086 hide_whitespace_changes, diff_context):
2086 hide_whitespace_changes, diff_context):
2087
2087
2088 return self._get_diff_from_pr_or_version(
2088 return self._get_diff_from_pr_or_version(
2089 source_repo, source_ref_id, target_ref_id,
2089 source_repo, source_ref_id, target_ref_id,
2090 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
2090 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
2091
2091
2092 def _get_diff_from_pr_or_version(
2092 def _get_diff_from_pr_or_version(
2093 self, source_repo, source_ref_id, target_ref_id,
2093 self, source_repo, source_ref_id, target_ref_id,
2094 hide_whitespace_changes, diff_context):
2094 hide_whitespace_changes, diff_context):
2095
2095
2096 target_commit = source_repo.get_commit(
2096 target_commit = source_repo.get_commit(
2097 commit_id=safe_str(target_ref_id))
2097 commit_id=safe_str(target_ref_id))
2098 source_commit = source_repo.get_commit(
2098 source_commit = source_repo.get_commit(
2099 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
2099 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
2100 if isinstance(source_repo, Repository):
2100 if isinstance(source_repo, Repository):
2101 vcs_repo = source_repo.scm_instance()
2101 vcs_repo = source_repo.scm_instance()
2102 else:
2102 else:
2103 vcs_repo = source_repo
2103 vcs_repo = source_repo
2104
2104
2105 # TODO: johbo: In the context of an update, we cannot reach
2105 # TODO: johbo: In the context of an update, we cannot reach
2106 # the old commit anymore with our normal mechanisms. It needs
2106 # the old commit anymore with our normal mechanisms. It needs
2107 # some sort of special support in the vcs layer to avoid this
2107 # some sort of special support in the vcs layer to avoid this
2108 # workaround.
2108 # workaround.
2109 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
2109 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
2110 vcs_repo.alias == 'git'):
2110 vcs_repo.alias == 'git'):
2111 source_commit.raw_id = safe_str(source_ref_id)
2111 source_commit.raw_id = safe_str(source_ref_id)
2112
2112
2113 log.debug('calculating diff between '
2113 log.debug('calculating diff between '
2114 'source_ref:%s and target_ref:%s for repo `%s`',
2114 'source_ref:%s and target_ref:%s for repo `%s`',
2115 target_ref_id, source_ref_id,
2115 target_ref_id, source_ref_id,
2116 safe_unicode(vcs_repo.path))
2116 safe_unicode(vcs_repo.path))
2117
2117
2118 vcs_diff = vcs_repo.get_diff(
2118 vcs_diff = vcs_repo.get_diff(
2119 commit1=target_commit, commit2=source_commit,
2119 commit1=target_commit, commit2=source_commit,
2120 ignore_whitespace=hide_whitespace_changes, context=diff_context)
2120 ignore_whitespace=hide_whitespace_changes, context=diff_context)
2121 return vcs_diff
2121 return vcs_diff
2122
2122
2123 def _is_merge_enabled(self, pull_request):
2123 def _is_merge_enabled(self, pull_request):
2124 return self._get_general_setting(
2124 return self._get_general_setting(
2125 pull_request, 'rhodecode_pr_merge_enabled')
2125 pull_request, 'rhodecode_pr_merge_enabled')
2126
2126
2127 def _use_rebase_for_merging(self, pull_request):
2127 def _use_rebase_for_merging(self, pull_request):
2128 repo_type = pull_request.target_repo.repo_type
2128 repo_type = pull_request.target_repo.repo_type
2129 if repo_type == 'hg':
2129 if repo_type == 'hg':
2130 return self._get_general_setting(
2130 return self._get_general_setting(
2131 pull_request, 'rhodecode_hg_use_rebase_for_merging')
2131 pull_request, 'rhodecode_hg_use_rebase_for_merging')
2132 elif repo_type == 'git':
2132 elif repo_type == 'git':
2133 return self._get_general_setting(
2133 return self._get_general_setting(
2134 pull_request, 'rhodecode_git_use_rebase_for_merging')
2134 pull_request, 'rhodecode_git_use_rebase_for_merging')
2135
2135
2136 return False
2136 return False
2137
2137
2138 def _user_name_for_merging(self, pull_request, user):
2138 def _user_name_for_merging(self, pull_request, user):
2139 env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '')
2139 env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '')
2140 if env_user_name_attr and hasattr(user, env_user_name_attr):
2140 if env_user_name_attr and hasattr(user, env_user_name_attr):
2141 user_name_attr = env_user_name_attr
2141 user_name_attr = env_user_name_attr
2142 else:
2142 else:
2143 user_name_attr = 'short_contact'
2143 user_name_attr = 'short_contact'
2144
2144
2145 user_name = getattr(user, user_name_attr)
2145 user_name = getattr(user, user_name_attr)
2146 return user_name
2146 return user_name
2147
2147
2148 def _close_branch_before_merging(self, pull_request):
2148 def _close_branch_before_merging(self, pull_request):
2149 repo_type = pull_request.target_repo.repo_type
2149 repo_type = pull_request.target_repo.repo_type
2150 if repo_type == 'hg':
2150 if repo_type == 'hg':
2151 return self._get_general_setting(
2151 return self._get_general_setting(
2152 pull_request, 'rhodecode_hg_close_branch_before_merging')
2152 pull_request, 'rhodecode_hg_close_branch_before_merging')
2153 elif repo_type == 'git':
2153 elif repo_type == 'git':
2154 return self._get_general_setting(
2154 return self._get_general_setting(
2155 pull_request, 'rhodecode_git_close_branch_before_merging')
2155 pull_request, 'rhodecode_git_close_branch_before_merging')
2156
2156
2157 return False
2157 return False
2158
2158
2159 def _get_general_setting(self, pull_request, settings_key, default=False):
2159 def _get_general_setting(self, pull_request, settings_key, default=False):
2160 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
2160 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
2161 settings = settings_model.get_general_settings()
2161 settings = settings_model.get_general_settings()
2162 return settings.get(settings_key, default)
2162 return settings.get(settings_key, default)
2163
2163
2164 def _log_audit_action(self, action, action_data, user, pull_request):
2164 def _log_audit_action(self, action, action_data, user, pull_request):
2165 audit_logger.store(
2165 audit_logger.store(
2166 action=action,
2166 action=action,
2167 action_data=action_data,
2167 action_data=action_data,
2168 user=user,
2168 user=user,
2169 repo=pull_request.target_repo)
2169 repo=pull_request.target_repo)
2170
2170
2171 def get_reviewer_functions(self):
2171 def get_reviewer_functions(self):
2172 """
2172 """
2173 Fetches functions for validation and fetching default reviewers.
2173 Fetches functions for validation and fetching default reviewers.
2174 If available we use the EE package, else we fallback to CE
2174 If available we use the EE package, else we fallback to CE
2175 package functions
2175 package functions
2176 """
2176 """
2177 try:
2177 try:
2178 from rc_reviewers.utils import get_default_reviewers_data
2178 from rc_reviewers.utils import get_default_reviewers_data
2179 from rc_reviewers.utils import validate_default_reviewers
2179 from rc_reviewers.utils import validate_default_reviewers
2180 from rc_reviewers.utils import validate_observers
2180 from rc_reviewers.utils import validate_observers
2181 except ImportError:
2181 except ImportError:
2182 from rhodecode.apps.repository.utils import get_default_reviewers_data
2182 from rhodecode.apps.repository.utils import get_default_reviewers_data
2183 from rhodecode.apps.repository.utils import validate_default_reviewers
2183 from rhodecode.apps.repository.utils import validate_default_reviewers
2184 from rhodecode.apps.repository.utils import validate_observers
2184 from rhodecode.apps.repository.utils import validate_observers
2185
2185
2186 return get_default_reviewers_data, validate_default_reviewers, validate_observers
2186 return get_default_reviewers_data, validate_default_reviewers, validate_observers
2187
2187
2188
2188
2189 class MergeCheck(object):
2189 class MergeCheck(object):
2190 """
2190 """
2191 Perform Merge Checks and returns a check object which stores information
2191 Perform Merge Checks and returns a check object which stores information
2192 about merge errors, and merge conditions
2192 about merge errors, and merge conditions
2193 """
2193 """
2194 TODO_CHECK = 'todo'
2194 TODO_CHECK = 'todo'
2195 PERM_CHECK = 'perm'
2195 PERM_CHECK = 'perm'
2196 REVIEW_CHECK = 'review'
2196 REVIEW_CHECK = 'review'
2197 MERGE_CHECK = 'merge'
2197 MERGE_CHECK = 'merge'
2198 WIP_CHECK = 'wip'
2198 WIP_CHECK = 'wip'
2199
2199
2200 def __init__(self):
2200 def __init__(self):
2201 self.review_status = None
2201 self.review_status = None
2202 self.merge_possible = None
2202 self.merge_possible = None
2203 self.merge_msg = ''
2203 self.merge_msg = ''
2204 self.merge_response = None
2204 self.merge_response = None
2205 self.failed = None
2205 self.failed = None
2206 self.errors = []
2206 self.errors = []
2207 self.error_details = OrderedDict()
2207 self.error_details = OrderedDict()
2208 self.source_commit = AttributeDict()
2208 self.source_commit = AttributeDict()
2209 self.target_commit = AttributeDict()
2209 self.target_commit = AttributeDict()
2210 self.reviewers_count = 0
2210 self.reviewers_count = 0
2211 self.observers_count = 0
2211 self.observers_count = 0
2212
2212
2213 def __repr__(self):
2213 def __repr__(self):
2214 return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format(
2214 return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format(
2215 self.merge_possible, self.failed, self.errors)
2215 self.merge_possible, self.failed, self.errors)
2216
2216
2217 def push_error(self, error_type, message, error_key, details):
2217 def push_error(self, error_type, message, error_key, details):
2218 self.failed = True
2218 self.failed = True
2219 self.errors.append([error_type, message])
2219 self.errors.append([error_type, message])
2220 self.error_details[error_key] = dict(
2220 self.error_details[error_key] = dict(
2221 details=details,
2221 details=details,
2222 error_type=error_type,
2222 error_type=error_type,
2223 message=message
2223 message=message
2224 )
2224 )
2225
2225
2226 @classmethod
2226 @classmethod
2227 def validate(cls, pull_request, auth_user, translator, fail_early=False,
2227 def validate(cls, pull_request, auth_user, translator, fail_early=False,
2228 force_shadow_repo_refresh=False):
2228 force_shadow_repo_refresh=False):
2229 _ = translator
2229 _ = translator
2230 merge_check = cls()
2230 merge_check = cls()
2231
2231
2232 # title has WIP:
2232 # title has WIP:
2233 if pull_request.work_in_progress:
2233 if pull_request.work_in_progress:
2234 log.debug("MergeCheck: cannot merge, title has wip: marker.")
2234 log.debug("MergeCheck: cannot merge, title has wip: marker.")
2235
2235
2236 msg = _('WIP marker in title prevents from accidental merge.')
2236 msg = _('WIP marker in title prevents from accidental merge.')
2237 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
2237 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
2238 if fail_early:
2238 if fail_early:
2239 return merge_check
2239 return merge_check
2240
2240
2241 # permissions to merge
2241 # permissions to merge
2242 user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user)
2242 user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user)
2243 if not user_allowed_to_merge:
2243 if not user_allowed_to_merge:
2244 log.debug("MergeCheck: cannot merge, approval is pending.")
2244 log.debug("MergeCheck: cannot merge, approval is pending.")
2245
2245
2246 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
2246 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
2247 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2247 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2248 if fail_early:
2248 if fail_early:
2249 return merge_check
2249 return merge_check
2250
2250
2251 # permission to merge into the target branch
2251 # permission to merge into the target branch
2252 target_commit_id = pull_request.target_ref_parts.commit_id
2252 target_commit_id = pull_request.target_ref_parts.commit_id
2253 if pull_request.target_ref_parts.type == 'branch':
2253 if pull_request.target_ref_parts.type == 'branch':
2254 branch_name = pull_request.target_ref_parts.name
2254 branch_name = pull_request.target_ref_parts.name
2255 else:
2255 else:
2256 # for mercurial we can always figure out the branch from the commit
2256 # for mercurial we can always figure out the branch from the commit
2257 # in case of bookmark
2257 # in case of bookmark
2258 target_commit = pull_request.target_repo.get_commit(target_commit_id)
2258 target_commit = pull_request.target_repo.get_commit(target_commit_id)
2259 branch_name = target_commit.branch
2259 branch_name = target_commit.branch
2260
2260
2261 rule, branch_perm = auth_user.get_rule_and_branch_permission(
2261 rule, branch_perm = auth_user.get_rule_and_branch_permission(
2262 pull_request.target_repo.repo_name, branch_name)
2262 pull_request.target_repo.repo_name, branch_name)
2263 if branch_perm and branch_perm == 'branch.none':
2263 if branch_perm and branch_perm == 'branch.none':
2264 msg = _('Target branch `{}` changes rejected by rule {}.').format(
2264 msg = _('Target branch `{}` changes rejected by rule {}.').format(
2265 branch_name, rule)
2265 branch_name, rule)
2266 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2266 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2267 if fail_early:
2267 if fail_early:
2268 return merge_check
2268 return merge_check
2269
2269
2270 # review status, must be always present
2270 # review status, must be always present
2271 review_status = pull_request.calculated_review_status()
2271 review_status = pull_request.calculated_review_status()
2272 merge_check.review_status = review_status
2272 merge_check.review_status = review_status
2273 merge_check.reviewers_count = pull_request.reviewers_count
2273 merge_check.reviewers_count = pull_request.reviewers_count
2274 merge_check.observers_count = pull_request.observers_count
2274 merge_check.observers_count = pull_request.observers_count
2275
2275
2276 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
2276 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
2277 if not status_approved and merge_check.reviewers_count:
2277 if not status_approved and merge_check.reviewers_count:
2278 log.debug("MergeCheck: cannot merge, approval is pending.")
2278 log.debug("MergeCheck: cannot merge, approval is pending.")
2279 msg = _('Pull request reviewer approval is pending.')
2279 msg = _('Pull request reviewer approval is pending.')
2280
2280
2281 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
2281 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
2282
2282
2283 if fail_early:
2283 if fail_early:
2284 return merge_check
2284 return merge_check
2285
2285
2286 # left over TODOs
2286 # left over TODOs
2287 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
2287 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
2288 if todos:
2288 if todos:
2289 log.debug("MergeCheck: cannot merge, {} "
2289 log.debug("MergeCheck: cannot merge, {} "
2290 "unresolved TODOs left.".format(len(todos)))
2290 "unresolved TODOs left.".format(len(todos)))
2291
2291
2292 if len(todos) == 1:
2292 if len(todos) == 1:
2293 msg = _('Cannot merge, {} TODO still not resolved.').format(
2293 msg = _('Cannot merge, {} TODO still not resolved.').format(
2294 len(todos))
2294 len(todos))
2295 else:
2295 else:
2296 msg = _('Cannot merge, {} TODOs still not resolved.').format(
2296 msg = _('Cannot merge, {} TODOs still not resolved.').format(
2297 len(todos))
2297 len(todos))
2298
2298
2299 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
2299 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
2300
2300
2301 if fail_early:
2301 if fail_early:
2302 return merge_check
2302 return merge_check
2303
2303
2304 # merge possible, here is the filesystem simulation + shadow repo
2304 # merge possible, here is the filesystem simulation + shadow repo
2305 merge_response, merge_status, msg = PullRequestModel().merge_status(
2305 merge_response, merge_status, msg = PullRequestModel().merge_status(
2306 pull_request, translator=translator,
2306 pull_request, translator=translator,
2307 force_shadow_repo_refresh=force_shadow_repo_refresh)
2307 force_shadow_repo_refresh=force_shadow_repo_refresh)
2308
2308
2309 merge_check.merge_possible = merge_status
2309 merge_check.merge_possible = merge_status
2310 merge_check.merge_msg = msg
2310 merge_check.merge_msg = msg
2311 merge_check.merge_response = merge_response
2311 merge_check.merge_response = merge_response
2312
2312
2313 source_ref_id = pull_request.source_ref_parts.commit_id
2313 source_ref_id = pull_request.source_ref_parts.commit_id
2314 target_ref_id = pull_request.target_ref_parts.commit_id
2314 target_ref_id = pull_request.target_ref_parts.commit_id
2315
2315
2316 try:
2316 try:
2317 source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request)
2317 source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request)
2318 merge_check.source_commit.changed = source_ref_id != source_commit.raw_id
2318 merge_check.source_commit.changed = source_ref_id != source_commit.raw_id
2319 merge_check.source_commit.ref_spec = pull_request.source_ref_parts
2319 merge_check.source_commit.ref_spec = pull_request.source_ref_parts
2320 merge_check.source_commit.current_raw_id = source_commit.raw_id
2320 merge_check.source_commit.current_raw_id = source_commit.raw_id
2321 merge_check.source_commit.previous_raw_id = source_ref_id
2321 merge_check.source_commit.previous_raw_id = source_ref_id
2322
2322
2323 merge_check.target_commit.changed = target_ref_id != target_commit.raw_id
2323 merge_check.target_commit.changed = target_ref_id != target_commit.raw_id
2324 merge_check.target_commit.ref_spec = pull_request.target_ref_parts
2324 merge_check.target_commit.ref_spec = pull_request.target_ref_parts
2325 merge_check.target_commit.current_raw_id = target_commit.raw_id
2325 merge_check.target_commit.current_raw_id = target_commit.raw_id
2326 merge_check.target_commit.previous_raw_id = target_ref_id
2326 merge_check.target_commit.previous_raw_id = target_ref_id
2327 except (SourceRefMissing, TargetRefMissing):
2327 except (SourceRefMissing, TargetRefMissing):
2328 pass
2328 pass
2329
2329
2330 if not merge_status:
2330 if not merge_status:
2331 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
2331 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
2332 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
2332 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
2333
2333
2334 if fail_early:
2334 if fail_early:
2335 return merge_check
2335 return merge_check
2336
2336
2337 log.debug('MergeCheck: is failed: %s', merge_check.failed)
2337 log.debug('MergeCheck: is failed: %s', merge_check.failed)
2338 return merge_check
2338 return merge_check
2339
2339
2340 @classmethod
2340 @classmethod
2341 def get_merge_conditions(cls, pull_request, translator):
2341 def get_merge_conditions(cls, pull_request, translator):
2342 _ = translator
2342 _ = translator
2343 merge_details = {}
2343 merge_details = {}
2344
2344
2345 model = PullRequestModel()
2345 model = PullRequestModel()
2346 use_rebase = model._use_rebase_for_merging(pull_request)
2346 use_rebase = model._use_rebase_for_merging(pull_request)
2347
2347
2348 if use_rebase:
2348 if use_rebase:
2349 merge_details['merge_strategy'] = dict(
2349 merge_details['merge_strategy'] = dict(
2350 details={},
2350 details={},
2351 message=_('Merge strategy: rebase')
2351 message=_('Merge strategy: rebase')
2352 )
2352 )
2353 else:
2353 else:
2354 merge_details['merge_strategy'] = dict(
2354 merge_details['merge_strategy'] = dict(
2355 details={},
2355 details={},
2356 message=_('Merge strategy: explicit merge commit')
2356 message=_('Merge strategy: explicit merge commit')
2357 )
2357 )
2358
2358
2359 close_branch = model._close_branch_before_merging(pull_request)
2359 close_branch = model._close_branch_before_merging(pull_request)
2360 if close_branch:
2360 if close_branch:
2361 repo_type = pull_request.target_repo.repo_type
2361 repo_type = pull_request.target_repo.repo_type
2362 close_msg = ''
2362 close_msg = ''
2363 if repo_type == 'hg':
2363 if repo_type == 'hg':
2364 close_msg = _('Source branch will be closed before the merge.')
2364 close_msg = _('Source branch will be closed before the merge.')
2365 elif repo_type == 'git':
2365 elif repo_type == 'git':
2366 close_msg = _('Source branch will be deleted after the merge.')
2366 close_msg = _('Source branch will be deleted after the merge.')
2367
2367
2368 merge_details['close_branch'] = dict(
2368 merge_details['close_branch'] = dict(
2369 details={},
2369 details={},
2370 message=close_msg
2370 message=close_msg
2371 )
2371 )
2372
2372
2373 return merge_details
2373 return merge_details
2374
2374
2375
2375
2376 ChangeTuple = collections.namedtuple(
2376 ChangeTuple = collections.namedtuple(
2377 'ChangeTuple', ['added', 'common', 'removed', 'total'])
2377 'ChangeTuple', ['added', 'common', 'removed', 'total'])
2378
2378
2379 FileChangeTuple = collections.namedtuple(
2379 FileChangeTuple = collections.namedtuple(
2380 'FileChangeTuple', ['added', 'modified', 'removed'])
2380 'FileChangeTuple', ['added', 'modified', 'removed'])
@@ -1,1197 +1,1196 b''
1 # -*- coding: utf-8 -*-
2
1
3 # Copyright (C) 2010-2020 RhodeCode GmbH
2 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
3 #
5 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
8 #
7 #
9 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
11 # GNU General Public License for more details.
13 #
12 #
14 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
15 #
17 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
19
21 import os
20 import os
22 import re
21 import re
23 import shutil
22 import shutil
24 import time
23 import time
25 import logging
24 import logging
26 import traceback
25 import traceback
27 import datetime
26 import datetime
28
27
29 from pyramid.threadlocal import get_current_request
28 from pyramid.threadlocal import get_current_request
30 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
31
30
32 from rhodecode import events
31 from rhodecode import events
33 from rhodecode.lib.auth import HasUserGroupPermissionAny
32 from rhodecode.lib.auth import HasUserGroupPermissionAny
34 from rhodecode.lib.caching_query import FromCache
33 from rhodecode.lib.caching_query import FromCache
35 from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError
34 from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError
36 from rhodecode.lib import hooks_base
35 from rhodecode.lib import hooks_base
37 from rhodecode.lib.user_log_filter import user_log_filter
36 from rhodecode.lib.user_log_filter import user_log_filter
38 from rhodecode.lib.utils import make_db_config
37 from rhodecode.lib.utils import make_db_config
39 from rhodecode.lib.utils2 import (
38 from rhodecode.lib.utils2 import (
40 safe_str, safe_unicode, remove_prefix, obfuscate_url_pw,
39 safe_str, safe_unicode, remove_prefix, obfuscate_url_pw,
41 get_current_rhodecode_user, safe_int, action_logger_generic)
40 get_current_rhodecode_user, safe_int, action_logger_generic)
42 from rhodecode.lib.vcs.backends import get_backend
41 from rhodecode.lib.vcs.backends import get_backend
43 from rhodecode.model import BaseModel
42 from rhodecode.model import BaseModel
44 from rhodecode.model.db import (
43 from rhodecode.model.db import (
45 _hash_key, func, case, joinedload, or_, in_filter_generator,
44 _hash_key, func, case, joinedload, or_, in_filter_generator,
46 Session, Repository, UserRepoToPerm, UserGroupRepoToPerm,
45 Session, Repository, UserRepoToPerm, UserGroupRepoToPerm,
47 UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission,
46 UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission,
48 Statistics, UserGroup, RepoGroup, RepositoryField, UserLog)
47 Statistics, UserGroup, RepoGroup, RepositoryField, UserLog)
49 from rhodecode.model.permission import PermissionModel
48 from rhodecode.model.permission import PermissionModel
50 from rhodecode.model.settings import VcsSettingsModel
49 from rhodecode.model.settings import VcsSettingsModel
51
50
52 log = logging.getLogger(__name__)
51 log = logging.getLogger(__name__)
53
52
54
53
55 class RepoModel(BaseModel):
54 class RepoModel(BaseModel):
56
55
57 cls = Repository
56 cls = Repository
58
57
59 def _get_user_group(self, users_group):
58 def _get_user_group(self, users_group):
60 return self._get_instance(UserGroup, users_group,
59 return self._get_instance(UserGroup, users_group,
61 callback=UserGroup.get_by_group_name)
60 callback=UserGroup.get_by_group_name)
62
61
63 def _get_repo_group(self, repo_group):
62 def _get_repo_group(self, repo_group):
64 return self._get_instance(RepoGroup, repo_group,
63 return self._get_instance(RepoGroup, repo_group,
65 callback=RepoGroup.get_by_group_name)
64 callback=RepoGroup.get_by_group_name)
66
65
67 def _create_default_perms(self, repository, private):
66 def _create_default_perms(self, repository, private):
68 # create default permission
67 # create default permission
69 default = 'repository.read'
68 default = 'repository.read'
70 def_user = User.get_default_user()
69 def_user = User.get_default_user()
71 for p in def_user.user_perms:
70 for p in def_user.user_perms:
72 if p.permission.permission_name.startswith('repository.'):
71 if p.permission.permission_name.startswith('repository.'):
73 default = p.permission.permission_name
72 default = p.permission.permission_name
74 break
73 break
75
74
76 default_perm = 'repository.none' if private else default
75 default_perm = 'repository.none' if private else default
77
76
78 repo_to_perm = UserRepoToPerm()
77 repo_to_perm = UserRepoToPerm()
79 repo_to_perm.permission = Permission.get_by_key(default_perm)
78 repo_to_perm.permission = Permission.get_by_key(default_perm)
80
79
81 repo_to_perm.repository = repository
80 repo_to_perm.repository = repository
82 repo_to_perm.user_id = def_user.user_id
81 repo_to_perm.user_id = def_user.user_id
83
82
84 return repo_to_perm
83 return repo_to_perm
85
84
86 @LazyProperty
85 @LazyProperty
87 def repos_path(self):
86 def repos_path(self):
88 """
87 """
89 Gets the repositories root path from database
88 Gets the repositories root path from database
90 """
89 """
91 settings_model = VcsSettingsModel(sa=self.sa)
90 settings_model = VcsSettingsModel(sa=self.sa)
92 return settings_model.get_repos_location()
91 return settings_model.get_repos_location()
93
92
94 def get(self, repo_id):
93 def get(self, repo_id):
95 repo = self.sa.query(Repository) \
94 repo = self.sa.query(Repository) \
96 .filter(Repository.repo_id == repo_id)
95 .filter(Repository.repo_id == repo_id)
97
96
98 return repo.scalar()
97 return repo.scalar()
99
98
100 def get_repo(self, repository):
99 def get_repo(self, repository):
101 return self._get_repo(repository)
100 return self._get_repo(repository)
102
101
103 def get_by_repo_name(self, repo_name, cache=False):
102 def get_by_repo_name(self, repo_name, cache=False):
104 repo = self.sa.query(Repository) \
103 repo = self.sa.query(Repository) \
105 .filter(Repository.repo_name == repo_name)
104 .filter(Repository.repo_name == repo_name)
106
105
107 if cache:
106 if cache:
108 name_key = _hash_key(repo_name)
107 name_key = _hash_key(repo_name)
109 repo = repo.options(
108 repo = repo.options(
110 FromCache("sql_cache_short", f"get_repo_{name_key}"))
109 FromCache("sql_cache_short", f"get_repo_{name_key}"))
111 return repo.scalar()
110 return repo.scalar()
112
111
113 def _extract_id_from_repo_name(self, repo_name):
112 def _extract_id_from_repo_name(self, repo_name):
114 if repo_name.startswith('/'):
113 if repo_name.startswith('/'):
115 repo_name = repo_name.lstrip('/')
114 repo_name = repo_name.lstrip('/')
116 by_id_match = re.match(r'^_(\d{1,})', repo_name)
115 by_id_match = re.match(r'^_(\d{1,})', repo_name)
117 if by_id_match:
116 if by_id_match:
118 return by_id_match.groups()[0]
117 return by_id_match.groups()[0]
119
118
120 def get_repo_by_id(self, repo_name):
119 def get_repo_by_id(self, repo_name):
121 """
120 """
122 Extracts repo_name by id from special urls.
121 Extracts repo_name by id from special urls.
123 Example url is _11/repo_name
122 Example url is _11/repo_name
124
123
125 :param repo_name:
124 :param repo_name:
126 :return: repo object if matched else None
125 :return: repo object if matched else None
127 """
126 """
128 _repo_id = None
127 _repo_id = None
129 try:
128 try:
130 _repo_id = self._extract_id_from_repo_name(repo_name)
129 _repo_id = self._extract_id_from_repo_name(repo_name)
131 if _repo_id:
130 if _repo_id:
132 return self.get(_repo_id)
131 return self.get(_repo_id)
133 except Exception:
132 except Exception:
134 log.exception('Failed to extract repo_name from URL')
133 log.exception('Failed to extract repo_name from URL')
135 if _repo_id:
134 if _repo_id:
136 Session().rollback()
135 Session().rollback()
137
136
138 return None
137 return None
139
138
140 def get_repos_for_root(self, root, traverse=False):
139 def get_repos_for_root(self, root, traverse=False):
141 if traverse:
140 if traverse:
142 like_expression = u'{}%'.format(safe_unicode(root))
141 like_expression = u'{}%'.format(safe_unicode(root))
143 repos = Repository.query().filter(
142 repos = Repository.query().filter(
144 Repository.repo_name.like(like_expression)).all()
143 Repository.repo_name.like(like_expression)).all()
145 else:
144 else:
146 if root and not isinstance(root, RepoGroup):
145 if root and not isinstance(root, RepoGroup):
147 raise ValueError(
146 raise ValueError(
148 'Root must be an instance '
147 'Root must be an instance '
149 'of RepoGroup, got:{} instead'.format(type(root)))
148 'of RepoGroup, got:{} instead'.format(type(root)))
150 repos = Repository.query().filter(Repository.group == root).all()
149 repos = Repository.query().filter(Repository.group == root).all()
151 return repos
150 return repos
152
151
153 def get_url(self, repo, request=None, permalink=False):
152 def get_url(self, repo, request=None, permalink=False):
154 if not request:
153 if not request:
155 request = get_current_request()
154 request = get_current_request()
156
155
157 if not request:
156 if not request:
158 return
157 return
159
158
160 if permalink:
159 if permalink:
161 return request.route_url(
160 return request.route_url(
162 'repo_summary', repo_name='_{}'.format(safe_str(repo.repo_id)))
161 'repo_summary', repo_name='_{}'.format(safe_str(repo.repo_id)))
163 else:
162 else:
164 return request.route_url(
163 return request.route_url(
165 'repo_summary', repo_name=safe_str(repo.repo_name))
164 'repo_summary', repo_name=safe_str(repo.repo_name))
166
165
167 def get_commit_url(self, repo, commit_id, request=None, permalink=False):
166 def get_commit_url(self, repo, commit_id, request=None, permalink=False):
168 if not request:
167 if not request:
169 request = get_current_request()
168 request = get_current_request()
170
169
171 if not request:
170 if not request:
172 return
171 return
173
172
174 if permalink:
173 if permalink:
175 return request.route_url(
174 return request.route_url(
176 'repo_commit', repo_name=safe_str(repo.repo_id),
175 'repo_commit', repo_name=safe_str(repo.repo_id),
177 commit_id=commit_id)
176 commit_id=commit_id)
178
177
179 else:
178 else:
180 return request.route_url(
179 return request.route_url(
181 'repo_commit', repo_name=safe_str(repo.repo_name),
180 'repo_commit', repo_name=safe_str(repo.repo_name),
182 commit_id=commit_id)
181 commit_id=commit_id)
183
182
184 def get_repo_log(self, repo, filter_term):
183 def get_repo_log(self, repo, filter_term):
185 repo_log = UserLog.query()\
184 repo_log = UserLog.query()\
186 .filter(or_(UserLog.repository_id == repo.repo_id,
185 .filter(or_(UserLog.repository_id == repo.repo_id,
187 UserLog.repository_name == repo.repo_name))\
186 UserLog.repository_name == repo.repo_name))\
188 .options(joinedload(UserLog.user))\
187 .options(joinedload(UserLog.user))\
189 .options(joinedload(UserLog.repository))\
188 .options(joinedload(UserLog.repository))\
190 .order_by(UserLog.action_date.desc())
189 .order_by(UserLog.action_date.desc())
191
190
192 repo_log = user_log_filter(repo_log, filter_term)
191 repo_log = user_log_filter(repo_log, filter_term)
193 return repo_log
192 return repo_log
194
193
195 @classmethod
194 @classmethod
196 def update_commit_cache(cls, repositories=None):
195 def update_commit_cache(cls, repositories=None):
197 if not repositories:
196 if not repositories:
198 repositories = Repository.getAll()
197 repositories = Repository.getAll()
199 for repo in repositories:
198 for repo in repositories:
200 repo.update_commit_cache()
199 repo.update_commit_cache()
201
200
202 def get_repos_as_dict(self, repo_list=None, admin=False,
201 def get_repos_as_dict(self, repo_list=None, admin=False,
203 super_user_actions=False, short_name=None):
202 super_user_actions=False, short_name=None):
204
203
205 _render = get_current_request().get_partial_renderer(
204 _render = get_current_request().get_partial_renderer(
206 'rhodecode:templates/data_table/_dt_elements.mako')
205 'rhodecode:templates/data_table/_dt_elements.mako')
207 c = _render.get_call_context()
206 c = _render.get_call_context()
208 h = _render.get_helpers()
207 h = _render.get_helpers()
209
208
210 def quick_menu(repo_name):
209 def quick_menu(repo_name):
211 return _render('quick_menu', repo_name)
210 return _render('quick_menu', repo_name)
212
211
213 def repo_lnk(name, rtype, rstate, private, archived, fork_of):
212 def repo_lnk(name, rtype, rstate, private, archived, fork_of):
214 if short_name is not None:
213 if short_name is not None:
215 short_name_var = short_name
214 short_name_var = short_name
216 else:
215 else:
217 short_name_var = not admin
216 short_name_var = not admin
218 return _render('repo_name', name, rtype, rstate, private, archived, fork_of,
217 return _render('repo_name', name, rtype, rstate, private, archived, fork_of,
219 short_name=short_name_var, admin=False)
218 short_name=short_name_var, admin=False)
220
219
221 def last_change(last_change):
220 def last_change(last_change):
222 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
221 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
223 ts = time.time()
222 ts = time.time()
224 utc_offset = (datetime.datetime.fromtimestamp(ts)
223 utc_offset = (datetime.datetime.fromtimestamp(ts)
225 - datetime.datetime.utcfromtimestamp(ts)).total_seconds()
224 - datetime.datetime.utcfromtimestamp(ts)).total_seconds()
226 last_change = last_change + datetime.timedelta(seconds=utc_offset)
225 last_change = last_change + datetime.timedelta(seconds=utc_offset)
227
226
228 return _render("last_change", last_change)
227 return _render("last_change", last_change)
229
228
230 def rss_lnk(repo_name):
229 def rss_lnk(repo_name):
231 return _render("rss", repo_name)
230 return _render("rss", repo_name)
232
231
233 def atom_lnk(repo_name):
232 def atom_lnk(repo_name):
234 return _render("atom", repo_name)
233 return _render("atom", repo_name)
235
234
236 def last_rev(repo_name, cs_cache):
235 def last_rev(repo_name, cs_cache):
237 return _render('revision', repo_name, cs_cache.get('revision'),
236 return _render('revision', repo_name, cs_cache.get('revision'),
238 cs_cache.get('raw_id'), cs_cache.get('author'),
237 cs_cache.get('raw_id'), cs_cache.get('author'),
239 cs_cache.get('message'), cs_cache.get('date'))
238 cs_cache.get('message'), cs_cache.get('date'))
240
239
241 def desc(desc):
240 def desc(desc):
242 return _render('repo_desc', desc, c.visual.stylify_metatags)
241 return _render('repo_desc', desc, c.visual.stylify_metatags)
243
242
244 def state(repo_state):
243 def state(repo_state):
245 return _render("repo_state", repo_state)
244 return _render("repo_state", repo_state)
246
245
247 def repo_actions(repo_name):
246 def repo_actions(repo_name):
248 return _render('repo_actions', repo_name, super_user_actions)
247 return _render('repo_actions', repo_name, super_user_actions)
249
248
250 def user_profile(username):
249 def user_profile(username):
251 return _render('user_profile', username)
250 return _render('user_profile', username)
252
251
253 repos_data = []
252 repos_data = []
254 for repo in repo_list:
253 for repo in repo_list:
255 # NOTE(marcink): because we use only raw column we need to load it like that
254 # NOTE(marcink): because we use only raw column we need to load it like that
256 changeset_cache = Repository._load_changeset_cache(
255 changeset_cache = Repository._load_changeset_cache(
257 repo.repo_id, repo._changeset_cache)
256 repo.repo_id, repo._changeset_cache)
258
257
259 row = {
258 row = {
260 "menu": quick_menu(repo.repo_name),
259 "menu": quick_menu(repo.repo_name),
261
260
262 "name": repo_lnk(repo.repo_name, repo.repo_type, repo.repo_state,
261 "name": repo_lnk(repo.repo_name, repo.repo_type, repo.repo_state,
263 repo.private, repo.archived, repo.fork),
262 repo.private, repo.archived, repo.fork),
264
263
265 "desc": desc(h.escape(repo.description)),
264 "desc": desc(h.escape(repo.description)),
266
265
267 "last_change": last_change(repo.updated_on),
266 "last_change": last_change(repo.updated_on),
268
267
269 "last_changeset": last_rev(repo.repo_name, changeset_cache),
268 "last_changeset": last_rev(repo.repo_name, changeset_cache),
270 "last_changeset_raw": changeset_cache.get('revision'),
269 "last_changeset_raw": changeset_cache.get('revision'),
271
270
272 "owner": user_profile(repo.User.username),
271 "owner": user_profile(repo.User.username),
273
272
274 "state": state(repo.repo_state),
273 "state": state(repo.repo_state),
275 "rss": rss_lnk(repo.repo_name),
274 "rss": rss_lnk(repo.repo_name),
276 "atom": atom_lnk(repo.repo_name),
275 "atom": atom_lnk(repo.repo_name),
277 }
276 }
278 if admin:
277 if admin:
279 row.update({
278 row.update({
280 "action": repo_actions(repo.repo_name),
279 "action": repo_actions(repo.repo_name),
281 })
280 })
282 repos_data.append(row)
281 repos_data.append(row)
283
282
284 return repos_data
283 return repos_data
285
284
286 def get_repos_data_table(
285 def get_repos_data_table(
287 self, draw, start, limit,
286 self, draw, start, limit,
288 search_q, order_by, order_dir,
287 search_q, order_by, order_dir,
289 auth_user, repo_group_id):
288 auth_user, repo_group_id):
290 from rhodecode.model.scm import RepoList
289 from rhodecode.model.scm import RepoList
291
290
292 _perms = ['repository.read', 'repository.write', 'repository.admin']
291 _perms = ['repository.read', 'repository.write', 'repository.admin']
293
292
294 repos = Repository.query() \
293 repos = Repository.query() \
295 .filter(Repository.group_id == repo_group_id) \
294 .filter(Repository.group_id == repo_group_id) \
296 .all()
295 .all()
297 auth_repo_list = RepoList(
296 auth_repo_list = RepoList(
298 repos, perm_set=_perms,
297 repos, perm_set=_perms,
299 extra_kwargs=dict(user=auth_user))
298 extra_kwargs=dict(user=auth_user))
300
299
301 allowed_ids = [-1]
300 allowed_ids = [-1]
302 for repo in auth_repo_list:
301 for repo in auth_repo_list:
303 allowed_ids.append(repo.repo_id)
302 allowed_ids.append(repo.repo_id)
304
303
305 repos_data_total_count = Repository.query() \
304 repos_data_total_count = Repository.query() \
306 .filter(Repository.group_id == repo_group_id) \
305 .filter(Repository.group_id == repo_group_id) \
307 .filter(or_(
306 .filter(or_(
308 # generate multiple IN to fix limitation problems
307 # generate multiple IN to fix limitation problems
309 *in_filter_generator(Repository.repo_id, allowed_ids))
308 *in_filter_generator(Repository.repo_id, allowed_ids))
310 ) \
309 ) \
311 .count()
310 .count()
312
311
313 base_q = Session.query(
312 base_q = Session.query(
314 Repository.repo_id,
313 Repository.repo_id,
315 Repository.repo_name,
314 Repository.repo_name,
316 Repository.description,
315 Repository.description,
317 Repository.repo_type,
316 Repository.repo_type,
318 Repository.repo_state,
317 Repository.repo_state,
319 Repository.private,
318 Repository.private,
320 Repository.archived,
319 Repository.archived,
321 Repository.fork,
320 Repository.fork,
322 Repository.updated_on,
321 Repository.updated_on,
323 Repository._changeset_cache,
322 Repository._changeset_cache,
324 User,
323 User,
325 ) \
324 ) \
326 .filter(Repository.group_id == repo_group_id) \
325 .filter(Repository.group_id == repo_group_id) \
327 .filter(or_(
326 .filter(or_(
328 # generate multiple IN to fix limitation problems
327 # generate multiple IN to fix limitation problems
329 *in_filter_generator(Repository.repo_id, allowed_ids))
328 *in_filter_generator(Repository.repo_id, allowed_ids))
330 ) \
329 ) \
331 .join(User, User.user_id == Repository.user_id) \
330 .join(User, User.user_id == Repository.user_id) \
332 .group_by(Repository, User)
331 .group_by(Repository, User)
333
332
334 repos_data_total_filtered_count = base_q.count()
333 repos_data_total_filtered_count = base_q.count()
335
334
336 sort_defined = False
335 sort_defined = False
337 if order_by == 'repo_name':
336 if order_by == 'repo_name':
338 sort_col = func.lower(Repository.repo_name)
337 sort_col = func.lower(Repository.repo_name)
339 sort_defined = True
338 sort_defined = True
340 elif order_by == 'user_username':
339 elif order_by == 'user_username':
341 sort_col = User.username
340 sort_col = User.username
342 else:
341 else:
343 sort_col = getattr(Repository, order_by, None)
342 sort_col = getattr(Repository, order_by, None)
344
343
345 if sort_defined or sort_col:
344 if sort_defined or sort_col:
346 if order_dir == 'asc':
345 if order_dir == 'asc':
347 sort_col = sort_col.asc()
346 sort_col = sort_col.asc()
348 else:
347 else:
349 sort_col = sort_col.desc()
348 sort_col = sort_col.desc()
350
349
351 base_q = base_q.order_by(sort_col)
350 base_q = base_q.order_by(sort_col)
352 base_q = base_q.offset(start).limit(limit)
351 base_q = base_q.offset(start).limit(limit)
353
352
354 repos_list = base_q.all()
353 repos_list = base_q.all()
355
354
356 repos_data = RepoModel().get_repos_as_dict(
355 repos_data = RepoModel().get_repos_as_dict(
357 repo_list=repos_list, admin=False)
356 repo_list=repos_list, admin=False)
358
357
359 data = ({
358 data = ({
360 'draw': draw,
359 'draw': draw,
361 'data': repos_data,
360 'data': repos_data,
362 'recordsTotal': repos_data_total_count,
361 'recordsTotal': repos_data_total_count,
363 'recordsFiltered': repos_data_total_filtered_count,
362 'recordsFiltered': repos_data_total_filtered_count,
364 })
363 })
365 return data
364 return data
366
365
367 def _get_defaults(self, repo_name):
366 def _get_defaults(self, repo_name):
368 """
367 """
369 Gets information about repository, and returns a dict for
368 Gets information about repository, and returns a dict for
370 usage in forms
369 usage in forms
371
370
372 :param repo_name:
371 :param repo_name:
373 """
372 """
374
373
375 repo_info = Repository.get_by_repo_name(repo_name)
374 repo_info = Repository.get_by_repo_name(repo_name)
376
375
377 if repo_info is None:
376 if repo_info is None:
378 return None
377 return None
379
378
380 defaults = repo_info.get_dict()
379 defaults = repo_info.get_dict()
381 defaults['repo_name'] = repo_info.just_name
380 defaults['repo_name'] = repo_info.just_name
382
381
383 groups = repo_info.groups_with_parents
382 groups = repo_info.groups_with_parents
384 parent_group = groups[-1] if groups else None
383 parent_group = groups[-1] if groups else None
385
384
386 # we use -1 as this is how in HTML, we mark an empty group
385 # we use -1 as this is how in HTML, we mark an empty group
387 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
386 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
388
387
389 keys_to_process = (
388 keys_to_process = (
390 {'k': 'repo_type', 'strip': False},
389 {'k': 'repo_type', 'strip': False},
391 {'k': 'repo_enable_downloads', 'strip': True},
390 {'k': 'repo_enable_downloads', 'strip': True},
392 {'k': 'repo_description', 'strip': True},
391 {'k': 'repo_description', 'strip': True},
393 {'k': 'repo_enable_locking', 'strip': True},
392 {'k': 'repo_enable_locking', 'strip': True},
394 {'k': 'repo_landing_rev', 'strip': True},
393 {'k': 'repo_landing_rev', 'strip': True},
395 {'k': 'clone_uri', 'strip': False},
394 {'k': 'clone_uri', 'strip': False},
396 {'k': 'push_uri', 'strip': False},
395 {'k': 'push_uri', 'strip': False},
397 {'k': 'repo_private', 'strip': True},
396 {'k': 'repo_private', 'strip': True},
398 {'k': 'repo_enable_statistics', 'strip': True}
397 {'k': 'repo_enable_statistics', 'strip': True}
399 )
398 )
400
399
401 for item in keys_to_process:
400 for item in keys_to_process:
402 attr = item['k']
401 attr = item['k']
403 if item['strip']:
402 if item['strip']:
404 attr = remove_prefix(item['k'], 'repo_')
403 attr = remove_prefix(item['k'], 'repo_')
405
404
406 val = defaults[attr]
405 val = defaults[attr]
407 if item['k'] == 'repo_landing_rev':
406 if item['k'] == 'repo_landing_rev':
408 val = ':'.join(defaults[attr])
407 val = ':'.join(defaults[attr])
409 defaults[item['k']] = val
408 defaults[item['k']] = val
410 if item['k'] == 'clone_uri':
409 if item['k'] == 'clone_uri':
411 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
410 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
412 if item['k'] == 'push_uri':
411 if item['k'] == 'push_uri':
413 defaults['push_uri_hidden'] = repo_info.push_uri_hidden
412 defaults['push_uri_hidden'] = repo_info.push_uri_hidden
414
413
415 # fill owner
414 # fill owner
416 if repo_info.user:
415 if repo_info.user:
417 defaults.update({'user': repo_info.user.username})
416 defaults.update({'user': repo_info.user.username})
418 else:
417 else:
419 replacement_user = User.get_first_super_admin().username
418 replacement_user = User.get_first_super_admin().username
420 defaults.update({'user': replacement_user})
419 defaults.update({'user': replacement_user})
421
420
422 return defaults
421 return defaults
423
422
424 def update(self, repo, **kwargs):
423 def update(self, repo, **kwargs):
425 try:
424 try:
426 cur_repo = self._get_repo(repo)
425 cur_repo = self._get_repo(repo)
427 source_repo_name = cur_repo.repo_name
426 source_repo_name = cur_repo.repo_name
428
427
429 affected_user_ids = []
428 affected_user_ids = []
430 if 'user' in kwargs:
429 if 'user' in kwargs:
431 old_owner_id = cur_repo.user.user_id
430 old_owner_id = cur_repo.user.user_id
432 new_owner = User.get_by_username(kwargs['user'])
431 new_owner = User.get_by_username(kwargs['user'])
433 cur_repo.user = new_owner
432 cur_repo.user = new_owner
434
433
435 if old_owner_id != new_owner.user_id:
434 if old_owner_id != new_owner.user_id:
436 affected_user_ids = [new_owner.user_id, old_owner_id]
435 affected_user_ids = [new_owner.user_id, old_owner_id]
437
436
438 if 'repo_group' in kwargs:
437 if 'repo_group' in kwargs:
439 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
438 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
440 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
439 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
441
440
442 update_keys = [
441 update_keys = [
443 (1, 'repo_description'),
442 (1, 'repo_description'),
444 (1, 'repo_landing_rev'),
443 (1, 'repo_landing_rev'),
445 (1, 'repo_private'),
444 (1, 'repo_private'),
446 (1, 'repo_enable_downloads'),
445 (1, 'repo_enable_downloads'),
447 (1, 'repo_enable_locking'),
446 (1, 'repo_enable_locking'),
448 (1, 'repo_enable_statistics'),
447 (1, 'repo_enable_statistics'),
449 (0, 'clone_uri'),
448 (0, 'clone_uri'),
450 (0, 'push_uri'),
449 (0, 'push_uri'),
451 (0, 'fork_id')
450 (0, 'fork_id')
452 ]
451 ]
453 for strip, k in update_keys:
452 for strip, k in update_keys:
454 if k in kwargs:
453 if k in kwargs:
455 val = kwargs[k]
454 val = kwargs[k]
456 if strip:
455 if strip:
457 k = remove_prefix(k, 'repo_')
456 k = remove_prefix(k, 'repo_')
458
457
459 setattr(cur_repo, k, val)
458 setattr(cur_repo, k, val)
460
459
461 new_name = cur_repo.get_new_name(kwargs['repo_name'])
460 new_name = cur_repo.get_new_name(kwargs['repo_name'])
462 cur_repo.repo_name = new_name
461 cur_repo.repo_name = new_name
463
462
464 # if private flag is set, reset default permission to NONE
463 # if private flag is set, reset default permission to NONE
465 if kwargs.get('repo_private'):
464 if kwargs.get('repo_private'):
466 EMPTY_PERM = 'repository.none'
465 EMPTY_PERM = 'repository.none'
467 RepoModel().grant_user_permission(
466 RepoModel().grant_user_permission(
468 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
467 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
469 )
468 )
470 if kwargs.get('repo_landing_rev'):
469 if kwargs.get('repo_landing_rev'):
471 landing_rev_val = kwargs['repo_landing_rev']
470 landing_rev_val = kwargs['repo_landing_rev']
472 RepoModel().set_landing_rev(cur_repo, landing_rev_val)
471 RepoModel().set_landing_rev(cur_repo, landing_rev_val)
473
472
474 # handle extra fields
473 # handle extra fields
475 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), kwargs):
474 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), kwargs):
476 k = RepositoryField.un_prefix_key(field)
475 k = RepositoryField.un_prefix_key(field)
477 ex_field = RepositoryField.get_by_key_name(
476 ex_field = RepositoryField.get_by_key_name(
478 key=k, repo=cur_repo)
477 key=k, repo=cur_repo)
479 if ex_field:
478 if ex_field:
480 ex_field.field_value = kwargs[field]
479 ex_field.field_value = kwargs[field]
481 self.sa.add(ex_field)
480 self.sa.add(ex_field)
482
481
483 self.sa.add(cur_repo)
482 self.sa.add(cur_repo)
484
483
485 if source_repo_name != new_name:
484 if source_repo_name != new_name:
486 # rename repository
485 # rename repository
487 self._rename_filesystem_repo(
486 self._rename_filesystem_repo(
488 old=source_repo_name, new=new_name)
487 old=source_repo_name, new=new_name)
489
488
490 if affected_user_ids:
489 if affected_user_ids:
491 PermissionModel().trigger_permission_flush(affected_user_ids)
490 PermissionModel().trigger_permission_flush(affected_user_ids)
492
491
493 return cur_repo
492 return cur_repo
494 except Exception:
493 except Exception:
495 log.error(traceback.format_exc())
494 log.error(traceback.format_exc())
496 raise
495 raise
497
496
498 def _create_repo(self, repo_name, repo_type, description, owner,
497 def _create_repo(self, repo_name, repo_type, description, owner,
499 private=False, clone_uri=None, repo_group=None,
498 private=False, clone_uri=None, repo_group=None,
500 landing_rev=None, fork_of=None,
499 landing_rev=None, fork_of=None,
501 copy_fork_permissions=False, enable_statistics=False,
500 copy_fork_permissions=False, enable_statistics=False,
502 enable_locking=False, enable_downloads=False,
501 enable_locking=False, enable_downloads=False,
503 copy_group_permissions=False,
502 copy_group_permissions=False,
504 state=Repository.STATE_PENDING):
503 state=Repository.STATE_PENDING):
505 """
504 """
506 Create repository inside database with PENDING state, this should be
505 Create repository inside database with PENDING state, this should be
507 only executed by create() repo. With exception of importing existing
506 only executed by create() repo. With exception of importing existing
508 repos
507 repos
509 """
508 """
510 from rhodecode.model.scm import ScmModel
509 from rhodecode.model.scm import ScmModel
511
510
512 owner = self._get_user(owner)
511 owner = self._get_user(owner)
513 fork_of = self._get_repo(fork_of)
512 fork_of = self._get_repo(fork_of)
514 repo_group = self._get_repo_group(safe_int(repo_group))
513 repo_group = self._get_repo_group(safe_int(repo_group))
515 default_landing_ref, _lbl = ScmModel.backend_landing_ref(repo_type)
514 default_landing_ref, _lbl = ScmModel.backend_landing_ref(repo_type)
516 landing_rev = landing_rev or default_landing_ref
515 landing_rev = landing_rev or default_landing_ref
517
516
518 try:
517 try:
519 repo_name = safe_unicode(repo_name)
518 repo_name = safe_unicode(repo_name)
520 description = safe_unicode(description)
519 description = safe_unicode(description)
521 # repo name is just a name of repository
520 # repo name is just a name of repository
522 # while repo_name_full is a full qualified name that is combined
521 # while repo_name_full is a full qualified name that is combined
523 # with name and path of group
522 # with name and path of group
524 repo_name_full = repo_name
523 repo_name_full = repo_name
525 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
524 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
526
525
527 new_repo = Repository()
526 new_repo = Repository()
528 new_repo.repo_state = state
527 new_repo.repo_state = state
529 new_repo.enable_statistics = False
528 new_repo.enable_statistics = False
530 new_repo.repo_name = repo_name_full
529 new_repo.repo_name = repo_name_full
531 new_repo.repo_type = repo_type
530 new_repo.repo_type = repo_type
532 new_repo.user = owner
531 new_repo.user = owner
533 new_repo.group = repo_group
532 new_repo.group = repo_group
534 new_repo.description = description or repo_name
533 new_repo.description = description or repo_name
535 new_repo.private = private
534 new_repo.private = private
536 new_repo.archived = False
535 new_repo.archived = False
537 new_repo.clone_uri = clone_uri
536 new_repo.clone_uri = clone_uri
538 new_repo.landing_rev = landing_rev
537 new_repo.landing_rev = landing_rev
539
538
540 new_repo.enable_statistics = enable_statistics
539 new_repo.enable_statistics = enable_statistics
541 new_repo.enable_locking = enable_locking
540 new_repo.enable_locking = enable_locking
542 new_repo.enable_downloads = enable_downloads
541 new_repo.enable_downloads = enable_downloads
543
542
544 if repo_group:
543 if repo_group:
545 new_repo.enable_locking = repo_group.enable_locking
544 new_repo.enable_locking = repo_group.enable_locking
546
545
547 if fork_of:
546 if fork_of:
548 parent_repo = fork_of
547 parent_repo = fork_of
549 new_repo.fork = parent_repo
548 new_repo.fork = parent_repo
550
549
551 events.trigger(events.RepoPreCreateEvent(new_repo))
550 events.trigger(events.RepoPreCreateEvent(new_repo))
552
551
553 self.sa.add(new_repo)
552 self.sa.add(new_repo)
554
553
555 EMPTY_PERM = 'repository.none'
554 EMPTY_PERM = 'repository.none'
556 if fork_of and copy_fork_permissions:
555 if fork_of and copy_fork_permissions:
557 repo = fork_of
556 repo = fork_of
558 user_perms = UserRepoToPerm.query() \
557 user_perms = UserRepoToPerm.query() \
559 .filter(UserRepoToPerm.repository == repo).all()
558 .filter(UserRepoToPerm.repository == repo).all()
560 group_perms = UserGroupRepoToPerm.query() \
559 group_perms = UserGroupRepoToPerm.query() \
561 .filter(UserGroupRepoToPerm.repository == repo).all()
560 .filter(UserGroupRepoToPerm.repository == repo).all()
562
561
563 for perm in user_perms:
562 for perm in user_perms:
564 UserRepoToPerm.create(
563 UserRepoToPerm.create(
565 perm.user, new_repo, perm.permission)
564 perm.user, new_repo, perm.permission)
566
565
567 for perm in group_perms:
566 for perm in group_perms:
568 UserGroupRepoToPerm.create(
567 UserGroupRepoToPerm.create(
569 perm.users_group, new_repo, perm.permission)
568 perm.users_group, new_repo, perm.permission)
570 # in case we copy permissions and also set this repo to private
569 # in case we copy permissions and also set this repo to private
571 # override the default user permission to make it a private repo
570 # override the default user permission to make it a private repo
572 if private:
571 if private:
573 RepoModel(self.sa).grant_user_permission(
572 RepoModel(self.sa).grant_user_permission(
574 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
573 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
575
574
576 elif repo_group and copy_group_permissions:
575 elif repo_group and copy_group_permissions:
577 user_perms = UserRepoGroupToPerm.query() \
576 user_perms = UserRepoGroupToPerm.query() \
578 .filter(UserRepoGroupToPerm.group == repo_group).all()
577 .filter(UserRepoGroupToPerm.group == repo_group).all()
579
578
580 group_perms = UserGroupRepoGroupToPerm.query() \
579 group_perms = UserGroupRepoGroupToPerm.query() \
581 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
580 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
582
581
583 for perm in user_perms:
582 for perm in user_perms:
584 perm_name = perm.permission.permission_name.replace(
583 perm_name = perm.permission.permission_name.replace(
585 'group.', 'repository.')
584 'group.', 'repository.')
586 perm_obj = Permission.get_by_key(perm_name)
585 perm_obj = Permission.get_by_key(perm_name)
587 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
586 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
588
587
589 for perm in group_perms:
588 for perm in group_perms:
590 perm_name = perm.permission.permission_name.replace(
589 perm_name = perm.permission.permission_name.replace(
591 'group.', 'repository.')
590 'group.', 'repository.')
592 perm_obj = Permission.get_by_key(perm_name)
591 perm_obj = Permission.get_by_key(perm_name)
593 UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj)
592 UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj)
594
593
595 if private:
594 if private:
596 RepoModel(self.sa).grant_user_permission(
595 RepoModel(self.sa).grant_user_permission(
597 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
596 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
598
597
599 else:
598 else:
600 perm_obj = self._create_default_perms(new_repo, private)
599 perm_obj = self._create_default_perms(new_repo, private)
601 self.sa.add(perm_obj)
600 self.sa.add(perm_obj)
602
601
603 # now automatically start following this repository as owner
602 # now automatically start following this repository as owner
604 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id, owner.user_id)
603 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id, owner.user_id)
605
604
606 # we need to flush here, in order to check if database won't
605 # we need to flush here, in order to check if database won't
607 # throw any exceptions, create filesystem dirs at the very end
606 # throw any exceptions, create filesystem dirs at the very end
608 self.sa.flush()
607 self.sa.flush()
609 events.trigger(events.RepoCreateEvent(new_repo))
608 events.trigger(events.RepoCreateEvent(new_repo))
610 return new_repo
609 return new_repo
611
610
612 except Exception:
611 except Exception:
613 log.error(traceback.format_exc())
612 log.error(traceback.format_exc())
614 raise
613 raise
615
614
616 def create(self, form_data, cur_user):
615 def create(self, form_data, cur_user):
617 """
616 """
618 Create repository using celery tasks
617 Create repository using celery tasks
619
618
620 :param form_data:
619 :param form_data:
621 :param cur_user:
620 :param cur_user:
622 """
621 """
623 from rhodecode.lib.celerylib import tasks, run_task
622 from rhodecode.lib.celerylib import tasks, run_task
624 return run_task(tasks.create_repo, form_data, cur_user)
623 return run_task(tasks.create_repo, form_data, cur_user)
625
624
626 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
625 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
627 perm_deletions=None, check_perms=True,
626 perm_deletions=None, check_perms=True,
628 cur_user=None):
627 cur_user=None):
629 if not perm_additions:
628 if not perm_additions:
630 perm_additions = []
629 perm_additions = []
631 if not perm_updates:
630 if not perm_updates:
632 perm_updates = []
631 perm_updates = []
633 if not perm_deletions:
632 if not perm_deletions:
634 perm_deletions = []
633 perm_deletions = []
635
634
636 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
635 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
637
636
638 changes = {
637 changes = {
639 'added': [],
638 'added': [],
640 'updated': [],
639 'updated': [],
641 'deleted': [],
640 'deleted': [],
642 'default_user_changed': None
641 'default_user_changed': None
643 }
642 }
644
643
645 repo = self._get_repo(repo)
644 repo = self._get_repo(repo)
646
645
647 # update permissions
646 # update permissions
648 for member_id, perm, member_type in perm_updates:
647 for member_id, perm, member_type in perm_updates:
649 member_id = int(member_id)
648 member_id = int(member_id)
650 if member_type == 'user':
649 if member_type == 'user':
651 member_name = User.get(member_id).username
650 member_name = User.get(member_id).username
652 if member_name == User.DEFAULT_USER:
651 if member_name == User.DEFAULT_USER:
653 # NOTE(dan): detect if we changed permissions for default user
652 # NOTE(dan): detect if we changed permissions for default user
654 perm_obj = self.sa.query(UserRepoToPerm) \
653 perm_obj = self.sa.query(UserRepoToPerm) \
655 .filter(UserRepoToPerm.user_id == member_id) \
654 .filter(UserRepoToPerm.user_id == member_id) \
656 .filter(UserRepoToPerm.repository == repo) \
655 .filter(UserRepoToPerm.repository == repo) \
657 .scalar()
656 .scalar()
658 if perm_obj and perm_obj.permission.permission_name != perm:
657 if perm_obj and perm_obj.permission.permission_name != perm:
659 changes['default_user_changed'] = True
658 changes['default_user_changed'] = True
660
659
661 # this updates also current one if found
660 # this updates also current one if found
662 self.grant_user_permission(
661 self.grant_user_permission(
663 repo=repo, user=member_id, perm=perm)
662 repo=repo, user=member_id, perm=perm)
664 elif member_type == 'user_group':
663 elif member_type == 'user_group':
665 # check if we have permissions to alter this usergroup
664 # check if we have permissions to alter this usergroup
666 member_name = UserGroup.get(member_id).users_group_name
665 member_name = UserGroup.get(member_id).users_group_name
667 if not check_perms or HasUserGroupPermissionAny(
666 if not check_perms or HasUserGroupPermissionAny(
668 *req_perms)(member_name, user=cur_user):
667 *req_perms)(member_name, user=cur_user):
669 self.grant_user_group_permission(
668 self.grant_user_group_permission(
670 repo=repo, group_name=member_id, perm=perm)
669 repo=repo, group_name=member_id, perm=perm)
671 else:
670 else:
672 raise ValueError("member_type must be 'user' or 'user_group' "
671 raise ValueError("member_type must be 'user' or 'user_group' "
673 "got {} instead".format(member_type))
672 "got {} instead".format(member_type))
674 changes['updated'].append({'type': member_type, 'id': member_id,
673 changes['updated'].append({'type': member_type, 'id': member_id,
675 'name': member_name, 'new_perm': perm})
674 'name': member_name, 'new_perm': perm})
676
675
677 # set new permissions
676 # set new permissions
678 for member_id, perm, member_type in perm_additions:
677 for member_id, perm, member_type in perm_additions:
679 member_id = int(member_id)
678 member_id = int(member_id)
680 if member_type == 'user':
679 if member_type == 'user':
681 member_name = User.get(member_id).username
680 member_name = User.get(member_id).username
682 self.grant_user_permission(
681 self.grant_user_permission(
683 repo=repo, user=member_id, perm=perm)
682 repo=repo, user=member_id, perm=perm)
684 elif member_type == 'user_group':
683 elif member_type == 'user_group':
685 # check if we have permissions to alter this usergroup
684 # check if we have permissions to alter this usergroup
686 member_name = UserGroup.get(member_id).users_group_name
685 member_name = UserGroup.get(member_id).users_group_name
687 if not check_perms or HasUserGroupPermissionAny(
686 if not check_perms or HasUserGroupPermissionAny(
688 *req_perms)(member_name, user=cur_user):
687 *req_perms)(member_name, user=cur_user):
689 self.grant_user_group_permission(
688 self.grant_user_group_permission(
690 repo=repo, group_name=member_id, perm=perm)
689 repo=repo, group_name=member_id, perm=perm)
691 else:
690 else:
692 raise ValueError("member_type must be 'user' or 'user_group' "
691 raise ValueError("member_type must be 'user' or 'user_group' "
693 "got {} instead".format(member_type))
692 "got {} instead".format(member_type))
694
693
695 changes['added'].append({'type': member_type, 'id': member_id,
694 changes['added'].append({'type': member_type, 'id': member_id,
696 'name': member_name, 'new_perm': perm})
695 'name': member_name, 'new_perm': perm})
697 # delete permissions
696 # delete permissions
698 for member_id, perm, member_type in perm_deletions:
697 for member_id, perm, member_type in perm_deletions:
699 member_id = int(member_id)
698 member_id = int(member_id)
700 if member_type == 'user':
699 if member_type == 'user':
701 member_name = User.get(member_id).username
700 member_name = User.get(member_id).username
702 self.revoke_user_permission(repo=repo, user=member_id)
701 self.revoke_user_permission(repo=repo, user=member_id)
703 elif member_type == 'user_group':
702 elif member_type == 'user_group':
704 # check if we have permissions to alter this usergroup
703 # check if we have permissions to alter this usergroup
705 member_name = UserGroup.get(member_id).users_group_name
704 member_name = UserGroup.get(member_id).users_group_name
706 if not check_perms or HasUserGroupPermissionAny(
705 if not check_perms or HasUserGroupPermissionAny(
707 *req_perms)(member_name, user=cur_user):
706 *req_perms)(member_name, user=cur_user):
708 self.revoke_user_group_permission(
707 self.revoke_user_group_permission(
709 repo=repo, group_name=member_id)
708 repo=repo, group_name=member_id)
710 else:
709 else:
711 raise ValueError("member_type must be 'user' or 'user_group' "
710 raise ValueError("member_type must be 'user' or 'user_group' "
712 "got {} instead".format(member_type))
711 "got {} instead".format(member_type))
713
712
714 changes['deleted'].append({'type': member_type, 'id': member_id,
713 changes['deleted'].append({'type': member_type, 'id': member_id,
715 'name': member_name, 'new_perm': perm})
714 'name': member_name, 'new_perm': perm})
716 return changes
715 return changes
717
716
718 def create_fork(self, form_data, cur_user):
717 def create_fork(self, form_data, cur_user):
719 """
718 """
720 Simple wrapper into executing celery task for fork creation
719 Simple wrapper into executing celery task for fork creation
721
720
722 :param form_data:
721 :param form_data:
723 :param cur_user:
722 :param cur_user:
724 """
723 """
725 from rhodecode.lib.celerylib import tasks, run_task
724 from rhodecode.lib.celerylib import tasks, run_task
726 return run_task(tasks.create_repo_fork, form_data, cur_user)
725 return run_task(tasks.create_repo_fork, form_data, cur_user)
727
726
728 def archive(self, repo):
727 def archive(self, repo):
729 """
728 """
730 Archive given repository. Set archive flag.
729 Archive given repository. Set archive flag.
731
730
732 :param repo:
731 :param repo:
733 """
732 """
734 repo = self._get_repo(repo)
733 repo = self._get_repo(repo)
735 if repo:
734 if repo:
736
735
737 try:
736 try:
738 repo.archived = True
737 repo.archived = True
739 self.sa.add(repo)
738 self.sa.add(repo)
740 self.sa.commit()
739 self.sa.commit()
741 except Exception:
740 except Exception:
742 log.error(traceback.format_exc())
741 log.error(traceback.format_exc())
743 raise
742 raise
744
743
745 def delete(self, repo, forks=None, pull_requests=None, fs_remove=True, cur_user=None):
744 def delete(self, repo, forks=None, pull_requests=None, fs_remove=True, cur_user=None):
746 """
745 """
747 Delete given repository, forks parameter defines what do do with
746 Delete given repository, forks parameter defines what do do with
748 attached forks. Throws AttachedForksError if deleted repo has attached
747 attached forks. Throws AttachedForksError if deleted repo has attached
749 forks
748 forks
750
749
751 :param repo:
750 :param repo:
752 :param forks: str 'delete' or 'detach'
751 :param forks: str 'delete' or 'detach'
753 :param pull_requests: str 'delete' or None
752 :param pull_requests: str 'delete' or None
754 :param fs_remove: remove(archive) repo from filesystem
753 :param fs_remove: remove(archive) repo from filesystem
755 """
754 """
756 if not cur_user:
755 if not cur_user:
757 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
756 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
758 repo = self._get_repo(repo)
757 repo = self._get_repo(repo)
759 if repo:
758 if repo:
760 if forks == 'detach':
759 if forks == 'detach':
761 for r in repo.forks:
760 for r in repo.forks:
762 r.fork = None
761 r.fork = None
763 self.sa.add(r)
762 self.sa.add(r)
764 elif forks == 'delete':
763 elif forks == 'delete':
765 for r in repo.forks:
764 for r in repo.forks:
766 self.delete(r, forks='delete')
765 self.delete(r, forks='delete')
767 elif [f for f in repo.forks]:
766 elif [f for f in repo.forks]:
768 raise AttachedForksError()
767 raise AttachedForksError()
769
768
770 # check for pull requests
769 # check for pull requests
771 pr_sources = repo.pull_requests_source
770 pr_sources = repo.pull_requests_source
772 pr_targets = repo.pull_requests_target
771 pr_targets = repo.pull_requests_target
773 if pull_requests != 'delete' and (pr_sources or pr_targets):
772 if pull_requests != 'delete' and (pr_sources or pr_targets):
774 raise AttachedPullRequestsError()
773 raise AttachedPullRequestsError()
775
774
776 old_repo_dict = repo.get_dict()
775 old_repo_dict = repo.get_dict()
777 events.trigger(events.RepoPreDeleteEvent(repo))
776 events.trigger(events.RepoPreDeleteEvent(repo))
778 try:
777 try:
779 self.sa.delete(repo)
778 self.sa.delete(repo)
780 if fs_remove:
779 if fs_remove:
781 self._delete_filesystem_repo(repo)
780 self._delete_filesystem_repo(repo)
782 else:
781 else:
783 log.debug('skipping removal from filesystem')
782 log.debug('skipping removal from filesystem')
784 old_repo_dict.update({
783 old_repo_dict.update({
785 'deleted_by': cur_user,
784 'deleted_by': cur_user,
786 'deleted_on': time.time(),
785 'deleted_on': time.time(),
787 })
786 })
788 hooks_base.delete_repository(**old_repo_dict)
787 hooks_base.delete_repository(**old_repo_dict)
789 events.trigger(events.RepoDeleteEvent(repo))
788 events.trigger(events.RepoDeleteEvent(repo))
790 except Exception:
789 except Exception:
791 log.error(traceback.format_exc())
790 log.error(traceback.format_exc())
792 raise
791 raise
793
792
794 def grant_user_permission(self, repo, user, perm):
793 def grant_user_permission(self, repo, user, perm):
795 """
794 """
796 Grant permission for user on given repository, or update existing one
795 Grant permission for user on given repository, or update existing one
797 if found
796 if found
798
797
799 :param repo: Instance of Repository, repository_id, or repository name
798 :param repo: Instance of Repository, repository_id, or repository name
800 :param user: Instance of User, user_id or username
799 :param user: Instance of User, user_id or username
801 :param perm: Instance of Permission, or permission_name
800 :param perm: Instance of Permission, or permission_name
802 """
801 """
803 user = self._get_user(user)
802 user = self._get_user(user)
804 repo = self._get_repo(repo)
803 repo = self._get_repo(repo)
805 permission = self._get_perm(perm)
804 permission = self._get_perm(perm)
806
805
807 # check if we have that permission already
806 # check if we have that permission already
808 obj = self.sa.query(UserRepoToPerm) \
807 obj = self.sa.query(UserRepoToPerm) \
809 .filter(UserRepoToPerm.user == user) \
808 .filter(UserRepoToPerm.user == user) \
810 .filter(UserRepoToPerm.repository == repo) \
809 .filter(UserRepoToPerm.repository == repo) \
811 .scalar()
810 .scalar()
812 if obj is None:
811 if obj is None:
813 # create new !
812 # create new !
814 obj = UserRepoToPerm()
813 obj = UserRepoToPerm()
815 obj.repository = repo
814 obj.repository = repo
816 obj.user = user
815 obj.user = user
817 obj.permission = permission
816 obj.permission = permission
818 self.sa.add(obj)
817 self.sa.add(obj)
819 log.debug('Granted perm %s to %s on %s', perm, user, repo)
818 log.debug('Granted perm %s to %s on %s', perm, user, repo)
820 action_logger_generic(
819 action_logger_generic(
821 'granted permission: {} to user: {} on repo: {}'.format(
820 'granted permission: {} to user: {} on repo: {}'.format(
822 perm, user, repo), namespace='security.repo')
821 perm, user, repo), namespace='security.repo')
823 return obj
822 return obj
824
823
825 def revoke_user_permission(self, repo, user):
824 def revoke_user_permission(self, repo, user):
826 """
825 """
827 Revoke permission for user on given repository
826 Revoke permission for user on given repository
828
827
829 :param repo: Instance of Repository, repository_id, or repository name
828 :param repo: Instance of Repository, repository_id, or repository name
830 :param user: Instance of User, user_id or username
829 :param user: Instance of User, user_id or username
831 """
830 """
832
831
833 user = self._get_user(user)
832 user = self._get_user(user)
834 repo = self._get_repo(repo)
833 repo = self._get_repo(repo)
835
834
836 obj = self.sa.query(UserRepoToPerm) \
835 obj = self.sa.query(UserRepoToPerm) \
837 .filter(UserRepoToPerm.repository == repo) \
836 .filter(UserRepoToPerm.repository == repo) \
838 .filter(UserRepoToPerm.user == user) \
837 .filter(UserRepoToPerm.user == user) \
839 .scalar()
838 .scalar()
840 if obj:
839 if obj:
841 self.sa.delete(obj)
840 self.sa.delete(obj)
842 log.debug('Revoked perm on %s on %s', repo, user)
841 log.debug('Revoked perm on %s on %s', repo, user)
843 action_logger_generic(
842 action_logger_generic(
844 'revoked permission from user: {} on repo: {}'.format(
843 'revoked permission from user: {} on repo: {}'.format(
845 user, repo), namespace='security.repo')
844 user, repo), namespace='security.repo')
846
845
847 def grant_user_group_permission(self, repo, group_name, perm):
846 def grant_user_group_permission(self, repo, group_name, perm):
848 """
847 """
849 Grant permission for user group on given repository, or update
848 Grant permission for user group on given repository, or update
850 existing one if found
849 existing one if found
851
850
852 :param repo: Instance of Repository, repository_id, or repository name
851 :param repo: Instance of Repository, repository_id, or repository name
853 :param group_name: Instance of UserGroup, users_group_id,
852 :param group_name: Instance of UserGroup, users_group_id,
854 or user group name
853 or user group name
855 :param perm: Instance of Permission, or permission_name
854 :param perm: Instance of Permission, or permission_name
856 """
855 """
857 repo = self._get_repo(repo)
856 repo = self._get_repo(repo)
858 group_name = self._get_user_group(group_name)
857 group_name = self._get_user_group(group_name)
859 permission = self._get_perm(perm)
858 permission = self._get_perm(perm)
860
859
861 # check if we have that permission already
860 # check if we have that permission already
862 obj = self.sa.query(UserGroupRepoToPerm) \
861 obj = self.sa.query(UserGroupRepoToPerm) \
863 .filter(UserGroupRepoToPerm.users_group == group_name) \
862 .filter(UserGroupRepoToPerm.users_group == group_name) \
864 .filter(UserGroupRepoToPerm.repository == repo) \
863 .filter(UserGroupRepoToPerm.repository == repo) \
865 .scalar()
864 .scalar()
866
865
867 if obj is None:
866 if obj is None:
868 # create new
867 # create new
869 obj = UserGroupRepoToPerm()
868 obj = UserGroupRepoToPerm()
870
869
871 obj.repository = repo
870 obj.repository = repo
872 obj.users_group = group_name
871 obj.users_group = group_name
873 obj.permission = permission
872 obj.permission = permission
874 self.sa.add(obj)
873 self.sa.add(obj)
875 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
874 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
876 action_logger_generic(
875 action_logger_generic(
877 'granted permission: {} to usergroup: {} on repo: {}'.format(
876 'granted permission: {} to usergroup: {} on repo: {}'.format(
878 perm, group_name, repo), namespace='security.repo')
877 perm, group_name, repo), namespace='security.repo')
879
878
880 return obj
879 return obj
881
880
882 def revoke_user_group_permission(self, repo, group_name):
881 def revoke_user_group_permission(self, repo, group_name):
883 """
882 """
884 Revoke permission for user group on given repository
883 Revoke permission for user group on given repository
885
884
886 :param repo: Instance of Repository, repository_id, or repository name
885 :param repo: Instance of Repository, repository_id, or repository name
887 :param group_name: Instance of UserGroup, users_group_id,
886 :param group_name: Instance of UserGroup, users_group_id,
888 or user group name
887 or user group name
889 """
888 """
890 repo = self._get_repo(repo)
889 repo = self._get_repo(repo)
891 group_name = self._get_user_group(group_name)
890 group_name = self._get_user_group(group_name)
892
891
893 obj = self.sa.query(UserGroupRepoToPerm) \
892 obj = self.sa.query(UserGroupRepoToPerm) \
894 .filter(UserGroupRepoToPerm.repository == repo) \
893 .filter(UserGroupRepoToPerm.repository == repo) \
895 .filter(UserGroupRepoToPerm.users_group == group_name) \
894 .filter(UserGroupRepoToPerm.users_group == group_name) \
896 .scalar()
895 .scalar()
897 if obj:
896 if obj:
898 self.sa.delete(obj)
897 self.sa.delete(obj)
899 log.debug('Revoked perm to %s on %s', repo, group_name)
898 log.debug('Revoked perm to %s on %s', repo, group_name)
900 action_logger_generic(
899 action_logger_generic(
901 'revoked permission from usergroup: {} on repo: {}'.format(
900 'revoked permission from usergroup: {} on repo: {}'.format(
902 group_name, repo), namespace='security.repo')
901 group_name, repo), namespace='security.repo')
903
902
904 def delete_stats(self, repo_name):
903 def delete_stats(self, repo_name):
905 """
904 """
906 removes stats for given repo
905 removes stats for given repo
907
906
908 :param repo_name:
907 :param repo_name:
909 """
908 """
910 repo = self._get_repo(repo_name)
909 repo = self._get_repo(repo_name)
911 try:
910 try:
912 obj = self.sa.query(Statistics) \
911 obj = self.sa.query(Statistics) \
913 .filter(Statistics.repository == repo).scalar()
912 .filter(Statistics.repository == repo).scalar()
914 if obj:
913 if obj:
915 self.sa.delete(obj)
914 self.sa.delete(obj)
916 except Exception:
915 except Exception:
917 log.error(traceback.format_exc())
916 log.error(traceback.format_exc())
918 raise
917 raise
919
918
920 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
919 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
921 field_type='str', field_desc=''):
920 field_type='str', field_desc=''):
922
921
923 repo = self._get_repo(repo_name)
922 repo = self._get_repo(repo_name)
924
923
925 new_field = RepositoryField()
924 new_field = RepositoryField()
926 new_field.repository = repo
925 new_field.repository = repo
927 new_field.field_key = field_key
926 new_field.field_key = field_key
928 new_field.field_type = field_type # python type
927 new_field.field_type = field_type # python type
929 new_field.field_value = field_value
928 new_field.field_value = field_value
930 new_field.field_desc = field_desc
929 new_field.field_desc = field_desc
931 new_field.field_label = field_label
930 new_field.field_label = field_label
932 self.sa.add(new_field)
931 self.sa.add(new_field)
933 return new_field
932 return new_field
934
933
935 def delete_repo_field(self, repo_name, field_key):
934 def delete_repo_field(self, repo_name, field_key):
936 repo = self._get_repo(repo_name)
935 repo = self._get_repo(repo_name)
937 field = RepositoryField.get_by_key_name(field_key, repo)
936 field = RepositoryField.get_by_key_name(field_key, repo)
938 if field:
937 if field:
939 self.sa.delete(field)
938 self.sa.delete(field)
940
939
941 def set_landing_rev(self, repo, landing_rev_name):
940 def set_landing_rev(self, repo, landing_rev_name):
942 if landing_rev_name.startswith('branch:'):
941 if landing_rev_name.startswith('branch:'):
943 landing_rev_name = landing_rev_name.split('branch:')[-1]
942 landing_rev_name = landing_rev_name.split('branch:')[-1]
944 scm_instance = repo.scm_instance()
943 scm_instance = repo.scm_instance()
945 if scm_instance:
944 if scm_instance:
946 return scm_instance._remote.set_head_ref(landing_rev_name)
945 return scm_instance._remote.set_head_ref(landing_rev_name)
947
946
948 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
947 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
949 clone_uri=None, repo_store_location=None,
948 clone_uri=None, repo_store_location=None,
950 use_global_config=False, install_hooks=True):
949 use_global_config=False, install_hooks=True):
951 """
950 """
952 makes repository on filesystem. It's group aware means it'll create
951 makes repository on filesystem. It's group aware means it'll create
953 a repository within a group, and alter the paths accordingly of
952 a repository within a group, and alter the paths accordingly of
954 group location
953 group location
955
954
956 :param repo_name:
955 :param repo_name:
957 :param alias:
956 :param alias:
958 :param parent:
957 :param parent:
959 :param clone_uri:
958 :param clone_uri:
960 :param repo_store_location:
959 :param repo_store_location:
961 """
960 """
962 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
961 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
963 from rhodecode.model.scm import ScmModel
962 from rhodecode.model.scm import ScmModel
964
963
965 if Repository.NAME_SEP in repo_name:
964 if Repository.NAME_SEP in repo_name:
966 raise ValueError(
965 raise ValueError(
967 'repo_name must not contain groups got `%s`' % repo_name)
966 'repo_name must not contain groups got `%s`' % repo_name)
968
967
969 if isinstance(repo_group, RepoGroup):
968 if isinstance(repo_group, RepoGroup):
970 new_parent_path = os.sep.join(repo_group.full_path_splitted)
969 new_parent_path = os.sep.join(repo_group.full_path_splitted)
971 else:
970 else:
972 new_parent_path = repo_group or ''
971 new_parent_path = repo_group or ''
973
972
974 if repo_store_location:
973 if repo_store_location:
975 _paths = [repo_store_location]
974 _paths = [repo_store_location]
976 else:
975 else:
977 _paths = [self.repos_path, new_parent_path, repo_name]
976 _paths = [self.repos_path, new_parent_path, repo_name]
978 # we need to make it str for mercurial
977 # we need to make it str for mercurial
979 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
978 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
980
979
981 # check if this path is not a repository
980 # check if this path is not a repository
982 if is_valid_repo(repo_path, self.repos_path):
981 if is_valid_repo(repo_path, self.repos_path):
983 raise Exception('This path %s is a valid repository' % repo_path)
982 raise Exception('This path %s is a valid repository' % repo_path)
984
983
985 # check if this path is a group
984 # check if this path is a group
986 if is_valid_repo_group(repo_path, self.repos_path):
985 if is_valid_repo_group(repo_path, self.repos_path):
987 raise Exception('This path %s is a valid group' % repo_path)
986 raise Exception('This path %s is a valid group' % repo_path)
988
987
989 log.info('creating repo %s in %s from url: `%s`',
988 log.info('creating repo %s in %s from url: `%s`',
990 repo_name, safe_unicode(repo_path),
989 repo_name, safe_unicode(repo_path),
991 obfuscate_url_pw(clone_uri))
990 obfuscate_url_pw(clone_uri))
992
991
993 backend = get_backend(repo_type)
992 backend = get_backend(repo_type)
994
993
995 config_repo = None if use_global_config else repo_name
994 config_repo = None if use_global_config else repo_name
996 if config_repo and new_parent_path:
995 if config_repo and new_parent_path:
997 config_repo = Repository.NAME_SEP.join(
996 config_repo = Repository.NAME_SEP.join(
998 (new_parent_path, config_repo))
997 (new_parent_path, config_repo))
999 config = make_db_config(clear_session=False, repo=config_repo)
998 config = make_db_config(clear_session=False, repo=config_repo)
1000 config.set('extensions', 'largefiles', '')
999 config.set('extensions', 'largefiles', '')
1001
1000
1002 # patch and reset hooks section of UI config to not run any
1001 # patch and reset hooks section of UI config to not run any
1003 # hooks on creating remote repo
1002 # hooks on creating remote repo
1004 config.clear_section('hooks')
1003 config.clear_section('hooks')
1005
1004
1006 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
1005 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
1007 if repo_type == 'git':
1006 if repo_type == 'git':
1008 repo = backend(
1007 repo = backend(
1009 repo_path, config=config, create=True, src_url=clone_uri, bare=True,
1008 repo_path, config=config, create=True, src_url=clone_uri, bare=True,
1010 with_wire={"cache": False})
1009 with_wire={"cache": False})
1011 else:
1010 else:
1012 repo = backend(
1011 repo = backend(
1013 repo_path, config=config, create=True, src_url=clone_uri,
1012 repo_path, config=config, create=True, src_url=clone_uri,
1014 with_wire={"cache": False})
1013 with_wire={"cache": False})
1015
1014
1016 if install_hooks:
1015 if install_hooks:
1017 repo.install_hooks()
1016 repo.install_hooks()
1018
1017
1019 log.debug('Created repo %s with %s backend',
1018 log.debug('Created repo %s with %s backend',
1020 safe_unicode(repo_name), safe_unicode(repo_type))
1019 safe_unicode(repo_name), safe_unicode(repo_type))
1021 return repo
1020 return repo
1022
1021
1023 def _rename_filesystem_repo(self, old, new):
1022 def _rename_filesystem_repo(self, old, new):
1024 """
1023 """
1025 renames repository on filesystem
1024 renames repository on filesystem
1026
1025
1027 :param old: old name
1026 :param old: old name
1028 :param new: new name
1027 :param new: new name
1029 """
1028 """
1030 log.info('renaming repo from %s to %s', old, new)
1029 log.info('renaming repo from %s to %s', old, new)
1031
1030
1032 old_path = os.path.join(self.repos_path, old)
1031 old_path = os.path.join(self.repos_path, old)
1033 new_path = os.path.join(self.repos_path, new)
1032 new_path = os.path.join(self.repos_path, new)
1034 if os.path.isdir(new_path):
1033 if os.path.isdir(new_path):
1035 raise Exception(
1034 raise Exception(
1036 'Was trying to rename to already existing dir %s' % new_path
1035 'Was trying to rename to already existing dir %s' % new_path
1037 )
1036 )
1038 shutil.move(old_path, new_path)
1037 shutil.move(old_path, new_path)
1039
1038
1040 def _delete_filesystem_repo(self, repo):
1039 def _delete_filesystem_repo(self, repo):
1041 """
1040 """
1042 removes repo from filesystem, the removal is acctually made by
1041 removes repo from filesystem, the removal is acctually made by
1043 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
1042 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
1044 repository is no longer valid for rhodecode, can be undeleted later on
1043 repository is no longer valid for rhodecode, can be undeleted later on
1045 by reverting the renames on this repository
1044 by reverting the renames on this repository
1046
1045
1047 :param repo: repo object
1046 :param repo: repo object
1048 """
1047 """
1049 rm_path = os.path.join(self.repos_path, repo.repo_name)
1048 rm_path = os.path.join(self.repos_path, repo.repo_name)
1050 repo_group = repo.group
1049 repo_group = repo.group
1051 log.info("Removing repository %s", rm_path)
1050 log.info("Removing repository %s", rm_path)
1052 # disable hg/git internal that it doesn't get detected as repo
1051 # disable hg/git internal that it doesn't get detected as repo
1053 alias = repo.repo_type
1052 alias = repo.repo_type
1054
1053
1055 config = make_db_config(clear_session=False)
1054 config = make_db_config(clear_session=False)
1056 config.set('extensions', 'largefiles', '')
1055 config.set('extensions', 'largefiles', '')
1057 bare = getattr(repo.scm_instance(config=config), 'bare', False)
1056 bare = getattr(repo.scm_instance(config=config), 'bare', False)
1058
1057
1059 # skip this for bare git repos
1058 # skip this for bare git repos
1060 if not bare:
1059 if not bare:
1061 # disable VCS repo
1060 # disable VCS repo
1062 vcs_path = os.path.join(rm_path, '.%s' % alias)
1061 vcs_path = os.path.join(rm_path, '.%s' % alias)
1063 if os.path.exists(vcs_path):
1062 if os.path.exists(vcs_path):
1064 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
1063 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
1065
1064
1066 _now = datetime.datetime.now()
1065 _now = datetime.datetime.now()
1067 _ms = str(_now.microsecond).rjust(6, '0')
1066 _ms = str(_now.microsecond).rjust(6, '0')
1068 _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
1067 _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
1069 repo.just_name)
1068 repo.just_name)
1070 if repo_group:
1069 if repo_group:
1071 # if repository is in group, prefix the removal path with the group
1070 # if repository is in group, prefix the removal path with the group
1072 args = repo_group.full_path_splitted + [_d]
1071 args = repo_group.full_path_splitted + [_d]
1073 _d = os.path.join(*args)
1072 _d = os.path.join(*args)
1074
1073
1075 if os.path.isdir(rm_path):
1074 if os.path.isdir(rm_path):
1076 shutil.move(rm_path, os.path.join(self.repos_path, _d))
1075 shutil.move(rm_path, os.path.join(self.repos_path, _d))
1077
1076
1078 # finally cleanup diff-cache if it exists
1077 # finally cleanup diff-cache if it exists
1079 cached_diffs_dir = repo.cached_diffs_dir
1078 cached_diffs_dir = repo.cached_diffs_dir
1080 if os.path.isdir(cached_diffs_dir):
1079 if os.path.isdir(cached_diffs_dir):
1081 shutil.rmtree(cached_diffs_dir)
1080 shutil.rmtree(cached_diffs_dir)
1082
1081
1083
1082
1084 class ReadmeFinder:
1083 class ReadmeFinder:
1085 """
1084 """
1086 Utility which knows how to find a readme for a specific commit.
1085 Utility which knows how to find a readme for a specific commit.
1087
1086
1088 The main idea is that this is a configurable algorithm. When creating an
1087 The main idea is that this is a configurable algorithm. When creating an
1089 instance you can define parameters, currently only the `default_renderer`.
1088 instance you can define parameters, currently only the `default_renderer`.
1090 Based on this configuration the method :meth:`search` behaves slightly
1089 Based on this configuration the method :meth:`search` behaves slightly
1091 different.
1090 different.
1092 """
1091 """
1093
1092
1094 readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE)
1093 readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE)
1095 path_re = re.compile(r'^docs?', re.IGNORECASE)
1094 path_re = re.compile(r'^docs?', re.IGNORECASE)
1096
1095
1097 default_priorities = {
1096 default_priorities = {
1098 None: 0,
1097 None: 0,
1099 '.text': 2,
1098 '.text': 2,
1100 '.txt': 3,
1099 '.txt': 3,
1101 '.rst': 1,
1100 '.rst': 1,
1102 '.rest': 2,
1101 '.rest': 2,
1103 '.md': 1,
1102 '.md': 1,
1104 '.mkdn': 2,
1103 '.mkdn': 2,
1105 '.mdown': 3,
1104 '.mdown': 3,
1106 '.markdown': 4,
1105 '.markdown': 4,
1107 }
1106 }
1108
1107
1109 path_priority = {
1108 path_priority = {
1110 'doc': 0,
1109 'doc': 0,
1111 'docs': 1,
1110 'docs': 1,
1112 }
1111 }
1113
1112
1114 FALLBACK_PRIORITY = 99
1113 FALLBACK_PRIORITY = 99
1115
1114
1116 RENDERER_TO_EXTENSION = {
1115 RENDERER_TO_EXTENSION = {
1117 'rst': ['.rst', '.rest'],
1116 'rst': ['.rst', '.rest'],
1118 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'],
1117 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'],
1119 }
1118 }
1120
1119
1121 def __init__(self, default_renderer=None):
1120 def __init__(self, default_renderer=None):
1122 self._default_renderer = default_renderer
1121 self._default_renderer = default_renderer
1123 self._renderer_extensions = self.RENDERER_TO_EXTENSION.get(
1122 self._renderer_extensions = self.RENDERER_TO_EXTENSION.get(
1124 default_renderer, [])
1123 default_renderer, [])
1125
1124
1126 def search(self, commit, path=u'/'):
1125 def search(self, commit, path=u'/'):
1127 """
1126 """
1128 Find a readme in the given `commit`.
1127 Find a readme in the given `commit`.
1129 """
1128 """
1130 nodes = commit.get_nodes(path)
1129 nodes = commit.get_nodes(path)
1131 matches = self._match_readmes(nodes)
1130 matches = self._match_readmes(nodes)
1132 matches = self._sort_according_to_priority(matches)
1131 matches = self._sort_according_to_priority(matches)
1133 if matches:
1132 if matches:
1134 return matches[0].node
1133 return matches[0].node
1135
1134
1136 paths = self._match_paths(nodes)
1135 paths = self._match_paths(nodes)
1137 paths = self._sort_paths_according_to_priority(paths)
1136 paths = self._sort_paths_according_to_priority(paths)
1138 for path in paths:
1137 for path in paths:
1139 match = self.search(commit, path=path)
1138 match = self.search(commit, path=path)
1140 if match:
1139 if match:
1141 return match
1140 return match
1142
1141
1143 return None
1142 return None
1144
1143
1145 def _match_readmes(self, nodes):
1144 def _match_readmes(self, nodes):
1146 for node in nodes:
1145 for node in nodes:
1147 if not node.is_file():
1146 if not node.is_file():
1148 continue
1147 continue
1149 path = node.path.rsplit('/', 1)[-1]
1148 path = node.path.rsplit('/', 1)[-1]
1150 match = self.readme_re.match(path)
1149 match = self.readme_re.match(path)
1151 if match:
1150 if match:
1152 extension = match.group(1)
1151 extension = match.group(1)
1153 yield ReadmeMatch(node, match, self._priority(extension))
1152 yield ReadmeMatch(node, match, self._priority(extension))
1154
1153
1155 def _match_paths(self, nodes):
1154 def _match_paths(self, nodes):
1156 for node in nodes:
1155 for node in nodes:
1157 if not node.is_dir():
1156 if not node.is_dir():
1158 continue
1157 continue
1159 match = self.path_re.match(node.path)
1158 match = self.path_re.match(node.path)
1160 if match:
1159 if match:
1161 yield node.path
1160 yield node.path
1162
1161
1163 def _priority(self, extension):
1162 def _priority(self, extension):
1164 renderer_priority = (
1163 renderer_priority = (
1165 0 if extension in self._renderer_extensions else 1)
1164 0 if extension in self._renderer_extensions else 1)
1166 extension_priority = self.default_priorities.get(
1165 extension_priority = self.default_priorities.get(
1167 extension, self.FALLBACK_PRIORITY)
1166 extension, self.FALLBACK_PRIORITY)
1168 return (renderer_priority, extension_priority)
1167 return (renderer_priority, extension_priority)
1169
1168
1170 def _sort_according_to_priority(self, matches):
1169 def _sort_according_to_priority(self, matches):
1171
1170
1172 def priority_and_path(match):
1171 def priority_and_path(match):
1173 return (match.priority, match.path)
1172 return (match.priority, match.path)
1174
1173
1175 return sorted(matches, key=priority_and_path)
1174 return sorted(matches, key=priority_and_path)
1176
1175
1177 def _sort_paths_according_to_priority(self, paths):
1176 def _sort_paths_according_to_priority(self, paths):
1178
1177
1179 def priority_and_path(path):
1178 def priority_and_path(path):
1180 return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path)
1179 return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path)
1181
1180
1182 return sorted(paths, key=priority_and_path)
1181 return sorted(paths, key=priority_and_path)
1183
1182
1184
1183
1185 class ReadmeMatch:
1184 class ReadmeMatch:
1186
1185
1187 def __init__(self, node, match, priority):
1186 def __init__(self, node, match, priority):
1188 self.node = node
1187 self.node = node
1189 self._match = match
1188 self._match = match
1190 self.priority = priority
1189 self.priority = priority
1191
1190
1192 @property
1191 @property
1193 def path(self):
1192 def path(self):
1194 return self.node.path
1193 return self.node.path
1195
1194
1196 def __repr__(self):
1195 def __repr__(self):
1197 return '<ReadmeMatch {} priority={}'.format(self.path, self.priority)
1196 return '<ReadmeMatch {} priority={}'.format(self.path, self.priority)
@@ -1,897 +1,897 b''
1 # -*- coding: utf-8 -*-
1
2
2
3 # Copyright (C) 2011-2020 RhodeCode GmbH
3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 repo group model for RhodeCode
23 repo group model for RhodeCode
24 """
24 """
25
25
26 import os
26 import os
27 import datetime
27 import datetime
28 import itertools
28 import itertools
29 import logging
29 import logging
30 import shutil
30 import shutil
31 import time
31 import time
32 import traceback
32 import traceback
33 import string
33 import string
34
34
35 from zope.cachedescriptors.property import Lazy as LazyProperty
35 from zope.cachedescriptors.property import Lazy as LazyProperty
36
36
37 from rhodecode import events
37 from rhodecode import events
38 from rhodecode.model import BaseModel
38 from rhodecode.model import BaseModel
39 from rhodecode.model.db import (_hash_key, func, or_, in_filter_generator,
39 from rhodecode.model.db import (_hash_key, func, or_, in_filter_generator,
40 Session, RepoGroup, UserRepoGroupToPerm, User, Permission, UserGroupRepoGroupToPerm,
40 Session, RepoGroup, UserRepoGroupToPerm, User, Permission, UserGroupRepoGroupToPerm,
41 UserGroup, Repository)
41 UserGroup, Repository)
42 from rhodecode.model.permission import PermissionModel
42 from rhodecode.model.permission import PermissionModel
43 from rhodecode.model.settings import VcsSettingsModel, SettingsModel
43 from rhodecode.model.settings import VcsSettingsModel, SettingsModel
44 from rhodecode.lib.caching_query import FromCache
44 from rhodecode.lib.caching_query import FromCache
45 from rhodecode.lib.utils2 import action_logger_generic
45 from rhodecode.lib.utils2 import action_logger_generic
46
46
47 log = logging.getLogger(__name__)
47 log = logging.getLogger(__name__)
48
48
49
49
50 class RepoGroupModel(BaseModel):
50 class RepoGroupModel(BaseModel):
51
51
52 cls = RepoGroup
52 cls = RepoGroup
53 PERSONAL_GROUP_DESC = 'personal repo group of user `%(username)s`'
53 PERSONAL_GROUP_DESC = 'personal repo group of user `%(username)s`'
54 PERSONAL_GROUP_PATTERN = '${username}' # default
54 PERSONAL_GROUP_PATTERN = '${username}' # default
55
55
56 def _get_user_group(self, users_group):
56 def _get_user_group(self, users_group):
57 return self._get_instance(UserGroup, users_group,
57 return self._get_instance(UserGroup, users_group,
58 callback=UserGroup.get_by_group_name)
58 callback=UserGroup.get_by_group_name)
59
59
60 def _get_repo_group(self, repo_group):
60 def _get_repo_group(self, repo_group):
61 return self._get_instance(RepoGroup, repo_group,
61 return self._get_instance(RepoGroup, repo_group,
62 callback=RepoGroup.get_by_group_name)
62 callback=RepoGroup.get_by_group_name)
63
63
64 def get_repo_group(self, repo_group):
64 def get_repo_group(self, repo_group):
65 return self._get_repo_group(repo_group)
65 return self._get_repo_group(repo_group)
66
66
67 @LazyProperty
67 @LazyProperty
68 def repos_path(self):
68 def repos_path(self):
69 """
69 """
70 Gets the repositories root path from database
70 Gets the repositories root path from database
71 """
71 """
72
72
73 settings_model = VcsSettingsModel(sa=self.sa)
73 settings_model = VcsSettingsModel(sa=self.sa)
74 return settings_model.get_repos_location()
74 return settings_model.get_repos_location()
75
75
76 def get_by_group_name(self, repo_group_name, cache=None):
76 def get_by_group_name(self, repo_group_name, cache=None):
77 repo = self.sa.query(RepoGroup) \
77 repo = self.sa.query(RepoGroup) \
78 .filter(RepoGroup.group_name == repo_group_name)
78 .filter(RepoGroup.group_name == repo_group_name)
79
79
80 if cache:
80 if cache:
81 name_key = _hash_key(repo_group_name)
81 name_key = _hash_key(repo_group_name)
82 repo = repo.options(
82 repo = repo.options(
83 FromCache("sql_cache_short", f"get_repo_group_{name_key}"))
83 FromCache("sql_cache_short", f"get_repo_group_{name_key}"))
84 return repo.scalar()
84 return repo.scalar()
85
85
86 def get_default_create_personal_repo_group(self):
86 def get_default_create_personal_repo_group(self):
87 value = SettingsModel().get_setting_by_name(
87 value = SettingsModel().get_setting_by_name(
88 'create_personal_repo_group')
88 'create_personal_repo_group')
89 return value.app_settings_value if value else None or False
89 return value.app_settings_value if value else None or False
90
90
91 def get_personal_group_name_pattern(self):
91 def get_personal_group_name_pattern(self):
92 value = SettingsModel().get_setting_by_name(
92 value = SettingsModel().get_setting_by_name(
93 'personal_repo_group_pattern')
93 'personal_repo_group_pattern')
94 val = value.app_settings_value if value else None
94 val = value.app_settings_value if value else None
95 group_template = val or self.PERSONAL_GROUP_PATTERN
95 group_template = val or self.PERSONAL_GROUP_PATTERN
96
96
97 group_template = group_template.lstrip('/')
97 group_template = group_template.lstrip('/')
98 return group_template
98 return group_template
99
99
100 def get_personal_group_name(self, user):
100 def get_personal_group_name(self, user):
101 template = self.get_personal_group_name_pattern()
101 template = self.get_personal_group_name_pattern()
102 return string.Template(template).safe_substitute(
102 return string.Template(template).safe_substitute(
103 username=user.username,
103 username=user.username,
104 user_id=user.user_id,
104 user_id=user.user_id,
105 first_name=user.first_name,
105 first_name=user.first_name,
106 last_name=user.last_name,
106 last_name=user.last_name,
107 )
107 )
108
108
109 def create_personal_repo_group(self, user, commit_early=True):
109 def create_personal_repo_group(self, user, commit_early=True):
110 desc = self.PERSONAL_GROUP_DESC % {'username': user.username}
110 desc = self.PERSONAL_GROUP_DESC % {'username': user.username}
111 personal_repo_group_name = self.get_personal_group_name(user)
111 personal_repo_group_name = self.get_personal_group_name(user)
112
112
113 # create a new one
113 # create a new one
114 RepoGroupModel().create(
114 RepoGroupModel().create(
115 group_name=personal_repo_group_name,
115 group_name=personal_repo_group_name,
116 group_description=desc,
116 group_description=desc,
117 owner=user.username,
117 owner=user.username,
118 personal=True,
118 personal=True,
119 commit_early=commit_early)
119 commit_early=commit_early)
120
120
121 def _create_default_perms(self, new_group):
121 def _create_default_perms(self, new_group):
122 # create default permission
122 # create default permission
123 default_perm = 'group.read'
123 default_perm = 'group.read'
124 def_user = User.get_default_user()
124 def_user = User.get_default_user()
125 for p in def_user.user_perms:
125 for p in def_user.user_perms:
126 if p.permission.permission_name.startswith('group.'):
126 if p.permission.permission_name.startswith('group.'):
127 default_perm = p.permission.permission_name
127 default_perm = p.permission.permission_name
128 break
128 break
129
129
130 repo_group_to_perm = UserRepoGroupToPerm()
130 repo_group_to_perm = UserRepoGroupToPerm()
131 repo_group_to_perm.permission = Permission.get_by_key(default_perm)
131 repo_group_to_perm.permission = Permission.get_by_key(default_perm)
132
132
133 repo_group_to_perm.group = new_group
133 repo_group_to_perm.group = new_group
134 repo_group_to_perm.user_id = def_user.user_id
134 repo_group_to_perm.user_id = def_user.user_id
135 return repo_group_to_perm
135 return repo_group_to_perm
136
136
137 def _get_group_name_and_parent(self, group_name_full, repo_in_path=False,
137 def _get_group_name_and_parent(self, group_name_full, repo_in_path=False,
138 get_object=False):
138 get_object=False):
139 """
139 """
140 Get's the group name and a parent group name from given group name.
140 Get's the group name and a parent group name from given group name.
141 If repo_in_path is set to truth, we asume the full path also includes
141 If repo_in_path is set to truth, we asume the full path also includes
142 repo name, in such case we clean the last element.
142 repo name, in such case we clean the last element.
143
143
144 :param group_name_full:
144 :param group_name_full:
145 """
145 """
146 split_paths = 1
146 split_paths = 1
147 if repo_in_path:
147 if repo_in_path:
148 split_paths = 2
148 split_paths = 2
149 _parts = group_name_full.rsplit(RepoGroup.url_sep(), split_paths)
149 _parts = group_name_full.rsplit(RepoGroup.url_sep(), split_paths)
150
150
151 if repo_in_path and len(_parts) > 1:
151 if repo_in_path and len(_parts) > 1:
152 # such case last element is the repo_name
152 # such case last element is the repo_name
153 _parts.pop(-1)
153 _parts.pop(-1)
154 group_name_cleaned = _parts[-1] # just the group name
154 group_name_cleaned = _parts[-1] # just the group name
155 parent_repo_group_name = None
155 parent_repo_group_name = None
156
156
157 if len(_parts) > 1:
157 if len(_parts) > 1:
158 parent_repo_group_name = _parts[0]
158 parent_repo_group_name = _parts[0]
159
159
160 parent_group = None
160 parent_group = None
161 if parent_repo_group_name:
161 if parent_repo_group_name:
162 parent_group = RepoGroup.get_by_group_name(parent_repo_group_name)
162 parent_group = RepoGroup.get_by_group_name(parent_repo_group_name)
163
163
164 if get_object:
164 if get_object:
165 return group_name_cleaned, parent_repo_group_name, parent_group
165 return group_name_cleaned, parent_repo_group_name, parent_group
166
166
167 return group_name_cleaned, parent_repo_group_name
167 return group_name_cleaned, parent_repo_group_name
168
168
169 def check_exist_filesystem(self, group_name, exc_on_failure=True):
169 def check_exist_filesystem(self, group_name, exc_on_failure=True):
170 create_path = os.path.join(self.repos_path, group_name)
170 create_path = os.path.join(self.repos_path, group_name)
171 log.debug('creating new group in %s', create_path)
171 log.debug('creating new group in %s', create_path)
172
172
173 if os.path.isdir(create_path):
173 if os.path.isdir(create_path):
174 if exc_on_failure:
174 if exc_on_failure:
175 abs_create_path = os.path.abspath(create_path)
175 abs_create_path = os.path.abspath(create_path)
176 raise Exception('Directory `{}` already exists !'.format(abs_create_path))
176 raise Exception('Directory `{}` already exists !'.format(abs_create_path))
177 return False
177 return False
178 return True
178 return True
179
179
180 def _create_group(self, group_name):
180 def _create_group(self, group_name):
181 """
181 """
182 makes repository group on filesystem
182 makes repository group on filesystem
183
183
184 :param repo_name:
184 :param repo_name:
185 :param parent_id:
185 :param parent_id:
186 """
186 """
187
187
188 self.check_exist_filesystem(group_name)
188 self.check_exist_filesystem(group_name)
189 create_path = os.path.join(self.repos_path, group_name)
189 create_path = os.path.join(self.repos_path, group_name)
190 log.debug('creating new group in %s', create_path)
190 log.debug('creating new group in %s', create_path)
191 os.makedirs(create_path, mode=0o755)
191 os.makedirs(create_path, mode=0o755)
192 log.debug('created group in %s', create_path)
192 log.debug('created group in %s', create_path)
193
193
194 def _rename_group(self, old, new):
194 def _rename_group(self, old, new):
195 """
195 """
196 Renames a group on filesystem
196 Renames a group on filesystem
197
197
198 :param group_name:
198 :param group_name:
199 """
199 """
200
200
201 if old == new:
201 if old == new:
202 log.debug('skipping group rename')
202 log.debug('skipping group rename')
203 return
203 return
204
204
205 log.debug('renaming repository group from %s to %s', old, new)
205 log.debug('renaming repository group from %s to %s', old, new)
206
206
207 old_path = os.path.join(self.repos_path, old)
207 old_path = os.path.join(self.repos_path, old)
208 new_path = os.path.join(self.repos_path, new)
208 new_path = os.path.join(self.repos_path, new)
209
209
210 log.debug('renaming repos paths from %s to %s', old_path, new_path)
210 log.debug('renaming repos paths from %s to %s', old_path, new_path)
211
211
212 if os.path.isdir(new_path):
212 if os.path.isdir(new_path):
213 raise Exception('Was trying to rename to already '
213 raise Exception('Was trying to rename to already '
214 'existing dir %s' % new_path)
214 'existing dir %s' % new_path)
215 shutil.move(old_path, new_path)
215 shutil.move(old_path, new_path)
216
216
217 def _delete_filesystem_group(self, group, force_delete=False):
217 def _delete_filesystem_group(self, group, force_delete=False):
218 """
218 """
219 Deletes a group from a filesystem
219 Deletes a group from a filesystem
220
220
221 :param group: instance of group from database
221 :param group: instance of group from database
222 :param force_delete: use shutil rmtree to remove all objects
222 :param force_delete: use shutil rmtree to remove all objects
223 """
223 """
224 paths = group.full_path.split(RepoGroup.url_sep())
224 paths = group.full_path.split(RepoGroup.url_sep())
225 paths = os.sep.join(paths)
225 paths = os.sep.join(paths)
226
226
227 rm_path = os.path.join(self.repos_path, paths)
227 rm_path = os.path.join(self.repos_path, paths)
228 log.info("Removing group %s", rm_path)
228 log.info("Removing group %s", rm_path)
229 # delete only if that path really exists
229 # delete only if that path really exists
230 if os.path.isdir(rm_path):
230 if os.path.isdir(rm_path):
231 if force_delete:
231 if force_delete:
232 shutil.rmtree(rm_path)
232 shutil.rmtree(rm_path)
233 else:
233 else:
234 # archive that group`
234 # archive that group`
235 _now = datetime.datetime.now()
235 _now = datetime.datetime.now()
236 _ms = str(_now.microsecond).rjust(6, '0')
236 _ms = str(_now.microsecond).rjust(6, '0')
237 _d = 'rm__%s_GROUP_%s' % (
237 _d = 'rm__%s_GROUP_%s' % (
238 _now.strftime('%Y%m%d_%H%M%S_' + _ms), group.name)
238 _now.strftime('%Y%m%d_%H%M%S_' + _ms), group.name)
239 shutil.move(rm_path, os.path.join(self.repos_path, _d))
239 shutil.move(rm_path, os.path.join(self.repos_path, _d))
240
240
241 def create(self, group_name, group_description, owner, just_db=False,
241 def create(self, group_name, group_description, owner, just_db=False,
242 copy_permissions=False, personal=None, commit_early=True):
242 copy_permissions=False, personal=None, commit_early=True):
243
243
244 (group_name_cleaned,
244 (group_name_cleaned,
245 parent_group_name) = RepoGroupModel()._get_group_name_and_parent(group_name)
245 parent_group_name) = RepoGroupModel()._get_group_name_and_parent(group_name)
246
246
247 parent_group = None
247 parent_group = None
248 if parent_group_name:
248 if parent_group_name:
249 parent_group = self._get_repo_group(parent_group_name)
249 parent_group = self._get_repo_group(parent_group_name)
250 if not parent_group:
250 if not parent_group:
251 # we tried to create a nested group, but the parent is not
251 # we tried to create a nested group, but the parent is not
252 # existing
252 # existing
253 raise ValueError(
253 raise ValueError(
254 'Parent group `%s` given in `%s` group name '
254 'Parent group `%s` given in `%s` group name '
255 'is not yet existing.' % (parent_group_name, group_name))
255 'is not yet existing.' % (parent_group_name, group_name))
256
256
257 # because we are doing a cleanup, we need to check if such directory
257 # because we are doing a cleanup, we need to check if such directory
258 # already exists. If we don't do that we can accidentally delete
258 # already exists. If we don't do that we can accidentally delete
259 # existing directory via cleanup that can cause data issues, since
259 # existing directory via cleanup that can cause data issues, since
260 # delete does a folder rename to special syntax later cleanup
260 # delete does a folder rename to special syntax later cleanup
261 # functions can delete this
261 # functions can delete this
262 cleanup_group = self.check_exist_filesystem(group_name,
262 cleanup_group = self.check_exist_filesystem(group_name,
263 exc_on_failure=False)
263 exc_on_failure=False)
264 user = self._get_user(owner)
264 user = self._get_user(owner)
265 if not user:
265 if not user:
266 raise ValueError('Owner %s not found as rhodecode user', owner)
266 raise ValueError('Owner %s not found as rhodecode user', owner)
267
267
268 try:
268 try:
269 new_repo_group = RepoGroup()
269 new_repo_group = RepoGroup()
270 new_repo_group.user = user
270 new_repo_group.user = user
271 new_repo_group.group_description = group_description or group_name
271 new_repo_group.group_description = group_description or group_name
272 new_repo_group.parent_group = parent_group
272 new_repo_group.parent_group = parent_group
273 new_repo_group.group_name = group_name
273 new_repo_group.group_name = group_name
274 new_repo_group.personal = personal
274 new_repo_group.personal = personal
275
275
276 self.sa.add(new_repo_group)
276 self.sa.add(new_repo_group)
277
277
278 # create an ADMIN permission for owner except if we're super admin,
278 # create an ADMIN permission for owner except if we're super admin,
279 # later owner should go into the owner field of groups
279 # later owner should go into the owner field of groups
280 if not user.is_admin:
280 if not user.is_admin:
281 self.grant_user_permission(repo_group=new_repo_group,
281 self.grant_user_permission(repo_group=new_repo_group,
282 user=owner, perm='group.admin')
282 user=owner, perm='group.admin')
283
283
284 if parent_group and copy_permissions:
284 if parent_group and copy_permissions:
285 # copy permissions from parent
285 # copy permissions from parent
286 user_perms = UserRepoGroupToPerm.query() \
286 user_perms = UserRepoGroupToPerm.query() \
287 .filter(UserRepoGroupToPerm.group == parent_group).all()
287 .filter(UserRepoGroupToPerm.group == parent_group).all()
288
288
289 group_perms = UserGroupRepoGroupToPerm.query() \
289 group_perms = UserGroupRepoGroupToPerm.query() \
290 .filter(UserGroupRepoGroupToPerm.group == parent_group).all()
290 .filter(UserGroupRepoGroupToPerm.group == parent_group).all()
291
291
292 for perm in user_perms:
292 for perm in user_perms:
293 # don't copy over the permission for user who is creating
293 # don't copy over the permission for user who is creating
294 # this group, if he is not super admin he get's admin
294 # this group, if he is not super admin he get's admin
295 # permission set above
295 # permission set above
296 if perm.user != user or user.is_admin:
296 if perm.user != user or user.is_admin:
297 UserRepoGroupToPerm.create(
297 UserRepoGroupToPerm.create(
298 perm.user, new_repo_group, perm.permission)
298 perm.user, new_repo_group, perm.permission)
299
299
300 for perm in group_perms:
300 for perm in group_perms:
301 UserGroupRepoGroupToPerm.create(
301 UserGroupRepoGroupToPerm.create(
302 perm.users_group, new_repo_group, perm.permission)
302 perm.users_group, new_repo_group, perm.permission)
303 else:
303 else:
304 perm_obj = self._create_default_perms(new_repo_group)
304 perm_obj = self._create_default_perms(new_repo_group)
305 self.sa.add(perm_obj)
305 self.sa.add(perm_obj)
306
306
307 # now commit the changes, earlier so we are sure everything is in
307 # now commit the changes, earlier so we are sure everything is in
308 # the database.
308 # the database.
309 if commit_early:
309 if commit_early:
310 self.sa.commit()
310 self.sa.commit()
311 if not just_db:
311 if not just_db:
312 self._create_group(new_repo_group.group_name)
312 self._create_group(new_repo_group.group_name)
313
313
314 # trigger the post hook
314 # trigger the post hook
315 from rhodecode.lib import hooks_base
315 from rhodecode.lib import hooks_base
316 repo_group = RepoGroup.get_by_group_name(group_name)
316 repo_group = RepoGroup.get_by_group_name(group_name)
317
317
318 # update repo group commit caches initially
318 # update repo group commit caches initially
319 repo_group.update_commit_cache()
319 repo_group.update_commit_cache()
320
320
321 hooks_base.create_repository_group(
321 hooks_base.create_repository_group(
322 created_by=user.username, **repo_group.get_dict())
322 created_by=user.username, **repo_group.get_dict())
323
323
324 # Trigger create event.
324 # Trigger create event.
325 events.trigger(events.RepoGroupCreateEvent(repo_group))
325 events.trigger(events.RepoGroupCreateEvent(repo_group))
326
326
327 return new_repo_group
327 return new_repo_group
328 except Exception:
328 except Exception:
329 self.sa.rollback()
329 self.sa.rollback()
330 log.exception('Exception occurred when creating repository group, '
330 log.exception('Exception occurred when creating repository group, '
331 'doing cleanup...')
331 'doing cleanup...')
332 # rollback things manually !
332 # rollback things manually !
333 repo_group = RepoGroup.get_by_group_name(group_name)
333 repo_group = RepoGroup.get_by_group_name(group_name)
334 if repo_group:
334 if repo_group:
335 RepoGroup.delete(repo_group.group_id)
335 RepoGroup.delete(repo_group.group_id)
336 self.sa.commit()
336 self.sa.commit()
337 if cleanup_group:
337 if cleanup_group:
338 RepoGroupModel()._delete_filesystem_group(repo_group)
338 RepoGroupModel()._delete_filesystem_group(repo_group)
339 raise
339 raise
340
340
341 def update_permissions(
341 def update_permissions(
342 self, repo_group, perm_additions=None, perm_updates=None,
342 self, repo_group, perm_additions=None, perm_updates=None,
343 perm_deletions=None, recursive=None, check_perms=True,
343 perm_deletions=None, recursive=None, check_perms=True,
344 cur_user=None):
344 cur_user=None):
345 from rhodecode.model.repo import RepoModel
345 from rhodecode.model.repo import RepoModel
346 from rhodecode.lib.auth import HasUserGroupPermissionAny
346 from rhodecode.lib.auth import HasUserGroupPermissionAny
347
347
348 if not perm_additions:
348 if not perm_additions:
349 perm_additions = []
349 perm_additions = []
350 if not perm_updates:
350 if not perm_updates:
351 perm_updates = []
351 perm_updates = []
352 if not perm_deletions:
352 if not perm_deletions:
353 perm_deletions = []
353 perm_deletions = []
354
354
355 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
355 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
356
356
357 changes = {
357 changes = {
358 'added': [],
358 'added': [],
359 'updated': [],
359 'updated': [],
360 'deleted': [],
360 'deleted': [],
361 'default_user_changed': None
361 'default_user_changed': None
362 }
362 }
363
363
364 def _set_perm_user(obj, user, perm):
364 def _set_perm_user(obj, user, perm):
365 if isinstance(obj, RepoGroup):
365 if isinstance(obj, RepoGroup):
366 self.grant_user_permission(
366 self.grant_user_permission(
367 repo_group=obj, user=user, perm=perm)
367 repo_group=obj, user=user, perm=perm)
368 elif isinstance(obj, Repository):
368 elif isinstance(obj, Repository):
369 # private repos will not allow to change the default
369 # private repos will not allow to change the default
370 # permissions using recursive mode
370 # permissions using recursive mode
371 if obj.private and user == User.DEFAULT_USER:
371 if obj.private and user == User.DEFAULT_USER:
372 return
372 return
373
373
374 # we set group permission but we have to switch to repo
374 # we set group permission but we have to switch to repo
375 # permission
375 # permission
376 perm = perm.replace('group.', 'repository.')
376 perm = perm.replace('group.', 'repository.')
377 RepoModel().grant_user_permission(
377 RepoModel().grant_user_permission(
378 repo=obj, user=user, perm=perm)
378 repo=obj, user=user, perm=perm)
379
379
380 def _set_perm_group(obj, users_group, perm):
380 def _set_perm_group(obj, users_group, perm):
381 if isinstance(obj, RepoGroup):
381 if isinstance(obj, RepoGroup):
382 self.grant_user_group_permission(
382 self.grant_user_group_permission(
383 repo_group=obj, group_name=users_group, perm=perm)
383 repo_group=obj, group_name=users_group, perm=perm)
384 elif isinstance(obj, Repository):
384 elif isinstance(obj, Repository):
385 # we set group permission but we have to switch to repo
385 # we set group permission but we have to switch to repo
386 # permission
386 # permission
387 perm = perm.replace('group.', 'repository.')
387 perm = perm.replace('group.', 'repository.')
388 RepoModel().grant_user_group_permission(
388 RepoModel().grant_user_group_permission(
389 repo=obj, group_name=users_group, perm=perm)
389 repo=obj, group_name=users_group, perm=perm)
390
390
391 def _revoke_perm_user(obj, user):
391 def _revoke_perm_user(obj, user):
392 if isinstance(obj, RepoGroup):
392 if isinstance(obj, RepoGroup):
393 self.revoke_user_permission(repo_group=obj, user=user)
393 self.revoke_user_permission(repo_group=obj, user=user)
394 elif isinstance(obj, Repository):
394 elif isinstance(obj, Repository):
395 RepoModel().revoke_user_permission(repo=obj, user=user)
395 RepoModel().revoke_user_permission(repo=obj, user=user)
396
396
397 def _revoke_perm_group(obj, user_group):
397 def _revoke_perm_group(obj, user_group):
398 if isinstance(obj, RepoGroup):
398 if isinstance(obj, RepoGroup):
399 self.revoke_user_group_permission(
399 self.revoke_user_group_permission(
400 repo_group=obj, group_name=user_group)
400 repo_group=obj, group_name=user_group)
401 elif isinstance(obj, Repository):
401 elif isinstance(obj, Repository):
402 RepoModel().revoke_user_group_permission(
402 RepoModel().revoke_user_group_permission(
403 repo=obj, group_name=user_group)
403 repo=obj, group_name=user_group)
404
404
405 # start updates
405 # start updates
406 log.debug('Now updating permissions for %s in recursive mode:%s',
406 log.debug('Now updating permissions for %s in recursive mode:%s',
407 repo_group, recursive)
407 repo_group, recursive)
408
408
409 # initialize check function, we'll call that multiple times
409 # initialize check function, we'll call that multiple times
410 has_group_perm = HasUserGroupPermissionAny(*req_perms)
410 has_group_perm = HasUserGroupPermissionAny(*req_perms)
411
411
412 for obj in repo_group.recursive_groups_and_repos():
412 for obj in repo_group.recursive_groups_and_repos():
413 # iterated obj is an instance of a repos group or repository in
413 # iterated obj is an instance of a repos group or repository in
414 # that group, recursive option can be: none, repos, groups, all
414 # that group, recursive option can be: none, repos, groups, all
415 if recursive == 'all':
415 if recursive == 'all':
416 obj = obj
416 obj = obj
417 elif recursive == 'repos':
417 elif recursive == 'repos':
418 # skip groups, other than this one
418 # skip groups, other than this one
419 if isinstance(obj, RepoGroup) and not obj == repo_group:
419 if isinstance(obj, RepoGroup) and not obj == repo_group:
420 continue
420 continue
421 elif recursive == 'groups':
421 elif recursive == 'groups':
422 # skip repos
422 # skip repos
423 if isinstance(obj, Repository):
423 if isinstance(obj, Repository):
424 continue
424 continue
425 else: # recursive == 'none':
425 else: # recursive == 'none':
426 # DEFAULT option - don't apply to iterated objects
426 # DEFAULT option - don't apply to iterated objects
427 # also we do a break at the end of this loop. if we are not
427 # also we do a break at the end of this loop. if we are not
428 # in recursive mode
428 # in recursive mode
429 obj = repo_group
429 obj = repo_group
430
430
431 change_obj = obj.get_api_data()
431 change_obj = obj.get_api_data()
432
432
433 # update permissions
433 # update permissions
434 for member_id, perm, member_type in perm_updates:
434 for member_id, perm, member_type in perm_updates:
435 member_id = int(member_id)
435 member_id = int(member_id)
436 if member_type == 'user':
436 if member_type == 'user':
437 member_name = User.get(member_id).username
437 member_name = User.get(member_id).username
438 if isinstance(obj, RepoGroup) and obj == repo_group and member_name == User.DEFAULT_USER:
438 if isinstance(obj, RepoGroup) and obj == repo_group and member_name == User.DEFAULT_USER:
439 # NOTE(dan): detect if we changed permissions for default user
439 # NOTE(dan): detect if we changed permissions for default user
440 perm_obj = self.sa.query(UserRepoGroupToPerm) \
440 perm_obj = self.sa.query(UserRepoGroupToPerm) \
441 .filter(UserRepoGroupToPerm.user_id == member_id) \
441 .filter(UserRepoGroupToPerm.user_id == member_id) \
442 .filter(UserRepoGroupToPerm.group == repo_group) \
442 .filter(UserRepoGroupToPerm.group == repo_group) \
443 .scalar()
443 .scalar()
444 if perm_obj and perm_obj.permission.permission_name != perm:
444 if perm_obj and perm_obj.permission.permission_name != perm:
445 changes['default_user_changed'] = True
445 changes['default_user_changed'] = True
446
446
447 # this updates also current one if found
447 # this updates also current one if found
448 _set_perm_user(obj, user=member_id, perm=perm)
448 _set_perm_user(obj, user=member_id, perm=perm)
449 elif member_type == 'user_group':
449 elif member_type == 'user_group':
450 member_name = UserGroup.get(member_id).users_group_name
450 member_name = UserGroup.get(member_id).users_group_name
451 if not check_perms or has_group_perm(member_name,
451 if not check_perms or has_group_perm(member_name,
452 user=cur_user):
452 user=cur_user):
453 _set_perm_group(obj, users_group=member_id, perm=perm)
453 _set_perm_group(obj, users_group=member_id, perm=perm)
454 else:
454 else:
455 raise ValueError("member_type must be 'user' or 'user_group' "
455 raise ValueError("member_type must be 'user' or 'user_group' "
456 "got {} instead".format(member_type))
456 "got {} instead".format(member_type))
457
457
458 changes['updated'].append(
458 changes['updated'].append(
459 {'change_obj': change_obj, 'type': member_type,
459 {'change_obj': change_obj, 'type': member_type,
460 'id': member_id, 'name': member_name, 'new_perm': perm})
460 'id': member_id, 'name': member_name, 'new_perm': perm})
461
461
462 # set new permissions
462 # set new permissions
463 for member_id, perm, member_type in perm_additions:
463 for member_id, perm, member_type in perm_additions:
464 member_id = int(member_id)
464 member_id = int(member_id)
465 if member_type == 'user':
465 if member_type == 'user':
466 member_name = User.get(member_id).username
466 member_name = User.get(member_id).username
467 _set_perm_user(obj, user=member_id, perm=perm)
467 _set_perm_user(obj, user=member_id, perm=perm)
468 elif member_type == 'user_group':
468 elif member_type == 'user_group':
469 # check if we have permissions to alter this usergroup
469 # check if we have permissions to alter this usergroup
470 member_name = UserGroup.get(member_id).users_group_name
470 member_name = UserGroup.get(member_id).users_group_name
471 if not check_perms or has_group_perm(member_name,
471 if not check_perms or has_group_perm(member_name,
472 user=cur_user):
472 user=cur_user):
473 _set_perm_group(obj, users_group=member_id, perm=perm)
473 _set_perm_group(obj, users_group=member_id, perm=perm)
474 else:
474 else:
475 raise ValueError("member_type must be 'user' or 'user_group' "
475 raise ValueError("member_type must be 'user' or 'user_group' "
476 "got {} instead".format(member_type))
476 "got {} instead".format(member_type))
477
477
478 changes['added'].append(
478 changes['added'].append(
479 {'change_obj': change_obj, 'type': member_type,
479 {'change_obj': change_obj, 'type': member_type,
480 'id': member_id, 'name': member_name, 'new_perm': perm})
480 'id': member_id, 'name': member_name, 'new_perm': perm})
481
481
482 # delete permissions
482 # delete permissions
483 for member_id, perm, member_type in perm_deletions:
483 for member_id, perm, member_type in perm_deletions:
484 member_id = int(member_id)
484 member_id = int(member_id)
485 if member_type == 'user':
485 if member_type == 'user':
486 member_name = User.get(member_id).username
486 member_name = User.get(member_id).username
487 _revoke_perm_user(obj, user=member_id)
487 _revoke_perm_user(obj, user=member_id)
488 elif member_type == 'user_group':
488 elif member_type == 'user_group':
489 # check if we have permissions to alter this usergroup
489 # check if we have permissions to alter this usergroup
490 member_name = UserGroup.get(member_id).users_group_name
490 member_name = UserGroup.get(member_id).users_group_name
491 if not check_perms or has_group_perm(member_name,
491 if not check_perms or has_group_perm(member_name,
492 user=cur_user):
492 user=cur_user):
493 _revoke_perm_group(obj, user_group=member_id)
493 _revoke_perm_group(obj, user_group=member_id)
494 else:
494 else:
495 raise ValueError("member_type must be 'user' or 'user_group' "
495 raise ValueError("member_type must be 'user' or 'user_group' "
496 "got {} instead".format(member_type))
496 "got {} instead".format(member_type))
497
497
498 changes['deleted'].append(
498 changes['deleted'].append(
499 {'change_obj': change_obj, 'type': member_type,
499 {'change_obj': change_obj, 'type': member_type,
500 'id': member_id, 'name': member_name, 'new_perm': perm})
500 'id': member_id, 'name': member_name, 'new_perm': perm})
501
501
502 # if it's not recursive call for all,repos,groups
502 # if it's not recursive call for all,repos,groups
503 # break the loop and don't proceed with other changes
503 # break the loop and don't proceed with other changes
504 if recursive not in ['all', 'repos', 'groups']:
504 if recursive not in ['all', 'repos', 'groups']:
505 break
505 break
506
506
507 return changes
507 return changes
508
508
509 def update(self, repo_group, form_data):
509 def update(self, repo_group, form_data):
510 try:
510 try:
511 repo_group = self._get_repo_group(repo_group)
511 repo_group = self._get_repo_group(repo_group)
512 old_path = repo_group.full_path
512 old_path = repo_group.full_path
513
513
514 # change properties
514 # change properties
515 if 'group_description' in form_data:
515 if 'group_description' in form_data:
516 repo_group.group_description = form_data['group_description']
516 repo_group.group_description = form_data['group_description']
517
517
518 if 'enable_locking' in form_data:
518 if 'enable_locking' in form_data:
519 repo_group.enable_locking = form_data['enable_locking']
519 repo_group.enable_locking = form_data['enable_locking']
520
520
521 if 'group_parent_id' in form_data:
521 if 'group_parent_id' in form_data:
522 parent_group = (
522 parent_group = (
523 self._get_repo_group(form_data['group_parent_id']))
523 self._get_repo_group(form_data['group_parent_id']))
524 repo_group.group_parent_id = (
524 repo_group.group_parent_id = (
525 parent_group.group_id if parent_group else None)
525 parent_group.group_id if parent_group else None)
526 repo_group.parent_group = parent_group
526 repo_group.parent_group = parent_group
527
527
528 # mikhail: to update the full_path, we have to explicitly
528 # mikhail: to update the full_path, we have to explicitly
529 # update group_name
529 # update group_name
530 group_name = form_data.get('group_name', repo_group.name)
530 group_name = form_data.get('group_name', repo_group.name)
531 repo_group.group_name = repo_group.get_new_name(group_name)
531 repo_group.group_name = repo_group.get_new_name(group_name)
532
532
533 new_path = repo_group.full_path
533 new_path = repo_group.full_path
534
534
535 affected_user_ids = []
535 affected_user_ids = []
536 if 'user' in form_data:
536 if 'user' in form_data:
537 old_owner_id = repo_group.user.user_id
537 old_owner_id = repo_group.user.user_id
538 new_owner = User.get_by_username(form_data['user'])
538 new_owner = User.get_by_username(form_data['user'])
539 repo_group.user = new_owner
539 repo_group.user = new_owner
540
540
541 if old_owner_id != new_owner.user_id:
541 if old_owner_id != new_owner.user_id:
542 affected_user_ids = [new_owner.user_id, old_owner_id]
542 affected_user_ids = [new_owner.user_id, old_owner_id]
543
543
544 self.sa.add(repo_group)
544 self.sa.add(repo_group)
545
545
546 # iterate over all members of this groups and do fixes
546 # iterate over all members of this groups and do fixes
547 # set locking if given
547 # set locking if given
548 # if obj is a repoGroup also fix the name of the group according
548 # if obj is a repoGroup also fix the name of the group according
549 # to the parent
549 # to the parent
550 # if obj is a Repo fix it's name
550 # if obj is a Repo fix it's name
551 # this can be potentially heavy operation
551 # this can be potentially heavy operation
552 for obj in repo_group.recursive_groups_and_repos():
552 for obj in repo_group.recursive_groups_and_repos():
553 # set the value from it's parent
553 # set the value from it's parent
554 obj.enable_locking = repo_group.enable_locking
554 obj.enable_locking = repo_group.enable_locking
555 if isinstance(obj, RepoGroup):
555 if isinstance(obj, RepoGroup):
556 new_name = obj.get_new_name(obj.name)
556 new_name = obj.get_new_name(obj.name)
557 log.debug('Fixing group %s to new name %s',
557 log.debug('Fixing group %s to new name %s',
558 obj.group_name, new_name)
558 obj.group_name, new_name)
559 obj.group_name = new_name
559 obj.group_name = new_name
560
560
561 elif isinstance(obj, Repository):
561 elif isinstance(obj, Repository):
562 # we need to get all repositories from this new group and
562 # we need to get all repositories from this new group and
563 # rename them accordingly to new group path
563 # rename them accordingly to new group path
564 new_name = obj.get_new_name(obj.just_name)
564 new_name = obj.get_new_name(obj.just_name)
565 log.debug('Fixing repo %s to new name %s',
565 log.debug('Fixing repo %s to new name %s',
566 obj.repo_name, new_name)
566 obj.repo_name, new_name)
567 obj.repo_name = new_name
567 obj.repo_name = new_name
568
568
569 self.sa.add(obj)
569 self.sa.add(obj)
570
570
571 self._rename_group(old_path, new_path)
571 self._rename_group(old_path, new_path)
572
572
573 # Trigger update event.
573 # Trigger update event.
574 events.trigger(events.RepoGroupUpdateEvent(repo_group))
574 events.trigger(events.RepoGroupUpdateEvent(repo_group))
575
575
576 if affected_user_ids:
576 if affected_user_ids:
577 PermissionModel().trigger_permission_flush(affected_user_ids)
577 PermissionModel().trigger_permission_flush(affected_user_ids)
578
578
579 return repo_group
579 return repo_group
580 except Exception:
580 except Exception:
581 log.error(traceback.format_exc())
581 log.error(traceback.format_exc())
582 raise
582 raise
583
583
584 def delete(self, repo_group, force_delete=False, fs_remove=True):
584 def delete(self, repo_group, force_delete=False, fs_remove=True):
585 repo_group = self._get_repo_group(repo_group)
585 repo_group = self._get_repo_group(repo_group)
586 if not repo_group:
586 if not repo_group:
587 return False
587 return False
588 try:
588 try:
589 self.sa.delete(repo_group)
589 self.sa.delete(repo_group)
590 if fs_remove:
590 if fs_remove:
591 self._delete_filesystem_group(repo_group, force_delete)
591 self._delete_filesystem_group(repo_group, force_delete)
592 else:
592 else:
593 log.debug('skipping removal from filesystem')
593 log.debug('skipping removal from filesystem')
594
594
595 # Trigger delete event.
595 # Trigger delete event.
596 events.trigger(events.RepoGroupDeleteEvent(repo_group))
596 events.trigger(events.RepoGroupDeleteEvent(repo_group))
597 return True
597 return True
598
598
599 except Exception:
599 except Exception:
600 log.error('Error removing repo_group %s', repo_group)
600 log.error('Error removing repo_group %s', repo_group)
601 raise
601 raise
602
602
603 def grant_user_permission(self, repo_group, user, perm):
603 def grant_user_permission(self, repo_group, user, perm):
604 """
604 """
605 Grant permission for user on given repository group, or update
605 Grant permission for user on given repository group, or update
606 existing one if found
606 existing one if found
607
607
608 :param repo_group: Instance of RepoGroup, repositories_group_id,
608 :param repo_group: Instance of RepoGroup, repositories_group_id,
609 or repositories_group name
609 or repositories_group name
610 :param user: Instance of User, user_id or username
610 :param user: Instance of User, user_id or username
611 :param perm: Instance of Permission, or permission_name
611 :param perm: Instance of Permission, or permission_name
612 """
612 """
613
613
614 repo_group = self._get_repo_group(repo_group)
614 repo_group = self._get_repo_group(repo_group)
615 user = self._get_user(user)
615 user = self._get_user(user)
616 permission = self._get_perm(perm)
616 permission = self._get_perm(perm)
617
617
618 # check if we have that permission already
618 # check if we have that permission already
619 obj = self.sa.query(UserRepoGroupToPerm)\
619 obj = self.sa.query(UserRepoGroupToPerm)\
620 .filter(UserRepoGroupToPerm.user == user)\
620 .filter(UserRepoGroupToPerm.user == user)\
621 .filter(UserRepoGroupToPerm.group == repo_group)\
621 .filter(UserRepoGroupToPerm.group == repo_group)\
622 .scalar()
622 .scalar()
623 if obj is None:
623 if obj is None:
624 # create new !
624 # create new !
625 obj = UserRepoGroupToPerm()
625 obj = UserRepoGroupToPerm()
626 obj.group = repo_group
626 obj.group = repo_group
627 obj.user = user
627 obj.user = user
628 obj.permission = permission
628 obj.permission = permission
629 self.sa.add(obj)
629 self.sa.add(obj)
630 log.debug('Granted perm %s to %s on %s', perm, user, repo_group)
630 log.debug('Granted perm %s to %s on %s', perm, user, repo_group)
631 action_logger_generic(
631 action_logger_generic(
632 'granted permission: {} to user: {} on repogroup: {}'.format(
632 'granted permission: {} to user: {} on repogroup: {}'.format(
633 perm, user, repo_group), namespace='security.repogroup')
633 perm, user, repo_group), namespace='security.repogroup')
634 return obj
634 return obj
635
635
636 def revoke_user_permission(self, repo_group, user):
636 def revoke_user_permission(self, repo_group, user):
637 """
637 """
638 Revoke permission for user on given repository group
638 Revoke permission for user on given repository group
639
639
640 :param repo_group: Instance of RepoGroup, repositories_group_id,
640 :param repo_group: Instance of RepoGroup, repositories_group_id,
641 or repositories_group name
641 or repositories_group name
642 :param user: Instance of User, user_id or username
642 :param user: Instance of User, user_id or username
643 """
643 """
644
644
645 repo_group = self._get_repo_group(repo_group)
645 repo_group = self._get_repo_group(repo_group)
646 user = self._get_user(user)
646 user = self._get_user(user)
647
647
648 obj = self.sa.query(UserRepoGroupToPerm)\
648 obj = self.sa.query(UserRepoGroupToPerm)\
649 .filter(UserRepoGroupToPerm.user == user)\
649 .filter(UserRepoGroupToPerm.user == user)\
650 .filter(UserRepoGroupToPerm.group == repo_group)\
650 .filter(UserRepoGroupToPerm.group == repo_group)\
651 .scalar()
651 .scalar()
652 if obj:
652 if obj:
653 self.sa.delete(obj)
653 self.sa.delete(obj)
654 log.debug('Revoked perm on %s on %s', repo_group, user)
654 log.debug('Revoked perm on %s on %s', repo_group, user)
655 action_logger_generic(
655 action_logger_generic(
656 'revoked permission from user: {} on repogroup: {}'.format(
656 'revoked permission from user: {} on repogroup: {}'.format(
657 user, repo_group), namespace='security.repogroup')
657 user, repo_group), namespace='security.repogroup')
658
658
659 def grant_user_group_permission(self, repo_group, group_name, perm):
659 def grant_user_group_permission(self, repo_group, group_name, perm):
660 """
660 """
661 Grant permission for user group on given repository group, or update
661 Grant permission for user group on given repository group, or update
662 existing one if found
662 existing one if found
663
663
664 :param repo_group: Instance of RepoGroup, repositories_group_id,
664 :param repo_group: Instance of RepoGroup, repositories_group_id,
665 or repositories_group name
665 or repositories_group name
666 :param group_name: Instance of UserGroup, users_group_id,
666 :param group_name: Instance of UserGroup, users_group_id,
667 or user group name
667 or user group name
668 :param perm: Instance of Permission, or permission_name
668 :param perm: Instance of Permission, or permission_name
669 """
669 """
670 repo_group = self._get_repo_group(repo_group)
670 repo_group = self._get_repo_group(repo_group)
671 group_name = self._get_user_group(group_name)
671 group_name = self._get_user_group(group_name)
672 permission = self._get_perm(perm)
672 permission = self._get_perm(perm)
673
673
674 # check if we have that permission already
674 # check if we have that permission already
675 obj = self.sa.query(UserGroupRepoGroupToPerm)\
675 obj = self.sa.query(UserGroupRepoGroupToPerm)\
676 .filter(UserGroupRepoGroupToPerm.group == repo_group)\
676 .filter(UserGroupRepoGroupToPerm.group == repo_group)\
677 .filter(UserGroupRepoGroupToPerm.users_group == group_name)\
677 .filter(UserGroupRepoGroupToPerm.users_group == group_name)\
678 .scalar()
678 .scalar()
679
679
680 if obj is None:
680 if obj is None:
681 # create new
681 # create new
682 obj = UserGroupRepoGroupToPerm()
682 obj = UserGroupRepoGroupToPerm()
683
683
684 obj.group = repo_group
684 obj.group = repo_group
685 obj.users_group = group_name
685 obj.users_group = group_name
686 obj.permission = permission
686 obj.permission = permission
687 self.sa.add(obj)
687 self.sa.add(obj)
688 log.debug('Granted perm %s to %s on %s', perm, group_name, repo_group)
688 log.debug('Granted perm %s to %s on %s', perm, group_name, repo_group)
689 action_logger_generic(
689 action_logger_generic(
690 'granted permission: {} to usergroup: {} on repogroup: {}'.format(
690 'granted permission: {} to usergroup: {} on repogroup: {}'.format(
691 perm, group_name, repo_group), namespace='security.repogroup')
691 perm, group_name, repo_group), namespace='security.repogroup')
692 return obj
692 return obj
693
693
694 def revoke_user_group_permission(self, repo_group, group_name):
694 def revoke_user_group_permission(self, repo_group, group_name):
695 """
695 """
696 Revoke permission for user group on given repository group
696 Revoke permission for user group on given repository group
697
697
698 :param repo_group: Instance of RepoGroup, repositories_group_id,
698 :param repo_group: Instance of RepoGroup, repositories_group_id,
699 or repositories_group name
699 or repositories_group name
700 :param group_name: Instance of UserGroup, users_group_id,
700 :param group_name: Instance of UserGroup, users_group_id,
701 or user group name
701 or user group name
702 """
702 """
703 repo_group = self._get_repo_group(repo_group)
703 repo_group = self._get_repo_group(repo_group)
704 group_name = self._get_user_group(group_name)
704 group_name = self._get_user_group(group_name)
705
705
706 obj = self.sa.query(UserGroupRepoGroupToPerm)\
706 obj = self.sa.query(UserGroupRepoGroupToPerm)\
707 .filter(UserGroupRepoGroupToPerm.group == repo_group)\
707 .filter(UserGroupRepoGroupToPerm.group == repo_group)\
708 .filter(UserGroupRepoGroupToPerm.users_group == group_name)\
708 .filter(UserGroupRepoGroupToPerm.users_group == group_name)\
709 .scalar()
709 .scalar()
710 if obj:
710 if obj:
711 self.sa.delete(obj)
711 self.sa.delete(obj)
712 log.debug('Revoked perm to %s on %s', repo_group, group_name)
712 log.debug('Revoked perm to %s on %s', repo_group, group_name)
713 action_logger_generic(
713 action_logger_generic(
714 'revoked permission from usergroup: {} on repogroup: {}'.format(
714 'revoked permission from usergroup: {} on repogroup: {}'.format(
715 group_name, repo_group), namespace='security.repogroup')
715 group_name, repo_group), namespace='security.repogroup')
716
716
717 @classmethod
717 @classmethod
718 def update_commit_cache(cls, repo_groups=None):
718 def update_commit_cache(cls, repo_groups=None):
719 if not repo_groups:
719 if not repo_groups:
720 repo_groups = RepoGroup.getAll()
720 repo_groups = RepoGroup.getAll()
721 for repo_group in repo_groups:
721 for repo_group in repo_groups:
722 repo_group.update_commit_cache()
722 repo_group.update_commit_cache()
723
723
724 def get_repo_groups_as_dict(self, repo_group_list=None, admin=False,
724 def get_repo_groups_as_dict(self, repo_group_list=None, admin=False,
725 super_user_actions=False):
725 super_user_actions=False):
726
726
727 from pyramid.threadlocal import get_current_request
727 from pyramid.threadlocal import get_current_request
728 _render = get_current_request().get_partial_renderer(
728 _render = get_current_request().get_partial_renderer(
729 'rhodecode:templates/data_table/_dt_elements.mako')
729 'rhodecode:templates/data_table/_dt_elements.mako')
730 c = _render.get_call_context()
730 c = _render.get_call_context()
731 h = _render.get_helpers()
731 h = _render.get_helpers()
732
732
733 def quick_menu(repo_group_name):
733 def quick_menu(repo_group_name):
734 return _render('quick_repo_group_menu', repo_group_name)
734 return _render('quick_repo_group_menu', repo_group_name)
735
735
736 def repo_group_lnk(repo_group_name):
736 def repo_group_lnk(repo_group_name):
737 return _render('repo_group_name', repo_group_name)
737 return _render('repo_group_name', repo_group_name)
738
738
739 def last_change(last_change):
739 def last_change(last_change):
740 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
740 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
741 ts = time.time()
741 ts = time.time()
742 utc_offset = (datetime.datetime.fromtimestamp(ts)
742 utc_offset = (datetime.datetime.fromtimestamp(ts)
743 - datetime.datetime.utcfromtimestamp(ts)).total_seconds()
743 - datetime.datetime.utcfromtimestamp(ts)).total_seconds()
744 last_change = last_change + datetime.timedelta(seconds=utc_offset)
744 last_change = last_change + datetime.timedelta(seconds=utc_offset)
745 return _render("last_change", last_change)
745 return _render("last_change", last_change)
746
746
747 def desc(desc, personal):
747 def desc(desc, personal):
748 return _render(
748 return _render(
749 'repo_group_desc', desc, personal, c.visual.stylify_metatags)
749 'repo_group_desc', desc, personal, c.visual.stylify_metatags)
750
750
751 def repo_group_actions(repo_group_id, repo_group_name, gr_count):
751 def repo_group_actions(repo_group_id, repo_group_name, gr_count):
752 return _render(
752 return _render(
753 'repo_group_actions', repo_group_id, repo_group_name, gr_count)
753 'repo_group_actions', repo_group_id, repo_group_name, gr_count)
754
754
755 def repo_group_name(repo_group_name, children_groups):
755 def repo_group_name(repo_group_name, children_groups):
756 return _render("repo_group_name", repo_group_name, children_groups)
756 return _render("repo_group_name", repo_group_name, children_groups)
757
757
758 def user_profile(username):
758 def user_profile(username):
759 return _render('user_profile', username)
759 return _render('user_profile', username)
760
760
761 repo_group_data = []
761 repo_group_data = []
762 for group in repo_group_list:
762 for group in repo_group_list:
763 # NOTE(marcink): because we use only raw column we need to load it like that
763 # NOTE(marcink): because we use only raw column we need to load it like that
764 changeset_cache = RepoGroup._load_changeset_cache(
764 changeset_cache = RepoGroup._load_changeset_cache(
765 '', group._changeset_cache)
765 '', group._changeset_cache)
766 last_commit_change = RepoGroup._load_commit_change(changeset_cache)
766 last_commit_change = RepoGroup._load_commit_change(changeset_cache)
767 row = {
767 row = {
768 "menu": quick_menu(group.group_name),
768 "menu": quick_menu(group.group_name),
769 "name": repo_group_lnk(group.group_name),
769 "name": repo_group_lnk(group.group_name),
770 "name_raw": group.group_name,
770 "name_raw": group.group_name,
771
771
772 "last_change": last_change(last_commit_change),
772 "last_change": last_change(last_commit_change),
773
773
774 "last_changeset": "",
774 "last_changeset": "",
775 "last_changeset_raw": "",
775 "last_changeset_raw": "",
776
776
777 "desc": desc(h.escape(group.group_description), group.personal),
777 "desc": desc(h.escape(group.group_description), group.personal),
778 "top_level_repos": 0,
778 "top_level_repos": 0,
779 "owner": user_profile(group.User.username)
779 "owner": user_profile(group.User.username)
780 }
780 }
781 if admin:
781 if admin:
782 repo_count = group.repositories.count()
782 repo_count = group.repositories.count()
783 children_groups = map(
783 children_groups = map(
784 h.safe_unicode,
784 h.safe_unicode,
785 itertools.chain((g.name for g in group.parents),
785 itertools.chain((g.name for g in group.parents),
786 (x.name for x in [group])))
786 (x.name for x in [group])))
787 row.update({
787 row.update({
788 "action": repo_group_actions(
788 "action": repo_group_actions(
789 group.group_id, group.group_name, repo_count),
789 group.group_id, group.group_name, repo_count),
790 "top_level_repos": repo_count,
790 "top_level_repos": repo_count,
791 "name": repo_group_name(group.group_name, children_groups),
791 "name": repo_group_name(group.group_name, children_groups),
792
792
793 })
793 })
794 repo_group_data.append(row)
794 repo_group_data.append(row)
795
795
796 return repo_group_data
796 return repo_group_data
797
797
798 def get_repo_groups_data_table(
798 def get_repo_groups_data_table(
799 self, draw, start, limit,
799 self, draw, start, limit,
800 search_q, order_by, order_dir,
800 search_q, order_by, order_dir,
801 auth_user, repo_group_id):
801 auth_user, repo_group_id):
802 from rhodecode.model.scm import RepoGroupList
802 from rhodecode.model.scm import RepoGroupList
803
803
804 _perms = ['group.read', 'group.write', 'group.admin']
804 _perms = ['group.read', 'group.write', 'group.admin']
805 repo_groups = RepoGroup.query() \
805 repo_groups = RepoGroup.query() \
806 .filter(RepoGroup.group_parent_id == repo_group_id) \
806 .filter(RepoGroup.group_parent_id == repo_group_id) \
807 .all()
807 .all()
808 auth_repo_group_list = RepoGroupList(
808 auth_repo_group_list = RepoGroupList(
809 repo_groups, perm_set=_perms,
809 repo_groups, perm_set=_perms,
810 extra_kwargs=dict(user=auth_user))
810 extra_kwargs=dict(user=auth_user))
811
811
812 allowed_ids = [-1]
812 allowed_ids = [-1]
813 for repo_group in auth_repo_group_list:
813 for repo_group in auth_repo_group_list:
814 allowed_ids.append(repo_group.group_id)
814 allowed_ids.append(repo_group.group_id)
815
815
816 repo_groups_data_total_count = RepoGroup.query() \
816 repo_groups_data_total_count = RepoGroup.query() \
817 .filter(RepoGroup.group_parent_id == repo_group_id) \
817 .filter(RepoGroup.group_parent_id == repo_group_id) \
818 .filter(or_(
818 .filter(or_(
819 # generate multiple IN to fix limitation problems
819 # generate multiple IN to fix limitation problems
820 *in_filter_generator(RepoGroup.group_id, allowed_ids))
820 *in_filter_generator(RepoGroup.group_id, allowed_ids))
821 ) \
821 ) \
822 .count()
822 .count()
823
823
824 base_q = Session.query(
824 base_q = Session.query(
825 RepoGroup.group_name,
825 RepoGroup.group_name,
826 RepoGroup.group_name_hash,
826 RepoGroup.group_name_hash,
827 RepoGroup.group_description,
827 RepoGroup.group_description,
828 RepoGroup.group_id,
828 RepoGroup.group_id,
829 RepoGroup.personal,
829 RepoGroup.personal,
830 RepoGroup.updated_on,
830 RepoGroup.updated_on,
831 RepoGroup._changeset_cache,
831 RepoGroup._changeset_cache,
832 User,
832 User,
833 ) \
833 ) \
834 .filter(RepoGroup.group_parent_id == repo_group_id) \
834 .filter(RepoGroup.group_parent_id == repo_group_id) \
835 .filter(or_(
835 .filter(or_(
836 # generate multiple IN to fix limitation problems
836 # generate multiple IN to fix limitation problems
837 *in_filter_generator(RepoGroup.group_id, allowed_ids))
837 *in_filter_generator(RepoGroup.group_id, allowed_ids))
838 ) \
838 ) \
839 .join(User, User.user_id == RepoGroup.user_id) \
839 .join(User, User.user_id == RepoGroup.user_id) \
840 .group_by(RepoGroup, User)
840 .group_by(RepoGroup, User)
841
841
842 repo_groups_data_total_filtered_count = base_q.count()
842 repo_groups_data_total_filtered_count = base_q.count()
843
843
844 sort_defined = False
844 sort_defined = False
845
845
846 if order_by == 'group_name':
846 if order_by == 'group_name':
847 sort_col = func.lower(RepoGroup.group_name)
847 sort_col = func.lower(RepoGroup.group_name)
848 sort_defined = True
848 sort_defined = True
849 elif order_by == 'user_username':
849 elif order_by == 'user_username':
850 sort_col = User.username
850 sort_col = User.username
851 else:
851 else:
852 sort_col = getattr(RepoGroup, order_by, None)
852 sort_col = getattr(RepoGroup, order_by, None)
853
853
854 if sort_defined or sort_col:
854 if sort_defined or sort_col:
855 if order_dir == 'asc':
855 if order_dir == 'asc':
856 sort_col = sort_col.asc()
856 sort_col = sort_col.asc()
857 else:
857 else:
858 sort_col = sort_col.desc()
858 sort_col = sort_col.desc()
859
859
860 base_q = base_q.order_by(sort_col)
860 base_q = base_q.order_by(sort_col)
861 base_q = base_q.offset(start).limit(limit)
861 base_q = base_q.offset(start).limit(limit)
862
862
863 repo_group_list = base_q.all()
863 repo_group_list = base_q.all()
864
864
865 repo_groups_data = RepoGroupModel().get_repo_groups_as_dict(
865 repo_groups_data = RepoGroupModel().get_repo_groups_as_dict(
866 repo_group_list=repo_group_list, admin=False)
866 repo_group_list=repo_group_list, admin=False)
867
867
868 data = ({
868 data = ({
869 'draw': draw,
869 'draw': draw,
870 'data': repo_groups_data,
870 'data': repo_groups_data,
871 'recordsTotal': repo_groups_data_total_count,
871 'recordsTotal': repo_groups_data_total_count,
872 'recordsFiltered': repo_groups_data_total_filtered_count,
872 'recordsFiltered': repo_groups_data_total_filtered_count,
873 })
873 })
874 return data
874 return data
875
875
876 def _get_defaults(self, repo_group_name):
876 def _get_defaults(self, repo_group_name):
877 repo_group = RepoGroup.get_by_group_name(repo_group_name)
877 repo_group = RepoGroup.get_by_group_name(repo_group_name)
878
878
879 if repo_group is None:
879 if repo_group is None:
880 return None
880 return None
881
881
882 defaults = repo_group.get_dict()
882 defaults = repo_group.get_dict()
883 defaults['repo_group_name'] = repo_group.name
883 defaults['repo_group_name'] = repo_group.name
884 defaults['repo_group_description'] = repo_group.group_description
884 defaults['repo_group_description'] = repo_group.group_description
885 defaults['repo_group_enable_locking'] = repo_group.enable_locking
885 defaults['repo_group_enable_locking'] = repo_group.enable_locking
886
886
887 # we use -1 as this is how in HTML, we mark an empty group
887 # we use -1 as this is how in HTML, we mark an empty group
888 defaults['repo_group'] = defaults['group_parent_id'] or -1
888 defaults['repo_group'] = defaults['group_parent_id'] or -1
889
889
890 # fill owner
890 # fill owner
891 if repo_group.user:
891 if repo_group.user:
892 defaults.update({'user': repo_group.user.username})
892 defaults.update({'user': repo_group.user.username})
893 else:
893 else:
894 replacement_user = User.get_first_super_admin().username
894 replacement_user = User.get_first_super_admin().username
895 defaults.update({'user': replacement_user})
895 defaults.update({'user': replacement_user})
896
896
897 return defaults
897 return defaults
@@ -1,102 +1,102 b''
1 # -*- coding: utf-8 -*-
1
2
2
3 # Copyright (C) 2011-2020 RhodeCode GmbH
3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 repository permission model for RhodeCode
23 repository permission model for RhodeCode
24 """
24 """
25
25
26 import logging
26 import logging
27 from rhodecode.model import BaseModel
27 from rhodecode.model import BaseModel
28 from rhodecode.model.db import UserRepoToPerm, UserGroupRepoToPerm, \
28 from rhodecode.model.db import UserRepoToPerm, UserGroupRepoToPerm, \
29 Permission
29 Permission
30
30
31 log = logging.getLogger(__name__)
31 log = logging.getLogger(__name__)
32
32
33
33
34 class RepositoryPermissionModel(BaseModel):
34 class RepositoryPermissionModel(BaseModel):
35
35
36 cls = UserRepoToPerm
36 cls = UserRepoToPerm
37
37
38 def get_user_permission(self, repository, user):
38 def get_user_permission(self, repository, user):
39 repository = self._get_repo(repository)
39 repository = self._get_repo(repository)
40 user = self._get_user(user)
40 user = self._get_user(user)
41
41
42 return UserRepoToPerm.query() \
42 return UserRepoToPerm.query() \
43 .filter(UserRepoToPerm.user == user) \
43 .filter(UserRepoToPerm.user == user) \
44 .filter(UserRepoToPerm.repository == repository) \
44 .filter(UserRepoToPerm.repository == repository) \
45 .scalar()
45 .scalar()
46
46
47 def update_user_permission(self, repository, user, permission):
47 def update_user_permission(self, repository, user, permission):
48 permission = Permission.get_by_key(permission)
48 permission = Permission.get_by_key(permission)
49 current = self.get_user_permission(repository, user)
49 current = self.get_user_permission(repository, user)
50 if current:
50 if current:
51 if current.permission is not permission:
51 if current.permission is not permission:
52 current.permission = permission
52 current.permission = permission
53 else:
53 else:
54 p = UserRepoToPerm()
54 p = UserRepoToPerm()
55 p.user = user
55 p.user = user
56 p.repository = repository
56 p.repository = repository
57 p.permission = permission
57 p.permission = permission
58 self.sa.add(p)
58 self.sa.add(p)
59
59
60 def delete_user_permission(self, repository, user):
60 def delete_user_permission(self, repository, user):
61 current = self.get_user_permission(repository, user)
61 current = self.get_user_permission(repository, user)
62 if current:
62 if current:
63 self.sa.delete(current)
63 self.sa.delete(current)
64
64
65 def get_users_group_permission(self, repository, users_group):
65 def get_users_group_permission(self, repository, users_group):
66 return UserGroupRepoToPerm.query() \
66 return UserGroupRepoToPerm.query() \
67 .filter(UserGroupRepoToPerm.users_group == users_group) \
67 .filter(UserGroupRepoToPerm.users_group == users_group) \
68 .filter(UserGroupRepoToPerm.repository == repository) \
68 .filter(UserGroupRepoToPerm.repository == repository) \
69 .scalar()
69 .scalar()
70
70
71 def update_user_group_permission(self, repository, users_group,
71 def update_user_group_permission(self, repository, users_group,
72 permission):
72 permission):
73 permission = Permission.get_by_key(permission)
73 permission = Permission.get_by_key(permission)
74 current = self.get_users_group_permission(repository, users_group)
74 current = self.get_users_group_permission(repository, users_group)
75 if current:
75 if current:
76 if current.permission is not permission:
76 if current.permission is not permission:
77 current.permission = permission
77 current.permission = permission
78 else:
78 else:
79 p = UserGroupRepoToPerm()
79 p = UserGroupRepoToPerm()
80 p.users_group = users_group
80 p.users_group = users_group
81 p.repository = repository
81 p.repository = repository
82 p.permission = permission
82 p.permission = permission
83 self.sa.add(p)
83 self.sa.add(p)
84
84
85 def delete_users_group_permission(self, repository, users_group):
85 def delete_users_group_permission(self, repository, users_group):
86 current = self.get_users_group_permission(repository, users_group)
86 current = self.get_users_group_permission(repository, users_group)
87 if current:
87 if current:
88 self.sa.delete(current)
88 self.sa.delete(current)
89
89
90 def update_or_delete_user_permission(self, repository, user, permission):
90 def update_or_delete_user_permission(self, repository, user, permission):
91 if permission:
91 if permission:
92 self.update_user_permission(repository, user, permission)
92 self.update_user_permission(repository, user, permission)
93 else:
93 else:
94 self.delete_user_permission(repository, user)
94 self.delete_user_permission(repository, user)
95
95
96 def update_or_delete_users_group_permission(
96 def update_or_delete_users_group_permission(
97 self, repository, user_group, permission):
97 self, repository, user_group, permission):
98 if permission:
98 if permission:
99 self.update_user_group_permission(
99 self.update_user_group_permission(
100 repository, user_group, permission)
100 repository, user_group, permission)
101 else:
101 else:
102 self.delete_users_group_permission(repository, user_group)
102 self.delete_users_group_permission(repository, user_group)
@@ -1,1028 +1,1027 b''
1 # -*- coding: utf-8 -*-
2
1
3 # Copyright (C) 2010-2020 RhodeCode GmbH
2 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
3 #
5 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
8 #
7 #
9 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
11 # GNU General Public License for more details.
13 #
12 #
14 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
15 #
17 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
19
21 """
20 """
22 Scm model for RhodeCode
21 Scm model for RhodeCode
23 """
22 """
24
23
25 import os.path
24 import os.path
26 import traceback
25 import traceback
27 import logging
26 import logging
28 import io
27 import io
29
28
30 from sqlalchemy import func
29 from sqlalchemy import func
31 from zope.cachedescriptors.property import Lazy as LazyProperty
30 from zope.cachedescriptors.property import Lazy as LazyProperty
32
31
33 import rhodecode
32 import rhodecode
34 from rhodecode.lib.vcs import get_backend
33 from rhodecode.lib.vcs import get_backend
35 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
34 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
36 from rhodecode.lib.vcs.nodes import FileNode
35 from rhodecode.lib.vcs.nodes import FileNode
37 from rhodecode.lib.vcs.backends.base import EmptyCommit
36 from rhodecode.lib.vcs.backends.base import EmptyCommit
38 from rhodecode.lib import helpers as h, rc_cache
37 from rhodecode.lib import helpers as h, rc_cache
39 from rhodecode.lib.auth import (
38 from rhodecode.lib.auth import (
40 HasRepoPermissionAny, HasRepoGroupPermissionAny,
39 HasRepoPermissionAny, HasRepoGroupPermissionAny,
41 HasUserGroupPermissionAny)
40 HasUserGroupPermissionAny)
42 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
41 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
43 from rhodecode.lib import hooks_utils
42 from rhodecode.lib import hooks_utils
44 from rhodecode.lib.utils import (
43 from rhodecode.lib.utils import (
45 get_filesystem_repos, make_db_config)
44 get_filesystem_repos, make_db_config)
46 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
45 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
47 from rhodecode.lib.system_info import get_system_info
46 from rhodecode.lib.system_info import get_system_info
48 from rhodecode.model import BaseModel
47 from rhodecode.model import BaseModel
49 from rhodecode.model.db import (
48 from rhodecode.model.db import (
50 or_, false,
49 or_, false,
51 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
50 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
52 PullRequest, FileStore)
51 PullRequest, FileStore)
53 from rhodecode.model.settings import VcsSettingsModel
52 from rhodecode.model.settings import VcsSettingsModel
54 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
53 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
55
54
56 log = logging.getLogger(__name__)
55 log = logging.getLogger(__name__)
57
56
58
57
59 class UserTemp(object):
58 class UserTemp(object):
60 def __init__(self, user_id):
59 def __init__(self, user_id):
61 self.user_id = user_id
60 self.user_id = user_id
62
61
63 def __repr__(self):
62 def __repr__(self):
64 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
63 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
65
64
66
65
67 class RepoTemp(object):
66 class RepoTemp(object):
68 def __init__(self, repo_id):
67 def __init__(self, repo_id):
69 self.repo_id = repo_id
68 self.repo_id = repo_id
70
69
71 def __repr__(self):
70 def __repr__(self):
72 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
71 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
73
72
74
73
75 class SimpleCachedRepoList(object):
74 class SimpleCachedRepoList(object):
76 """
75 """
77 Lighter version of of iteration of repos without the scm initialisation,
76 Lighter version of of iteration of repos without the scm initialisation,
78 and with cache usage
77 and with cache usage
79 """
78 """
80 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
79 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
81 self.db_repo_list = db_repo_list
80 self.db_repo_list = db_repo_list
82 self.repos_path = repos_path
81 self.repos_path = repos_path
83 self.order_by = order_by
82 self.order_by = order_by
84 self.reversed = (order_by or '').startswith('-')
83 self.reversed = (order_by or '').startswith('-')
85 if not perm_set:
84 if not perm_set:
86 perm_set = ['repository.read', 'repository.write',
85 perm_set = ['repository.read', 'repository.write',
87 'repository.admin']
86 'repository.admin']
88 self.perm_set = perm_set
87 self.perm_set = perm_set
89
88
90 def __len__(self):
89 def __len__(self):
91 return len(self.db_repo_list)
90 return len(self.db_repo_list)
92
91
93 def __repr__(self):
92 def __repr__(self):
94 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
93 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
95
94
96 def __iter__(self):
95 def __iter__(self):
97 for dbr in self.db_repo_list:
96 for dbr in self.db_repo_list:
98 # check permission at this level
97 # check permission at this level
99 has_perm = HasRepoPermissionAny(*self.perm_set)(
98 has_perm = HasRepoPermissionAny(*self.perm_set)(
100 dbr.repo_name, 'SimpleCachedRepoList check')
99 dbr.repo_name, 'SimpleCachedRepoList check')
101 if not has_perm:
100 if not has_perm:
102 continue
101 continue
103
102
104 tmp_d = {
103 tmp_d = {
105 'name': dbr.repo_name,
104 'name': dbr.repo_name,
106 'dbrepo': dbr.get_dict(),
105 'dbrepo': dbr.get_dict(),
107 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
106 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
108 }
107 }
109 yield tmp_d
108 yield tmp_d
110
109
111
110
112 class _PermCheckIterator(object):
111 class _PermCheckIterator(object):
113
112
114 def __init__(
113 def __init__(
115 self, obj_list, obj_attr, perm_set, perm_checker,
114 self, obj_list, obj_attr, perm_set, perm_checker,
116 extra_kwargs=None):
115 extra_kwargs=None):
117 """
116 """
118 Creates iterator from given list of objects, additionally
117 Creates iterator from given list of objects, additionally
119 checking permission for them from perm_set var
118 checking permission for them from perm_set var
120
119
121 :param obj_list: list of db objects
120 :param obj_list: list of db objects
122 :param obj_attr: attribute of object to pass into perm_checker
121 :param obj_attr: attribute of object to pass into perm_checker
123 :param perm_set: list of permissions to check
122 :param perm_set: list of permissions to check
124 :param perm_checker: callable to check permissions against
123 :param perm_checker: callable to check permissions against
125 """
124 """
126 self.obj_list = obj_list
125 self.obj_list = obj_list
127 self.obj_attr = obj_attr
126 self.obj_attr = obj_attr
128 self.perm_set = perm_set
127 self.perm_set = perm_set
129 self.perm_checker = perm_checker(*self.perm_set)
128 self.perm_checker = perm_checker(*self.perm_set)
130 self.extra_kwargs = extra_kwargs or {}
129 self.extra_kwargs = extra_kwargs or {}
131
130
132 def __len__(self):
131 def __len__(self):
133 return len(self.obj_list)
132 return len(self.obj_list)
134
133
135 def __repr__(self):
134 def __repr__(self):
136 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
135 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
137
136
138 def __iter__(self):
137 def __iter__(self):
139 for db_obj in self.obj_list:
138 for db_obj in self.obj_list:
140 # check permission at this level
139 # check permission at this level
141 # NOTE(marcink): the __dict__.get() is ~4x faster then getattr()
140 # NOTE(marcink): the __dict__.get() is ~4x faster then getattr()
142 name = db_obj.__dict__.get(self.obj_attr, None)
141 name = db_obj.__dict__.get(self.obj_attr, None)
143 if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs):
142 if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs):
144 continue
143 continue
145
144
146 yield db_obj
145 yield db_obj
147
146
148
147
149 class RepoList(_PermCheckIterator):
148 class RepoList(_PermCheckIterator):
150
149
151 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
150 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
152 if not perm_set:
151 if not perm_set:
153 perm_set = ['repository.read', 'repository.write', 'repository.admin']
152 perm_set = ['repository.read', 'repository.write', 'repository.admin']
154
153
155 super(RepoList, self).__init__(
154 super(RepoList, self).__init__(
156 obj_list=db_repo_list,
155 obj_list=db_repo_list,
157 obj_attr='_repo_name', perm_set=perm_set,
156 obj_attr='_repo_name', perm_set=perm_set,
158 perm_checker=HasRepoPermissionAny,
157 perm_checker=HasRepoPermissionAny,
159 extra_kwargs=extra_kwargs)
158 extra_kwargs=extra_kwargs)
160
159
161
160
162 class RepoGroupList(_PermCheckIterator):
161 class RepoGroupList(_PermCheckIterator):
163
162
164 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
163 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
165 if not perm_set:
164 if not perm_set:
166 perm_set = ['group.read', 'group.write', 'group.admin']
165 perm_set = ['group.read', 'group.write', 'group.admin']
167
166
168 super(RepoGroupList, self).__init__(
167 super(RepoGroupList, self).__init__(
169 obj_list=db_repo_group_list,
168 obj_list=db_repo_group_list,
170 obj_attr='_group_name', perm_set=perm_set,
169 obj_attr='_group_name', perm_set=perm_set,
171 perm_checker=HasRepoGroupPermissionAny,
170 perm_checker=HasRepoGroupPermissionAny,
172 extra_kwargs=extra_kwargs)
171 extra_kwargs=extra_kwargs)
173
172
174
173
175 class UserGroupList(_PermCheckIterator):
174 class UserGroupList(_PermCheckIterator):
176
175
177 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
176 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
178 if not perm_set:
177 if not perm_set:
179 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
178 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
180
179
181 super(UserGroupList, self).__init__(
180 super(UserGroupList, self).__init__(
182 obj_list=db_user_group_list,
181 obj_list=db_user_group_list,
183 obj_attr='users_group_name', perm_set=perm_set,
182 obj_attr='users_group_name', perm_set=perm_set,
184 perm_checker=HasUserGroupPermissionAny,
183 perm_checker=HasUserGroupPermissionAny,
185 extra_kwargs=extra_kwargs)
184 extra_kwargs=extra_kwargs)
186
185
187
186
188 class ScmModel(BaseModel):
187 class ScmModel(BaseModel):
189 """
188 """
190 Generic Scm Model
189 Generic Scm Model
191 """
190 """
192
191
193 @LazyProperty
192 @LazyProperty
194 def repos_path(self):
193 def repos_path(self):
195 """
194 """
196 Gets the repositories root path from database
195 Gets the repositories root path from database
197 """
196 """
198
197
199 settings_model = VcsSettingsModel(sa=self.sa)
198 settings_model = VcsSettingsModel(sa=self.sa)
200 return settings_model.get_repos_location()
199 return settings_model.get_repos_location()
201
200
202 def repo_scan(self, repos_path=None):
201 def repo_scan(self, repos_path=None):
203 """
202 """
204 Listing of repositories in given path. This path should not be a
203 Listing of repositories in given path. This path should not be a
205 repository itself. Return a dictionary of repository objects
204 repository itself. Return a dictionary of repository objects
206
205
207 :param repos_path: path to directory containing repositories
206 :param repos_path: path to directory containing repositories
208 """
207 """
209
208
210 if repos_path is None:
209 if repos_path is None:
211 repos_path = self.repos_path
210 repos_path = self.repos_path
212
211
213 log.info('scanning for repositories in %s', repos_path)
212 log.info('scanning for repositories in %s', repos_path)
214
213
215 config = make_db_config()
214 config = make_db_config()
216 config.set('extensions', 'largefiles', '')
215 config.set('extensions', 'largefiles', '')
217 repos = {}
216 repos = {}
218
217
219 for name, path in get_filesystem_repos(repos_path, recursive=True):
218 for name, path in get_filesystem_repos(repos_path, recursive=True):
220 # name need to be decomposed and put back together using the /
219 # name need to be decomposed and put back together using the /
221 # since this is internal storage separator for rhodecode
220 # since this is internal storage separator for rhodecode
222 name = Repository.normalize_repo_name(name)
221 name = Repository.normalize_repo_name(name)
223
222
224 try:
223 try:
225 if name in repos:
224 if name in repos:
226 raise RepositoryError('Duplicate repository name %s '
225 raise RepositoryError('Duplicate repository name %s '
227 'found in %s' % (name, path))
226 'found in %s' % (name, path))
228 elif path[0] in rhodecode.BACKENDS:
227 elif path[0] in rhodecode.BACKENDS:
229 backend = get_backend(path[0])
228 backend = get_backend(path[0])
230 repos[name] = backend(path[1], config=config,
229 repos[name] = backend(path[1], config=config,
231 with_wire={"cache": False})
230 with_wire={"cache": False})
232 except OSError:
231 except OSError:
233 continue
232 continue
234 except RepositoryError:
233 except RepositoryError:
235 log.exception('Failed to create a repo')
234 log.exception('Failed to create a repo')
236 continue
235 continue
237
236
238 log.debug('found %s paths with repositories', len(repos))
237 log.debug('found %s paths with repositories', len(repos))
239 return repos
238 return repos
240
239
241 def get_repos(self, all_repos=None, sort_key=None):
240 def get_repos(self, all_repos=None, sort_key=None):
242 """
241 """
243 Get all repositories from db and for each repo create it's
242 Get all repositories from db and for each repo create it's
244 backend instance and fill that backed with information from database
243 backend instance and fill that backed with information from database
245
244
246 :param all_repos: list of repository names as strings
245 :param all_repos: list of repository names as strings
247 give specific repositories list, good for filtering
246 give specific repositories list, good for filtering
248
247
249 :param sort_key: initial sorting of repositories
248 :param sort_key: initial sorting of repositories
250 """
249 """
251 if all_repos is None:
250 if all_repos is None:
252 all_repos = self.sa.query(Repository)\
251 all_repos = self.sa.query(Repository)\
253 .filter(Repository.group_id == None)\
252 .filter(Repository.group_id == None)\
254 .order_by(func.lower(Repository.repo_name)).all()
253 .order_by(func.lower(Repository.repo_name)).all()
255 repo_iter = SimpleCachedRepoList(
254 repo_iter = SimpleCachedRepoList(
256 all_repos, repos_path=self.repos_path, order_by=sort_key)
255 all_repos, repos_path=self.repos_path, order_by=sort_key)
257 return repo_iter
256 return repo_iter
258
257
259 def get_repo_groups(self, all_groups=None):
258 def get_repo_groups(self, all_groups=None):
260 if all_groups is None:
259 if all_groups is None:
261 all_groups = RepoGroup.query()\
260 all_groups = RepoGroup.query()\
262 .filter(RepoGroup.group_parent_id == None).all()
261 .filter(RepoGroup.group_parent_id == None).all()
263 return [x for x in RepoGroupList(all_groups)]
262 return [x for x in RepoGroupList(all_groups)]
264
263
265 def mark_for_invalidation(self, repo_name, delete=False):
264 def mark_for_invalidation(self, repo_name, delete=False):
266 """
265 """
267 Mark caches of this repo invalid in the database. `delete` flag
266 Mark caches of this repo invalid in the database. `delete` flag
268 removes the cache entries
267 removes the cache entries
269
268
270 :param repo_name: the repo_name for which caches should be marked
269 :param repo_name: the repo_name for which caches should be marked
271 invalid, or deleted
270 invalid, or deleted
272 :param delete: delete the entry keys instead of setting bool
271 :param delete: delete the entry keys instead of setting bool
273 flag on them, and also purge caches used by the dogpile
272 flag on them, and also purge caches used by the dogpile
274 """
273 """
275 repo = Repository.get_by_repo_name(repo_name)
274 repo = Repository.get_by_repo_name(repo_name)
276
275
277 if repo:
276 if repo:
278 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
277 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
279 repo_id=repo.repo_id)
278 repo_id=repo.repo_id)
280 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
279 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
281
280
282 repo_id = repo.repo_id
281 repo_id = repo.repo_id
283 config = repo._config
282 config = repo._config
284 config.set('extensions', 'largefiles', '')
283 config.set('extensions', 'largefiles', '')
285 repo.update_commit_cache(config=config, cs_cache=None)
284 repo.update_commit_cache(config=config, cs_cache=None)
286 if delete:
285 if delete:
287 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
286 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
288 rc_cache.clear_cache_namespace(
287 rc_cache.clear_cache_namespace(
289 'cache_repo', cache_namespace_uid, invalidate=True)
288 'cache_repo', cache_namespace_uid, invalidate=True)
290
289
291 def toggle_following_repo(self, follow_repo_id, user_id):
290 def toggle_following_repo(self, follow_repo_id, user_id):
292
291
293 f = self.sa.query(UserFollowing)\
292 f = self.sa.query(UserFollowing)\
294 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
293 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
295 .filter(UserFollowing.user_id == user_id).scalar()
294 .filter(UserFollowing.user_id == user_id).scalar()
296
295
297 if f is not None:
296 if f is not None:
298 try:
297 try:
299 self.sa.delete(f)
298 self.sa.delete(f)
300 return
299 return
301 except Exception:
300 except Exception:
302 log.error(traceback.format_exc())
301 log.error(traceback.format_exc())
303 raise
302 raise
304
303
305 try:
304 try:
306 f = UserFollowing()
305 f = UserFollowing()
307 f.user_id = user_id
306 f.user_id = user_id
308 f.follows_repo_id = follow_repo_id
307 f.follows_repo_id = follow_repo_id
309 self.sa.add(f)
308 self.sa.add(f)
310 except Exception:
309 except Exception:
311 log.error(traceback.format_exc())
310 log.error(traceback.format_exc())
312 raise
311 raise
313
312
314 def toggle_following_user(self, follow_user_id, user_id):
313 def toggle_following_user(self, follow_user_id, user_id):
315 f = self.sa.query(UserFollowing)\
314 f = self.sa.query(UserFollowing)\
316 .filter(UserFollowing.follows_user_id == follow_user_id)\
315 .filter(UserFollowing.follows_user_id == follow_user_id)\
317 .filter(UserFollowing.user_id == user_id).scalar()
316 .filter(UserFollowing.user_id == user_id).scalar()
318
317
319 if f is not None:
318 if f is not None:
320 try:
319 try:
321 self.sa.delete(f)
320 self.sa.delete(f)
322 return
321 return
323 except Exception:
322 except Exception:
324 log.error(traceback.format_exc())
323 log.error(traceback.format_exc())
325 raise
324 raise
326
325
327 try:
326 try:
328 f = UserFollowing()
327 f = UserFollowing()
329 f.user_id = user_id
328 f.user_id = user_id
330 f.follows_user_id = follow_user_id
329 f.follows_user_id = follow_user_id
331 self.sa.add(f)
330 self.sa.add(f)
332 except Exception:
331 except Exception:
333 log.error(traceback.format_exc())
332 log.error(traceback.format_exc())
334 raise
333 raise
335
334
336 def is_following_repo(self, repo_name, user_id, cache=False):
335 def is_following_repo(self, repo_name, user_id, cache=False):
337 r = self.sa.query(Repository)\
336 r = self.sa.query(Repository)\
338 .filter(Repository.repo_name == repo_name).scalar()
337 .filter(Repository.repo_name == repo_name).scalar()
339
338
340 f = self.sa.query(UserFollowing)\
339 f = self.sa.query(UserFollowing)\
341 .filter(UserFollowing.follows_repository == r)\
340 .filter(UserFollowing.follows_repository == r)\
342 .filter(UserFollowing.user_id == user_id).scalar()
341 .filter(UserFollowing.user_id == user_id).scalar()
343
342
344 return f is not None
343 return f is not None
345
344
346 def is_following_user(self, username, user_id, cache=False):
345 def is_following_user(self, username, user_id, cache=False):
347 u = User.get_by_username(username)
346 u = User.get_by_username(username)
348
347
349 f = self.sa.query(UserFollowing)\
348 f = self.sa.query(UserFollowing)\
350 .filter(UserFollowing.follows_user == u)\
349 .filter(UserFollowing.follows_user == u)\
351 .filter(UserFollowing.user_id == user_id).scalar()
350 .filter(UserFollowing.user_id == user_id).scalar()
352
351
353 return f is not None
352 return f is not None
354
353
355 def get_followers(self, repo):
354 def get_followers(self, repo):
356 repo = self._get_repo(repo)
355 repo = self._get_repo(repo)
357
356
358 return self.sa.query(UserFollowing)\
357 return self.sa.query(UserFollowing)\
359 .filter(UserFollowing.follows_repository == repo).count()
358 .filter(UserFollowing.follows_repository == repo).count()
360
359
361 def get_forks(self, repo):
360 def get_forks(self, repo):
362 repo = self._get_repo(repo)
361 repo = self._get_repo(repo)
363 return self.sa.query(Repository)\
362 return self.sa.query(Repository)\
364 .filter(Repository.fork == repo).count()
363 .filter(Repository.fork == repo).count()
365
364
366 def get_pull_requests(self, repo):
365 def get_pull_requests(self, repo):
367 repo = self._get_repo(repo)
366 repo = self._get_repo(repo)
368 return self.sa.query(PullRequest)\
367 return self.sa.query(PullRequest)\
369 .filter(PullRequest.target_repo == repo)\
368 .filter(PullRequest.target_repo == repo)\
370 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
369 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
371
370
372 def get_artifacts(self, repo):
371 def get_artifacts(self, repo):
373 repo = self._get_repo(repo)
372 repo = self._get_repo(repo)
374 return self.sa.query(FileStore)\
373 return self.sa.query(FileStore)\
375 .filter(FileStore.repo == repo)\
374 .filter(FileStore.repo == repo)\
376 .filter(or_(FileStore.hidden == None, FileStore.hidden == false())).count()
375 .filter(or_(FileStore.hidden == None, FileStore.hidden == false())).count()
377
376
378 def mark_as_fork(self, repo, fork, user):
377 def mark_as_fork(self, repo, fork, user):
379 repo = self._get_repo(repo)
378 repo = self._get_repo(repo)
380 fork = self._get_repo(fork)
379 fork = self._get_repo(fork)
381 if fork and repo.repo_id == fork.repo_id:
380 if fork and repo.repo_id == fork.repo_id:
382 raise Exception("Cannot set repository as fork of itself")
381 raise Exception("Cannot set repository as fork of itself")
383
382
384 if fork and repo.repo_type != fork.repo_type:
383 if fork and repo.repo_type != fork.repo_type:
385 raise RepositoryError(
384 raise RepositoryError(
386 "Cannot set repository as fork of repository with other type")
385 "Cannot set repository as fork of repository with other type")
387
386
388 repo.fork = fork
387 repo.fork = fork
389 self.sa.add(repo)
388 self.sa.add(repo)
390 return repo
389 return repo
391
390
392 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True):
391 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True):
393 dbrepo = self._get_repo(repo)
392 dbrepo = self._get_repo(repo)
394 remote_uri = remote_uri or dbrepo.clone_uri
393 remote_uri = remote_uri or dbrepo.clone_uri
395 if not remote_uri:
394 if not remote_uri:
396 raise Exception("This repository doesn't have a clone uri")
395 raise Exception("This repository doesn't have a clone uri")
397
396
398 repo = dbrepo.scm_instance(cache=False)
397 repo = dbrepo.scm_instance(cache=False)
399 repo.config.clear_section('hooks')
398 repo.config.clear_section('hooks')
400
399
401 try:
400 try:
402 # NOTE(marcink): add extra validation so we skip invalid urls
401 # NOTE(marcink): add extra validation so we skip invalid urls
403 # this is due this tasks can be executed via scheduler without
402 # this is due this tasks can be executed via scheduler without
404 # proper validation of remote_uri
403 # proper validation of remote_uri
405 if validate_uri:
404 if validate_uri:
406 config = make_db_config(clear_session=False)
405 config = make_db_config(clear_session=False)
407 url_validator(remote_uri, dbrepo.repo_type, config)
406 url_validator(remote_uri, dbrepo.repo_type, config)
408 except InvalidCloneUrl:
407 except InvalidCloneUrl:
409 raise
408 raise
410
409
411 repo_name = dbrepo.repo_name
410 repo_name = dbrepo.repo_name
412 try:
411 try:
413 # TODO: we need to make sure those operations call proper hooks !
412 # TODO: we need to make sure those operations call proper hooks !
414 repo.fetch(remote_uri)
413 repo.fetch(remote_uri)
415
414
416 self.mark_for_invalidation(repo_name)
415 self.mark_for_invalidation(repo_name)
417 except Exception:
416 except Exception:
418 log.error(traceback.format_exc())
417 log.error(traceback.format_exc())
419 raise
418 raise
420
419
421 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
420 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
422 dbrepo = self._get_repo(repo)
421 dbrepo = self._get_repo(repo)
423 remote_uri = remote_uri or dbrepo.push_uri
422 remote_uri = remote_uri or dbrepo.push_uri
424 if not remote_uri:
423 if not remote_uri:
425 raise Exception("This repository doesn't have a clone uri")
424 raise Exception("This repository doesn't have a clone uri")
426
425
427 repo = dbrepo.scm_instance(cache=False)
426 repo = dbrepo.scm_instance(cache=False)
428 repo.config.clear_section('hooks')
427 repo.config.clear_section('hooks')
429
428
430 try:
429 try:
431 # NOTE(marcink): add extra validation so we skip invalid urls
430 # NOTE(marcink): add extra validation so we skip invalid urls
432 # this is due this tasks can be executed via scheduler without
431 # this is due this tasks can be executed via scheduler without
433 # proper validation of remote_uri
432 # proper validation of remote_uri
434 if validate_uri:
433 if validate_uri:
435 config = make_db_config(clear_session=False)
434 config = make_db_config(clear_session=False)
436 url_validator(remote_uri, dbrepo.repo_type, config)
435 url_validator(remote_uri, dbrepo.repo_type, config)
437 except InvalidCloneUrl:
436 except InvalidCloneUrl:
438 raise
437 raise
439
438
440 try:
439 try:
441 repo.push(remote_uri)
440 repo.push(remote_uri)
442 except Exception:
441 except Exception:
443 log.error(traceback.format_exc())
442 log.error(traceback.format_exc())
444 raise
443 raise
445
444
446 def commit_change(self, repo, repo_name, commit, user, author, message,
445 def commit_change(self, repo, repo_name, commit, user, author, message,
447 content, f_path):
446 content, f_path):
448 """
447 """
449 Commits changes
448 Commits changes
450
449
451 :param repo: SCM instance
450 :param repo: SCM instance
452
451
453 """
452 """
454 user = self._get_user(user)
453 user = self._get_user(user)
455
454
456 # decoding here will force that we have proper encoded values
455 # decoding here will force that we have proper encoded values
457 # in any other case this will throw exceptions and deny commit
456 # in any other case this will throw exceptions and deny commit
458 content = safe_str(content)
457 content = safe_str(content)
459 path = safe_str(f_path)
458 path = safe_str(f_path)
460 # message and author needs to be unicode
459 # message and author needs to be unicode
461 # proper backend should then translate that into required type
460 # proper backend should then translate that into required type
462 message = safe_unicode(message)
461 message = safe_unicode(message)
463 author = safe_unicode(author)
462 author = safe_unicode(author)
464 imc = repo.in_memory_commit
463 imc = repo.in_memory_commit
465 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
464 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
466 try:
465 try:
467 # TODO: handle pre-push action !
466 # TODO: handle pre-push action !
468 tip = imc.commit(
467 tip = imc.commit(
469 message=message, author=author, parents=[commit],
468 message=message, author=author, parents=[commit],
470 branch=commit.branch)
469 branch=commit.branch)
471 except Exception as e:
470 except Exception as e:
472 log.error(traceback.format_exc())
471 log.error(traceback.format_exc())
473 raise IMCCommitError(str(e))
472 raise IMCCommitError(str(e))
474 finally:
473 finally:
475 # always clear caches, if commit fails we want fresh object also
474 # always clear caches, if commit fails we want fresh object also
476 self.mark_for_invalidation(repo_name)
475 self.mark_for_invalidation(repo_name)
477
476
478 # We trigger the post-push action
477 # We trigger the post-push action
479 hooks_utils.trigger_post_push_hook(
478 hooks_utils.trigger_post_push_hook(
480 username=user.username, action='push_local', hook_type='post_push',
479 username=user.username, action='push_local', hook_type='post_push',
481 repo_name=repo_name, repo_type=repo.alias, commit_ids=[tip.raw_id])
480 repo_name=repo_name, repo_type=repo.alias, commit_ids=[tip.raw_id])
482 return tip
481 return tip
483
482
484 def _sanitize_path(self, f_path):
483 def _sanitize_path(self, f_path):
485 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
484 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
486 raise NonRelativePathError('%s is not an relative path' % f_path)
485 raise NonRelativePathError('%s is not an relative path' % f_path)
487 if f_path:
486 if f_path:
488 f_path = os.path.normpath(f_path)
487 f_path = os.path.normpath(f_path)
489 return f_path
488 return f_path
490
489
491 def get_dirnode_metadata(self, request, commit, dir_node):
490 def get_dirnode_metadata(self, request, commit, dir_node):
492 if not dir_node.is_dir():
491 if not dir_node.is_dir():
493 return []
492 return []
494
493
495 data = []
494 data = []
496 for node in dir_node:
495 for node in dir_node:
497 if not node.is_file():
496 if not node.is_file():
498 # we skip file-nodes
497 # we skip file-nodes
499 continue
498 continue
500
499
501 last_commit = node.last_commit
500 last_commit = node.last_commit
502 last_commit_date = last_commit.date
501 last_commit_date = last_commit.date
503 data.append({
502 data.append({
504 'name': node.name,
503 'name': node.name,
505 'size': h.format_byte_size_binary(node.size),
504 'size': h.format_byte_size_binary(node.size),
506 'modified_at': h.format_date(last_commit_date),
505 'modified_at': h.format_date(last_commit_date),
507 'modified_ts': last_commit_date.isoformat(),
506 'modified_ts': last_commit_date.isoformat(),
508 'revision': last_commit.revision,
507 'revision': last_commit.revision,
509 'short_id': last_commit.short_id,
508 'short_id': last_commit.short_id,
510 'message': h.escape(last_commit.message),
509 'message': h.escape(last_commit.message),
511 'author': h.escape(last_commit.author),
510 'author': h.escape(last_commit.author),
512 'user_profile': h.gravatar_with_user(
511 'user_profile': h.gravatar_with_user(
513 request, last_commit.author),
512 request, last_commit.author),
514 })
513 })
515
514
516 return data
515 return data
517
516
518 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
517 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
519 extended_info=False, content=False, max_file_bytes=None):
518 extended_info=False, content=False, max_file_bytes=None):
520 """
519 """
521 recursive walk in root dir and return a set of all path in that dir
520 recursive walk in root dir and return a set of all path in that dir
522 based on repository walk function
521 based on repository walk function
523
522
524 :param repo_name: name of repository
523 :param repo_name: name of repository
525 :param commit_id: commit id for which to list nodes
524 :param commit_id: commit id for which to list nodes
526 :param root_path: root path to list
525 :param root_path: root path to list
527 :param flat: return as a list, if False returns a dict with description
526 :param flat: return as a list, if False returns a dict with description
528 :param extended_info: show additional info such as md5, binary, size etc
527 :param extended_info: show additional info such as md5, binary, size etc
529 :param content: add nodes content to the return data
528 :param content: add nodes content to the return data
530 :param max_file_bytes: will not return file contents over this limit
529 :param max_file_bytes: will not return file contents over this limit
531
530
532 """
531 """
533 _files = list()
532 _files = list()
534 _dirs = list()
533 _dirs = list()
535 try:
534 try:
536 _repo = self._get_repo(repo_name)
535 _repo = self._get_repo(repo_name)
537 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
536 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
538 root_path = root_path.lstrip('/')
537 root_path = root_path.lstrip('/')
539 for __, dirs, files in commit.walk(root_path):
538 for __, dirs, files in commit.walk(root_path):
540
539
541 for f in files:
540 for f in files:
542 _content = None
541 _content = None
543 _data = f_name = f.unicode_path
542 _data = f_name = f.unicode_path
544
543
545 if not flat:
544 if not flat:
546 _data = {
545 _data = {
547 "name": h.escape(f_name),
546 "name": h.escape(f_name),
548 "type": "file",
547 "type": "file",
549 }
548 }
550 if extended_info:
549 if extended_info:
551 _data.update({
550 _data.update({
552 "md5": f.md5,
551 "md5": f.md5,
553 "binary": f.is_binary,
552 "binary": f.is_binary,
554 "size": f.size,
553 "size": f.size,
555 "extension": f.extension,
554 "extension": f.extension,
556 "mimetype": f.mimetype,
555 "mimetype": f.mimetype,
557 "lines": f.lines()[0]
556 "lines": f.lines()[0]
558 })
557 })
559
558
560 if content:
559 if content:
561 over_size_limit = (max_file_bytes is not None
560 over_size_limit = (max_file_bytes is not None
562 and f.size > max_file_bytes)
561 and f.size > max_file_bytes)
563 full_content = None
562 full_content = None
564 if not f.is_binary and not over_size_limit:
563 if not f.is_binary and not over_size_limit:
565 full_content = safe_str(f.content)
564 full_content = safe_str(f.content)
566
565
567 _data.update({
566 _data.update({
568 "content": full_content,
567 "content": full_content,
569 })
568 })
570 _files.append(_data)
569 _files.append(_data)
571
570
572 for d in dirs:
571 for d in dirs:
573 _data = d_name = d.unicode_path
572 _data = d_name = d.unicode_path
574 if not flat:
573 if not flat:
575 _data = {
574 _data = {
576 "name": h.escape(d_name),
575 "name": h.escape(d_name),
577 "type": "dir",
576 "type": "dir",
578 }
577 }
579 if extended_info:
578 if extended_info:
580 _data.update({
579 _data.update({
581 "md5": None,
580 "md5": None,
582 "binary": None,
581 "binary": None,
583 "size": None,
582 "size": None,
584 "extension": None,
583 "extension": None,
585 })
584 })
586 if content:
585 if content:
587 _data.update({
586 _data.update({
588 "content": None
587 "content": None
589 })
588 })
590 _dirs.append(_data)
589 _dirs.append(_data)
591 except RepositoryError:
590 except RepositoryError:
592 log.exception("Exception in get_nodes")
591 log.exception("Exception in get_nodes")
593 raise
592 raise
594
593
595 return _dirs, _files
594 return _dirs, _files
596
595
597 def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'):
596 def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'):
598 """
597 """
599 Generate files for quick filter in files view
598 Generate files for quick filter in files view
600 """
599 """
601
600
602 _files = list()
601 _files = list()
603 _dirs = list()
602 _dirs = list()
604 try:
603 try:
605 _repo = self._get_repo(repo_name)
604 _repo = self._get_repo(repo_name)
606 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
605 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
607 root_path = root_path.lstrip('/')
606 root_path = root_path.lstrip('/')
608 for __, dirs, files in commit.walk(root_path):
607 for __, dirs, files in commit.walk(root_path):
609
608
610 for f in files:
609 for f in files:
611
610
612 _data = {
611 _data = {
613 "name": h.escape(f.unicode_path),
612 "name": h.escape(f.unicode_path),
614 "type": "file",
613 "type": "file",
615 }
614 }
616
615
617 _files.append(_data)
616 _files.append(_data)
618
617
619 for d in dirs:
618 for d in dirs:
620
619
621 _data = {
620 _data = {
622 "name": h.escape(d.unicode_path),
621 "name": h.escape(d.unicode_path),
623 "type": "dir",
622 "type": "dir",
624 }
623 }
625
624
626 _dirs.append(_data)
625 _dirs.append(_data)
627 except RepositoryError:
626 except RepositoryError:
628 log.exception("Exception in get_quick_filter_nodes")
627 log.exception("Exception in get_quick_filter_nodes")
629 raise
628 raise
630
629
631 return _dirs, _files
630 return _dirs, _files
632
631
633 def get_node(self, repo_name, commit_id, file_path,
632 def get_node(self, repo_name, commit_id, file_path,
634 extended_info=False, content=False, max_file_bytes=None, cache=True):
633 extended_info=False, content=False, max_file_bytes=None, cache=True):
635 """
634 """
636 retrieve single node from commit
635 retrieve single node from commit
637 """
636 """
638 try:
637 try:
639
638
640 _repo = self._get_repo(repo_name)
639 _repo = self._get_repo(repo_name)
641 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
640 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
642
641
643 file_node = commit.get_node(file_path)
642 file_node = commit.get_node(file_path)
644 if file_node.is_dir():
643 if file_node.is_dir():
645 raise RepositoryError('The given path is a directory')
644 raise RepositoryError('The given path is a directory')
646
645
647 _content = None
646 _content = None
648 f_name = file_node.unicode_path
647 f_name = file_node.unicode_path
649
648
650 file_data = {
649 file_data = {
651 "name": h.escape(f_name),
650 "name": h.escape(f_name),
652 "type": "file",
651 "type": "file",
653 }
652 }
654
653
655 if extended_info:
654 if extended_info:
656 file_data.update({
655 file_data.update({
657 "extension": file_node.extension,
656 "extension": file_node.extension,
658 "mimetype": file_node.mimetype,
657 "mimetype": file_node.mimetype,
659 })
658 })
660
659
661 if cache:
660 if cache:
662 md5 = file_node.md5
661 md5 = file_node.md5
663 is_binary = file_node.is_binary
662 is_binary = file_node.is_binary
664 size = file_node.size
663 size = file_node.size
665 else:
664 else:
666 is_binary, md5, size, _content = file_node.metadata_uncached()
665 is_binary, md5, size, _content = file_node.metadata_uncached()
667
666
668 file_data.update({
667 file_data.update({
669 "md5": md5,
668 "md5": md5,
670 "binary": is_binary,
669 "binary": is_binary,
671 "size": size,
670 "size": size,
672 })
671 })
673
672
674 if content and cache:
673 if content and cache:
675 # get content + cache
674 # get content + cache
676 size = file_node.size
675 size = file_node.size
677 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
676 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
678 full_content = None
677 full_content = None
679 all_lines = 0
678 all_lines = 0
680 if not file_node.is_binary and not over_size_limit:
679 if not file_node.is_binary and not over_size_limit:
681 full_content = safe_unicode(file_node.content)
680 full_content = safe_unicode(file_node.content)
682 all_lines, empty_lines = file_node.count_lines(full_content)
681 all_lines, empty_lines = file_node.count_lines(full_content)
683
682
684 file_data.update({
683 file_data.update({
685 "content": full_content,
684 "content": full_content,
686 "lines": all_lines
685 "lines": all_lines
687 })
686 })
688 elif content:
687 elif content:
689 # get content *without* cache
688 # get content *without* cache
690 if _content is None:
689 if _content is None:
691 is_binary, md5, size, _content = file_node.metadata_uncached()
690 is_binary, md5, size, _content = file_node.metadata_uncached()
692
691
693 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
692 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
694 full_content = None
693 full_content = None
695 all_lines = 0
694 all_lines = 0
696 if not is_binary and not over_size_limit:
695 if not is_binary and not over_size_limit:
697 full_content = safe_unicode(_content)
696 full_content = safe_unicode(_content)
698 all_lines, empty_lines = file_node.count_lines(full_content)
697 all_lines, empty_lines = file_node.count_lines(full_content)
699
698
700 file_data.update({
699 file_data.update({
701 "content": full_content,
700 "content": full_content,
702 "lines": all_lines
701 "lines": all_lines
703 })
702 })
704
703
705 except RepositoryError:
704 except RepositoryError:
706 log.exception("Exception in get_node")
705 log.exception("Exception in get_node")
707 raise
706 raise
708
707
709 return file_data
708 return file_data
710
709
711 def get_fts_data(self, repo_name, commit_id, root_path='/'):
710 def get_fts_data(self, repo_name, commit_id, root_path='/'):
712 """
711 """
713 Fetch node tree for usage in full text search
712 Fetch node tree for usage in full text search
714 """
713 """
715
714
716 tree_info = list()
715 tree_info = list()
717
716
718 try:
717 try:
719 _repo = self._get_repo(repo_name)
718 _repo = self._get_repo(repo_name)
720 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
719 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
721 root_path = root_path.lstrip('/')
720 root_path = root_path.lstrip('/')
722 for __, dirs, files in commit.walk(root_path):
721 for __, dirs, files in commit.walk(root_path):
723
722
724 for f in files:
723 for f in files:
725 is_binary, md5, size, _content = f.metadata_uncached()
724 is_binary, md5, size, _content = f.metadata_uncached()
726 _data = {
725 _data = {
727 "name": f.unicode_path,
726 "name": f.unicode_path,
728 "md5": md5,
727 "md5": md5,
729 "extension": f.extension,
728 "extension": f.extension,
730 "binary": is_binary,
729 "binary": is_binary,
731 "size": size
730 "size": size
732 }
731 }
733
732
734 tree_info.append(_data)
733 tree_info.append(_data)
735
734
736 except RepositoryError:
735 except RepositoryError:
737 log.exception("Exception in get_nodes")
736 log.exception("Exception in get_nodes")
738 raise
737 raise
739
738
740 return tree_info
739 return tree_info
741
740
742 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
741 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
743 author=None, trigger_push_hook=True):
742 author=None, trigger_push_hook=True):
744 """
743 """
745 Commits given multiple nodes into repo
744 Commits given multiple nodes into repo
746
745
747 :param user: RhodeCode User object or user_id, the commiter
746 :param user: RhodeCode User object or user_id, the commiter
748 :param repo: RhodeCode Repository object
747 :param repo: RhodeCode Repository object
749 :param message: commit message
748 :param message: commit message
750 :param nodes: mapping {filename:{'content':content},...}
749 :param nodes: mapping {filename:{'content':content},...}
751 :param parent_commit: parent commit, can be empty than it's
750 :param parent_commit: parent commit, can be empty than it's
752 initial commit
751 initial commit
753 :param author: author of commit, cna be different that commiter
752 :param author: author of commit, cna be different that commiter
754 only for git
753 only for git
755 :param trigger_push_hook: trigger push hooks
754 :param trigger_push_hook: trigger push hooks
756
755
757 :returns: new committed commit
756 :returns: new committed commit
758 """
757 """
759
758
760 user = self._get_user(user)
759 user = self._get_user(user)
761 scm_instance = repo.scm_instance(cache=False)
760 scm_instance = repo.scm_instance(cache=False)
762
761
763 processed_nodes = []
762 processed_nodes = []
764 for f_path in nodes:
763 for f_path in nodes:
765 f_path = self._sanitize_path(f_path)
764 f_path = self._sanitize_path(f_path)
766 content = nodes[f_path]['content']
765 content = nodes[f_path]['content']
767 f_path = safe_str(f_path)
766 f_path = safe_str(f_path)
768 # decoding here will force that we have proper encoded values
767 # decoding here will force that we have proper encoded values
769 # in any other case this will throw exceptions and deny commit
768 # in any other case this will throw exceptions and deny commit
770 if isinstance(content, (str,)):
769 if isinstance(content, (str,)):
771 content = safe_str(content)
770 content = safe_str(content)
772 elif isinstance(content, (file, cStringIO.OutputType,)):
771 elif isinstance(content, (file, cStringIO.OutputType,)):
773 content = content.read()
772 content = content.read()
774 else:
773 else:
775 raise Exception('Content is of unrecognized type %s' % (
774 raise Exception('Content is of unrecognized type %s' % (
776 type(content)
775 type(content)
777 ))
776 ))
778 processed_nodes.append((f_path, content))
777 processed_nodes.append((f_path, content))
779
778
780 message = safe_unicode(message)
779 message = safe_unicode(message)
781 commiter = user.full_contact
780 commiter = user.full_contact
782 author = safe_unicode(author) if author else commiter
781 author = safe_unicode(author) if author else commiter
783
782
784 imc = scm_instance.in_memory_commit
783 imc = scm_instance.in_memory_commit
785
784
786 if not parent_commit:
785 if not parent_commit:
787 parent_commit = EmptyCommit(alias=scm_instance.alias)
786 parent_commit = EmptyCommit(alias=scm_instance.alias)
788
787
789 if isinstance(parent_commit, EmptyCommit):
788 if isinstance(parent_commit, EmptyCommit):
790 # EmptyCommit means we we're editing empty repository
789 # EmptyCommit means we we're editing empty repository
791 parents = None
790 parents = None
792 else:
791 else:
793 parents = [parent_commit]
792 parents = [parent_commit]
794 # add multiple nodes
793 # add multiple nodes
795 for path, content in processed_nodes:
794 for path, content in processed_nodes:
796 imc.add(FileNode(path, content=content))
795 imc.add(FileNode(path, content=content))
797 # TODO: handle pre push scenario
796 # TODO: handle pre push scenario
798 tip = imc.commit(message=message,
797 tip = imc.commit(message=message,
799 author=author,
798 author=author,
800 parents=parents,
799 parents=parents,
801 branch=parent_commit.branch)
800 branch=parent_commit.branch)
802
801
803 self.mark_for_invalidation(repo.repo_name)
802 self.mark_for_invalidation(repo.repo_name)
804 if trigger_push_hook:
803 if trigger_push_hook:
805 hooks_utils.trigger_post_push_hook(
804 hooks_utils.trigger_post_push_hook(
806 username=user.username, action='push_local',
805 username=user.username, action='push_local',
807 repo_name=repo.repo_name, repo_type=scm_instance.alias,
806 repo_name=repo.repo_name, repo_type=scm_instance.alias,
808 hook_type='post_push',
807 hook_type='post_push',
809 commit_ids=[tip.raw_id])
808 commit_ids=[tip.raw_id])
810 return tip
809 return tip
811
810
812 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
811 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
813 author=None, trigger_push_hook=True):
812 author=None, trigger_push_hook=True):
814 user = self._get_user(user)
813 user = self._get_user(user)
815 scm_instance = repo.scm_instance(cache=False)
814 scm_instance = repo.scm_instance(cache=False)
816
815
817 message = safe_unicode(message)
816 message = safe_unicode(message)
818 commiter = user.full_contact
817 commiter = user.full_contact
819 author = safe_unicode(author) if author else commiter
818 author = safe_unicode(author) if author else commiter
820
819
821 imc = scm_instance.in_memory_commit
820 imc = scm_instance.in_memory_commit
822
821
823 if not parent_commit:
822 if not parent_commit:
824 parent_commit = EmptyCommit(alias=scm_instance.alias)
823 parent_commit = EmptyCommit(alias=scm_instance.alias)
825
824
826 if isinstance(parent_commit, EmptyCommit):
825 if isinstance(parent_commit, EmptyCommit):
827 # EmptyCommit means we we're editing empty repository
826 # EmptyCommit means we we're editing empty repository
828 parents = None
827 parents = None
829 else:
828 else:
830 parents = [parent_commit]
829 parents = [parent_commit]
831
830
832 # add multiple nodes
831 # add multiple nodes
833 for _filename, data in nodes.items():
832 for _filename, data in nodes.items():
834 # new filename, can be renamed from the old one, also sanitaze
833 # new filename, can be renamed from the old one, also sanitaze
835 # the path for any hack around relative paths like ../../ etc.
834 # the path for any hack around relative paths like ../../ etc.
836 filename = self._sanitize_path(data['filename'])
835 filename = self._sanitize_path(data['filename'])
837 old_filename = self._sanitize_path(_filename)
836 old_filename = self._sanitize_path(_filename)
838 content = data['content']
837 content = data['content']
839 file_mode = data.get('mode')
838 file_mode = data.get('mode')
840 filenode = FileNode(old_filename, content=content, mode=file_mode)
839 filenode = FileNode(old_filename, content=content, mode=file_mode)
841 op = data['op']
840 op = data['op']
842 if op == 'add':
841 if op == 'add':
843 imc.add(filenode)
842 imc.add(filenode)
844 elif op == 'del':
843 elif op == 'del':
845 imc.remove(filenode)
844 imc.remove(filenode)
846 elif op == 'mod':
845 elif op == 'mod':
847 if filename != old_filename:
846 if filename != old_filename:
848 # TODO: handle renames more efficient, needs vcs lib changes
847 # TODO: handle renames more efficient, needs vcs lib changes
849 imc.remove(filenode)
848 imc.remove(filenode)
850 imc.add(FileNode(filename, content=content, mode=file_mode))
849 imc.add(FileNode(filename, content=content, mode=file_mode))
851 else:
850 else:
852 imc.change(filenode)
851 imc.change(filenode)
853
852
854 try:
853 try:
855 # TODO: handle pre push scenario commit changes
854 # TODO: handle pre push scenario commit changes
856 tip = imc.commit(message=message,
855 tip = imc.commit(message=message,
857 author=author,
856 author=author,
858 parents=parents,
857 parents=parents,
859 branch=parent_commit.branch)
858 branch=parent_commit.branch)
860 except NodeNotChangedError:
859 except NodeNotChangedError:
861 raise
860 raise
862 except Exception as e:
861 except Exception as e:
863 log.exception("Unexpected exception during call to imc.commit")
862 log.exception("Unexpected exception during call to imc.commit")
864 raise IMCCommitError(str(e))
863 raise IMCCommitError(str(e))
865 finally:
864 finally:
866 # always clear caches, if commit fails we want fresh object also
865 # always clear caches, if commit fails we want fresh object also
867 self.mark_for_invalidation(repo.repo_name)
866 self.mark_for_invalidation(repo.repo_name)
868
867
869 if trigger_push_hook:
868 if trigger_push_hook:
870 hooks_utils.trigger_post_push_hook(
869 hooks_utils.trigger_post_push_hook(
871 username=user.username, action='push_local', hook_type='post_push',
870 username=user.username, action='push_local', hook_type='post_push',
872 repo_name=repo.repo_name, repo_type=scm_instance.alias,
871 repo_name=repo.repo_name, repo_type=scm_instance.alias,
873 commit_ids=[tip.raw_id])
872 commit_ids=[tip.raw_id])
874
873
875 return tip
874 return tip
876
875
877 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
876 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
878 author=None, trigger_push_hook=True):
877 author=None, trigger_push_hook=True):
879 """
878 """
880 Deletes given multiple nodes into `repo`
879 Deletes given multiple nodes into `repo`
881
880
882 :param user: RhodeCode User object or user_id, the committer
881 :param user: RhodeCode User object or user_id, the committer
883 :param repo: RhodeCode Repository object
882 :param repo: RhodeCode Repository object
884 :param message: commit message
883 :param message: commit message
885 :param nodes: mapping {filename:{'content':content},...}
884 :param nodes: mapping {filename:{'content':content},...}
886 :param parent_commit: parent commit, can be empty than it's initial
885 :param parent_commit: parent commit, can be empty than it's initial
887 commit
886 commit
888 :param author: author of commit, cna be different that commiter only
887 :param author: author of commit, cna be different that commiter only
889 for git
888 for git
890 :param trigger_push_hook: trigger push hooks
889 :param trigger_push_hook: trigger push hooks
891
890
892 :returns: new commit after deletion
891 :returns: new commit after deletion
893 """
892 """
894
893
895 user = self._get_user(user)
894 user = self._get_user(user)
896 scm_instance = repo.scm_instance(cache=False)
895 scm_instance = repo.scm_instance(cache=False)
897
896
898 processed_nodes = []
897 processed_nodes = []
899 for f_path in nodes:
898 for f_path in nodes:
900 f_path = self._sanitize_path(f_path)
899 f_path = self._sanitize_path(f_path)
901 # content can be empty but for compatabilty it allows same dicts
900 # content can be empty but for compatabilty it allows same dicts
902 # structure as add_nodes
901 # structure as add_nodes
903 content = nodes[f_path].get('content')
902 content = nodes[f_path].get('content')
904 processed_nodes.append((f_path, content))
903 processed_nodes.append((f_path, content))
905
904
906 message = safe_unicode(message)
905 message = safe_unicode(message)
907 commiter = user.full_contact
906 commiter = user.full_contact
908 author = safe_unicode(author) if author else commiter
907 author = safe_unicode(author) if author else commiter
909
908
910 imc = scm_instance.in_memory_commit
909 imc = scm_instance.in_memory_commit
911
910
912 if not parent_commit:
911 if not parent_commit:
913 parent_commit = EmptyCommit(alias=scm_instance.alias)
912 parent_commit = EmptyCommit(alias=scm_instance.alias)
914
913
915 if isinstance(parent_commit, EmptyCommit):
914 if isinstance(parent_commit, EmptyCommit):
916 # EmptyCommit means we we're editing empty repository
915 # EmptyCommit means we we're editing empty repository
917 parents = None
916 parents = None
918 else:
917 else:
919 parents = [parent_commit]
918 parents = [parent_commit]
920 # add multiple nodes
919 # add multiple nodes
921 for path, content in processed_nodes:
920 for path, content in processed_nodes:
922 imc.remove(FileNode(path, content=content))
921 imc.remove(FileNode(path, content=content))
923
922
924 # TODO: handle pre push scenario
923 # TODO: handle pre push scenario
925 tip = imc.commit(message=message,
924 tip = imc.commit(message=message,
926 author=author,
925 author=author,
927 parents=parents,
926 parents=parents,
928 branch=parent_commit.branch)
927 branch=parent_commit.branch)
929
928
930 self.mark_for_invalidation(repo.repo_name)
929 self.mark_for_invalidation(repo.repo_name)
931 if trigger_push_hook:
930 if trigger_push_hook:
932 hooks_utils.trigger_post_push_hook(
931 hooks_utils.trigger_post_push_hook(
933 username=user.username, action='push_local', hook_type='post_push',
932 username=user.username, action='push_local', hook_type='post_push',
934 repo_name=repo.repo_name, repo_type=scm_instance.alias,
933 repo_name=repo.repo_name, repo_type=scm_instance.alias,
935 commit_ids=[tip.raw_id])
934 commit_ids=[tip.raw_id])
936 return tip
935 return tip
937
936
938 def strip(self, repo, commit_id, branch):
937 def strip(self, repo, commit_id, branch):
939 scm_instance = repo.scm_instance(cache=False)
938 scm_instance = repo.scm_instance(cache=False)
940 scm_instance.config.clear_section('hooks')
939 scm_instance.config.clear_section('hooks')
941 scm_instance.strip(commit_id, branch)
940 scm_instance.strip(commit_id, branch)
942 self.mark_for_invalidation(repo.repo_name)
941 self.mark_for_invalidation(repo.repo_name)
943
942
944 def get_unread_journal(self):
943 def get_unread_journal(self):
945 return self.sa.query(UserLog).count()
944 return self.sa.query(UserLog).count()
946
945
947 @classmethod
946 @classmethod
948 def backend_landing_ref(cls, repo_type):
947 def backend_landing_ref(cls, repo_type):
949 """
948 """
950 Return a default landing ref based on a repository type.
949 Return a default landing ref based on a repository type.
951 """
950 """
952
951
953 landing_ref = {
952 landing_ref = {
954 'hg': ('branch:default', 'default'),
953 'hg': ('branch:default', 'default'),
955 'git': ('branch:master', 'master'),
954 'git': ('branch:master', 'master'),
956 'svn': ('rev:tip', 'latest tip'),
955 'svn': ('rev:tip', 'latest tip'),
957 'default': ('rev:tip', 'latest tip'),
956 'default': ('rev:tip', 'latest tip'),
958 }
957 }
959
958
960 return landing_ref.get(repo_type) or landing_ref['default']
959 return landing_ref.get(repo_type) or landing_ref['default']
961
960
962 def get_repo_landing_revs(self, translator, repo=None):
961 def get_repo_landing_revs(self, translator, repo=None):
963 """
962 """
964 Generates select option with tags branches and bookmarks (for hg only)
963 Generates select option with tags branches and bookmarks (for hg only)
965 grouped by type
964 grouped by type
966
965
967 :param repo:
966 :param repo:
968 """
967 """
969 from rhodecode.lib.vcs.backends.git import GitRepository
968 from rhodecode.lib.vcs.backends.git import GitRepository
970
969
971 _ = translator
970 _ = translator
972 repo = self._get_repo(repo)
971 repo = self._get_repo(repo)
973
972
974 if repo:
973 if repo:
975 repo_type = repo.repo_type
974 repo_type = repo.repo_type
976 else:
975 else:
977 repo_type = 'default'
976 repo_type = 'default'
978
977
979 default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type)
978 default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type)
980
979
981 default_ref_options = [
980 default_ref_options = [
982 [default_landing_ref, landing_ref_lbl]
981 [default_landing_ref, landing_ref_lbl]
983 ]
982 ]
984 default_choices = [
983 default_choices = [
985 default_landing_ref
984 default_landing_ref
986 ]
985 ]
987
986
988 if not repo:
987 if not repo:
989 # presented at NEW repo creation
988 # presented at NEW repo creation
990 return default_choices, default_ref_options
989 return default_choices, default_ref_options
991
990
992 repo = repo.scm_instance()
991 repo = repo.scm_instance()
993
992
994 ref_options = [(default_landing_ref, landing_ref_lbl)]
993 ref_options = [(default_landing_ref, landing_ref_lbl)]
995 choices = [default_landing_ref]
994 choices = [default_landing_ref]
996
995
997 # branches
996 # branches
998 branch_group = [(u'branch:%s' % safe_unicode(b), safe_unicode(b)) for b in repo.branches]
997 branch_group = [(u'branch:%s' % safe_unicode(b), safe_unicode(b)) for b in repo.branches]
999 if not branch_group:
998 if not branch_group:
1000 # new repo, or without maybe a branch?
999 # new repo, or without maybe a branch?
1001 branch_group = default_ref_options
1000 branch_group = default_ref_options
1002
1001
1003 branches_group = (branch_group, _("Branches"))
1002 branches_group = (branch_group, _("Branches"))
1004 ref_options.append(branches_group)
1003 ref_options.append(branches_group)
1005 choices.extend([x[0] for x in branches_group[0]])
1004 choices.extend([x[0] for x in branches_group[0]])
1006
1005
1007 # bookmarks for HG
1006 # bookmarks for HG
1008 if repo.alias == 'hg':
1007 if repo.alias == 'hg':
1009 bookmarks_group = (
1008 bookmarks_group = (
1010 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
1009 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
1011 for b in repo.bookmarks],
1010 for b in repo.bookmarks],
1012 _("Bookmarks"))
1011 _("Bookmarks"))
1013 ref_options.append(bookmarks_group)
1012 ref_options.append(bookmarks_group)
1014 choices.extend([x[0] for x in bookmarks_group[0]])
1013 choices.extend([x[0] for x in bookmarks_group[0]])
1015
1014
1016 # tags
1015 # tags
1017 tags_group = (
1016 tags_group = (
1018 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
1017 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
1019 for t in repo.tags],
1018 for t in repo.tags],
1020 _("Tags"))
1019 _("Tags"))
1021 ref_options.append(tags_group)
1020 ref_options.append(tags_group)
1022 choices.extend([x[0] for x in tags_group[0]])
1021 choices.extend([x[0] for x in tags_group[0]])
1023
1022
1024 return choices, ref_options
1023 return choices, ref_options
1025
1024
1026 def get_server_info(self, environ=None):
1025 def get_server_info(self, environ=None):
1027 server_info = get_system_info(environ)
1026 server_info = get_system_info(environ)
1028 return server_info
1027 return server_info
@@ -1,919 +1,918 b''
1 # -*- coding: utf-8 -*-
2
1
3 # Copyright (C) 2010-2020 RhodeCode GmbH
2 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
3 #
5 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
8 #
7 #
9 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
11 # GNU General Public License for more details.
13 #
12 #
14 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
15 #
17 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
19
21 import os
20 import os
22 import re
21 import re
23 import hashlib
22 import hashlib
24 import logging
23 import logging
25 import time
24 import time
26 import functools
25 import functools
27 import bleach
26 import bleach
28 from collections import namedtuple
27 from collections import namedtuple
29
28
30 from pyramid.threadlocal import get_current_request, get_current_registry
29 from pyramid.threadlocal import get_current_request, get_current_registry
31
30
32 from rhodecode.lib import rc_cache
31 from rhodecode.lib import rc_cache
33 from rhodecode.lib.utils2 import (
32 from rhodecode.lib.utils2 import (
34 Optional, AttributeDict, safe_str, remove_prefix, str2bool)
33 Optional, AttributeDict, safe_str, remove_prefix, str2bool)
35 from rhodecode.lib.vcs.backends import base
34 from rhodecode.lib.vcs.backends import base
36 from rhodecode.lib.statsd_client import StatsdClient
35 from rhodecode.lib.statsd_client import StatsdClient
37 from rhodecode.model import BaseModel
36 from rhodecode.model import BaseModel
38 from rhodecode.model.db import (
37 from rhodecode.model.db import (
39 RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi, RhodeCodeSetting)
38 RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi, RhodeCodeSetting)
40 from rhodecode.model.meta import Session
39 from rhodecode.model.meta import Session
41
40
42
41
43 log = logging.getLogger(__name__)
42 log = logging.getLogger(__name__)
44
43
45
44
46 UiSetting = namedtuple(
45 UiSetting = namedtuple(
47 'UiSetting', ['section', 'key', 'value', 'active'])
46 'UiSetting', ['section', 'key', 'value', 'active'])
48
47
49 SOCIAL_PLUGINS_LIST = ['github', 'bitbucket', 'twitter', 'google']
48 SOCIAL_PLUGINS_LIST = ['github', 'bitbucket', 'twitter', 'google']
50
49
51
50
52 class SettingNotFound(Exception):
51 class SettingNotFound(Exception):
53 def __init__(self, setting_id):
52 def __init__(self, setting_id):
54 msg = 'Setting `{}` is not found'.format(setting_id)
53 msg = 'Setting `{}` is not found'.format(setting_id)
55 super(SettingNotFound, self).__init__(msg)
54 super(SettingNotFound, self).__init__(msg)
56
55
57
56
58 class SettingsModel(BaseModel):
57 class SettingsModel(BaseModel):
59 BUILTIN_HOOKS = (
58 BUILTIN_HOOKS = (
60 RhodeCodeUi.HOOK_REPO_SIZE, RhodeCodeUi.HOOK_PUSH,
59 RhodeCodeUi.HOOK_REPO_SIZE, RhodeCodeUi.HOOK_PUSH,
61 RhodeCodeUi.HOOK_PRE_PUSH, RhodeCodeUi.HOOK_PRETX_PUSH,
60 RhodeCodeUi.HOOK_PRE_PUSH, RhodeCodeUi.HOOK_PRETX_PUSH,
62 RhodeCodeUi.HOOK_PULL, RhodeCodeUi.HOOK_PRE_PULL,
61 RhodeCodeUi.HOOK_PULL, RhodeCodeUi.HOOK_PRE_PULL,
63 RhodeCodeUi.HOOK_PUSH_KEY,)
62 RhodeCodeUi.HOOK_PUSH_KEY,)
64 HOOKS_SECTION = 'hooks'
63 HOOKS_SECTION = 'hooks'
65
64
66 def __init__(self, sa=None, repo=None):
65 def __init__(self, sa=None, repo=None):
67 self.repo = repo
66 self.repo = repo
68 self.UiDbModel = RepoRhodeCodeUi if repo else RhodeCodeUi
67 self.UiDbModel = RepoRhodeCodeUi if repo else RhodeCodeUi
69 self.SettingsDbModel = (
68 self.SettingsDbModel = (
70 RepoRhodeCodeSetting if repo else RhodeCodeSetting)
69 RepoRhodeCodeSetting if repo else RhodeCodeSetting)
71 super(SettingsModel, self).__init__(sa)
70 super(SettingsModel, self).__init__(sa)
72
71
73 def get_ui_by_key(self, key):
72 def get_ui_by_key(self, key):
74 q = self.UiDbModel.query()
73 q = self.UiDbModel.query()
75 q = q.filter(self.UiDbModel.ui_key == key)
74 q = q.filter(self.UiDbModel.ui_key == key)
76 q = self._filter_by_repo(RepoRhodeCodeUi, q)
75 q = self._filter_by_repo(RepoRhodeCodeUi, q)
77 return q.scalar()
76 return q.scalar()
78
77
79 def get_ui_by_section(self, section):
78 def get_ui_by_section(self, section):
80 q = self.UiDbModel.query()
79 q = self.UiDbModel.query()
81 q = q.filter(self.UiDbModel.ui_section == section)
80 q = q.filter(self.UiDbModel.ui_section == section)
82 q = self._filter_by_repo(RepoRhodeCodeUi, q)
81 q = self._filter_by_repo(RepoRhodeCodeUi, q)
83 return q.all()
82 return q.all()
84
83
85 def get_ui_by_section_and_key(self, section, key):
84 def get_ui_by_section_and_key(self, section, key):
86 q = self.UiDbModel.query()
85 q = self.UiDbModel.query()
87 q = q.filter(self.UiDbModel.ui_section == section)
86 q = q.filter(self.UiDbModel.ui_section == section)
88 q = q.filter(self.UiDbModel.ui_key == key)
87 q = q.filter(self.UiDbModel.ui_key == key)
89 q = self._filter_by_repo(RepoRhodeCodeUi, q)
88 q = self._filter_by_repo(RepoRhodeCodeUi, q)
90 return q.scalar()
89 return q.scalar()
91
90
92 def get_ui(self, section=None, key=None):
91 def get_ui(self, section=None, key=None):
93 q = self.UiDbModel.query()
92 q = self.UiDbModel.query()
94 q = self._filter_by_repo(RepoRhodeCodeUi, q)
93 q = self._filter_by_repo(RepoRhodeCodeUi, q)
95
94
96 if section:
95 if section:
97 q = q.filter(self.UiDbModel.ui_section == section)
96 q = q.filter(self.UiDbModel.ui_section == section)
98 if key:
97 if key:
99 q = q.filter(self.UiDbModel.ui_key == key)
98 q = q.filter(self.UiDbModel.ui_key == key)
100
99
101 # TODO: mikhail: add caching
100 # TODO: mikhail: add caching
102 result = [
101 result = [
103 UiSetting(
102 UiSetting(
104 section=safe_str(r.ui_section), key=safe_str(r.ui_key),
103 section=safe_str(r.ui_section), key=safe_str(r.ui_key),
105 value=safe_str(r.ui_value), active=r.ui_active
104 value=safe_str(r.ui_value), active=r.ui_active
106 )
105 )
107 for r in q.all()
106 for r in q.all()
108 ]
107 ]
109 return result
108 return result
110
109
111 def get_builtin_hooks(self):
110 def get_builtin_hooks(self):
112 q = self.UiDbModel.query()
111 q = self.UiDbModel.query()
113 q = q.filter(self.UiDbModel.ui_key.in_(self.BUILTIN_HOOKS))
112 q = q.filter(self.UiDbModel.ui_key.in_(self.BUILTIN_HOOKS))
114 return self._get_hooks(q)
113 return self._get_hooks(q)
115
114
116 def get_custom_hooks(self):
115 def get_custom_hooks(self):
117 q = self.UiDbModel.query()
116 q = self.UiDbModel.query()
118 q = q.filter(~self.UiDbModel.ui_key.in_(self.BUILTIN_HOOKS))
117 q = q.filter(~self.UiDbModel.ui_key.in_(self.BUILTIN_HOOKS))
119 return self._get_hooks(q)
118 return self._get_hooks(q)
120
119
121 def create_ui_section_value(self, section, val, key=None, active=True):
120 def create_ui_section_value(self, section, val, key=None, active=True):
122 new_ui = self.UiDbModel()
121 new_ui = self.UiDbModel()
123 new_ui.ui_section = section
122 new_ui.ui_section = section
124 new_ui.ui_value = val
123 new_ui.ui_value = val
125 new_ui.ui_active = active
124 new_ui.ui_active = active
126
125
127 repository_id = ''
126 repository_id = ''
128 if self.repo:
127 if self.repo:
129 repo = self._get_repo(self.repo)
128 repo = self._get_repo(self.repo)
130 repository_id = repo.repo_id
129 repository_id = repo.repo_id
131 new_ui.repository_id = repository_id
130 new_ui.repository_id = repository_id
132
131
133 if not key:
132 if not key:
134 # keys are unique so they need appended info
133 # keys are unique so they need appended info
135 if self.repo:
134 if self.repo:
136 key = hashlib.sha1(
135 key = hashlib.sha1(
137 '{}{}{}'.format(section, val, repository_id)).hexdigest()
136 '{}{}{}'.format(section, val, repository_id)).hexdigest()
138 else:
137 else:
139 key = hashlib.sha1('{}{}'.format(section, val)).hexdigest()
138 key = hashlib.sha1('{}{}'.format(section, val)).hexdigest()
140
139
141 new_ui.ui_key = key
140 new_ui.ui_key = key
142
141
143 Session().add(new_ui)
142 Session().add(new_ui)
144 return new_ui
143 return new_ui
145
144
146 def create_or_update_hook(self, key, value):
145 def create_or_update_hook(self, key, value):
147 ui = (
146 ui = (
148 self.get_ui_by_section_and_key(self.HOOKS_SECTION, key) or
147 self.get_ui_by_section_and_key(self.HOOKS_SECTION, key) or
149 self.UiDbModel())
148 self.UiDbModel())
150 ui.ui_section = self.HOOKS_SECTION
149 ui.ui_section = self.HOOKS_SECTION
151 ui.ui_active = True
150 ui.ui_active = True
152 ui.ui_key = key
151 ui.ui_key = key
153 ui.ui_value = value
152 ui.ui_value = value
154
153
155 if self.repo:
154 if self.repo:
156 repo = self._get_repo(self.repo)
155 repo = self._get_repo(self.repo)
157 repository_id = repo.repo_id
156 repository_id = repo.repo_id
158 ui.repository_id = repository_id
157 ui.repository_id = repository_id
159
158
160 Session().add(ui)
159 Session().add(ui)
161 return ui
160 return ui
162
161
163 def delete_ui(self, id_):
162 def delete_ui(self, id_):
164 ui = self.UiDbModel.get(id_)
163 ui = self.UiDbModel.get(id_)
165 if not ui:
164 if not ui:
166 raise SettingNotFound(id_)
165 raise SettingNotFound(id_)
167 Session().delete(ui)
166 Session().delete(ui)
168
167
169 def get_setting_by_name(self, name):
168 def get_setting_by_name(self, name):
170 q = self._get_settings_query()
169 q = self._get_settings_query()
171 q = q.filter(self.SettingsDbModel.app_settings_name == name)
170 q = q.filter(self.SettingsDbModel.app_settings_name == name)
172 return q.scalar()
171 return q.scalar()
173
172
174 def create_or_update_setting(
173 def create_or_update_setting(
175 self, name, val=Optional(''), type_=Optional('unicode')):
174 self, name, val=Optional(''), type_=Optional('unicode')):
176 """
175 """
177 Creates or updates RhodeCode setting. If updates is triggered it will
176 Creates or updates RhodeCode setting. If updates is triggered it will
178 only update parameters that are explicitly set Optional instance will
177 only update parameters that are explicitly set Optional instance will
179 be skipped
178 be skipped
180
179
181 :param name:
180 :param name:
182 :param val:
181 :param val:
183 :param type_:
182 :param type_:
184 :return:
183 :return:
185 """
184 """
186
185
187 res = self.get_setting_by_name(name)
186 res = self.get_setting_by_name(name)
188 repo = self._get_repo(self.repo) if self.repo else None
187 repo = self._get_repo(self.repo) if self.repo else None
189
188
190 if not res:
189 if not res:
191 val = Optional.extract(val)
190 val = Optional.extract(val)
192 type_ = Optional.extract(type_)
191 type_ = Optional.extract(type_)
193
192
194 args = (
193 args = (
195 (repo.repo_id, name, val, type_)
194 (repo.repo_id, name, val, type_)
196 if repo else (name, val, type_))
195 if repo else (name, val, type_))
197 res = self.SettingsDbModel(*args)
196 res = self.SettingsDbModel(*args)
198
197
199 else:
198 else:
200 if self.repo:
199 if self.repo:
201 res.repository_id = repo.repo_id
200 res.repository_id = repo.repo_id
202
201
203 res.app_settings_name = name
202 res.app_settings_name = name
204 if not isinstance(type_, Optional):
203 if not isinstance(type_, Optional):
205 # update if set
204 # update if set
206 res.app_settings_type = type_
205 res.app_settings_type = type_
207 if not isinstance(val, Optional):
206 if not isinstance(val, Optional):
208 # update if set
207 # update if set
209 res.app_settings_value = val
208 res.app_settings_value = val
210
209
211 Session().add(res)
210 Session().add(res)
212 return res
211 return res
213
212
214 def get_cache_region(self):
213 def get_cache_region(self):
215 repo = self._get_repo(self.repo) if self.repo else None
214 repo = self._get_repo(self.repo) if self.repo else None
216 cache_key = "repo.{}".format(repo.repo_id) if repo else "general_settings"
215 cache_key = "repo.{}".format(repo.repo_id) if repo else "general_settings"
217 cache_namespace_uid = 'cache_settings.{}'.format(cache_key)
216 cache_namespace_uid = 'cache_settings.{}'.format(cache_key)
218 region = rc_cache.get_or_create_region('cache_general', cache_namespace_uid)
217 region = rc_cache.get_or_create_region('cache_general', cache_namespace_uid)
219 return region, cache_key
218 return region, cache_key
220
219
221 def invalidate_settings_cache(self):
220 def invalidate_settings_cache(self):
222 region, cache_key = self.get_cache_region()
221 region, cache_key = self.get_cache_region()
223 log.debug('Invalidation cache region %s for cache_key: %s', region, cache_key)
222 log.debug('Invalidation cache region %s for cache_key: %s', region, cache_key)
224 region.invalidate()
223 region.invalidate()
225
224
226 def get_all_settings(self, cache=False, from_request=True):
225 def get_all_settings(self, cache=False, from_request=True):
227 # defines if we use GLOBAL, or PER_REPO
226 # defines if we use GLOBAL, or PER_REPO
228 repo = self._get_repo(self.repo) if self.repo else None
227 repo = self._get_repo(self.repo) if self.repo else None
229
228
230 # initially try the requests context, this is the fastest
229 # initially try the requests context, this is the fastest
231 # we only fetch global config
230 # we only fetch global config
232 if from_request:
231 if from_request:
233 request = get_current_request()
232 request = get_current_request()
234
233
235 if request and not repo and hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'):
234 if request and not repo and hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'):
236 rc_config = request.call_context.rc_config
235 rc_config = request.call_context.rc_config
237 if rc_config:
236 if rc_config:
238 return rc_config
237 return rc_config
239
238
240 region, cache_key = self.get_cache_region()
239 region, cache_key = self.get_cache_region()
241
240
242 @region.conditional_cache_on_arguments(condition=cache)
241 @region.conditional_cache_on_arguments(condition=cache)
243 def _get_all_settings(name, key):
242 def _get_all_settings(name, key):
244 q = self._get_settings_query()
243 q = self._get_settings_query()
245 if not q:
244 if not q:
246 raise Exception('Could not get application settings !')
245 raise Exception('Could not get application settings !')
247
246
248 settings = {
247 settings = {
249 'rhodecode_' + res.app_settings_name: res.app_settings_value
248 'rhodecode_' + res.app_settings_name: res.app_settings_value
250 for res in q
249 for res in q
251 }
250 }
252 return settings
251 return settings
253
252
254 start = time.time()
253 start = time.time()
255 result = _get_all_settings('rhodecode_settings', cache_key)
254 result = _get_all_settings('rhodecode_settings', cache_key)
256 compute_time = time.time() - start
255 compute_time = time.time() - start
257 log.debug('cached method:%s took %.4fs', _get_all_settings.__name__, compute_time)
256 log.debug('cached method:%s took %.4fs', _get_all_settings.__name__, compute_time)
258
257
259 statsd = StatsdClient.statsd
258 statsd = StatsdClient.statsd
260 if statsd:
259 if statsd:
261 elapsed_time_ms = round(1000.0 * compute_time) # use ms only
260 elapsed_time_ms = round(1000.0 * compute_time) # use ms only
262 statsd.timing("rhodecode_settings_timing.histogram", elapsed_time_ms,
261 statsd.timing("rhodecode_settings_timing.histogram", elapsed_time_ms,
263 use_decimals=False)
262 use_decimals=False)
264
263
265 log.debug('Fetching app settings for key: %s took: %.4fs: cache: %s', cache_key, compute_time, cache)
264 log.debug('Fetching app settings for key: %s took: %.4fs: cache: %s', cache_key, compute_time, cache)
266
265
267 return result
266 return result
268
267
269 def get_auth_settings(self):
268 def get_auth_settings(self):
270 q = self._get_settings_query()
269 q = self._get_settings_query()
271 q = q.filter(
270 q = q.filter(
272 self.SettingsDbModel.app_settings_name.startswith('auth_'))
271 self.SettingsDbModel.app_settings_name.startswith('auth_'))
273 rows = q.all()
272 rows = q.all()
274 auth_settings = {
273 auth_settings = {
275 row.app_settings_name: row.app_settings_value for row in rows}
274 row.app_settings_name: row.app_settings_value for row in rows}
276 return auth_settings
275 return auth_settings
277
276
278 def get_auth_plugins(self):
277 def get_auth_plugins(self):
279 auth_plugins = self.get_setting_by_name("auth_plugins")
278 auth_plugins = self.get_setting_by_name("auth_plugins")
280 return auth_plugins.app_settings_value
279 return auth_plugins.app_settings_value
281
280
282 def get_default_repo_settings(self, strip_prefix=False):
281 def get_default_repo_settings(self, strip_prefix=False):
283 q = self._get_settings_query()
282 q = self._get_settings_query()
284 q = q.filter(
283 q = q.filter(
285 self.SettingsDbModel.app_settings_name.startswith('default_'))
284 self.SettingsDbModel.app_settings_name.startswith('default_'))
286 rows = q.all()
285 rows = q.all()
287
286
288 result = {}
287 result = {}
289 for row in rows:
288 for row in rows:
290 key = row.app_settings_name
289 key = row.app_settings_name
291 if strip_prefix:
290 if strip_prefix:
292 key = remove_prefix(key, prefix='default_')
291 key = remove_prefix(key, prefix='default_')
293 result.update({key: row.app_settings_value})
292 result.update({key: row.app_settings_value})
294 return result
293 return result
295
294
296 def get_repo(self):
295 def get_repo(self):
297 repo = self._get_repo(self.repo)
296 repo = self._get_repo(self.repo)
298 if not repo:
297 if not repo:
299 raise Exception(
298 raise Exception(
300 'Repository `{}` cannot be found inside the database'.format(
299 'Repository `{}` cannot be found inside the database'.format(
301 self.repo))
300 self.repo))
302 return repo
301 return repo
303
302
304 def _filter_by_repo(self, model, query):
303 def _filter_by_repo(self, model, query):
305 if self.repo:
304 if self.repo:
306 repo = self.get_repo()
305 repo = self.get_repo()
307 query = query.filter(model.repository_id == repo.repo_id)
306 query = query.filter(model.repository_id == repo.repo_id)
308 return query
307 return query
309
308
310 def _get_hooks(self, query):
309 def _get_hooks(self, query):
311 query = query.filter(self.UiDbModel.ui_section == self.HOOKS_SECTION)
310 query = query.filter(self.UiDbModel.ui_section == self.HOOKS_SECTION)
312 query = self._filter_by_repo(RepoRhodeCodeUi, query)
311 query = self._filter_by_repo(RepoRhodeCodeUi, query)
313 return query.all()
312 return query.all()
314
313
315 def _get_settings_query(self):
314 def _get_settings_query(self):
316 q = self.SettingsDbModel.query()
315 q = self.SettingsDbModel.query()
317 return self._filter_by_repo(RepoRhodeCodeSetting, q)
316 return self._filter_by_repo(RepoRhodeCodeSetting, q)
318
317
319 def list_enabled_social_plugins(self, settings):
318 def list_enabled_social_plugins(self, settings):
320 enabled = []
319 enabled = []
321 for plug in SOCIAL_PLUGINS_LIST:
320 for plug in SOCIAL_PLUGINS_LIST:
322 if str2bool(settings.get('rhodecode_auth_{}_enabled'.format(plug)
321 if str2bool(settings.get('rhodecode_auth_{}_enabled'.format(plug)
323 )):
322 )):
324 enabled.append(plug)
323 enabled.append(plug)
325 return enabled
324 return enabled
326
325
327
326
328 def assert_repo_settings(func):
327 def assert_repo_settings(func):
329 @functools.wraps(func)
328 @functools.wraps(func)
330 def _wrapper(self, *args, **kwargs):
329 def _wrapper(self, *args, **kwargs):
331 if not self.repo_settings:
330 if not self.repo_settings:
332 raise Exception('Repository is not specified')
331 raise Exception('Repository is not specified')
333 return func(self, *args, **kwargs)
332 return func(self, *args, **kwargs)
334 return _wrapper
333 return _wrapper
335
334
336
335
337 class IssueTrackerSettingsModel(object):
336 class IssueTrackerSettingsModel(object):
338 INHERIT_SETTINGS = 'inherit_issue_tracker_settings'
337 INHERIT_SETTINGS = 'inherit_issue_tracker_settings'
339 SETTINGS_PREFIX = 'issuetracker_'
338 SETTINGS_PREFIX = 'issuetracker_'
340
339
341 def __init__(self, sa=None, repo=None):
340 def __init__(self, sa=None, repo=None):
342 self.global_settings = SettingsModel(sa=sa)
341 self.global_settings = SettingsModel(sa=sa)
343 self.repo_settings = SettingsModel(sa=sa, repo=repo) if repo else None
342 self.repo_settings = SettingsModel(sa=sa, repo=repo) if repo else None
344
343
345 @property
344 @property
346 def inherit_global_settings(self):
345 def inherit_global_settings(self):
347 if not self.repo_settings:
346 if not self.repo_settings:
348 return True
347 return True
349 setting = self.repo_settings.get_setting_by_name(self.INHERIT_SETTINGS)
348 setting = self.repo_settings.get_setting_by_name(self.INHERIT_SETTINGS)
350 return setting.app_settings_value if setting else True
349 return setting.app_settings_value if setting else True
351
350
352 @inherit_global_settings.setter
351 @inherit_global_settings.setter
353 def inherit_global_settings(self, value):
352 def inherit_global_settings(self, value):
354 if self.repo_settings:
353 if self.repo_settings:
355 settings = self.repo_settings.create_or_update_setting(
354 settings = self.repo_settings.create_or_update_setting(
356 self.INHERIT_SETTINGS, value, type_='bool')
355 self.INHERIT_SETTINGS, value, type_='bool')
357 Session().add(settings)
356 Session().add(settings)
358
357
359 def _get_keyname(self, key, uid, prefix=''):
358 def _get_keyname(self, key, uid, prefix=''):
360 return '{0}{1}{2}_{3}'.format(
359 return '{0}{1}{2}_{3}'.format(
361 prefix, self.SETTINGS_PREFIX, key, uid)
360 prefix, self.SETTINGS_PREFIX, key, uid)
362
361
363 def _make_dict_for_settings(self, qs):
362 def _make_dict_for_settings(self, qs):
364 prefix_match = self._get_keyname('pat', '', 'rhodecode_')
363 prefix_match = self._get_keyname('pat', '', 'rhodecode_')
365
364
366 issuetracker_entries = {}
365 issuetracker_entries = {}
367 # create keys
366 # create keys
368 for k, v in qs.items():
367 for k, v in qs.items():
369 if k.startswith(prefix_match):
368 if k.startswith(prefix_match):
370 uid = k[len(prefix_match):]
369 uid = k[len(prefix_match):]
371 issuetracker_entries[uid] = None
370 issuetracker_entries[uid] = None
372
371
373 def url_cleaner(input_str):
372 def url_cleaner(input_str):
374 input_str = input_str.replace('"', '').replace("'", '')
373 input_str = input_str.replace('"', '').replace("'", '')
375 input_str = bleach.clean(input_str, strip=True)
374 input_str = bleach.clean(input_str, strip=True)
376 return input_str
375 return input_str
377
376
378 # populate
377 # populate
379 for uid in issuetracker_entries:
378 for uid in issuetracker_entries:
380 url_data = qs.get(self._get_keyname('url', uid, 'rhodecode_'))
379 url_data = qs.get(self._get_keyname('url', uid, 'rhodecode_'))
381
380
382 pat = qs.get(self._get_keyname('pat', uid, 'rhodecode_'))
381 pat = qs.get(self._get_keyname('pat', uid, 'rhodecode_'))
383 try:
382 try:
384 pat_compiled = re.compile(r'%s' % pat)
383 pat_compiled = re.compile(r'%s' % pat)
385 except re.error:
384 except re.error:
386 pat_compiled = None
385 pat_compiled = None
387
386
388 issuetracker_entries[uid] = AttributeDict({
387 issuetracker_entries[uid] = AttributeDict({
389 'pat': pat,
388 'pat': pat,
390 'pat_compiled': pat_compiled,
389 'pat_compiled': pat_compiled,
391 'url': url_cleaner(
390 'url': url_cleaner(
392 qs.get(self._get_keyname('url', uid, 'rhodecode_')) or ''),
391 qs.get(self._get_keyname('url', uid, 'rhodecode_')) or ''),
393 'pref': bleach.clean(
392 'pref': bleach.clean(
394 qs.get(self._get_keyname('pref', uid, 'rhodecode_')) or ''),
393 qs.get(self._get_keyname('pref', uid, 'rhodecode_')) or ''),
395 'desc': qs.get(
394 'desc': qs.get(
396 self._get_keyname('desc', uid, 'rhodecode_')),
395 self._get_keyname('desc', uid, 'rhodecode_')),
397 })
396 })
398
397
399 return issuetracker_entries
398 return issuetracker_entries
400
399
401 def get_global_settings(self, cache=False):
400 def get_global_settings(self, cache=False):
402 """
401 """
403 Returns list of global issue tracker settings
402 Returns list of global issue tracker settings
404 """
403 """
405 defaults = self.global_settings.get_all_settings(cache=cache)
404 defaults = self.global_settings.get_all_settings(cache=cache)
406 settings = self._make_dict_for_settings(defaults)
405 settings = self._make_dict_for_settings(defaults)
407 return settings
406 return settings
408
407
409 def get_repo_settings(self, cache=False):
408 def get_repo_settings(self, cache=False):
410 """
409 """
411 Returns list of issue tracker settings per repository
410 Returns list of issue tracker settings per repository
412 """
411 """
413 if not self.repo_settings:
412 if not self.repo_settings:
414 raise Exception('Repository is not specified')
413 raise Exception('Repository is not specified')
415 all_settings = self.repo_settings.get_all_settings(cache=cache)
414 all_settings = self.repo_settings.get_all_settings(cache=cache)
416 settings = self._make_dict_for_settings(all_settings)
415 settings = self._make_dict_for_settings(all_settings)
417 return settings
416 return settings
418
417
419 def get_settings(self, cache=False):
418 def get_settings(self, cache=False):
420 if self.inherit_global_settings:
419 if self.inherit_global_settings:
421 return self.get_global_settings(cache=cache)
420 return self.get_global_settings(cache=cache)
422 else:
421 else:
423 return self.get_repo_settings(cache=cache)
422 return self.get_repo_settings(cache=cache)
424
423
425 def delete_entries(self, uid):
424 def delete_entries(self, uid):
426 if self.repo_settings:
425 if self.repo_settings:
427 all_patterns = self.get_repo_settings()
426 all_patterns = self.get_repo_settings()
428 settings_model = self.repo_settings
427 settings_model = self.repo_settings
429 else:
428 else:
430 all_patterns = self.get_global_settings()
429 all_patterns = self.get_global_settings()
431 settings_model = self.global_settings
430 settings_model = self.global_settings
432 entries = all_patterns.get(uid, [])
431 entries = all_patterns.get(uid, [])
433
432
434 for del_key in entries:
433 for del_key in entries:
435 setting_name = self._get_keyname(del_key, uid)
434 setting_name = self._get_keyname(del_key, uid)
436 entry = settings_model.get_setting_by_name(setting_name)
435 entry = settings_model.get_setting_by_name(setting_name)
437 if entry:
436 if entry:
438 Session().delete(entry)
437 Session().delete(entry)
439
438
440 Session().commit()
439 Session().commit()
441
440
442 def create_or_update_setting(
441 def create_or_update_setting(
443 self, name, val=Optional(''), type_=Optional('unicode')):
442 self, name, val=Optional(''), type_=Optional('unicode')):
444 if self.repo_settings:
443 if self.repo_settings:
445 setting = self.repo_settings.create_or_update_setting(
444 setting = self.repo_settings.create_or_update_setting(
446 name, val, type_)
445 name, val, type_)
447 else:
446 else:
448 setting = self.global_settings.create_or_update_setting(
447 setting = self.global_settings.create_or_update_setting(
449 name, val, type_)
448 name, val, type_)
450 return setting
449 return setting
451
450
452
451
453 class VcsSettingsModel(object):
452 class VcsSettingsModel(object):
454
453
455 INHERIT_SETTINGS = 'inherit_vcs_settings'
454 INHERIT_SETTINGS = 'inherit_vcs_settings'
456 GENERAL_SETTINGS = (
455 GENERAL_SETTINGS = (
457 'use_outdated_comments',
456 'use_outdated_comments',
458 'pr_merge_enabled',
457 'pr_merge_enabled',
459 'hg_use_rebase_for_merging',
458 'hg_use_rebase_for_merging',
460 'hg_close_branch_before_merging',
459 'hg_close_branch_before_merging',
461 'git_use_rebase_for_merging',
460 'git_use_rebase_for_merging',
462 'git_close_branch_before_merging',
461 'git_close_branch_before_merging',
463 'diff_cache',
462 'diff_cache',
464 )
463 )
465
464
466 HOOKS_SETTINGS = (
465 HOOKS_SETTINGS = (
467 ('hooks', 'changegroup.repo_size'),
466 ('hooks', 'changegroup.repo_size'),
468 ('hooks', 'changegroup.push_logger'),
467 ('hooks', 'changegroup.push_logger'),
469 ('hooks', 'outgoing.pull_logger'),
468 ('hooks', 'outgoing.pull_logger'),
470 )
469 )
471 HG_SETTINGS = (
470 HG_SETTINGS = (
472 ('extensions', 'largefiles'),
471 ('extensions', 'largefiles'),
473 ('phases', 'publish'),
472 ('phases', 'publish'),
474 ('extensions', 'evolve'),
473 ('extensions', 'evolve'),
475 ('extensions', 'topic'),
474 ('extensions', 'topic'),
476 ('experimental', 'evolution'),
475 ('experimental', 'evolution'),
477 ('experimental', 'evolution.exchange'),
476 ('experimental', 'evolution.exchange'),
478 )
477 )
479 GIT_SETTINGS = (
478 GIT_SETTINGS = (
480 ('vcs_git_lfs', 'enabled'),
479 ('vcs_git_lfs', 'enabled'),
481 )
480 )
482 GLOBAL_HG_SETTINGS = (
481 GLOBAL_HG_SETTINGS = (
483 ('extensions', 'largefiles'),
482 ('extensions', 'largefiles'),
484 ('largefiles', 'usercache'),
483 ('largefiles', 'usercache'),
485 ('phases', 'publish'),
484 ('phases', 'publish'),
486 ('extensions', 'hgsubversion'),
485 ('extensions', 'hgsubversion'),
487 ('extensions', 'evolve'),
486 ('extensions', 'evolve'),
488 ('extensions', 'topic'),
487 ('extensions', 'topic'),
489 ('experimental', 'evolution'),
488 ('experimental', 'evolution'),
490 ('experimental', 'evolution.exchange'),
489 ('experimental', 'evolution.exchange'),
491 )
490 )
492
491
493 GLOBAL_GIT_SETTINGS = (
492 GLOBAL_GIT_SETTINGS = (
494 ('vcs_git_lfs', 'enabled'),
493 ('vcs_git_lfs', 'enabled'),
495 ('vcs_git_lfs', 'store_location')
494 ('vcs_git_lfs', 'store_location')
496 )
495 )
497
496
498 GLOBAL_SVN_SETTINGS = (
497 GLOBAL_SVN_SETTINGS = (
499 ('vcs_svn_proxy', 'http_requests_enabled'),
498 ('vcs_svn_proxy', 'http_requests_enabled'),
500 ('vcs_svn_proxy', 'http_server_url')
499 ('vcs_svn_proxy', 'http_server_url')
501 )
500 )
502
501
503 SVN_BRANCH_SECTION = 'vcs_svn_branch'
502 SVN_BRANCH_SECTION = 'vcs_svn_branch'
504 SVN_TAG_SECTION = 'vcs_svn_tag'
503 SVN_TAG_SECTION = 'vcs_svn_tag'
505 SSL_SETTING = ('web', 'push_ssl')
504 SSL_SETTING = ('web', 'push_ssl')
506 PATH_SETTING = ('paths', '/')
505 PATH_SETTING = ('paths', '/')
507
506
508 def __init__(self, sa=None, repo=None):
507 def __init__(self, sa=None, repo=None):
509 self.global_settings = SettingsModel(sa=sa)
508 self.global_settings = SettingsModel(sa=sa)
510 self.repo_settings = SettingsModel(sa=sa, repo=repo) if repo else None
509 self.repo_settings = SettingsModel(sa=sa, repo=repo) if repo else None
511 self._ui_settings = (
510 self._ui_settings = (
512 self.HG_SETTINGS + self.GIT_SETTINGS + self.HOOKS_SETTINGS)
511 self.HG_SETTINGS + self.GIT_SETTINGS + self.HOOKS_SETTINGS)
513 self._svn_sections = (self.SVN_BRANCH_SECTION, self.SVN_TAG_SECTION)
512 self._svn_sections = (self.SVN_BRANCH_SECTION, self.SVN_TAG_SECTION)
514
513
515 @property
514 @property
516 @assert_repo_settings
515 @assert_repo_settings
517 def inherit_global_settings(self):
516 def inherit_global_settings(self):
518 setting = self.repo_settings.get_setting_by_name(self.INHERIT_SETTINGS)
517 setting = self.repo_settings.get_setting_by_name(self.INHERIT_SETTINGS)
519 return setting.app_settings_value if setting else True
518 return setting.app_settings_value if setting else True
520
519
521 @inherit_global_settings.setter
520 @inherit_global_settings.setter
522 @assert_repo_settings
521 @assert_repo_settings
523 def inherit_global_settings(self, value):
522 def inherit_global_settings(self, value):
524 self.repo_settings.create_or_update_setting(
523 self.repo_settings.create_or_update_setting(
525 self.INHERIT_SETTINGS, value, type_='bool')
524 self.INHERIT_SETTINGS, value, type_='bool')
526
525
527 def get_global_svn_branch_patterns(self):
526 def get_global_svn_branch_patterns(self):
528 return self.global_settings.get_ui_by_section(self.SVN_BRANCH_SECTION)
527 return self.global_settings.get_ui_by_section(self.SVN_BRANCH_SECTION)
529
528
530 @assert_repo_settings
529 @assert_repo_settings
531 def get_repo_svn_branch_patterns(self):
530 def get_repo_svn_branch_patterns(self):
532 return self.repo_settings.get_ui_by_section(self.SVN_BRANCH_SECTION)
531 return self.repo_settings.get_ui_by_section(self.SVN_BRANCH_SECTION)
533
532
534 def get_global_svn_tag_patterns(self):
533 def get_global_svn_tag_patterns(self):
535 return self.global_settings.get_ui_by_section(self.SVN_TAG_SECTION)
534 return self.global_settings.get_ui_by_section(self.SVN_TAG_SECTION)
536
535
537 @assert_repo_settings
536 @assert_repo_settings
538 def get_repo_svn_tag_patterns(self):
537 def get_repo_svn_tag_patterns(self):
539 return self.repo_settings.get_ui_by_section(self.SVN_TAG_SECTION)
538 return self.repo_settings.get_ui_by_section(self.SVN_TAG_SECTION)
540
539
541 def get_global_settings(self):
540 def get_global_settings(self):
542 return self._collect_all_settings(global_=True)
541 return self._collect_all_settings(global_=True)
543
542
544 @assert_repo_settings
543 @assert_repo_settings
545 def get_repo_settings(self):
544 def get_repo_settings(self):
546 return self._collect_all_settings(global_=False)
545 return self._collect_all_settings(global_=False)
547
546
548 @assert_repo_settings
547 @assert_repo_settings
549 def get_repo_settings_inherited(self):
548 def get_repo_settings_inherited(self):
550 global_settings = self.get_global_settings()
549 global_settings = self.get_global_settings()
551 global_settings.update(self.get_repo_settings())
550 global_settings.update(self.get_repo_settings())
552 return global_settings
551 return global_settings
553
552
554 @assert_repo_settings
553 @assert_repo_settings
555 def create_or_update_repo_settings(
554 def create_or_update_repo_settings(
556 self, data, inherit_global_settings=False):
555 self, data, inherit_global_settings=False):
557 from rhodecode.model.scm import ScmModel
556 from rhodecode.model.scm import ScmModel
558
557
559 self.inherit_global_settings = inherit_global_settings
558 self.inherit_global_settings = inherit_global_settings
560
559
561 repo = self.repo_settings.get_repo()
560 repo = self.repo_settings.get_repo()
562 if not inherit_global_settings:
561 if not inherit_global_settings:
563 if repo.repo_type == 'svn':
562 if repo.repo_type == 'svn':
564 self.create_repo_svn_settings(data)
563 self.create_repo_svn_settings(data)
565 else:
564 else:
566 self.create_or_update_repo_hook_settings(data)
565 self.create_or_update_repo_hook_settings(data)
567 self.create_or_update_repo_pr_settings(data)
566 self.create_or_update_repo_pr_settings(data)
568
567
569 if repo.repo_type == 'hg':
568 if repo.repo_type == 'hg':
570 self.create_or_update_repo_hg_settings(data)
569 self.create_or_update_repo_hg_settings(data)
571
570
572 if repo.repo_type == 'git':
571 if repo.repo_type == 'git':
573 self.create_or_update_repo_git_settings(data)
572 self.create_or_update_repo_git_settings(data)
574
573
575 ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
574 ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
576
575
577 @assert_repo_settings
576 @assert_repo_settings
578 def create_or_update_repo_hook_settings(self, data):
577 def create_or_update_repo_hook_settings(self, data):
579 for section, key in self.HOOKS_SETTINGS:
578 for section, key in self.HOOKS_SETTINGS:
580 data_key = self._get_form_ui_key(section, key)
579 data_key = self._get_form_ui_key(section, key)
581 if data_key not in data:
580 if data_key not in data:
582 raise ValueError(
581 raise ValueError(
583 'The given data does not contain {} key'.format(data_key))
582 'The given data does not contain {} key'.format(data_key))
584
583
585 active = data.get(data_key)
584 active = data.get(data_key)
586 repo_setting = self.repo_settings.get_ui_by_section_and_key(
585 repo_setting = self.repo_settings.get_ui_by_section_and_key(
587 section, key)
586 section, key)
588 if not repo_setting:
587 if not repo_setting:
589 global_setting = self.global_settings.\
588 global_setting = self.global_settings.\
590 get_ui_by_section_and_key(section, key)
589 get_ui_by_section_and_key(section, key)
591 self.repo_settings.create_ui_section_value(
590 self.repo_settings.create_ui_section_value(
592 section, global_setting.ui_value, key=key, active=active)
591 section, global_setting.ui_value, key=key, active=active)
593 else:
592 else:
594 repo_setting.ui_active = active
593 repo_setting.ui_active = active
595 Session().add(repo_setting)
594 Session().add(repo_setting)
596
595
597 def update_global_hook_settings(self, data):
596 def update_global_hook_settings(self, data):
598 for section, key in self.HOOKS_SETTINGS:
597 for section, key in self.HOOKS_SETTINGS:
599 data_key = self._get_form_ui_key(section, key)
598 data_key = self._get_form_ui_key(section, key)
600 if data_key not in data:
599 if data_key not in data:
601 raise ValueError(
600 raise ValueError(
602 'The given data does not contain {} key'.format(data_key))
601 'The given data does not contain {} key'.format(data_key))
603 active = data.get(data_key)
602 active = data.get(data_key)
604 repo_setting = self.global_settings.get_ui_by_section_and_key(
603 repo_setting = self.global_settings.get_ui_by_section_and_key(
605 section, key)
604 section, key)
606 repo_setting.ui_active = active
605 repo_setting.ui_active = active
607 Session().add(repo_setting)
606 Session().add(repo_setting)
608
607
609 @assert_repo_settings
608 @assert_repo_settings
610 def create_or_update_repo_pr_settings(self, data):
609 def create_or_update_repo_pr_settings(self, data):
611 return self._create_or_update_general_settings(
610 return self._create_or_update_general_settings(
612 self.repo_settings, data)
611 self.repo_settings, data)
613
612
614 def create_or_update_global_pr_settings(self, data):
613 def create_or_update_global_pr_settings(self, data):
615 return self._create_or_update_general_settings(
614 return self._create_or_update_general_settings(
616 self.global_settings, data)
615 self.global_settings, data)
617
616
618 @assert_repo_settings
617 @assert_repo_settings
619 def create_repo_svn_settings(self, data):
618 def create_repo_svn_settings(self, data):
620 return self._create_svn_settings(self.repo_settings, data)
619 return self._create_svn_settings(self.repo_settings, data)
621
620
622 def _set_evolution(self, settings, is_enabled):
621 def _set_evolution(self, settings, is_enabled):
623 if is_enabled:
622 if is_enabled:
624 # if evolve is active set evolution=all
623 # if evolve is active set evolution=all
625
624
626 self._create_or_update_ui(
625 self._create_or_update_ui(
627 settings, *('experimental', 'evolution'), value='all',
626 settings, *('experimental', 'evolution'), value='all',
628 active=True)
627 active=True)
629 self._create_or_update_ui(
628 self._create_or_update_ui(
630 settings, *('experimental', 'evolution.exchange'), value='yes',
629 settings, *('experimental', 'evolution.exchange'), value='yes',
631 active=True)
630 active=True)
632 # if evolve is active set topics server support
631 # if evolve is active set topics server support
633 self._create_or_update_ui(
632 self._create_or_update_ui(
634 settings, *('extensions', 'topic'), value='',
633 settings, *('extensions', 'topic'), value='',
635 active=True)
634 active=True)
636
635
637 else:
636 else:
638 self._create_or_update_ui(
637 self._create_or_update_ui(
639 settings, *('experimental', 'evolution'), value='',
638 settings, *('experimental', 'evolution'), value='',
640 active=False)
639 active=False)
641 self._create_or_update_ui(
640 self._create_or_update_ui(
642 settings, *('experimental', 'evolution.exchange'), value='no',
641 settings, *('experimental', 'evolution.exchange'), value='no',
643 active=False)
642 active=False)
644 self._create_or_update_ui(
643 self._create_or_update_ui(
645 settings, *('extensions', 'topic'), value='',
644 settings, *('extensions', 'topic'), value='',
646 active=False)
645 active=False)
647
646
648 @assert_repo_settings
647 @assert_repo_settings
649 def create_or_update_repo_hg_settings(self, data):
648 def create_or_update_repo_hg_settings(self, data):
650 largefiles, phases, evolve = \
649 largefiles, phases, evolve = \
651 self.HG_SETTINGS[:3]
650 self.HG_SETTINGS[:3]
652 largefiles_key, phases_key, evolve_key = \
651 largefiles_key, phases_key, evolve_key = \
653 self._get_settings_keys(self.HG_SETTINGS[:3], data)
652 self._get_settings_keys(self.HG_SETTINGS[:3], data)
654
653
655 self._create_or_update_ui(
654 self._create_or_update_ui(
656 self.repo_settings, *largefiles, value='',
655 self.repo_settings, *largefiles, value='',
657 active=data[largefiles_key])
656 active=data[largefiles_key])
658 self._create_or_update_ui(
657 self._create_or_update_ui(
659 self.repo_settings, *evolve, value='',
658 self.repo_settings, *evolve, value='',
660 active=data[evolve_key])
659 active=data[evolve_key])
661 self._set_evolution(self.repo_settings, is_enabled=data[evolve_key])
660 self._set_evolution(self.repo_settings, is_enabled=data[evolve_key])
662
661
663 self._create_or_update_ui(
662 self._create_or_update_ui(
664 self.repo_settings, *phases, value=safe_str(data[phases_key]))
663 self.repo_settings, *phases, value=safe_str(data[phases_key]))
665
664
666 def create_or_update_global_hg_settings(self, data):
665 def create_or_update_global_hg_settings(self, data):
667 largefiles, largefiles_store, phases, hgsubversion, evolve \
666 largefiles, largefiles_store, phases, hgsubversion, evolve \
668 = self.GLOBAL_HG_SETTINGS[:5]
667 = self.GLOBAL_HG_SETTINGS[:5]
669 largefiles_key, largefiles_store_key, phases_key, subversion_key, evolve_key \
668 largefiles_key, largefiles_store_key, phases_key, subversion_key, evolve_key \
670 = self._get_settings_keys(self.GLOBAL_HG_SETTINGS[:5], data)
669 = self._get_settings_keys(self.GLOBAL_HG_SETTINGS[:5], data)
671
670
672 self._create_or_update_ui(
671 self._create_or_update_ui(
673 self.global_settings, *largefiles, value='',
672 self.global_settings, *largefiles, value='',
674 active=data[largefiles_key])
673 active=data[largefiles_key])
675 self._create_or_update_ui(
674 self._create_or_update_ui(
676 self.global_settings, *largefiles_store, value=data[largefiles_store_key])
675 self.global_settings, *largefiles_store, value=data[largefiles_store_key])
677 self._create_or_update_ui(
676 self._create_or_update_ui(
678 self.global_settings, *phases, value=safe_str(data[phases_key]))
677 self.global_settings, *phases, value=safe_str(data[phases_key]))
679 self._create_or_update_ui(
678 self._create_or_update_ui(
680 self.global_settings, *hgsubversion, active=data[subversion_key])
679 self.global_settings, *hgsubversion, active=data[subversion_key])
681 self._create_or_update_ui(
680 self._create_or_update_ui(
682 self.global_settings, *evolve, value='',
681 self.global_settings, *evolve, value='',
683 active=data[evolve_key])
682 active=data[evolve_key])
684 self._set_evolution(self.global_settings, is_enabled=data[evolve_key])
683 self._set_evolution(self.global_settings, is_enabled=data[evolve_key])
685
684
686 def create_or_update_repo_git_settings(self, data):
685 def create_or_update_repo_git_settings(self, data):
687 # NOTE(marcink): # comma makes unpack work properly
686 # NOTE(marcink): # comma makes unpack work properly
688 lfs_enabled, \
687 lfs_enabled, \
689 = self.GIT_SETTINGS
688 = self.GIT_SETTINGS
690
689
691 lfs_enabled_key, \
690 lfs_enabled_key, \
692 = self._get_settings_keys(self.GIT_SETTINGS, data)
691 = self._get_settings_keys(self.GIT_SETTINGS, data)
693
692
694 self._create_or_update_ui(
693 self._create_or_update_ui(
695 self.repo_settings, *lfs_enabled, value=data[lfs_enabled_key],
694 self.repo_settings, *lfs_enabled, value=data[lfs_enabled_key],
696 active=data[lfs_enabled_key])
695 active=data[lfs_enabled_key])
697
696
698 def create_or_update_global_git_settings(self, data):
697 def create_or_update_global_git_settings(self, data):
699 lfs_enabled, lfs_store_location \
698 lfs_enabled, lfs_store_location \
700 = self.GLOBAL_GIT_SETTINGS
699 = self.GLOBAL_GIT_SETTINGS
701 lfs_enabled_key, lfs_store_location_key \
700 lfs_enabled_key, lfs_store_location_key \
702 = self._get_settings_keys(self.GLOBAL_GIT_SETTINGS, data)
701 = self._get_settings_keys(self.GLOBAL_GIT_SETTINGS, data)
703
702
704 self._create_or_update_ui(
703 self._create_or_update_ui(
705 self.global_settings, *lfs_enabled, value=data[lfs_enabled_key],
704 self.global_settings, *lfs_enabled, value=data[lfs_enabled_key],
706 active=data[lfs_enabled_key])
705 active=data[lfs_enabled_key])
707 self._create_or_update_ui(
706 self._create_or_update_ui(
708 self.global_settings, *lfs_store_location,
707 self.global_settings, *lfs_store_location,
709 value=data[lfs_store_location_key])
708 value=data[lfs_store_location_key])
710
709
711 def create_or_update_global_svn_settings(self, data):
710 def create_or_update_global_svn_settings(self, data):
712 # branch/tags patterns
711 # branch/tags patterns
713 self._create_svn_settings(self.global_settings, data)
712 self._create_svn_settings(self.global_settings, data)
714
713
715 http_requests_enabled, http_server_url = self.GLOBAL_SVN_SETTINGS
714 http_requests_enabled, http_server_url = self.GLOBAL_SVN_SETTINGS
716 http_requests_enabled_key, http_server_url_key = self._get_settings_keys(
715 http_requests_enabled_key, http_server_url_key = self._get_settings_keys(
717 self.GLOBAL_SVN_SETTINGS, data)
716 self.GLOBAL_SVN_SETTINGS, data)
718
717
719 self._create_or_update_ui(
718 self._create_or_update_ui(
720 self.global_settings, *http_requests_enabled,
719 self.global_settings, *http_requests_enabled,
721 value=safe_str(data[http_requests_enabled_key]))
720 value=safe_str(data[http_requests_enabled_key]))
722 self._create_or_update_ui(
721 self._create_or_update_ui(
723 self.global_settings, *http_server_url,
722 self.global_settings, *http_server_url,
724 value=data[http_server_url_key])
723 value=data[http_server_url_key])
725
724
726 def update_global_ssl_setting(self, value):
725 def update_global_ssl_setting(self, value):
727 self._create_or_update_ui(
726 self._create_or_update_ui(
728 self.global_settings, *self.SSL_SETTING, value=value)
727 self.global_settings, *self.SSL_SETTING, value=value)
729
728
730 def update_global_path_setting(self, value):
729 def update_global_path_setting(self, value):
731 self._create_or_update_ui(
730 self._create_or_update_ui(
732 self.global_settings, *self.PATH_SETTING, value=value)
731 self.global_settings, *self.PATH_SETTING, value=value)
733
732
734 @assert_repo_settings
733 @assert_repo_settings
735 def delete_repo_svn_pattern(self, id_):
734 def delete_repo_svn_pattern(self, id_):
736 ui = self.repo_settings.UiDbModel.get(id_)
735 ui = self.repo_settings.UiDbModel.get(id_)
737 if ui and ui.repository.repo_name == self.repo_settings.repo:
736 if ui and ui.repository.repo_name == self.repo_settings.repo:
738 # only delete if it's the same repo as initialized settings
737 # only delete if it's the same repo as initialized settings
739 self.repo_settings.delete_ui(id_)
738 self.repo_settings.delete_ui(id_)
740 else:
739 else:
741 # raise error as if we wouldn't find this option
740 # raise error as if we wouldn't find this option
742 self.repo_settings.delete_ui(-1)
741 self.repo_settings.delete_ui(-1)
743
742
744 def delete_global_svn_pattern(self, id_):
743 def delete_global_svn_pattern(self, id_):
745 self.global_settings.delete_ui(id_)
744 self.global_settings.delete_ui(id_)
746
745
747 @assert_repo_settings
746 @assert_repo_settings
748 def get_repo_ui_settings(self, section=None, key=None):
747 def get_repo_ui_settings(self, section=None, key=None):
749 global_uis = self.global_settings.get_ui(section, key)
748 global_uis = self.global_settings.get_ui(section, key)
750 repo_uis = self.repo_settings.get_ui(section, key)
749 repo_uis = self.repo_settings.get_ui(section, key)
751
750
752 filtered_repo_uis = self._filter_ui_settings(repo_uis)
751 filtered_repo_uis = self._filter_ui_settings(repo_uis)
753 filtered_repo_uis_keys = [
752 filtered_repo_uis_keys = [
754 (s.section, s.key) for s in filtered_repo_uis]
753 (s.section, s.key) for s in filtered_repo_uis]
755
754
756 def _is_global_ui_filtered(ui):
755 def _is_global_ui_filtered(ui):
757 return (
756 return (
758 (ui.section, ui.key) in filtered_repo_uis_keys
757 (ui.section, ui.key) in filtered_repo_uis_keys
759 or ui.section in self._svn_sections)
758 or ui.section in self._svn_sections)
760
759
761 filtered_global_uis = [
760 filtered_global_uis = [
762 ui for ui in global_uis if not _is_global_ui_filtered(ui)]
761 ui for ui in global_uis if not _is_global_ui_filtered(ui)]
763
762
764 return filtered_global_uis + filtered_repo_uis
763 return filtered_global_uis + filtered_repo_uis
765
764
766 def get_global_ui_settings(self, section=None, key=None):
765 def get_global_ui_settings(self, section=None, key=None):
767 return self.global_settings.get_ui(section, key)
766 return self.global_settings.get_ui(section, key)
768
767
769 def get_ui_settings_as_config_obj(self, section=None, key=None):
768 def get_ui_settings_as_config_obj(self, section=None, key=None):
770 config = base.Config()
769 config = base.Config()
771
770
772 ui_settings = self.get_ui_settings(section=section, key=key)
771 ui_settings = self.get_ui_settings(section=section, key=key)
773
772
774 for entry in ui_settings:
773 for entry in ui_settings:
775 config.set(entry.section, entry.key, entry.value)
774 config.set(entry.section, entry.key, entry.value)
776
775
777 return config
776 return config
778
777
779 def get_ui_settings(self, section=None, key=None):
778 def get_ui_settings(self, section=None, key=None):
780 if not self.repo_settings or self.inherit_global_settings:
779 if not self.repo_settings or self.inherit_global_settings:
781 return self.get_global_ui_settings(section, key)
780 return self.get_global_ui_settings(section, key)
782 else:
781 else:
783 return self.get_repo_ui_settings(section, key)
782 return self.get_repo_ui_settings(section, key)
784
783
785 def get_svn_patterns(self, section=None):
784 def get_svn_patterns(self, section=None):
786 if not self.repo_settings:
785 if not self.repo_settings:
787 return self.get_global_ui_settings(section)
786 return self.get_global_ui_settings(section)
788 else:
787 else:
789 return self.get_repo_ui_settings(section)
788 return self.get_repo_ui_settings(section)
790
789
791 @assert_repo_settings
790 @assert_repo_settings
792 def get_repo_general_settings(self):
791 def get_repo_general_settings(self):
793 global_settings = self.global_settings.get_all_settings()
792 global_settings = self.global_settings.get_all_settings()
794 repo_settings = self.repo_settings.get_all_settings()
793 repo_settings = self.repo_settings.get_all_settings()
795 filtered_repo_settings = self._filter_general_settings(repo_settings)
794 filtered_repo_settings = self._filter_general_settings(repo_settings)
796 global_settings.update(filtered_repo_settings)
795 global_settings.update(filtered_repo_settings)
797 return global_settings
796 return global_settings
798
797
799 def get_global_general_settings(self):
798 def get_global_general_settings(self):
800 return self.global_settings.get_all_settings()
799 return self.global_settings.get_all_settings()
801
800
802 def get_general_settings(self):
801 def get_general_settings(self):
803 if not self.repo_settings or self.inherit_global_settings:
802 if not self.repo_settings or self.inherit_global_settings:
804 return self.get_global_general_settings()
803 return self.get_global_general_settings()
805 else:
804 else:
806 return self.get_repo_general_settings()
805 return self.get_repo_general_settings()
807
806
808 def get_repos_location(self):
807 def get_repos_location(self):
809 return self.global_settings.get_ui_by_key('/').ui_value
808 return self.global_settings.get_ui_by_key('/').ui_value
810
809
811 def _filter_ui_settings(self, settings):
810 def _filter_ui_settings(self, settings):
812 filtered_settings = [
811 filtered_settings = [
813 s for s in settings if self._should_keep_setting(s)]
812 s for s in settings if self._should_keep_setting(s)]
814 return filtered_settings
813 return filtered_settings
815
814
816 def _should_keep_setting(self, setting):
815 def _should_keep_setting(self, setting):
817 keep = (
816 keep = (
818 (setting.section, setting.key) in self._ui_settings or
817 (setting.section, setting.key) in self._ui_settings or
819 setting.section in self._svn_sections)
818 setting.section in self._svn_sections)
820 return keep
819 return keep
821
820
822 def _filter_general_settings(self, settings):
821 def _filter_general_settings(self, settings):
823 keys = ['rhodecode_{}'.format(key) for key in self.GENERAL_SETTINGS]
822 keys = ['rhodecode_{}'.format(key) for key in self.GENERAL_SETTINGS]
824 return {
823 return {
825 k: settings[k]
824 k: settings[k]
826 for k in settings if k in keys}
825 for k in settings if k in keys}
827
826
828 def _collect_all_settings(self, global_=False):
827 def _collect_all_settings(self, global_=False):
829 settings = self.global_settings if global_ else self.repo_settings
828 settings = self.global_settings if global_ else self.repo_settings
830 result = {}
829 result = {}
831
830
832 for section, key in self._ui_settings:
831 for section, key in self._ui_settings:
833 ui = settings.get_ui_by_section_and_key(section, key)
832 ui = settings.get_ui_by_section_and_key(section, key)
834 result_key = self._get_form_ui_key(section, key)
833 result_key = self._get_form_ui_key(section, key)
835
834
836 if ui:
835 if ui:
837 if section in ('hooks', 'extensions'):
836 if section in ('hooks', 'extensions'):
838 result[result_key] = ui.ui_active
837 result[result_key] = ui.ui_active
839 elif result_key in ['vcs_git_lfs_enabled']:
838 elif result_key in ['vcs_git_lfs_enabled']:
840 result[result_key] = ui.ui_active
839 result[result_key] = ui.ui_active
841 else:
840 else:
842 result[result_key] = ui.ui_value
841 result[result_key] = ui.ui_value
843
842
844 for name in self.GENERAL_SETTINGS:
843 for name in self.GENERAL_SETTINGS:
845 setting = settings.get_setting_by_name(name)
844 setting = settings.get_setting_by_name(name)
846 if setting:
845 if setting:
847 result_key = 'rhodecode_{}'.format(name)
846 result_key = 'rhodecode_{}'.format(name)
848 result[result_key] = setting.app_settings_value
847 result[result_key] = setting.app_settings_value
849
848
850 return result
849 return result
851
850
852 def _get_form_ui_key(self, section, key):
851 def _get_form_ui_key(self, section, key):
853 return '{section}_{key}'.format(
852 return '{section}_{key}'.format(
854 section=section, key=key.replace('.', '_'))
853 section=section, key=key.replace('.', '_'))
855
854
856 def _create_or_update_ui(
855 def _create_or_update_ui(
857 self, settings, section, key, value=None, active=None):
856 self, settings, section, key, value=None, active=None):
858 ui = settings.get_ui_by_section_and_key(section, key)
857 ui = settings.get_ui_by_section_and_key(section, key)
859 if not ui:
858 if not ui:
860 active = True if active is None else active
859 active = True if active is None else active
861 settings.create_ui_section_value(
860 settings.create_ui_section_value(
862 section, value, key=key, active=active)
861 section, value, key=key, active=active)
863 else:
862 else:
864 if active is not None:
863 if active is not None:
865 ui.ui_active = active
864 ui.ui_active = active
866 if value is not None:
865 if value is not None:
867 ui.ui_value = value
866 ui.ui_value = value
868 Session().add(ui)
867 Session().add(ui)
869
868
870 def _create_svn_settings(self, settings, data):
869 def _create_svn_settings(self, settings, data):
871 svn_settings = {
870 svn_settings = {
872 'new_svn_branch': self.SVN_BRANCH_SECTION,
871 'new_svn_branch': self.SVN_BRANCH_SECTION,
873 'new_svn_tag': self.SVN_TAG_SECTION
872 'new_svn_tag': self.SVN_TAG_SECTION
874 }
873 }
875 for key in svn_settings:
874 for key in svn_settings:
876 if data.get(key):
875 if data.get(key):
877 settings.create_ui_section_value(svn_settings[key], data[key])
876 settings.create_ui_section_value(svn_settings[key], data[key])
878
877
879 def _create_or_update_general_settings(self, settings, data):
878 def _create_or_update_general_settings(self, settings, data):
880 for name in self.GENERAL_SETTINGS:
879 for name in self.GENERAL_SETTINGS:
881 data_key = 'rhodecode_{}'.format(name)
880 data_key = 'rhodecode_{}'.format(name)
882 if data_key not in data:
881 if data_key not in data:
883 raise ValueError(
882 raise ValueError(
884 'The given data does not contain {} key'.format(data_key))
883 'The given data does not contain {} key'.format(data_key))
885 setting = settings.create_or_update_setting(
884 setting = settings.create_or_update_setting(
886 name, data[data_key], 'bool')
885 name, data[data_key], 'bool')
887 Session().add(setting)
886 Session().add(setting)
888
887
889 def _get_settings_keys(self, settings, data):
888 def _get_settings_keys(self, settings, data):
890 data_keys = [self._get_form_ui_key(*s) for s in settings]
889 data_keys = [self._get_form_ui_key(*s) for s in settings]
891 for data_key in data_keys:
890 for data_key in data_keys:
892 if data_key not in data:
891 if data_key not in data:
893 raise ValueError(
892 raise ValueError(
894 'The given data does not contain {} key'.format(data_key))
893 'The given data does not contain {} key'.format(data_key))
895 return data_keys
894 return data_keys
896
895
897 def create_largeobjects_dirs_if_needed(self, repo_store_path):
896 def create_largeobjects_dirs_if_needed(self, repo_store_path):
898 """
897 """
899 This is subscribed to the `pyramid.events.ApplicationCreated` event. It
898 This is subscribed to the `pyramid.events.ApplicationCreated` event. It
900 does a repository scan if enabled in the settings.
899 does a repository scan if enabled in the settings.
901 """
900 """
902
901
903 from rhodecode.lib.vcs.backends.hg import largefiles_store
902 from rhodecode.lib.vcs.backends.hg import largefiles_store
904 from rhodecode.lib.vcs.backends.git import lfs_store
903 from rhodecode.lib.vcs.backends.git import lfs_store
905
904
906 paths = [
905 paths = [
907 largefiles_store(repo_store_path),
906 largefiles_store(repo_store_path),
908 lfs_store(repo_store_path)]
907 lfs_store(repo_store_path)]
909
908
910 for path in paths:
909 for path in paths:
911 if os.path.isdir(path):
910 if os.path.isdir(path):
912 continue
911 continue
913 if os.path.isfile(path):
912 if os.path.isfile(path):
914 continue
913 continue
915 # not a file nor dir, we try to create it
914 # not a file nor dir, we try to create it
916 try:
915 try:
917 os.makedirs(path)
916 os.makedirs(path)
918 except Exception:
917 except Exception:
919 log.warning('Failed to create largefiles dir:%s', path)
918 log.warning('Failed to create largefiles dir:%s', path)
@@ -1,144 +1,144 b''
1 # -*- coding: utf-8 -*-
1
2
2
3 # Copyright (C) 2013-2020 RhodeCode GmbH
3 # Copyright (C) 2013-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22 import traceback
22 import traceback
23
23
24 import sshpubkeys
24 import sshpubkeys
25 import sshpubkeys.exceptions
25 import sshpubkeys.exceptions
26
26
27 from cryptography.hazmat.primitives.asymmetric import rsa
27 from cryptography.hazmat.primitives.asymmetric import rsa
28 from cryptography.hazmat.primitives import serialization as crypto_serialization
28 from cryptography.hazmat.primitives import serialization as crypto_serialization
29 from cryptography.hazmat.backends import default_backend as crypto_default_backend
29 from cryptography.hazmat.backends import default_backend as crypto_default_backend
30
30
31 from rhodecode.model import BaseModel
31 from rhodecode.model import BaseModel
32 from rhodecode.model.db import UserSshKeys
32 from rhodecode.model.db import UserSshKeys
33 from rhodecode.model.meta import Session
33 from rhodecode.model.meta import Session
34
34
35
35
36 log = logging.getLogger(__name__)
36 log = logging.getLogger(__name__)
37
37
38
38
39 class SshKeyModel(BaseModel):
39 class SshKeyModel(BaseModel):
40 cls = UserSshKeys
40 cls = UserSshKeys
41 DEFAULT_PRIVATE_KEY_FORMAT = 'pkcs8'
41 DEFAULT_PRIVATE_KEY_FORMAT = 'pkcs8'
42
42
43 def parse_key(self, key_data):
43 def parse_key(self, key_data):
44 """
44 """
45 print(ssh.bits) # 768
45 print(ssh.bits) # 768
46 print(ssh.hash_md5()) # 56:84:1e:90:08:3b:60:c7:29:70:5f:5e:25:a6:3b:86
46 print(ssh.hash_md5()) # 56:84:1e:90:08:3b:60:c7:29:70:5f:5e:25:a6:3b:86
47 print(ssh.hash_sha256()) # SHA256:xk3IEJIdIoR9MmSRXTP98rjDdZocmXJje/28ohMQEwM
47 print(ssh.hash_sha256()) # SHA256:xk3IEJIdIoR9MmSRXTP98rjDdZocmXJje/28ohMQEwM
48 print(ssh.hash_sha512()) # SHA512:1C3lNBhjpDVQe39hnyy+xvlZYU3IPwzqK1rVneGavy6O3/ebjEQSFvmeWoyMTplIanmUK1hmr9nA8Skmj516HA
48 print(ssh.hash_sha512()) # SHA512:1C3lNBhjpDVQe39hnyy+xvlZYU3IPwzqK1rVneGavy6O3/ebjEQSFvmeWoyMTplIanmUK1hmr9nA8Skmj516HA
49 print(ssh.comment) # ojar@ojar-laptop
49 print(ssh.comment) # ojar@ojar-laptop
50 print(ssh.options_raw) # None (string of optional options at the beginning of public key)
50 print(ssh.options_raw) # None (string of optional options at the beginning of public key)
51 print(ssh.options) # None (options as a dictionary, parsed and validated)
51 print(ssh.options) # None (options as a dictionary, parsed and validated)
52
52
53 :param key_data:
53 :param key_data:
54 :return:
54 :return:
55 """
55 """
56 ssh = sshpubkeys.SSHKey(strict_mode=True)
56 ssh = sshpubkeys.SSHKey(strict_mode=True)
57 try:
57 try:
58 ssh.parse(key_data)
58 ssh.parse(key_data)
59 return ssh
59 return ssh
60 except sshpubkeys.exceptions.InvalidKeyException as err:
60 except sshpubkeys.exceptions.InvalidKeyException as err:
61 log.error("Invalid key: %s", err)
61 log.error("Invalid key: %s", err)
62 raise
62 raise
63 except NotImplementedError as err:
63 except NotImplementedError as err:
64 log.error("Invalid key type: %s", err)
64 log.error("Invalid key type: %s", err)
65 raise
65 raise
66 except Exception as err:
66 except Exception as err:
67 log.error("Key Parse error: %s", err)
67 log.error("Key Parse error: %s", err)
68 raise
68 raise
69
69
70 def generate_keypair(self, comment=None, private_format=DEFAULT_PRIVATE_KEY_FORMAT):
70 def generate_keypair(self, comment=None, private_format=DEFAULT_PRIVATE_KEY_FORMAT):
71
71
72 key = rsa.generate_private_key(
72 key = rsa.generate_private_key(
73 backend=crypto_default_backend(),
73 backend=crypto_default_backend(),
74 public_exponent=65537,
74 public_exponent=65537,
75 key_size=2048
75 key_size=2048
76 )
76 )
77 if private_format == self.DEFAULT_PRIVATE_KEY_FORMAT:
77 if private_format == self.DEFAULT_PRIVATE_KEY_FORMAT:
78 private_format = crypto_serialization.PrivateFormat.PKCS8
78 private_format = crypto_serialization.PrivateFormat.PKCS8
79 else:
79 else:
80 # legacy format that can be used by older systems, use if pkcs8 have
80 # legacy format that can be used by older systems, use if pkcs8 have
81 # problems
81 # problems
82 private_format = crypto_serialization.PrivateFormat.TraditionalOpenSSL
82 private_format = crypto_serialization.PrivateFormat.TraditionalOpenSSL
83
83
84 private_key = key.private_bytes(
84 private_key = key.private_bytes(
85 crypto_serialization.Encoding.PEM,
85 crypto_serialization.Encoding.PEM,
86 private_format,
86 private_format,
87 crypto_serialization.NoEncryption())
87 crypto_serialization.NoEncryption())
88 public_key = key.public_key().public_bytes(
88 public_key = key.public_key().public_bytes(
89 crypto_serialization.Encoding.OpenSSH,
89 crypto_serialization.Encoding.OpenSSH,
90 crypto_serialization.PublicFormat.OpenSSH
90 crypto_serialization.PublicFormat.OpenSSH
91 )
91 )
92
92
93 if comment:
93 if comment:
94 public_key = public_key + " " + comment
94 public_key = public_key + " " + comment
95 return private_key, public_key
95 return private_key, public_key
96
96
97 def create(self, user, fingerprint, key_data, description):
97 def create(self, user, fingerprint, key_data, description):
98 """
98 """
99 """
99 """
100 user = self._get_user(user)
100 user = self._get_user(user)
101
101
102 new_ssh_key = UserSshKeys()
102 new_ssh_key = UserSshKeys()
103 new_ssh_key.ssh_key_fingerprint = fingerprint
103 new_ssh_key.ssh_key_fingerprint = fingerprint
104 new_ssh_key.ssh_key_data = key_data
104 new_ssh_key.ssh_key_data = key_data
105 new_ssh_key.user_id = user.user_id
105 new_ssh_key.user_id = user.user_id
106 new_ssh_key.description = description
106 new_ssh_key.description = description
107
107
108 Session().add(new_ssh_key)
108 Session().add(new_ssh_key)
109
109
110 return new_ssh_key
110 return new_ssh_key
111
111
112 def delete(self, ssh_key_id, user=None):
112 def delete(self, ssh_key_id, user=None):
113 """
113 """
114 Deletes given api_key, if user is set it also filters the object for
114 Deletes given api_key, if user is set it also filters the object for
115 deletion by given user.
115 deletion by given user.
116 """
116 """
117 ssh_key = UserSshKeys.query().filter(
117 ssh_key = UserSshKeys.query().filter(
118 UserSshKeys.ssh_key_id == ssh_key_id)
118 UserSshKeys.ssh_key_id == ssh_key_id)
119
119
120 if user:
120 if user:
121 user = self._get_user(user)
121 user = self._get_user(user)
122 ssh_key = ssh_key.filter(UserSshKeys.user_id == user.user_id)
122 ssh_key = ssh_key.filter(UserSshKeys.user_id == user.user_id)
123 ssh_key = ssh_key.scalar()
123 ssh_key = ssh_key.scalar()
124
124
125 if ssh_key:
125 if ssh_key:
126 try:
126 try:
127 Session().delete(ssh_key)
127 Session().delete(ssh_key)
128 except Exception:
128 except Exception:
129 log.error(traceback.format_exc())
129 log.error(traceback.format_exc())
130 raise
130 raise
131
131
132 def get_ssh_keys(self, user):
132 def get_ssh_keys(self, user):
133 user = self._get_user(user)
133 user = self._get_user(user)
134 user_ssh_keys = UserSshKeys.query()\
134 user_ssh_keys = UserSshKeys.query()\
135 .filter(UserSshKeys.user_id == user.user_id)
135 .filter(UserSshKeys.user_id == user.user_id)
136 user_ssh_keys = user_ssh_keys.order_by(UserSshKeys.ssh_key_id)
136 user_ssh_keys = user_ssh_keys.order_by(UserSshKeys.ssh_key_id)
137 return user_ssh_keys
137 return user_ssh_keys
138
138
139 def get_ssh_key_by_fingerprint(self, ssh_key_fingerprint):
139 def get_ssh_key_by_fingerprint(self, ssh_key_fingerprint):
140 user_ssh_key = UserSshKeys.query()\
140 user_ssh_key = UserSshKeys.query()\
141 .filter(UserSshKeys.ssh_key_fingerprint == ssh_key_fingerprint)\
141 .filter(UserSshKeys.ssh_key_fingerprint == ssh_key_fingerprint)\
142 .first()
142 .first()
143
143
144 return user_ssh_key
144 return user_ssh_key
@@ -1,83 +1,83 b''
1 # -*- coding: utf-8 -*-
1
2
2
3 # Copyright (C) 2013-2020 RhodeCode GmbH
3 # Copyright (C) 2013-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22 import urllib.request, urllib.error, urllib.parse
22 import urllib.request, urllib.error, urllib.parse
23 from packaging.version import Version
23 from packaging.version import Version
24
24
25 import rhodecode
25 import rhodecode
26 from rhodecode.lib.ext_json import json
26 from rhodecode.lib.ext_json import json
27 from rhodecode.model import BaseModel
27 from rhodecode.model import BaseModel
28 from rhodecode.model.meta import Session
28 from rhodecode.model.meta import Session
29 from rhodecode.model.settings import SettingsModel
29 from rhodecode.model.settings import SettingsModel
30
30
31
31
32 log = logging.getLogger(__name__)
32 log = logging.getLogger(__name__)
33
33
34
34
35 class UpdateModel(BaseModel):
35 class UpdateModel(BaseModel):
36 UPDATE_SETTINGS_KEY = 'update_version'
36 UPDATE_SETTINGS_KEY = 'update_version'
37 UPDATE_URL_SETTINGS_KEY = 'rhodecode_update_url'
37 UPDATE_URL_SETTINGS_KEY = 'rhodecode_update_url'
38
38
39 @staticmethod
39 @staticmethod
40 def get_update_data(update_url):
40 def get_update_data(update_url):
41 """Return the JSON update data."""
41 """Return the JSON update data."""
42 ver = rhodecode.__version__
42 ver = rhodecode.__version__
43 log.debug('Checking for upgrade on `%s` server', update_url)
43 log.debug('Checking for upgrade on `%s` server', update_url)
44 opener = urllib.request.build_opener()
44 opener = urllib.request.build_opener()
45 opener.addheaders = [('User-agent', 'RhodeCode-SCM/%s' % ver)]
45 opener.addheaders = [('User-agent', 'RhodeCode-SCM/%s' % ver)]
46 response = opener.open(update_url)
46 response = opener.open(update_url)
47 response_data = response.read()
47 response_data = response.read()
48 data = json.loads(response_data)
48 data = json.loads(response_data)
49 log.debug('update server returned data')
49 log.debug('update server returned data')
50 return data
50 return data
51
51
52 def get_update_url(self):
52 def get_update_url(self):
53 settings = SettingsModel().get_all_settings()
53 settings = SettingsModel().get_all_settings()
54 return settings.get(self.UPDATE_URL_SETTINGS_KEY)
54 return settings.get(self.UPDATE_URL_SETTINGS_KEY)
55
55
56 def store_version(self, version):
56 def store_version(self, version):
57 log.debug('Storing version %s into settings', version)
57 log.debug('Storing version %s into settings', version)
58 setting = SettingsModel().create_or_update_setting(
58 setting = SettingsModel().create_or_update_setting(
59 self.UPDATE_SETTINGS_KEY, version)
59 self.UPDATE_SETTINGS_KEY, version)
60 Session().add(setting)
60 Session().add(setting)
61 Session().commit()
61 Session().commit()
62
62
63 def get_stored_version(self, fallback=None):
63 def get_stored_version(self, fallback=None):
64 obj = SettingsModel().get_setting_by_name(self.UPDATE_SETTINGS_KEY)
64 obj = SettingsModel().get_setting_by_name(self.UPDATE_SETTINGS_KEY)
65 if obj:
65 if obj:
66 return obj.app_settings_value
66 return obj.app_settings_value
67 return fallback or '0.0.0'
67 return fallback or '0.0.0'
68
68
69 def _sanitize_version(self, version):
69 def _sanitize_version(self, version):
70 """
70 """
71 Cleanup our custom ver.
71 Cleanup our custom ver.
72 e.g 4.11.0_20171204_204825_CE_default_EE_default to 4.11.0
72 e.g 4.11.0_20171204_204825_CE_default_EE_default to 4.11.0
73 """
73 """
74 return version.split('_')[0]
74 return version.split('_')[0]
75
75
76 def is_outdated(self, cur_version, latest_version=None):
76 def is_outdated(self, cur_version, latest_version=None):
77 latest_version = latest_version or self.get_stored_version()
77 latest_version = latest_version or self.get_stored_version()
78 try:
78 try:
79 cur_version = self._sanitize_version(cur_version)
79 cur_version = self._sanitize_version(cur_version)
80 return Version(latest_version) > Version(cur_version)
80 return Version(latest_version) > Version(cur_version)
81 except Exception:
81 except Exception:
82 # could be invalid version, etc
82 # could be invalid version, etc
83 return False
83 return False
@@ -1,1047 +1,1046 b''
1 # -*- coding: utf-8 -*-
2
1
3 # Copyright (C) 2010-2020 RhodeCode GmbH
2 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
3 #
5 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
8 #
7 #
9 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
11 # GNU General Public License for more details.
13 #
12 #
14 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
15 #
17 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
19
21 """
20 """
22 users model for RhodeCode
21 users model for RhodeCode
23 """
22 """
24
23
25 import logging
24 import logging
26 import traceback
25 import traceback
27 import datetime
26 import datetime
28 import ipaddress
27 import ipaddress
29
28
30 from pyramid.threadlocal import get_current_request
29 from pyramid.threadlocal import get_current_request
31 from sqlalchemy.exc import DatabaseError
30 from sqlalchemy.exc import DatabaseError
32
31
33 from rhodecode import events
32 from rhodecode import events
34 from rhodecode.lib.user_log_filter import user_log_filter
33 from rhodecode.lib.user_log_filter import user_log_filter
35 from rhodecode.lib.utils2 import (
34 from rhodecode.lib.utils2 import (
36 safe_unicode, get_current_rhodecode_user, action_logger_generic,
35 safe_unicode, get_current_rhodecode_user, action_logger_generic,
37 AttributeDict, str2bool)
36 AttributeDict, str2bool)
38 from rhodecode.lib.exceptions import (
37 from rhodecode.lib.exceptions import (
39 DefaultUserException, UserOwnsReposException, UserOwnsRepoGroupsException,
38 DefaultUserException, UserOwnsReposException, UserOwnsRepoGroupsException,
40 UserOwnsUserGroupsException, NotAllowedToCreateUserError,
39 UserOwnsUserGroupsException, NotAllowedToCreateUserError,
41 UserOwnsPullRequestsException, UserOwnsArtifactsException)
40 UserOwnsPullRequestsException, UserOwnsArtifactsException)
42 from rhodecode.lib.caching_query import FromCache
41 from rhodecode.lib.caching_query import FromCache
43 from rhodecode.model import BaseModel
42 from rhodecode.model import BaseModel
44 from rhodecode.model.db import (
43 from rhodecode.model.db import (
45 _hash_key, func, true, false, or_, joinedload, User, UserToPerm,
44 _hash_key, func, true, false, or_, joinedload, User, UserToPerm,
46 UserEmailMap, UserIpMap, UserLog)
45 UserEmailMap, UserIpMap, UserLog)
47 from rhodecode.model.meta import Session
46 from rhodecode.model.meta import Session
48 from rhodecode.model.auth_token import AuthTokenModel
47 from rhodecode.model.auth_token import AuthTokenModel
49 from rhodecode.model.repo_group import RepoGroupModel
48 from rhodecode.model.repo_group import RepoGroupModel
50
49
51 log = logging.getLogger(__name__)
50 log = logging.getLogger(__name__)
52
51
53
52
54 class UserModel(BaseModel):
53 class UserModel(BaseModel):
55 cls = User
54 cls = User
56
55
57 def get(self, user_id, cache=False):
56 def get(self, user_id, cache=False):
58 user = self.sa.query(User)
57 user = self.sa.query(User)
59 if cache:
58 if cache:
60 user = user.options(
59 user = user.options(
61 FromCache("sql_cache_short", f"get_user_{user_id}"))
60 FromCache("sql_cache_short", f"get_user_{user_id}"))
62 return user.get(user_id)
61 return user.get(user_id)
63
62
64 def get_user(self, user):
63 def get_user(self, user):
65 return self._get_user(user)
64 return self._get_user(user)
66
65
67 def _serialize_user(self, user):
66 def _serialize_user(self, user):
68 import rhodecode.lib.helpers as h
67 import rhodecode.lib.helpers as h
69
68
70 return {
69 return {
71 'id': user.user_id,
70 'id': user.user_id,
72 'first_name': user.first_name,
71 'first_name': user.first_name,
73 'last_name': user.last_name,
72 'last_name': user.last_name,
74 'username': user.username,
73 'username': user.username,
75 'email': user.email,
74 'email': user.email,
76 'icon_link': h.gravatar_url(user.email, 30),
75 'icon_link': h.gravatar_url(user.email, 30),
77 'profile_link': h.link_to_user(user),
76 'profile_link': h.link_to_user(user),
78 'value_display': h.escape(h.person(user)),
77 'value_display': h.escape(h.person(user)),
79 'value': user.username,
78 'value': user.username,
80 'value_type': 'user',
79 'value_type': 'user',
81 'active': user.active,
80 'active': user.active,
82 }
81 }
83
82
84 def get_users(self, name_contains=None, limit=20, only_active=True):
83 def get_users(self, name_contains=None, limit=20, only_active=True):
85
84
86 query = self.sa.query(User)
85 query = self.sa.query(User)
87 if only_active:
86 if only_active:
88 query = query.filter(User.active == true())
87 query = query.filter(User.active == true())
89
88
90 if name_contains:
89 if name_contains:
91 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
90 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
92 query = query.filter(
91 query = query.filter(
93 or_(
92 or_(
94 User.name.ilike(ilike_expression),
93 User.name.ilike(ilike_expression),
95 User.lastname.ilike(ilike_expression),
94 User.lastname.ilike(ilike_expression),
96 User.username.ilike(ilike_expression)
95 User.username.ilike(ilike_expression)
97 )
96 )
98 )
97 )
99 # sort by len to have top most matches first
98 # sort by len to have top most matches first
100 query = query.order_by(func.length(User.username))\
99 query = query.order_by(func.length(User.username))\
101 .order_by(User.username)
100 .order_by(User.username)
102 query = query.limit(limit)
101 query = query.limit(limit)
103
102
104 users = query.all()
103 users = query.all()
105
104
106 _users = [
105 _users = [
107 self._serialize_user(user) for user in users
106 self._serialize_user(user) for user in users
108 ]
107 ]
109 return _users
108 return _users
110
109
111 def get_by_username(self, username, cache=False, case_insensitive=False):
110 def get_by_username(self, username, cache=False, case_insensitive=False):
112
111
113 if case_insensitive:
112 if case_insensitive:
114 user = self.sa.query(User).filter(User.username.ilike(username))
113 user = self.sa.query(User).filter(User.username.ilike(username))
115 else:
114 else:
116 user = self.sa.query(User)\
115 user = self.sa.query(User)\
117 .filter(User.username == username)
116 .filter(User.username == username)
118 if cache:
117 if cache:
119 name_key = _hash_key(username)
118 name_key = _hash_key(username)
120 user = user.options(
119 user = user.options(
121 FromCache("sql_cache_short", f"get_user_{name_key}"))
120 FromCache("sql_cache_short", f"get_user_{name_key}"))
122 return user.scalar()
121 return user.scalar()
123
122
124 def get_by_email(self, email, cache=False, case_insensitive=False):
123 def get_by_email(self, email, cache=False, case_insensitive=False):
125 return User.get_by_email(email, case_insensitive, cache)
124 return User.get_by_email(email, case_insensitive, cache)
126
125
127 def get_by_auth_token(self, auth_token, cache=False):
126 def get_by_auth_token(self, auth_token, cache=False):
128 return User.get_by_auth_token(auth_token, cache)
127 return User.get_by_auth_token(auth_token, cache)
129
128
130 def get_active_user_count(self, cache=False):
129 def get_active_user_count(self, cache=False):
131 qry = User.query().filter(
130 qry = User.query().filter(
132 User.active == true()).filter(
131 User.active == true()).filter(
133 User.username != User.DEFAULT_USER)
132 User.username != User.DEFAULT_USER)
134 if cache:
133 if cache:
135 qry = qry.options(
134 qry = qry.options(
136 FromCache("sql_cache_short", "get_active_users"))
135 FromCache("sql_cache_short", "get_active_users"))
137 return qry.count()
136 return qry.count()
138
137
139 def create(self, form_data, cur_user=None):
138 def create(self, form_data, cur_user=None):
140 if not cur_user:
139 if not cur_user:
141 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
140 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
142
141
143 user_data = {
142 user_data = {
144 'username': form_data['username'],
143 'username': form_data['username'],
145 'password': form_data['password'],
144 'password': form_data['password'],
146 'email': form_data['email'],
145 'email': form_data['email'],
147 'firstname': form_data['firstname'],
146 'firstname': form_data['firstname'],
148 'lastname': form_data['lastname'],
147 'lastname': form_data['lastname'],
149 'active': form_data['active'],
148 'active': form_data['active'],
150 'extern_type': form_data['extern_type'],
149 'extern_type': form_data['extern_type'],
151 'extern_name': form_data['extern_name'],
150 'extern_name': form_data['extern_name'],
152 'admin': False,
151 'admin': False,
153 'cur_user': cur_user
152 'cur_user': cur_user
154 }
153 }
155
154
156 if 'create_repo_group' in form_data:
155 if 'create_repo_group' in form_data:
157 user_data['create_repo_group'] = str2bool(
156 user_data['create_repo_group'] = str2bool(
158 form_data.get('create_repo_group'))
157 form_data.get('create_repo_group'))
159
158
160 try:
159 try:
161 if form_data.get('password_change'):
160 if form_data.get('password_change'):
162 user_data['force_password_change'] = True
161 user_data['force_password_change'] = True
163 return UserModel().create_or_update(**user_data)
162 return UserModel().create_or_update(**user_data)
164 except Exception:
163 except Exception:
165 log.error(traceback.format_exc())
164 log.error(traceback.format_exc())
166 raise
165 raise
167
166
168 def update_user(self, user, skip_attrs=None, **kwargs):
167 def update_user(self, user, skip_attrs=None, **kwargs):
169 from rhodecode.lib.auth import get_crypt_password
168 from rhodecode.lib.auth import get_crypt_password
170
169
171 user = self._get_user(user)
170 user = self._get_user(user)
172 if user.username == User.DEFAULT_USER:
171 if user.username == User.DEFAULT_USER:
173 raise DefaultUserException(
172 raise DefaultUserException(
174 "You can't edit this user (`%(username)s`) since it's "
173 "You can't edit this user (`%(username)s`) since it's "
175 "crucial for entire application" % {
174 "crucial for entire application" % {
176 'username': user.username})
175 'username': user.username})
177
176
178 # first store only defaults
177 # first store only defaults
179 user_attrs = {
178 user_attrs = {
180 'updating_user_id': user.user_id,
179 'updating_user_id': user.user_id,
181 'username': user.username,
180 'username': user.username,
182 'password': user.password,
181 'password': user.password,
183 'email': user.email,
182 'email': user.email,
184 'firstname': user.name,
183 'firstname': user.name,
185 'lastname': user.lastname,
184 'lastname': user.lastname,
186 'description': user.description,
185 'description': user.description,
187 'active': user.active,
186 'active': user.active,
188 'admin': user.admin,
187 'admin': user.admin,
189 'extern_name': user.extern_name,
188 'extern_name': user.extern_name,
190 'extern_type': user.extern_type,
189 'extern_type': user.extern_type,
191 'language': user.user_data.get('language')
190 'language': user.user_data.get('language')
192 }
191 }
193
192
194 # in case there's new_password, that comes from form, use it to
193 # in case there's new_password, that comes from form, use it to
195 # store password
194 # store password
196 if kwargs.get('new_password'):
195 if kwargs.get('new_password'):
197 kwargs['password'] = kwargs['new_password']
196 kwargs['password'] = kwargs['new_password']
198
197
199 # cleanups, my_account password change form
198 # cleanups, my_account password change form
200 kwargs.pop('current_password', None)
199 kwargs.pop('current_password', None)
201 kwargs.pop('new_password', None)
200 kwargs.pop('new_password', None)
202
201
203 # cleanups, user edit password change form
202 # cleanups, user edit password change form
204 kwargs.pop('password_confirmation', None)
203 kwargs.pop('password_confirmation', None)
205 kwargs.pop('password_change', None)
204 kwargs.pop('password_change', None)
206
205
207 # create repo group on user creation
206 # create repo group on user creation
208 kwargs.pop('create_repo_group', None)
207 kwargs.pop('create_repo_group', None)
209
208
210 # legacy forms send name, which is the firstname
209 # legacy forms send name, which is the firstname
211 firstname = kwargs.pop('name', None)
210 firstname = kwargs.pop('name', None)
212 if firstname:
211 if firstname:
213 kwargs['firstname'] = firstname
212 kwargs['firstname'] = firstname
214
213
215 for k, v in kwargs.items():
214 for k, v in kwargs.items():
216 # skip if we don't want to update this
215 # skip if we don't want to update this
217 if skip_attrs and k in skip_attrs:
216 if skip_attrs and k in skip_attrs:
218 continue
217 continue
219
218
220 user_attrs[k] = v
219 user_attrs[k] = v
221
220
222 try:
221 try:
223 return self.create_or_update(**user_attrs)
222 return self.create_or_update(**user_attrs)
224 except Exception:
223 except Exception:
225 log.error(traceback.format_exc())
224 log.error(traceback.format_exc())
226 raise
225 raise
227
226
228 def create_or_update(
227 def create_or_update(
229 self, username, password, email, firstname='', lastname='',
228 self, username, password, email, firstname='', lastname='',
230 active=True, admin=False, extern_type=None, extern_name=None,
229 active=True, admin=False, extern_type=None, extern_name=None,
231 cur_user=None, plugin=None, force_password_change=False,
230 cur_user=None, plugin=None, force_password_change=False,
232 allow_to_create_user=True, create_repo_group=None,
231 allow_to_create_user=True, create_repo_group=None,
233 updating_user_id=None, language=None, description='',
232 updating_user_id=None, language=None, description='',
234 strict_creation_check=True):
233 strict_creation_check=True):
235 """
234 """
236 Creates a new instance if not found, or updates current one
235 Creates a new instance if not found, or updates current one
237
236
238 :param username:
237 :param username:
239 :param password:
238 :param password:
240 :param email:
239 :param email:
241 :param firstname:
240 :param firstname:
242 :param lastname:
241 :param lastname:
243 :param active:
242 :param active:
244 :param admin:
243 :param admin:
245 :param extern_type:
244 :param extern_type:
246 :param extern_name:
245 :param extern_name:
247 :param cur_user:
246 :param cur_user:
248 :param plugin: optional plugin this method was called from
247 :param plugin: optional plugin this method was called from
249 :param force_password_change: toggles new or existing user flag
248 :param force_password_change: toggles new or existing user flag
250 for password change
249 for password change
251 :param allow_to_create_user: Defines if the method can actually create
250 :param allow_to_create_user: Defines if the method can actually create
252 new users
251 new users
253 :param create_repo_group: Defines if the method should also
252 :param create_repo_group: Defines if the method should also
254 create an repo group with user name, and owner
253 create an repo group with user name, and owner
255 :param updating_user_id: if we set it up this is the user we want to
254 :param updating_user_id: if we set it up this is the user we want to
256 update this allows to editing username.
255 update this allows to editing username.
257 :param language: language of user from interface.
256 :param language: language of user from interface.
258 :param description: user description
257 :param description: user description
259 :param strict_creation_check: checks for allowed creation license wise etc.
258 :param strict_creation_check: checks for allowed creation license wise etc.
260
259
261 :returns: new User object with injected `is_new_user` attribute.
260 :returns: new User object with injected `is_new_user` attribute.
262 """
261 """
263
262
264 if not cur_user:
263 if not cur_user:
265 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
264 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
266
265
267 from rhodecode.lib.auth import (
266 from rhodecode.lib.auth import (
268 get_crypt_password, check_password)
267 get_crypt_password, check_password)
269 from rhodecode.lib import hooks_base
268 from rhodecode.lib import hooks_base
270
269
271 def _password_change(new_user, password):
270 def _password_change(new_user, password):
272 old_password = new_user.password or ''
271 old_password = new_user.password or ''
273 # empty password
272 # empty password
274 if not old_password:
273 if not old_password:
275 return False
274 return False
276
275
277 # password check is only needed for RhodeCode internal auth calls
276 # password check is only needed for RhodeCode internal auth calls
278 # in case it's a plugin we don't care
277 # in case it's a plugin we don't care
279 if not plugin:
278 if not plugin:
280
279
281 # first check if we gave crypted password back, and if it
280 # first check if we gave crypted password back, and if it
282 # matches it's not password change
281 # matches it's not password change
283 if new_user.password == password:
282 if new_user.password == password:
284 return False
283 return False
285
284
286 password_match = check_password(password, old_password)
285 password_match = check_password(password, old_password)
287 if not password_match:
286 if not password_match:
288 return True
287 return True
289
288
290 return False
289 return False
291
290
292 # read settings on default personal repo group creation
291 # read settings on default personal repo group creation
293 if create_repo_group is None:
292 if create_repo_group is None:
294 default_create_repo_group = RepoGroupModel()\
293 default_create_repo_group = RepoGroupModel()\
295 .get_default_create_personal_repo_group()
294 .get_default_create_personal_repo_group()
296 create_repo_group = default_create_repo_group
295 create_repo_group = default_create_repo_group
297
296
298 user_data = {
297 user_data = {
299 'username': username,
298 'username': username,
300 'password': password,
299 'password': password,
301 'email': email,
300 'email': email,
302 'firstname': firstname,
301 'firstname': firstname,
303 'lastname': lastname,
302 'lastname': lastname,
304 'active': active,
303 'active': active,
305 'admin': admin
304 'admin': admin
306 }
305 }
307
306
308 if updating_user_id:
307 if updating_user_id:
309 log.debug('Checking for existing account in RhodeCode '
308 log.debug('Checking for existing account in RhodeCode '
310 'database with user_id `%s` ', updating_user_id)
309 'database with user_id `%s` ', updating_user_id)
311 user = User.get(updating_user_id)
310 user = User.get(updating_user_id)
312 else:
311 else:
313 log.debug('Checking for existing account in RhodeCode '
312 log.debug('Checking for existing account in RhodeCode '
314 'database with username `%s` ', username)
313 'database with username `%s` ', username)
315 user = User.get_by_username(username, case_insensitive=True)
314 user = User.get_by_username(username, case_insensitive=True)
316
315
317 if user is None:
316 if user is None:
318 # we check internal flag if this method is actually allowed to
317 # we check internal flag if this method is actually allowed to
319 # create new user
318 # create new user
320 if not allow_to_create_user:
319 if not allow_to_create_user:
321 msg = ('Method wants to create new user, but it is not '
320 msg = ('Method wants to create new user, but it is not '
322 'allowed to do so')
321 'allowed to do so')
323 log.warning(msg)
322 log.warning(msg)
324 raise NotAllowedToCreateUserError(msg)
323 raise NotAllowedToCreateUserError(msg)
325
324
326 log.debug('Creating new user %s', username)
325 log.debug('Creating new user %s', username)
327
326
328 # only if we create user that is active
327 # only if we create user that is active
329 new_active_user = active
328 new_active_user = active
330 if new_active_user and strict_creation_check:
329 if new_active_user and strict_creation_check:
331 # raises UserCreationError if it's not allowed for any reason to
330 # raises UserCreationError if it's not allowed for any reason to
332 # create new active user, this also executes pre-create hooks
331 # create new active user, this also executes pre-create hooks
333 hooks_base.check_allowed_create_user(user_data, cur_user, strict_check=True)
332 hooks_base.check_allowed_create_user(user_data, cur_user, strict_check=True)
334 events.trigger(events.UserPreCreate(user_data))
333 events.trigger(events.UserPreCreate(user_data))
335 new_user = User()
334 new_user = User()
336 edit = False
335 edit = False
337 else:
336 else:
338 log.debug('updating user `%s`', username)
337 log.debug('updating user `%s`', username)
339 events.trigger(events.UserPreUpdate(user, user_data))
338 events.trigger(events.UserPreUpdate(user, user_data))
340 new_user = user
339 new_user = user
341 edit = True
340 edit = True
342
341
343 # we're not allowed to edit default user
342 # we're not allowed to edit default user
344 if user.username == User.DEFAULT_USER:
343 if user.username == User.DEFAULT_USER:
345 raise DefaultUserException(
344 raise DefaultUserException(
346 "You can't edit this user (`%(username)s`) since it's "
345 "You can't edit this user (`%(username)s`) since it's "
347 "crucial for entire application"
346 "crucial for entire application"
348 % {'username': user.username})
347 % {'username': user.username})
349
348
350 # inject special attribute that will tell us if User is new or old
349 # inject special attribute that will tell us if User is new or old
351 new_user.is_new_user = not edit
350 new_user.is_new_user = not edit
352 # for users that didn's specify auth type, we use RhodeCode built in
351 # for users that didn's specify auth type, we use RhodeCode built in
353 from rhodecode.authentication.plugins import auth_rhodecode
352 from rhodecode.authentication.plugins import auth_rhodecode
354 extern_name = extern_name or auth_rhodecode.RhodeCodeAuthPlugin.uid
353 extern_name = extern_name or auth_rhodecode.RhodeCodeAuthPlugin.uid
355 extern_type = extern_type or auth_rhodecode.RhodeCodeAuthPlugin.uid
354 extern_type = extern_type or auth_rhodecode.RhodeCodeAuthPlugin.uid
356
355
357 try:
356 try:
358 new_user.username = username
357 new_user.username = username
359 new_user.admin = admin
358 new_user.admin = admin
360 new_user.email = email
359 new_user.email = email
361 new_user.active = active
360 new_user.active = active
362 new_user.extern_name = safe_unicode(extern_name)
361 new_user.extern_name = safe_unicode(extern_name)
363 new_user.extern_type = safe_unicode(extern_type)
362 new_user.extern_type = safe_unicode(extern_type)
364 new_user.name = firstname
363 new_user.name = firstname
365 new_user.lastname = lastname
364 new_user.lastname = lastname
366 new_user.description = description
365 new_user.description = description
367
366
368 # set password only if creating an user or password is changed
367 # set password only if creating an user or password is changed
369 if not edit or _password_change(new_user, password):
368 if not edit or _password_change(new_user, password):
370 reason = 'new password' if edit else 'new user'
369 reason = 'new password' if edit else 'new user'
371 log.debug('Updating password reason=>%s', reason)
370 log.debug('Updating password reason=>%s', reason)
372 new_user.password = get_crypt_password(password) if password else None
371 new_user.password = get_crypt_password(password) if password else None
373
372
374 if force_password_change:
373 if force_password_change:
375 new_user.update_userdata(force_password_change=True)
374 new_user.update_userdata(force_password_change=True)
376 if language:
375 if language:
377 new_user.update_userdata(language=language)
376 new_user.update_userdata(language=language)
378 new_user.update_userdata(notification_status=True)
377 new_user.update_userdata(notification_status=True)
379
378
380 self.sa.add(new_user)
379 self.sa.add(new_user)
381
380
382 if not edit and create_repo_group:
381 if not edit and create_repo_group:
383 RepoGroupModel().create_personal_repo_group(
382 RepoGroupModel().create_personal_repo_group(
384 new_user, commit_early=False)
383 new_user, commit_early=False)
385
384
386 if not edit:
385 if not edit:
387 # add the RSS token
386 # add the RSS token
388 self.add_auth_token(
387 self.add_auth_token(
389 user=username, lifetime_minutes=-1,
388 user=username, lifetime_minutes=-1,
390 role=self.auth_token_role.ROLE_FEED,
389 role=self.auth_token_role.ROLE_FEED,
391 description=u'Generated feed token')
390 description=u'Generated feed token')
392
391
393 kwargs = new_user.get_dict()
392 kwargs = new_user.get_dict()
394 # backward compat, require api_keys present
393 # backward compat, require api_keys present
395 kwargs['api_keys'] = kwargs['auth_tokens']
394 kwargs['api_keys'] = kwargs['auth_tokens']
396 hooks_base.create_user(created_by=cur_user, **kwargs)
395 hooks_base.create_user(created_by=cur_user, **kwargs)
397 events.trigger(events.UserPostCreate(user_data))
396 events.trigger(events.UserPostCreate(user_data))
398 return new_user
397 return new_user
399 except (DatabaseError,):
398 except (DatabaseError,):
400 log.error(traceback.format_exc())
399 log.error(traceback.format_exc())
401 raise
400 raise
402
401
403 def create_registration(self, form_data,
402 def create_registration(self, form_data,
404 extern_name='rhodecode', extern_type='rhodecode'):
403 extern_name='rhodecode', extern_type='rhodecode'):
405 from rhodecode.model.notification import NotificationModel
404 from rhodecode.model.notification import NotificationModel
406 from rhodecode.model.notification import EmailNotificationModel
405 from rhodecode.model.notification import EmailNotificationModel
407
406
408 try:
407 try:
409 form_data['admin'] = False
408 form_data['admin'] = False
410 form_data['extern_name'] = extern_name
409 form_data['extern_name'] = extern_name
411 form_data['extern_type'] = extern_type
410 form_data['extern_type'] = extern_type
412 new_user = self.create(form_data)
411 new_user = self.create(form_data)
413
412
414 self.sa.add(new_user)
413 self.sa.add(new_user)
415 self.sa.flush()
414 self.sa.flush()
416
415
417 user_data = new_user.get_dict()
416 user_data = new_user.get_dict()
418 user_data.update({
417 user_data.update({
419 'first_name': user_data.get('firstname'),
418 'first_name': user_data.get('firstname'),
420 'last_name': user_data.get('lastname'),
419 'last_name': user_data.get('lastname'),
421 })
420 })
422 kwargs = {
421 kwargs = {
423 # use SQLALCHEMY safe dump of user data
422 # use SQLALCHEMY safe dump of user data
424 'user': AttributeDict(user_data),
423 'user': AttributeDict(user_data),
425 'date': datetime.datetime.now()
424 'date': datetime.datetime.now()
426 }
425 }
427 notification_type = EmailNotificationModel.TYPE_REGISTRATION
426 notification_type = EmailNotificationModel.TYPE_REGISTRATION
428
427
429 # create notification objects, and emails
428 # create notification objects, and emails
430 NotificationModel().create(
429 NotificationModel().create(
431 created_by=new_user,
430 created_by=new_user,
432 notification_subject='', # Filled in based on the notification_type
431 notification_subject='', # Filled in based on the notification_type
433 notification_body='', # Filled in based on the notification_type
432 notification_body='', # Filled in based on the notification_type
434 notification_type=notification_type,
433 notification_type=notification_type,
435 recipients=None, # all admins
434 recipients=None, # all admins
436 email_kwargs=kwargs,
435 email_kwargs=kwargs,
437 )
436 )
438
437
439 return new_user
438 return new_user
440 except Exception:
439 except Exception:
441 log.error(traceback.format_exc())
440 log.error(traceback.format_exc())
442 raise
441 raise
443
442
444 def _handle_user_repos(self, username, repositories, handle_user,
443 def _handle_user_repos(self, username, repositories, handle_user,
445 handle_mode=None):
444 handle_mode=None):
446
445
447 left_overs = True
446 left_overs = True
448
447
449 from rhodecode.model.repo import RepoModel
448 from rhodecode.model.repo import RepoModel
450
449
451 if handle_mode == 'detach':
450 if handle_mode == 'detach':
452 for obj in repositories:
451 for obj in repositories:
453 obj.user = handle_user
452 obj.user = handle_user
454 # set description we know why we super admin now owns
453 # set description we know why we super admin now owns
455 # additional repositories that were orphaned !
454 # additional repositories that were orphaned !
456 obj.description += ' \n::detached repository from deleted user: %s' % (username,)
455 obj.description += ' \n::detached repository from deleted user: %s' % (username,)
457 self.sa.add(obj)
456 self.sa.add(obj)
458 left_overs = False
457 left_overs = False
459 elif handle_mode == 'delete':
458 elif handle_mode == 'delete':
460 for obj in repositories:
459 for obj in repositories:
461 RepoModel().delete(obj, forks='detach')
460 RepoModel().delete(obj, forks='detach')
462 left_overs = False
461 left_overs = False
463
462
464 # if nothing is done we have left overs left
463 # if nothing is done we have left overs left
465 return left_overs
464 return left_overs
466
465
467 def _handle_user_repo_groups(self, username, repository_groups, handle_user,
466 def _handle_user_repo_groups(self, username, repository_groups, handle_user,
468 handle_mode=None):
467 handle_mode=None):
469
468
470 left_overs = True
469 left_overs = True
471
470
472 from rhodecode.model.repo_group import RepoGroupModel
471 from rhodecode.model.repo_group import RepoGroupModel
473
472
474 if handle_mode == 'detach':
473 if handle_mode == 'detach':
475 for r in repository_groups:
474 for r in repository_groups:
476 r.user = handle_user
475 r.user = handle_user
477 # set description we know why we super admin now owns
476 # set description we know why we super admin now owns
478 # additional repositories that were orphaned !
477 # additional repositories that were orphaned !
479 r.group_description += ' \n::detached repository group from deleted user: %s' % (username,)
478 r.group_description += ' \n::detached repository group from deleted user: %s' % (username,)
480 r.personal = False
479 r.personal = False
481 self.sa.add(r)
480 self.sa.add(r)
482 left_overs = False
481 left_overs = False
483 elif handle_mode == 'delete':
482 elif handle_mode == 'delete':
484 for r in repository_groups:
483 for r in repository_groups:
485 RepoGroupModel().delete(r)
484 RepoGroupModel().delete(r)
486 left_overs = False
485 left_overs = False
487
486
488 # if nothing is done we have left overs left
487 # if nothing is done we have left overs left
489 return left_overs
488 return left_overs
490
489
491 def _handle_user_user_groups(self, username, user_groups, handle_user,
490 def _handle_user_user_groups(self, username, user_groups, handle_user,
492 handle_mode=None):
491 handle_mode=None):
493
492
494 left_overs = True
493 left_overs = True
495
494
496 from rhodecode.model.user_group import UserGroupModel
495 from rhodecode.model.user_group import UserGroupModel
497
496
498 if handle_mode == 'detach':
497 if handle_mode == 'detach':
499 for r in user_groups:
498 for r in user_groups:
500 for user_user_group_to_perm in r.user_user_group_to_perm:
499 for user_user_group_to_perm in r.user_user_group_to_perm:
501 if user_user_group_to_perm.user.username == username:
500 if user_user_group_to_perm.user.username == username:
502 user_user_group_to_perm.user = handle_user
501 user_user_group_to_perm.user = handle_user
503 r.user = handle_user
502 r.user = handle_user
504 # set description we know why we super admin now owns
503 # set description we know why we super admin now owns
505 # additional repositories that were orphaned !
504 # additional repositories that were orphaned !
506 r.user_group_description += ' \n::detached user group from deleted user: %s' % (username,)
505 r.user_group_description += ' \n::detached user group from deleted user: %s' % (username,)
507 self.sa.add(r)
506 self.sa.add(r)
508 left_overs = False
507 left_overs = False
509 elif handle_mode == 'delete':
508 elif handle_mode == 'delete':
510 for r in user_groups:
509 for r in user_groups:
511 UserGroupModel().delete(r)
510 UserGroupModel().delete(r)
512 left_overs = False
511 left_overs = False
513
512
514 # if nothing is done we have left overs left
513 # if nothing is done we have left overs left
515 return left_overs
514 return left_overs
516
515
517 def _handle_user_pull_requests(self, username, pull_requests, handle_user,
516 def _handle_user_pull_requests(self, username, pull_requests, handle_user,
518 handle_mode=None):
517 handle_mode=None):
519 left_overs = True
518 left_overs = True
520
519
521 from rhodecode.model.pull_request import PullRequestModel
520 from rhodecode.model.pull_request import PullRequestModel
522
521
523 if handle_mode == 'detach':
522 if handle_mode == 'detach':
524 for pr in pull_requests:
523 for pr in pull_requests:
525 pr.user_id = handle_user.user_id
524 pr.user_id = handle_user.user_id
526 # set description we know why we super admin now owns
525 # set description we know why we super admin now owns
527 # additional repositories that were orphaned !
526 # additional repositories that were orphaned !
528 pr.description += ' \n::detached pull requests from deleted user: %s' % (username,)
527 pr.description += ' \n::detached pull requests from deleted user: %s' % (username,)
529 self.sa.add(pr)
528 self.sa.add(pr)
530 left_overs = False
529 left_overs = False
531 elif handle_mode == 'delete':
530 elif handle_mode == 'delete':
532 for pr in pull_requests:
531 for pr in pull_requests:
533 PullRequestModel().delete(pr)
532 PullRequestModel().delete(pr)
534
533
535 left_overs = False
534 left_overs = False
536
535
537 # if nothing is done we have left overs left
536 # if nothing is done we have left overs left
538 return left_overs
537 return left_overs
539
538
540 def _handle_user_artifacts(self, username, artifacts, handle_user,
539 def _handle_user_artifacts(self, username, artifacts, handle_user,
541 handle_mode=None):
540 handle_mode=None):
542
541
543 left_overs = True
542 left_overs = True
544
543
545 if handle_mode == 'detach':
544 if handle_mode == 'detach':
546 for a in artifacts:
545 for a in artifacts:
547 a.upload_user = handle_user
546 a.upload_user = handle_user
548 # set description we know why we super admin now owns
547 # set description we know why we super admin now owns
549 # additional artifacts that were orphaned !
548 # additional artifacts that were orphaned !
550 a.file_description += ' \n::detached artifact from deleted user: %s' % (username,)
549 a.file_description += ' \n::detached artifact from deleted user: %s' % (username,)
551 self.sa.add(a)
550 self.sa.add(a)
552 left_overs = False
551 left_overs = False
553 elif handle_mode == 'delete':
552 elif handle_mode == 'delete':
554 from rhodecode.apps.file_store import utils as store_utils
553 from rhodecode.apps.file_store import utils as store_utils
555 request = get_current_request()
554 request = get_current_request()
556 storage = store_utils.get_file_storage(request.registry.settings)
555 storage = store_utils.get_file_storage(request.registry.settings)
557 for a in artifacts:
556 for a in artifacts:
558 file_uid = a.file_uid
557 file_uid = a.file_uid
559 storage.delete(file_uid)
558 storage.delete(file_uid)
560 self.sa.delete(a)
559 self.sa.delete(a)
561
560
562 left_overs = False
561 left_overs = False
563
562
564 # if nothing is done we have left overs left
563 # if nothing is done we have left overs left
565 return left_overs
564 return left_overs
566
565
567 def delete(self, user, cur_user=None, handle_repos=None,
566 def delete(self, user, cur_user=None, handle_repos=None,
568 handle_repo_groups=None, handle_user_groups=None,
567 handle_repo_groups=None, handle_user_groups=None,
569 handle_pull_requests=None, handle_artifacts=None, handle_new_owner=None):
568 handle_pull_requests=None, handle_artifacts=None, handle_new_owner=None):
570 from rhodecode.lib import hooks_base
569 from rhodecode.lib import hooks_base
571
570
572 if not cur_user:
571 if not cur_user:
573 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
572 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
574
573
575 user = self._get_user(user)
574 user = self._get_user(user)
576
575
577 try:
576 try:
578 if user.username == User.DEFAULT_USER:
577 if user.username == User.DEFAULT_USER:
579 raise DefaultUserException(
578 raise DefaultUserException(
580 u"You can't remove this user since it's"
579 u"You can't remove this user since it's"
581 u" crucial for entire application")
580 u" crucial for entire application")
582 handle_user = handle_new_owner or self.cls.get_first_super_admin()
581 handle_user = handle_new_owner or self.cls.get_first_super_admin()
583 log.debug('New detached objects owner %s', handle_user)
582 log.debug('New detached objects owner %s', handle_user)
584
583
585 left_overs = self._handle_user_repos(
584 left_overs = self._handle_user_repos(
586 user.username, user.repositories, handle_user, handle_repos)
585 user.username, user.repositories, handle_user, handle_repos)
587 if left_overs and user.repositories:
586 if left_overs and user.repositories:
588 repos = [x.repo_name for x in user.repositories]
587 repos = [x.repo_name for x in user.repositories]
589 raise UserOwnsReposException(
588 raise UserOwnsReposException(
590 u'user "%(username)s" still owns %(len_repos)s repositories and cannot be '
589 u'user "%(username)s" still owns %(len_repos)s repositories and cannot be '
591 u'removed. Switch owners or remove those repositories:%(list_repos)s'
590 u'removed. Switch owners or remove those repositories:%(list_repos)s'
592 % {'username': user.username, 'len_repos': len(repos),
591 % {'username': user.username, 'len_repos': len(repos),
593 'list_repos': ', '.join(repos)})
592 'list_repos': ', '.join(repos)})
594
593
595 left_overs = self._handle_user_repo_groups(
594 left_overs = self._handle_user_repo_groups(
596 user.username, user.repository_groups, handle_user, handle_repo_groups)
595 user.username, user.repository_groups, handle_user, handle_repo_groups)
597 if left_overs and user.repository_groups:
596 if left_overs and user.repository_groups:
598 repo_groups = [x.group_name for x in user.repository_groups]
597 repo_groups = [x.group_name for x in user.repository_groups]
599 raise UserOwnsRepoGroupsException(
598 raise UserOwnsRepoGroupsException(
600 u'user "%(username)s" still owns %(len_repo_groups)s repository groups and cannot be '
599 u'user "%(username)s" still owns %(len_repo_groups)s repository groups and cannot be '
601 u'removed. Switch owners or remove those repository groups:%(list_repo_groups)s'
600 u'removed. Switch owners or remove those repository groups:%(list_repo_groups)s'
602 % {'username': user.username, 'len_repo_groups': len(repo_groups),
601 % {'username': user.username, 'len_repo_groups': len(repo_groups),
603 'list_repo_groups': ', '.join(repo_groups)})
602 'list_repo_groups': ', '.join(repo_groups)})
604
603
605 left_overs = self._handle_user_user_groups(
604 left_overs = self._handle_user_user_groups(
606 user.username, user.user_groups, handle_user, handle_user_groups)
605 user.username, user.user_groups, handle_user, handle_user_groups)
607 if left_overs and user.user_groups:
606 if left_overs and user.user_groups:
608 user_groups = [x.users_group_name for x in user.user_groups]
607 user_groups = [x.users_group_name for x in user.user_groups]
609 raise UserOwnsUserGroupsException(
608 raise UserOwnsUserGroupsException(
610 u'user "%s" still owns %s user groups and cannot be '
609 u'user "%s" still owns %s user groups and cannot be '
611 u'removed. Switch owners or remove those user groups:%s'
610 u'removed. Switch owners or remove those user groups:%s'
612 % (user.username, len(user_groups), ', '.join(user_groups)))
611 % (user.username, len(user_groups), ', '.join(user_groups)))
613
612
614 left_overs = self._handle_user_pull_requests(
613 left_overs = self._handle_user_pull_requests(
615 user.username, user.user_pull_requests, handle_user, handle_pull_requests)
614 user.username, user.user_pull_requests, handle_user, handle_pull_requests)
616 if left_overs and user.user_pull_requests:
615 if left_overs and user.user_pull_requests:
617 pull_requests = ['!{}'.format(x.pull_request_id) for x in user.user_pull_requests]
616 pull_requests = ['!{}'.format(x.pull_request_id) for x in user.user_pull_requests]
618 raise UserOwnsPullRequestsException(
617 raise UserOwnsPullRequestsException(
619 u'user "%s" still owns %s pull requests and cannot be '
618 u'user "%s" still owns %s pull requests and cannot be '
620 u'removed. Switch owners or remove those pull requests:%s'
619 u'removed. Switch owners or remove those pull requests:%s'
621 % (user.username, len(pull_requests), ', '.join(pull_requests)))
620 % (user.username, len(pull_requests), ', '.join(pull_requests)))
622
621
623 left_overs = self._handle_user_artifacts(
622 left_overs = self._handle_user_artifacts(
624 user.username, user.artifacts, handle_user, handle_artifacts)
623 user.username, user.artifacts, handle_user, handle_artifacts)
625 if left_overs and user.artifacts:
624 if left_overs and user.artifacts:
626 artifacts = [x.file_uid for x in user.artifacts]
625 artifacts = [x.file_uid for x in user.artifacts]
627 raise UserOwnsArtifactsException(
626 raise UserOwnsArtifactsException(
628 u'user "%s" still owns %s artifacts and cannot be '
627 u'user "%s" still owns %s artifacts and cannot be '
629 u'removed. Switch owners or remove those artifacts:%s'
628 u'removed. Switch owners or remove those artifacts:%s'
630 % (user.username, len(artifacts), ', '.join(artifacts)))
629 % (user.username, len(artifacts), ', '.join(artifacts)))
631
630
632 user_data = user.get_dict() # fetch user data before expire
631 user_data = user.get_dict() # fetch user data before expire
633
632
634 # we might change the user data with detach/delete, make sure
633 # we might change the user data with detach/delete, make sure
635 # the object is marked as expired before actually deleting !
634 # the object is marked as expired before actually deleting !
636 self.sa.expire(user)
635 self.sa.expire(user)
637 self.sa.delete(user)
636 self.sa.delete(user)
638
637
639 hooks_base.delete_user(deleted_by=cur_user, **user_data)
638 hooks_base.delete_user(deleted_by=cur_user, **user_data)
640 except Exception:
639 except Exception:
641 log.error(traceback.format_exc())
640 log.error(traceback.format_exc())
642 raise
641 raise
643
642
644 def reset_password_link(self, data, pwd_reset_url):
643 def reset_password_link(self, data, pwd_reset_url):
645 from rhodecode.lib.celerylib import tasks, run_task
644 from rhodecode.lib.celerylib import tasks, run_task
646 from rhodecode.model.notification import EmailNotificationModel
645 from rhodecode.model.notification import EmailNotificationModel
647 user_email = data['email']
646 user_email = data['email']
648 try:
647 try:
649 user = User.get_by_email(user_email)
648 user = User.get_by_email(user_email)
650 if user:
649 if user:
651 log.debug('password reset user found %s', user)
650 log.debug('password reset user found %s', user)
652
651
653 email_kwargs = {
652 email_kwargs = {
654 'password_reset_url': pwd_reset_url,
653 'password_reset_url': pwd_reset_url,
655 'user': user,
654 'user': user,
656 'email': user_email,
655 'email': user_email,
657 'date': datetime.datetime.now(),
656 'date': datetime.datetime.now(),
658 'first_admin_email': User.get_first_super_admin().email
657 'first_admin_email': User.get_first_super_admin().email
659 }
658 }
660
659
661 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
660 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
662 EmailNotificationModel.TYPE_PASSWORD_RESET, **email_kwargs)
661 EmailNotificationModel.TYPE_PASSWORD_RESET, **email_kwargs)
663
662
664 recipients = [user_email]
663 recipients = [user_email]
665
664
666 action_logger_generic(
665 action_logger_generic(
667 'sending password reset email to user: {}'.format(
666 'sending password reset email to user: {}'.format(
668 user), namespace='security.password_reset')
667 user), namespace='security.password_reset')
669
668
670 run_task(tasks.send_email, recipients, subject,
669 run_task(tasks.send_email, recipients, subject,
671 email_body_plaintext, email_body)
670 email_body_plaintext, email_body)
672
671
673 else:
672 else:
674 log.debug("password reset email %s not found", user_email)
673 log.debug("password reset email %s not found", user_email)
675 except Exception:
674 except Exception:
676 log.error(traceback.format_exc())
675 log.error(traceback.format_exc())
677 return False
676 return False
678
677
679 return True
678 return True
680
679
681 def reset_password(self, data):
680 def reset_password(self, data):
682 from rhodecode.lib.celerylib import tasks, run_task
681 from rhodecode.lib.celerylib import tasks, run_task
683 from rhodecode.model.notification import EmailNotificationModel
682 from rhodecode.model.notification import EmailNotificationModel
684 from rhodecode.lib import auth
683 from rhodecode.lib import auth
685 user_email = data['email']
684 user_email = data['email']
686 pre_db = True
685 pre_db = True
687 try:
686 try:
688 user = User.get_by_email(user_email)
687 user = User.get_by_email(user_email)
689 new_passwd = auth.PasswordGenerator().gen_password(
688 new_passwd = auth.PasswordGenerator().gen_password(
690 12, auth.PasswordGenerator.ALPHABETS_BIG_SMALL)
689 12, auth.PasswordGenerator.ALPHABETS_BIG_SMALL)
691 if user:
690 if user:
692 user.password = auth.get_crypt_password(new_passwd)
691 user.password = auth.get_crypt_password(new_passwd)
693 # also force this user to reset his password !
692 # also force this user to reset his password !
694 user.update_userdata(force_password_change=True)
693 user.update_userdata(force_password_change=True)
695
694
696 Session().add(user)
695 Session().add(user)
697
696
698 # now delete the token in question
697 # now delete the token in question
699 UserApiKeys = AuthTokenModel.cls
698 UserApiKeys = AuthTokenModel.cls
700 UserApiKeys().query().filter(
699 UserApiKeys().query().filter(
701 UserApiKeys.api_key == data['token']).delete()
700 UserApiKeys.api_key == data['token']).delete()
702
701
703 Session().commit()
702 Session().commit()
704 log.info('successfully reset password for `%s`', user_email)
703 log.info('successfully reset password for `%s`', user_email)
705
704
706 if new_passwd is None:
705 if new_passwd is None:
707 raise Exception('unable to generate new password')
706 raise Exception('unable to generate new password')
708
707
709 pre_db = False
708 pre_db = False
710
709
711 email_kwargs = {
710 email_kwargs = {
712 'new_password': new_passwd,
711 'new_password': new_passwd,
713 'user': user,
712 'user': user,
714 'email': user_email,
713 'email': user_email,
715 'date': datetime.datetime.now(),
714 'date': datetime.datetime.now(),
716 'first_admin_email': User.get_first_super_admin().email
715 'first_admin_email': User.get_first_super_admin().email
717 }
716 }
718
717
719 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
718 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
720 EmailNotificationModel.TYPE_PASSWORD_RESET_CONFIRMATION,
719 EmailNotificationModel.TYPE_PASSWORD_RESET_CONFIRMATION,
721 **email_kwargs)
720 **email_kwargs)
722
721
723 recipients = [user_email]
722 recipients = [user_email]
724
723
725 action_logger_generic(
724 action_logger_generic(
726 'sent new password to user: {} with email: {}'.format(
725 'sent new password to user: {} with email: {}'.format(
727 user, user_email), namespace='security.password_reset')
726 user, user_email), namespace='security.password_reset')
728
727
729 run_task(tasks.send_email, recipients, subject,
728 run_task(tasks.send_email, recipients, subject,
730 email_body_plaintext, email_body)
729 email_body_plaintext, email_body)
731
730
732 except Exception:
731 except Exception:
733 log.error('Failed to update user password')
732 log.error('Failed to update user password')
734 log.error(traceback.format_exc())
733 log.error(traceback.format_exc())
735 if pre_db:
734 if pre_db:
736 # we rollback only if local db stuff fails. If it goes into
735 # we rollback only if local db stuff fails. If it goes into
737 # run_task, we're pass rollback state this wouldn't work then
736 # run_task, we're pass rollback state this wouldn't work then
738 Session().rollback()
737 Session().rollback()
739
738
740 return True
739 return True
741
740
742 def fill_data(self, auth_user, user_id=None, api_key=None, username=None):
741 def fill_data(self, auth_user, user_id=None, api_key=None, username=None):
743 """
742 """
744 Fetches auth_user by user_id,or api_key if present.
743 Fetches auth_user by user_id,or api_key if present.
745 Fills auth_user attributes with those taken from database.
744 Fills auth_user attributes with those taken from database.
746 Additionally set's is_authenitated if lookup fails
745 Additionally set's is_authenitated if lookup fails
747 present in database
746 present in database
748
747
749 :param auth_user: instance of user to set attributes
748 :param auth_user: instance of user to set attributes
750 :param user_id: user id to fetch by
749 :param user_id: user id to fetch by
751 :param api_key: api key to fetch by
750 :param api_key: api key to fetch by
752 :param username: username to fetch by
751 :param username: username to fetch by
753 """
752 """
754 def token_obfuscate(token):
753 def token_obfuscate(token):
755 if token:
754 if token:
756 return token[:4] + "****"
755 return token[:4] + "****"
757
756
758 if user_id is None and api_key is None and username is None:
757 if user_id is None and api_key is None and username is None:
759 raise Exception('You need to pass user_id, api_key or username')
758 raise Exception('You need to pass user_id, api_key or username')
760
759
761 log.debug(
760 log.debug(
762 'AuthUser: fill data execution based on: '
761 'AuthUser: fill data execution based on: '
763 'user_id:%s api_key:%s username:%s', user_id, api_key, username)
762 'user_id:%s api_key:%s username:%s', user_id, api_key, username)
764 try:
763 try:
765 dbuser = None
764 dbuser = None
766 if user_id:
765 if user_id:
767 dbuser = self.get(user_id)
766 dbuser = self.get(user_id)
768 elif api_key:
767 elif api_key:
769 dbuser = self.get_by_auth_token(api_key)
768 dbuser = self.get_by_auth_token(api_key)
770 elif username:
769 elif username:
771 dbuser = self.get_by_username(username)
770 dbuser = self.get_by_username(username)
772
771
773 if not dbuser:
772 if not dbuser:
774 log.warning(
773 log.warning(
775 'Unable to lookup user by id:%s api_key:%s username:%s',
774 'Unable to lookup user by id:%s api_key:%s username:%s',
776 user_id, token_obfuscate(api_key), username)
775 user_id, token_obfuscate(api_key), username)
777 return False
776 return False
778 if not dbuser.active:
777 if not dbuser.active:
779 log.debug('User `%s:%s` is inactive, skipping fill data',
778 log.debug('User `%s:%s` is inactive, skipping fill data',
780 username, user_id)
779 username, user_id)
781 return False
780 return False
782
781
783 log.debug('AuthUser: filling found user:%s data', dbuser)
782 log.debug('AuthUser: filling found user:%s data', dbuser)
784
783
785 attrs = {
784 attrs = {
786 'user_id': dbuser.user_id,
785 'user_id': dbuser.user_id,
787 'username': dbuser.username,
786 'username': dbuser.username,
788 'name': dbuser.name,
787 'name': dbuser.name,
789 'first_name': dbuser.first_name,
788 'first_name': dbuser.first_name,
790 'firstname': dbuser.firstname,
789 'firstname': dbuser.firstname,
791 'last_name': dbuser.last_name,
790 'last_name': dbuser.last_name,
792 'lastname': dbuser.lastname,
791 'lastname': dbuser.lastname,
793 'admin': dbuser.admin,
792 'admin': dbuser.admin,
794 'active': dbuser.active,
793 'active': dbuser.active,
795
794
796 'email': dbuser.email,
795 'email': dbuser.email,
797 'emails': dbuser.emails_cached(),
796 'emails': dbuser.emails_cached(),
798 'short_contact': dbuser.short_contact,
797 'short_contact': dbuser.short_contact,
799 'full_contact': dbuser.full_contact,
798 'full_contact': dbuser.full_contact,
800 'full_name': dbuser.full_name,
799 'full_name': dbuser.full_name,
801 'full_name_or_username': dbuser.full_name_or_username,
800 'full_name_or_username': dbuser.full_name_or_username,
802
801
803 '_api_key': dbuser._api_key,
802 '_api_key': dbuser._api_key,
804 '_user_data': dbuser._user_data,
803 '_user_data': dbuser._user_data,
805
804
806 'created_on': dbuser.created_on,
805 'created_on': dbuser.created_on,
807 'extern_name': dbuser.extern_name,
806 'extern_name': dbuser.extern_name,
808 'extern_type': dbuser.extern_type,
807 'extern_type': dbuser.extern_type,
809
808
810 'inherit_default_permissions': dbuser.inherit_default_permissions,
809 'inherit_default_permissions': dbuser.inherit_default_permissions,
811
810
812 'language': dbuser.language,
811 'language': dbuser.language,
813 'last_activity': dbuser.last_activity,
812 'last_activity': dbuser.last_activity,
814 'last_login': dbuser.last_login,
813 'last_login': dbuser.last_login,
815 'password': dbuser.password,
814 'password': dbuser.password,
816 }
815 }
817 auth_user.__dict__.update(attrs)
816 auth_user.__dict__.update(attrs)
818 except Exception:
817 except Exception:
819 log.error(traceback.format_exc())
818 log.error(traceback.format_exc())
820 auth_user.is_authenticated = False
819 auth_user.is_authenticated = False
821 return False
820 return False
822
821
823 return True
822 return True
824
823
825 def has_perm(self, user, perm):
824 def has_perm(self, user, perm):
826 perm = self._get_perm(perm)
825 perm = self._get_perm(perm)
827 user = self._get_user(user)
826 user = self._get_user(user)
828
827
829 return UserToPerm.query().filter(UserToPerm.user == user)\
828 return UserToPerm.query().filter(UserToPerm.user == user)\
830 .filter(UserToPerm.permission == perm).scalar() is not None
829 .filter(UserToPerm.permission == perm).scalar() is not None
831
830
832 def grant_perm(self, user, perm):
831 def grant_perm(self, user, perm):
833 """
832 """
834 Grant user global permissions
833 Grant user global permissions
835
834
836 :param user:
835 :param user:
837 :param perm:
836 :param perm:
838 """
837 """
839 user = self._get_user(user)
838 user = self._get_user(user)
840 perm = self._get_perm(perm)
839 perm = self._get_perm(perm)
841 # if this permission is already granted skip it
840 # if this permission is already granted skip it
842 _perm = UserToPerm.query()\
841 _perm = UserToPerm.query()\
843 .filter(UserToPerm.user == user)\
842 .filter(UserToPerm.user == user)\
844 .filter(UserToPerm.permission == perm)\
843 .filter(UserToPerm.permission == perm)\
845 .scalar()
844 .scalar()
846 if _perm:
845 if _perm:
847 return
846 return
848 new = UserToPerm()
847 new = UserToPerm()
849 new.user = user
848 new.user = user
850 new.permission = perm
849 new.permission = perm
851 self.sa.add(new)
850 self.sa.add(new)
852 return new
851 return new
853
852
854 def revoke_perm(self, user, perm):
853 def revoke_perm(self, user, perm):
855 """
854 """
856 Revoke users global permissions
855 Revoke users global permissions
857
856
858 :param user:
857 :param user:
859 :param perm:
858 :param perm:
860 """
859 """
861 user = self._get_user(user)
860 user = self._get_user(user)
862 perm = self._get_perm(perm)
861 perm = self._get_perm(perm)
863
862
864 obj = UserToPerm.query()\
863 obj = UserToPerm.query()\
865 .filter(UserToPerm.user == user)\
864 .filter(UserToPerm.user == user)\
866 .filter(UserToPerm.permission == perm)\
865 .filter(UserToPerm.permission == perm)\
867 .scalar()
866 .scalar()
868 if obj:
867 if obj:
869 self.sa.delete(obj)
868 self.sa.delete(obj)
870
869
871 def add_extra_email(self, user, email):
870 def add_extra_email(self, user, email):
872 """
871 """
873 Adds email address to UserEmailMap
872 Adds email address to UserEmailMap
874
873
875 :param user:
874 :param user:
876 :param email:
875 :param email:
877 """
876 """
878
877
879 user = self._get_user(user)
878 user = self._get_user(user)
880
879
881 obj = UserEmailMap()
880 obj = UserEmailMap()
882 obj.user = user
881 obj.user = user
883 obj.email = email
882 obj.email = email
884 self.sa.add(obj)
883 self.sa.add(obj)
885 return obj
884 return obj
886
885
887 def delete_extra_email(self, user, email_id):
886 def delete_extra_email(self, user, email_id):
888 """
887 """
889 Removes email address from UserEmailMap
888 Removes email address from UserEmailMap
890
889
891 :param user:
890 :param user:
892 :param email_id:
891 :param email_id:
893 """
892 """
894 user = self._get_user(user)
893 user = self._get_user(user)
895 obj = UserEmailMap.query().get(email_id)
894 obj = UserEmailMap.query().get(email_id)
896 if obj and obj.user_id == user.user_id:
895 if obj and obj.user_id == user.user_id:
897 self.sa.delete(obj)
896 self.sa.delete(obj)
898
897
899 def parse_ip_range(self, ip_range):
898 def parse_ip_range(self, ip_range):
900 ip_list = []
899 ip_list = []
901
900
902 def make_unique(value):
901 def make_unique(value):
903 seen = []
902 seen = []
904 return [c for c in value if not (c in seen or seen.append(c))]
903 return [c for c in value if not (c in seen or seen.append(c))]
905
904
906 # firsts split by commas
905 # firsts split by commas
907 for ip_range in ip_range.split(','):
906 for ip_range in ip_range.split(','):
908 if not ip_range:
907 if not ip_range:
909 continue
908 continue
910 ip_range = ip_range.strip()
909 ip_range = ip_range.strip()
911 if '-' in ip_range:
910 if '-' in ip_range:
912 start_ip, end_ip = ip_range.split('-', 1)
911 start_ip, end_ip = ip_range.split('-', 1)
913 start_ip = ipaddress.ip_address(safe_unicode(start_ip.strip()))
912 start_ip = ipaddress.ip_address(safe_unicode(start_ip.strip()))
914 end_ip = ipaddress.ip_address(safe_unicode(end_ip.strip()))
913 end_ip = ipaddress.ip_address(safe_unicode(end_ip.strip()))
915 parsed_ip_range = []
914 parsed_ip_range = []
916
915
917 for index in range(int(start_ip), int(end_ip) + 1):
916 for index in range(int(start_ip), int(end_ip) + 1):
918 new_ip = ipaddress.ip_address(index)
917 new_ip = ipaddress.ip_address(index)
919 parsed_ip_range.append(str(new_ip))
918 parsed_ip_range.append(str(new_ip))
920 ip_list.extend(parsed_ip_range)
919 ip_list.extend(parsed_ip_range)
921 else:
920 else:
922 ip_list.append(ip_range)
921 ip_list.append(ip_range)
923
922
924 return make_unique(ip_list)
923 return make_unique(ip_list)
925
924
926 def add_extra_ip(self, user, ip, description=None):
925 def add_extra_ip(self, user, ip, description=None):
927 """
926 """
928 Adds ip address to UserIpMap
927 Adds ip address to UserIpMap
929
928
930 :param user:
929 :param user:
931 :param ip:
930 :param ip:
932 """
931 """
933
932
934 user = self._get_user(user)
933 user = self._get_user(user)
935 obj = UserIpMap()
934 obj = UserIpMap()
936 obj.user = user
935 obj.user = user
937 obj.ip_addr = ip
936 obj.ip_addr = ip
938 obj.description = description
937 obj.description = description
939 self.sa.add(obj)
938 self.sa.add(obj)
940 return obj
939 return obj
941
940
942 auth_token_role = AuthTokenModel.cls
941 auth_token_role = AuthTokenModel.cls
943
942
944 def add_auth_token(self, user, lifetime_minutes, role, description=u'',
943 def add_auth_token(self, user, lifetime_minutes, role, description=u'',
945 scope_callback=None):
944 scope_callback=None):
946 """
945 """
947 Add AuthToken for user.
946 Add AuthToken for user.
948
947
949 :param user: username/user_id
948 :param user: username/user_id
950 :param lifetime_minutes: in minutes the lifetime for token, -1 equals no limit
949 :param lifetime_minutes: in minutes the lifetime for token, -1 equals no limit
951 :param role: one of AuthTokenModel.cls.ROLE_*
950 :param role: one of AuthTokenModel.cls.ROLE_*
952 :param description: optional string description
951 :param description: optional string description
953 """
952 """
954
953
955 token = AuthTokenModel().create(
954 token = AuthTokenModel().create(
956 user, description, lifetime_minutes, role)
955 user, description, lifetime_minutes, role)
957 if scope_callback and callable(scope_callback):
956 if scope_callback and callable(scope_callback):
958 # call the callback if we provide, used to attach scope for EE edition
957 # call the callback if we provide, used to attach scope for EE edition
959 scope_callback(token)
958 scope_callback(token)
960 return token
959 return token
961
960
962 def delete_extra_ip(self, user, ip_id):
961 def delete_extra_ip(self, user, ip_id):
963 """
962 """
964 Removes ip address from UserIpMap
963 Removes ip address from UserIpMap
965
964
966 :param user:
965 :param user:
967 :param ip_id:
966 :param ip_id:
968 """
967 """
969 user = self._get_user(user)
968 user = self._get_user(user)
970 obj = UserIpMap.query().get(ip_id)
969 obj = UserIpMap.query().get(ip_id)
971 if obj and obj.user_id == user.user_id:
970 if obj and obj.user_id == user.user_id:
972 self.sa.delete(obj)
971 self.sa.delete(obj)
973
972
974 def get_accounts_in_creation_order(self, current_user=None):
973 def get_accounts_in_creation_order(self, current_user=None):
975 """
974 """
976 Get accounts in order of creation for deactivation for license limits
975 Get accounts in order of creation for deactivation for license limits
977
976
978 pick currently logged in user, and append to the list in position 0
977 pick currently logged in user, and append to the list in position 0
979 pick all super-admins in order of creation date and add it to the list
978 pick all super-admins in order of creation date and add it to the list
980 pick all other accounts in order of creation and add it to the list.
979 pick all other accounts in order of creation and add it to the list.
981
980
982 Based on that list, the last accounts can be disabled as they are
981 Based on that list, the last accounts can be disabled as they are
983 created at the end and don't include any of the super admins as well
982 created at the end and don't include any of the super admins as well
984 as the current user.
983 as the current user.
985
984
986 :param current_user: optionally current user running this operation
985 :param current_user: optionally current user running this operation
987 """
986 """
988
987
989 if not current_user:
988 if not current_user:
990 current_user = get_current_rhodecode_user()
989 current_user = get_current_rhodecode_user()
991 active_super_admins = [
990 active_super_admins = [
992 x.user_id for x in User.query()
991 x.user_id for x in User.query()
993 .filter(User.user_id != current_user.user_id)
992 .filter(User.user_id != current_user.user_id)
994 .filter(User.active == true())
993 .filter(User.active == true())
995 .filter(User.admin == true())
994 .filter(User.admin == true())
996 .order_by(User.created_on.asc())]
995 .order_by(User.created_on.asc())]
997
996
998 active_regular_users = [
997 active_regular_users = [
999 x.user_id for x in User.query()
998 x.user_id for x in User.query()
1000 .filter(User.user_id != current_user.user_id)
999 .filter(User.user_id != current_user.user_id)
1001 .filter(User.active == true())
1000 .filter(User.active == true())
1002 .filter(User.admin == false())
1001 .filter(User.admin == false())
1003 .order_by(User.created_on.asc())]
1002 .order_by(User.created_on.asc())]
1004
1003
1005 list_of_accounts = [current_user.user_id]
1004 list_of_accounts = [current_user.user_id]
1006 list_of_accounts += active_super_admins
1005 list_of_accounts += active_super_admins
1007 list_of_accounts += active_regular_users
1006 list_of_accounts += active_regular_users
1008
1007
1009 return list_of_accounts
1008 return list_of_accounts
1010
1009
1011 def deactivate_last_users(self, expected_users, current_user=None):
1010 def deactivate_last_users(self, expected_users, current_user=None):
1012 """
1011 """
1013 Deactivate accounts that are over the license limits.
1012 Deactivate accounts that are over the license limits.
1014 Algorithm of which accounts to disabled is based on the formula:
1013 Algorithm of which accounts to disabled is based on the formula:
1015
1014
1016 Get current user, then super admins in creation order, then regular
1015 Get current user, then super admins in creation order, then regular
1017 active users in creation order.
1016 active users in creation order.
1018
1017
1019 Using that list we mark all accounts from the end of it as inactive.
1018 Using that list we mark all accounts from the end of it as inactive.
1020 This way we block only latest created accounts.
1019 This way we block only latest created accounts.
1021
1020
1022 :param expected_users: list of users in special order, we deactivate
1021 :param expected_users: list of users in special order, we deactivate
1023 the end N amount of users from that list
1022 the end N amount of users from that list
1024 """
1023 """
1025
1024
1026 list_of_accounts = self.get_accounts_in_creation_order(
1025 list_of_accounts = self.get_accounts_in_creation_order(
1027 current_user=current_user)
1026 current_user=current_user)
1028
1027
1029 for acc_id in list_of_accounts[expected_users + 1:]:
1028 for acc_id in list_of_accounts[expected_users + 1:]:
1030 user = User.get(acc_id)
1029 user = User.get(acc_id)
1031 log.info('Deactivating account %s for license unlock', user)
1030 log.info('Deactivating account %s for license unlock', user)
1032 user.active = False
1031 user.active = False
1033 Session().add(user)
1032 Session().add(user)
1034 Session().commit()
1033 Session().commit()
1035
1034
1036 return
1035 return
1037
1036
1038 def get_user_log(self, user, filter_term):
1037 def get_user_log(self, user, filter_term):
1039 user_log = UserLog.query()\
1038 user_log = UserLog.query()\
1040 .filter(or_(UserLog.user_id == user.user_id,
1039 .filter(or_(UserLog.user_id == user.user_id,
1041 UserLog.username == user.username))\
1040 UserLog.username == user.username))\
1042 .options(joinedload(UserLog.user))\
1041 .options(joinedload(UserLog.user))\
1043 .options(joinedload(UserLog.repository))\
1042 .options(joinedload(UserLog.repository))\
1044 .order_by(UserLog.action_date.desc())
1043 .order_by(UserLog.action_date.desc())
1045
1044
1046 user_log = user_log_filter(user_log, filter_term)
1045 user_log = user_log_filter(user_log, filter_term)
1047 return user_log
1046 return user_log
@@ -1,754 +1,754 b''
1 # -*- coding: utf-8 -*-
1
2
2
3 # Copyright (C) 2011-2020 RhodeCode GmbH
3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22 import traceback
22 import traceback
23
23
24 from rhodecode.lib.utils2 import safe_str, safe_unicode
24 from rhodecode.lib.utils2 import safe_str, safe_unicode
25 from rhodecode.lib.exceptions import (
25 from rhodecode.lib.exceptions import (
26 UserGroupAssignedException, RepoGroupAssignmentError)
26 UserGroupAssignedException, RepoGroupAssignmentError)
27 from rhodecode.lib.utils2 import (
27 from rhodecode.lib.utils2 import (
28 get_current_rhodecode_user, action_logger_generic)
28 get_current_rhodecode_user, action_logger_generic)
29 from rhodecode.model import BaseModel
29 from rhodecode.model import BaseModel
30 from rhodecode.model.scm import UserGroupList
30 from rhodecode.model.scm import UserGroupList
31 from rhodecode.model.db import (
31 from rhodecode.model.db import (
32 joinedload, true, func, User, UserGroupMember, UserGroup,
32 joinedload, true, func, User, UserGroupMember, UserGroup,
33 UserGroupRepoToPerm, Permission, UserGroupToPerm, UserUserGroupToPerm,
33 UserGroupRepoToPerm, Permission, UserGroupToPerm, UserUserGroupToPerm,
34 UserGroupUserGroupToPerm, UserGroupRepoGroupToPerm)
34 UserGroupUserGroupToPerm, UserGroupRepoGroupToPerm)
35
35
36
36
37 log = logging.getLogger(__name__)
37 log = logging.getLogger(__name__)
38
38
39
39
40 class UserGroupModel(BaseModel):
40 class UserGroupModel(BaseModel):
41
41
42 cls = UserGroup
42 cls = UserGroup
43
43
44 def _get_user_group(self, user_group):
44 def _get_user_group(self, user_group):
45 return self._get_instance(UserGroup, user_group,
45 return self._get_instance(UserGroup, user_group,
46 callback=UserGroup.get_by_group_name)
46 callback=UserGroup.get_by_group_name)
47
47
48 def _create_default_perms(self, user_group):
48 def _create_default_perms(self, user_group):
49 # create default permission
49 # create default permission
50 default_perm = 'usergroup.read'
50 default_perm = 'usergroup.read'
51 def_user = User.get_default_user()
51 def_user = User.get_default_user()
52 for p in def_user.user_perms:
52 for p in def_user.user_perms:
53 if p.permission.permission_name.startswith('usergroup.'):
53 if p.permission.permission_name.startswith('usergroup.'):
54 default_perm = p.permission.permission_name
54 default_perm = p.permission.permission_name
55 break
55 break
56
56
57 user_group_to_perm = UserUserGroupToPerm()
57 user_group_to_perm = UserUserGroupToPerm()
58 user_group_to_perm.permission = Permission.get_by_key(default_perm)
58 user_group_to_perm.permission = Permission.get_by_key(default_perm)
59
59
60 user_group_to_perm.user_group = user_group
60 user_group_to_perm.user_group = user_group
61 user_group_to_perm.user_id = def_user.user_id
61 user_group_to_perm.user_id = def_user.user_id
62 return user_group_to_perm
62 return user_group_to_perm
63
63
64 def update_permissions(
64 def update_permissions(
65 self, user_group, perm_additions=None, perm_updates=None,
65 self, user_group, perm_additions=None, perm_updates=None,
66 perm_deletions=None, check_perms=True, cur_user=None):
66 perm_deletions=None, check_perms=True, cur_user=None):
67
67
68 from rhodecode.lib.auth import HasUserGroupPermissionAny
68 from rhodecode.lib.auth import HasUserGroupPermissionAny
69 if not perm_additions:
69 if not perm_additions:
70 perm_additions = []
70 perm_additions = []
71 if not perm_updates:
71 if not perm_updates:
72 perm_updates = []
72 perm_updates = []
73 if not perm_deletions:
73 if not perm_deletions:
74 perm_deletions = []
74 perm_deletions = []
75
75
76 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
76 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
77
77
78 changes = {
78 changes = {
79 'added': [],
79 'added': [],
80 'updated': [],
80 'updated': [],
81 'deleted': []
81 'deleted': []
82 }
82 }
83 change_obj = user_group.get_api_data()
83 change_obj = user_group.get_api_data()
84 # update permissions
84 # update permissions
85 for member_id, perm, member_type in perm_updates:
85 for member_id, perm, member_type in perm_updates:
86 member_id = int(member_id)
86 member_id = int(member_id)
87 if member_type == 'user':
87 if member_type == 'user':
88 member_name = User.get(member_id).username
88 member_name = User.get(member_id).username
89 # this updates existing one
89 # this updates existing one
90 self.grant_user_permission(
90 self.grant_user_permission(
91 user_group=user_group, user=member_id, perm=perm
91 user_group=user_group, user=member_id, perm=perm
92 )
92 )
93 elif member_type == 'user_group':
93 elif member_type == 'user_group':
94 # check if we have permissions to alter this usergroup
94 # check if we have permissions to alter this usergroup
95 member_name = UserGroup.get(member_id).users_group_name
95 member_name = UserGroup.get(member_id).users_group_name
96 if not check_perms or HasUserGroupPermissionAny(
96 if not check_perms or HasUserGroupPermissionAny(
97 *req_perms)(member_name, user=cur_user):
97 *req_perms)(member_name, user=cur_user):
98 self.grant_user_group_permission(
98 self.grant_user_group_permission(
99 target_user_group=user_group, user_group=member_id, perm=perm)
99 target_user_group=user_group, user_group=member_id, perm=perm)
100 else:
100 else:
101 raise ValueError("member_type must be 'user' or 'user_group' "
101 raise ValueError("member_type must be 'user' or 'user_group' "
102 "got {} instead".format(member_type))
102 "got {} instead".format(member_type))
103
103
104 changes['updated'].append({
104 changes['updated'].append({
105 'change_obj': change_obj,
105 'change_obj': change_obj,
106 'type': member_type, 'id': member_id,
106 'type': member_type, 'id': member_id,
107 'name': member_name, 'new_perm': perm})
107 'name': member_name, 'new_perm': perm})
108
108
109 # set new permissions
109 # set new permissions
110 for member_id, perm, member_type in perm_additions:
110 for member_id, perm, member_type in perm_additions:
111 member_id = int(member_id)
111 member_id = int(member_id)
112 if member_type == 'user':
112 if member_type == 'user':
113 member_name = User.get(member_id).username
113 member_name = User.get(member_id).username
114 self.grant_user_permission(
114 self.grant_user_permission(
115 user_group=user_group, user=member_id, perm=perm)
115 user_group=user_group, user=member_id, perm=perm)
116 elif member_type == 'user_group':
116 elif member_type == 'user_group':
117 # check if we have permissions to alter this usergroup
117 # check if we have permissions to alter this usergroup
118 member_name = UserGroup.get(member_id).users_group_name
118 member_name = UserGroup.get(member_id).users_group_name
119 if not check_perms or HasUserGroupPermissionAny(
119 if not check_perms or HasUserGroupPermissionAny(
120 *req_perms)(member_name, user=cur_user):
120 *req_perms)(member_name, user=cur_user):
121 self.grant_user_group_permission(
121 self.grant_user_group_permission(
122 target_user_group=user_group, user_group=member_id, perm=perm)
122 target_user_group=user_group, user_group=member_id, perm=perm)
123 else:
123 else:
124 raise ValueError("member_type must be 'user' or 'user_group' "
124 raise ValueError("member_type must be 'user' or 'user_group' "
125 "got {} instead".format(member_type))
125 "got {} instead".format(member_type))
126
126
127 changes['added'].append({
127 changes['added'].append({
128 'change_obj': change_obj,
128 'change_obj': change_obj,
129 'type': member_type, 'id': member_id,
129 'type': member_type, 'id': member_id,
130 'name': member_name, 'new_perm': perm})
130 'name': member_name, 'new_perm': perm})
131
131
132 # delete permissions
132 # delete permissions
133 for member_id, perm, member_type in perm_deletions:
133 for member_id, perm, member_type in perm_deletions:
134 member_id = int(member_id)
134 member_id = int(member_id)
135 if member_type == 'user':
135 if member_type == 'user':
136 member_name = User.get(member_id).username
136 member_name = User.get(member_id).username
137 self.revoke_user_permission(user_group=user_group, user=member_id)
137 self.revoke_user_permission(user_group=user_group, user=member_id)
138 elif member_type == 'user_group':
138 elif member_type == 'user_group':
139 # check if we have permissions to alter this usergroup
139 # check if we have permissions to alter this usergroup
140 member_name = UserGroup.get(member_id).users_group_name
140 member_name = UserGroup.get(member_id).users_group_name
141 if not check_perms or HasUserGroupPermissionAny(
141 if not check_perms or HasUserGroupPermissionAny(
142 *req_perms)(member_name, user=cur_user):
142 *req_perms)(member_name, user=cur_user):
143 self.revoke_user_group_permission(
143 self.revoke_user_group_permission(
144 target_user_group=user_group, user_group=member_id)
144 target_user_group=user_group, user_group=member_id)
145 else:
145 else:
146 raise ValueError("member_type must be 'user' or 'user_group' "
146 raise ValueError("member_type must be 'user' or 'user_group' "
147 "got {} instead".format(member_type))
147 "got {} instead".format(member_type))
148
148
149 changes['deleted'].append({
149 changes['deleted'].append({
150 'change_obj': change_obj,
150 'change_obj': change_obj,
151 'type': member_type, 'id': member_id,
151 'type': member_type, 'id': member_id,
152 'name': member_name, 'new_perm': perm})
152 'name': member_name, 'new_perm': perm})
153
153
154 return changes
154 return changes
155
155
156 def get(self, user_group_id, cache=False):
156 def get(self, user_group_id, cache=False):
157 return UserGroup.get(user_group_id)
157 return UserGroup.get(user_group_id)
158
158
159 def get_group(self, user_group):
159 def get_group(self, user_group):
160 return self._get_user_group(user_group)
160 return self._get_user_group(user_group)
161
161
162 def get_by_name(self, name, cache=False, case_insensitive=False):
162 def get_by_name(self, name, cache=False, case_insensitive=False):
163 return UserGroup.get_by_group_name(name, cache, case_insensitive)
163 return UserGroup.get_by_group_name(name, cache, case_insensitive)
164
164
165 def create(self, name, description, owner, active=True, group_data=None):
165 def create(self, name, description, owner, active=True, group_data=None):
166 try:
166 try:
167 new_user_group = UserGroup()
167 new_user_group = UserGroup()
168 new_user_group.user = self._get_user(owner)
168 new_user_group.user = self._get_user(owner)
169 new_user_group.users_group_name = name
169 new_user_group.users_group_name = name
170 new_user_group.user_group_description = description
170 new_user_group.user_group_description = description
171 new_user_group.users_group_active = active
171 new_user_group.users_group_active = active
172 if group_data:
172 if group_data:
173 new_user_group.group_data = group_data
173 new_user_group.group_data = group_data
174 self.sa.add(new_user_group)
174 self.sa.add(new_user_group)
175 perm_obj = self._create_default_perms(new_user_group)
175 perm_obj = self._create_default_perms(new_user_group)
176 self.sa.add(perm_obj)
176 self.sa.add(perm_obj)
177
177
178 self.grant_user_permission(user_group=new_user_group,
178 self.grant_user_permission(user_group=new_user_group,
179 user=owner, perm='usergroup.admin')
179 user=owner, perm='usergroup.admin')
180
180
181 return new_user_group
181 return new_user_group
182 except Exception:
182 except Exception:
183 log.error(traceback.format_exc())
183 log.error(traceback.format_exc())
184 raise
184 raise
185
185
186 def _get_memberships_for_user_ids(self, user_group, user_id_list):
186 def _get_memberships_for_user_ids(self, user_group, user_id_list):
187 members = []
187 members = []
188 for user_id in user_id_list:
188 for user_id in user_id_list:
189 member = self._get_membership(user_group.users_group_id, user_id)
189 member = self._get_membership(user_group.users_group_id, user_id)
190 members.append(member)
190 members.append(member)
191 return members
191 return members
192
192
193 def _get_added_and_removed_user_ids(self, user_group, user_id_list):
193 def _get_added_and_removed_user_ids(self, user_group, user_id_list):
194 current_members = user_group.members or []
194 current_members = user_group.members or []
195 current_members_ids = [m.user.user_id for m in current_members]
195 current_members_ids = [m.user.user_id for m in current_members]
196
196
197 added_members = [
197 added_members = [
198 user_id for user_id in user_id_list
198 user_id for user_id in user_id_list
199 if user_id not in current_members_ids]
199 if user_id not in current_members_ids]
200 if user_id_list == []:
200 if user_id_list == []:
201 # all members were deleted
201 # all members were deleted
202 deleted_members = current_members_ids
202 deleted_members = current_members_ids
203 else:
203 else:
204 deleted_members = [
204 deleted_members = [
205 user_id for user_id in current_members_ids
205 user_id for user_id in current_members_ids
206 if user_id not in user_id_list]
206 if user_id not in user_id_list]
207
207
208 return added_members, deleted_members
208 return added_members, deleted_members
209
209
210 def _set_users_as_members(self, user_group, user_ids):
210 def _set_users_as_members(self, user_group, user_ids):
211 user_group.members = []
211 user_group.members = []
212 self.sa.flush()
212 self.sa.flush()
213 members = self._get_memberships_for_user_ids(
213 members = self._get_memberships_for_user_ids(
214 user_group, user_ids)
214 user_group, user_ids)
215 user_group.members = members
215 user_group.members = members
216 self.sa.add(user_group)
216 self.sa.add(user_group)
217
217
218 def _update_members_from_user_ids(self, user_group, user_ids):
218 def _update_members_from_user_ids(self, user_group, user_ids):
219 added, removed = self._get_added_and_removed_user_ids(
219 added, removed = self._get_added_and_removed_user_ids(
220 user_group, user_ids)
220 user_group, user_ids)
221 self._set_users_as_members(user_group, user_ids)
221 self._set_users_as_members(user_group, user_ids)
222 self._log_user_changes('added to', user_group, added)
222 self._log_user_changes('added to', user_group, added)
223 self._log_user_changes('removed from', user_group, removed)
223 self._log_user_changes('removed from', user_group, removed)
224 return added, removed
224 return added, removed
225
225
226 def _clean_members_data(self, members_data):
226 def _clean_members_data(self, members_data):
227 if not members_data:
227 if not members_data:
228 members_data = []
228 members_data = []
229
229
230 members = []
230 members = []
231 for user in members_data:
231 for user in members_data:
232 uid = int(user['member_user_id'])
232 uid = int(user['member_user_id'])
233 if uid not in members and user['type'] in ['new', 'existing']:
233 if uid not in members and user['type'] in ['new', 'existing']:
234 members.append(uid)
234 members.append(uid)
235 return members
235 return members
236
236
237 def update(self, user_group, form_data, group_data=None):
237 def update(self, user_group, form_data, group_data=None):
238 user_group = self._get_user_group(user_group)
238 user_group = self._get_user_group(user_group)
239 if 'users_group_name' in form_data:
239 if 'users_group_name' in form_data:
240 user_group.users_group_name = form_data['users_group_name']
240 user_group.users_group_name = form_data['users_group_name']
241 if 'users_group_active' in form_data:
241 if 'users_group_active' in form_data:
242 user_group.users_group_active = form_data['users_group_active']
242 user_group.users_group_active = form_data['users_group_active']
243 if 'user_group_description' in form_data:
243 if 'user_group_description' in form_data:
244 user_group.user_group_description = form_data[
244 user_group.user_group_description = form_data[
245 'user_group_description']
245 'user_group_description']
246
246
247 # handle owner change
247 # handle owner change
248 if 'user' in form_data:
248 if 'user' in form_data:
249 owner = form_data['user']
249 owner = form_data['user']
250 if isinstance(owner, str):
250 if isinstance(owner, str):
251 owner = User.get_by_username(form_data['user'])
251 owner = User.get_by_username(form_data['user'])
252
252
253 if not isinstance(owner, User):
253 if not isinstance(owner, User):
254 raise ValueError(
254 raise ValueError(
255 'invalid owner for user group: %s' % form_data['user'])
255 'invalid owner for user group: %s' % form_data['user'])
256
256
257 user_group.user = owner
257 user_group.user = owner
258
258
259 added_user_ids = []
259 added_user_ids = []
260 removed_user_ids = []
260 removed_user_ids = []
261 if 'users_group_members' in form_data:
261 if 'users_group_members' in form_data:
262 members_id_list = self._clean_members_data(
262 members_id_list = self._clean_members_data(
263 form_data['users_group_members'])
263 form_data['users_group_members'])
264 added_user_ids, removed_user_ids = \
264 added_user_ids, removed_user_ids = \
265 self._update_members_from_user_ids(user_group, members_id_list)
265 self._update_members_from_user_ids(user_group, members_id_list)
266
266
267 if group_data:
267 if group_data:
268 new_group_data = {}
268 new_group_data = {}
269 new_group_data.update(group_data)
269 new_group_data.update(group_data)
270 user_group.group_data = new_group_data
270 user_group.group_data = new_group_data
271
271
272 self.sa.add(user_group)
272 self.sa.add(user_group)
273 return user_group, added_user_ids, removed_user_ids
273 return user_group, added_user_ids, removed_user_ids
274
274
275 def delete(self, user_group, force=False):
275 def delete(self, user_group, force=False):
276 """
276 """
277 Deletes repository group, unless force flag is used
277 Deletes repository group, unless force flag is used
278 raises exception if there are members in that group, else deletes
278 raises exception if there are members in that group, else deletes
279 group and users
279 group and users
280
280
281 :param user_group:
281 :param user_group:
282 :param force:
282 :param force:
283 """
283 """
284 user_group = self._get_user_group(user_group)
284 user_group = self._get_user_group(user_group)
285 if not user_group:
285 if not user_group:
286 return
286 return
287
287
288 try:
288 try:
289 # check if this group is not assigned to repo
289 # check if this group is not assigned to repo
290 assigned_to_repo = [x.repository for x in UserGroupRepoToPerm.query()\
290 assigned_to_repo = [x.repository for x in UserGroupRepoToPerm.query()\
291 .filter(UserGroupRepoToPerm.users_group == user_group).all()]
291 .filter(UserGroupRepoToPerm.users_group == user_group).all()]
292 # check if this group is not assigned to repo
292 # check if this group is not assigned to repo
293 assigned_to_repo_group = [x.group for x in UserGroupRepoGroupToPerm.query()\
293 assigned_to_repo_group = [x.group for x in UserGroupRepoGroupToPerm.query()\
294 .filter(UserGroupRepoGroupToPerm.users_group == user_group).all()]
294 .filter(UserGroupRepoGroupToPerm.users_group == user_group).all()]
295
295
296 if (assigned_to_repo or assigned_to_repo_group) and not force:
296 if (assigned_to_repo or assigned_to_repo_group) and not force:
297 assigned = ','.join(map(safe_str,
297 assigned = ','.join(map(safe_str,
298 assigned_to_repo+assigned_to_repo_group))
298 assigned_to_repo+assigned_to_repo_group))
299
299
300 raise UserGroupAssignedException(
300 raise UserGroupAssignedException(
301 'UserGroup assigned to %s' % (assigned,))
301 'UserGroup assigned to %s' % (assigned,))
302 self.sa.delete(user_group)
302 self.sa.delete(user_group)
303 except Exception:
303 except Exception:
304 log.error(traceback.format_exc())
304 log.error(traceback.format_exc())
305 raise
305 raise
306
306
307 def _log_user_changes(self, action, user_group, user_or_users):
307 def _log_user_changes(self, action, user_group, user_or_users):
308 users = user_or_users
308 users = user_or_users
309 if not isinstance(users, (list, tuple)):
309 if not isinstance(users, (list, tuple)):
310 users = [users]
310 users = [users]
311
311
312 group_name = user_group.users_group_name
312 group_name = user_group.users_group_name
313
313
314 for user_or_user_id in users:
314 for user_or_user_id in users:
315 user = self._get_user(user_or_user_id)
315 user = self._get_user(user_or_user_id)
316 log_text = 'User {user} {action} {group}'.format(
316 log_text = 'User {user} {action} {group}'.format(
317 action=action, user=user.username, group=group_name)
317 action=action, user=user.username, group=group_name)
318 action_logger_generic(log_text)
318 action_logger_generic(log_text)
319
319
320 def _find_user_in_group(self, user, user_group):
320 def _find_user_in_group(self, user, user_group):
321 user_group_member = None
321 user_group_member = None
322 for m in user_group.members:
322 for m in user_group.members:
323 if m.user_id == user.user_id:
323 if m.user_id == user.user_id:
324 # Found this user's membership row
324 # Found this user's membership row
325 user_group_member = m
325 user_group_member = m
326 break
326 break
327
327
328 return user_group_member
328 return user_group_member
329
329
330 def _get_membership(self, user_group_id, user_id):
330 def _get_membership(self, user_group_id, user_id):
331 user_group_member = UserGroupMember(user_group_id, user_id)
331 user_group_member = UserGroupMember(user_group_id, user_id)
332 return user_group_member
332 return user_group_member
333
333
334 def add_user_to_group(self, user_group, user):
334 def add_user_to_group(self, user_group, user):
335 user_group = self._get_user_group(user_group)
335 user_group = self._get_user_group(user_group)
336 user = self._get_user(user)
336 user = self._get_user(user)
337 user_member = self._find_user_in_group(user, user_group)
337 user_member = self._find_user_in_group(user, user_group)
338 if user_member:
338 if user_member:
339 # user already in the group, skip
339 # user already in the group, skip
340 return True
340 return True
341
341
342 member = self._get_membership(
342 member = self._get_membership(
343 user_group.users_group_id, user.user_id)
343 user_group.users_group_id, user.user_id)
344 user_group.members.append(member)
344 user_group.members.append(member)
345
345
346 try:
346 try:
347 self.sa.add(member)
347 self.sa.add(member)
348 except Exception:
348 except Exception:
349 # what could go wrong here?
349 # what could go wrong here?
350 log.error(traceback.format_exc())
350 log.error(traceback.format_exc())
351 raise
351 raise
352
352
353 self._log_user_changes('added to', user_group, user)
353 self._log_user_changes('added to', user_group, user)
354 return member
354 return member
355
355
356 def remove_user_from_group(self, user_group, user):
356 def remove_user_from_group(self, user_group, user):
357 user_group = self._get_user_group(user_group)
357 user_group = self._get_user_group(user_group)
358 user = self._get_user(user)
358 user = self._get_user(user)
359 user_group_member = self._find_user_in_group(user, user_group)
359 user_group_member = self._find_user_in_group(user, user_group)
360
360
361 if not user_group_member:
361 if not user_group_member:
362 # User isn't in that group
362 # User isn't in that group
363 return False
363 return False
364
364
365 try:
365 try:
366 self.sa.delete(user_group_member)
366 self.sa.delete(user_group_member)
367 except Exception:
367 except Exception:
368 log.error(traceback.format_exc())
368 log.error(traceback.format_exc())
369 raise
369 raise
370
370
371 self._log_user_changes('removed from', user_group, user)
371 self._log_user_changes('removed from', user_group, user)
372 return True
372 return True
373
373
374 def has_perm(self, user_group, perm):
374 def has_perm(self, user_group, perm):
375 user_group = self._get_user_group(user_group)
375 user_group = self._get_user_group(user_group)
376 perm = self._get_perm(perm)
376 perm = self._get_perm(perm)
377
377
378 return UserGroupToPerm.query()\
378 return UserGroupToPerm.query()\
379 .filter(UserGroupToPerm.users_group == user_group)\
379 .filter(UserGroupToPerm.users_group == user_group)\
380 .filter(UserGroupToPerm.permission == perm).scalar() is not None
380 .filter(UserGroupToPerm.permission == perm).scalar() is not None
381
381
382 def grant_perm(self, user_group, perm):
382 def grant_perm(self, user_group, perm):
383 user_group = self._get_user_group(user_group)
383 user_group = self._get_user_group(user_group)
384 perm = self._get_perm(perm)
384 perm = self._get_perm(perm)
385
385
386 # if this permission is already granted skip it
386 # if this permission is already granted skip it
387 _perm = UserGroupToPerm.query()\
387 _perm = UserGroupToPerm.query()\
388 .filter(UserGroupToPerm.users_group == user_group)\
388 .filter(UserGroupToPerm.users_group == user_group)\
389 .filter(UserGroupToPerm.permission == perm)\
389 .filter(UserGroupToPerm.permission == perm)\
390 .scalar()
390 .scalar()
391 if _perm:
391 if _perm:
392 return
392 return
393
393
394 new = UserGroupToPerm()
394 new = UserGroupToPerm()
395 new.users_group = user_group
395 new.users_group = user_group
396 new.permission = perm
396 new.permission = perm
397 self.sa.add(new)
397 self.sa.add(new)
398 return new
398 return new
399
399
400 def revoke_perm(self, user_group, perm):
400 def revoke_perm(self, user_group, perm):
401 user_group = self._get_user_group(user_group)
401 user_group = self._get_user_group(user_group)
402 perm = self._get_perm(perm)
402 perm = self._get_perm(perm)
403
403
404 obj = UserGroupToPerm.query()\
404 obj = UserGroupToPerm.query()\
405 .filter(UserGroupToPerm.users_group == user_group)\
405 .filter(UserGroupToPerm.users_group == user_group)\
406 .filter(UserGroupToPerm.permission == perm).scalar()
406 .filter(UserGroupToPerm.permission == perm).scalar()
407 if obj:
407 if obj:
408 self.sa.delete(obj)
408 self.sa.delete(obj)
409
409
410 def grant_user_permission(self, user_group, user, perm):
410 def grant_user_permission(self, user_group, user, perm):
411 """
411 """
412 Grant permission for user on given user group, or update
412 Grant permission for user on given user group, or update
413 existing one if found
413 existing one if found
414
414
415 :param user_group: Instance of UserGroup, users_group_id,
415 :param user_group: Instance of UserGroup, users_group_id,
416 or users_group_name
416 or users_group_name
417 :param user: Instance of User, user_id or username
417 :param user: Instance of User, user_id or username
418 :param perm: Instance of Permission, or permission_name
418 :param perm: Instance of Permission, or permission_name
419 """
419 """
420 changes = {
420 changes = {
421 'added': [],
421 'added': [],
422 'updated': [],
422 'updated': [],
423 'deleted': []
423 'deleted': []
424 }
424 }
425
425
426 user_group = self._get_user_group(user_group)
426 user_group = self._get_user_group(user_group)
427 user = self._get_user(user)
427 user = self._get_user(user)
428 permission = self._get_perm(perm)
428 permission = self._get_perm(perm)
429 perm_name = permission.permission_name
429 perm_name = permission.permission_name
430 member_id = user.user_id
430 member_id = user.user_id
431 member_name = user.username
431 member_name = user.username
432
432
433 # check if we have that permission already
433 # check if we have that permission already
434 obj = self.sa.query(UserUserGroupToPerm)\
434 obj = self.sa.query(UserUserGroupToPerm)\
435 .filter(UserUserGroupToPerm.user == user)\
435 .filter(UserUserGroupToPerm.user == user)\
436 .filter(UserUserGroupToPerm.user_group == user_group)\
436 .filter(UserUserGroupToPerm.user_group == user_group)\
437 .scalar()
437 .scalar()
438 if obj is None:
438 if obj is None:
439 # create new !
439 # create new !
440 obj = UserUserGroupToPerm()
440 obj = UserUserGroupToPerm()
441 obj.user_group = user_group
441 obj.user_group = user_group
442 obj.user = user
442 obj.user = user
443 obj.permission = permission
443 obj.permission = permission
444 self.sa.add(obj)
444 self.sa.add(obj)
445 log.debug('Granted perm %s to %s on %s', perm, user, user_group)
445 log.debug('Granted perm %s to %s on %s', perm, user, user_group)
446 action_logger_generic(
446 action_logger_generic(
447 'granted permission: {} to user: {} on usergroup: {}'.format(
447 'granted permission: {} to user: {} on usergroup: {}'.format(
448 perm, user, user_group), namespace='security.usergroup')
448 perm, user, user_group), namespace='security.usergroup')
449
449
450 changes['added'].append({
450 changes['added'].append({
451 'change_obj': user_group.get_api_data(),
451 'change_obj': user_group.get_api_data(),
452 'type': 'user', 'id': member_id,
452 'type': 'user', 'id': member_id,
453 'name': member_name, 'new_perm': perm_name})
453 'name': member_name, 'new_perm': perm_name})
454
454
455 return changes
455 return changes
456
456
457 def revoke_user_permission(self, user_group, user):
457 def revoke_user_permission(self, user_group, user):
458 """
458 """
459 Revoke permission for user on given user group
459 Revoke permission for user on given user group
460
460
461 :param user_group: Instance of UserGroup, users_group_id,
461 :param user_group: Instance of UserGroup, users_group_id,
462 or users_group name
462 or users_group name
463 :param user: Instance of User, user_id or username
463 :param user: Instance of User, user_id or username
464 """
464 """
465 changes = {
465 changes = {
466 'added': [],
466 'added': [],
467 'updated': [],
467 'updated': [],
468 'deleted': []
468 'deleted': []
469 }
469 }
470
470
471 user_group = self._get_user_group(user_group)
471 user_group = self._get_user_group(user_group)
472 user = self._get_user(user)
472 user = self._get_user(user)
473 perm_name = 'usergroup.none'
473 perm_name = 'usergroup.none'
474 member_id = user.user_id
474 member_id = user.user_id
475 member_name = user.username
475 member_name = user.username
476
476
477 obj = self.sa.query(UserUserGroupToPerm)\
477 obj = self.sa.query(UserUserGroupToPerm)\
478 .filter(UserUserGroupToPerm.user == user)\
478 .filter(UserUserGroupToPerm.user == user)\
479 .filter(UserUserGroupToPerm.user_group == user_group)\
479 .filter(UserUserGroupToPerm.user_group == user_group)\
480 .scalar()
480 .scalar()
481 if obj:
481 if obj:
482 self.sa.delete(obj)
482 self.sa.delete(obj)
483 log.debug('Revoked perm on %s on %s', user_group, user)
483 log.debug('Revoked perm on %s on %s', user_group, user)
484 action_logger_generic(
484 action_logger_generic(
485 'revoked permission from user: {} on usergroup: {}'.format(
485 'revoked permission from user: {} on usergroup: {}'.format(
486 user, user_group), namespace='security.usergroup')
486 user, user_group), namespace='security.usergroup')
487
487
488 changes['deleted'].append({
488 changes['deleted'].append({
489 'change_obj': user_group.get_api_data(),
489 'change_obj': user_group.get_api_data(),
490 'type': 'user', 'id': member_id,
490 'type': 'user', 'id': member_id,
491 'name': member_name, 'new_perm': perm_name})
491 'name': member_name, 'new_perm': perm_name})
492
492
493 return changes
493 return changes
494
494
495 def grant_user_group_permission(self, target_user_group, user_group, perm):
495 def grant_user_group_permission(self, target_user_group, user_group, perm):
496 """
496 """
497 Grant user group permission for given target_user_group
497 Grant user group permission for given target_user_group
498
498
499 :param target_user_group:
499 :param target_user_group:
500 :param user_group:
500 :param user_group:
501 :param perm:
501 :param perm:
502 """
502 """
503 changes = {
503 changes = {
504 'added': [],
504 'added': [],
505 'updated': [],
505 'updated': [],
506 'deleted': []
506 'deleted': []
507 }
507 }
508
508
509 target_user_group = self._get_user_group(target_user_group)
509 target_user_group = self._get_user_group(target_user_group)
510 user_group = self._get_user_group(user_group)
510 user_group = self._get_user_group(user_group)
511 permission = self._get_perm(perm)
511 permission = self._get_perm(perm)
512 perm_name = permission.permission_name
512 perm_name = permission.permission_name
513 member_id = user_group.users_group_id
513 member_id = user_group.users_group_id
514 member_name = user_group.users_group_name
514 member_name = user_group.users_group_name
515
515
516 # forbid assigning same user group to itself
516 # forbid assigning same user group to itself
517 if target_user_group == user_group:
517 if target_user_group == user_group:
518 raise RepoGroupAssignmentError('target repo:%s cannot be '
518 raise RepoGroupAssignmentError('target repo:%s cannot be '
519 'assigned to itself' % target_user_group)
519 'assigned to itself' % target_user_group)
520
520
521 # check if we have that permission already
521 # check if we have that permission already
522 obj = self.sa.query(UserGroupUserGroupToPerm)\
522 obj = self.sa.query(UserGroupUserGroupToPerm)\
523 .filter(UserGroupUserGroupToPerm.target_user_group == target_user_group)\
523 .filter(UserGroupUserGroupToPerm.target_user_group == target_user_group)\
524 .filter(UserGroupUserGroupToPerm.user_group == user_group)\
524 .filter(UserGroupUserGroupToPerm.user_group == user_group)\
525 .scalar()
525 .scalar()
526 if obj is None:
526 if obj is None:
527 # create new !
527 # create new !
528 obj = UserGroupUserGroupToPerm()
528 obj = UserGroupUserGroupToPerm()
529 obj.user_group = user_group
529 obj.user_group = user_group
530 obj.target_user_group = target_user_group
530 obj.target_user_group = target_user_group
531 obj.permission = permission
531 obj.permission = permission
532 self.sa.add(obj)
532 self.sa.add(obj)
533 log.debug(
533 log.debug(
534 'Granted perm %s to %s on %s', perm, target_user_group, user_group)
534 'Granted perm %s to %s on %s', perm, target_user_group, user_group)
535 action_logger_generic(
535 action_logger_generic(
536 'granted permission: {} to usergroup: {} on usergroup: {}'.format(
536 'granted permission: {} to usergroup: {} on usergroup: {}'.format(
537 perm, user_group, target_user_group),
537 perm, user_group, target_user_group),
538 namespace='security.usergroup')
538 namespace='security.usergroup')
539
539
540 changes['added'].append({
540 changes['added'].append({
541 'change_obj': target_user_group.get_api_data(),
541 'change_obj': target_user_group.get_api_data(),
542 'type': 'user_group', 'id': member_id,
542 'type': 'user_group', 'id': member_id,
543 'name': member_name, 'new_perm': perm_name})
543 'name': member_name, 'new_perm': perm_name})
544
544
545 return changes
545 return changes
546
546
547 def revoke_user_group_permission(self, target_user_group, user_group):
547 def revoke_user_group_permission(self, target_user_group, user_group):
548 """
548 """
549 Revoke user group permission for given target_user_group
549 Revoke user group permission for given target_user_group
550
550
551 :param target_user_group:
551 :param target_user_group:
552 :param user_group:
552 :param user_group:
553 """
553 """
554 changes = {
554 changes = {
555 'added': [],
555 'added': [],
556 'updated': [],
556 'updated': [],
557 'deleted': []
557 'deleted': []
558 }
558 }
559
559
560 target_user_group = self._get_user_group(target_user_group)
560 target_user_group = self._get_user_group(target_user_group)
561 user_group = self._get_user_group(user_group)
561 user_group = self._get_user_group(user_group)
562 perm_name = 'usergroup.none'
562 perm_name = 'usergroup.none'
563 member_id = user_group.users_group_id
563 member_id = user_group.users_group_id
564 member_name = user_group.users_group_name
564 member_name = user_group.users_group_name
565
565
566 obj = self.sa.query(UserGroupUserGroupToPerm)\
566 obj = self.sa.query(UserGroupUserGroupToPerm)\
567 .filter(UserGroupUserGroupToPerm.target_user_group == target_user_group)\
567 .filter(UserGroupUserGroupToPerm.target_user_group == target_user_group)\
568 .filter(UserGroupUserGroupToPerm.user_group == user_group)\
568 .filter(UserGroupUserGroupToPerm.user_group == user_group)\
569 .scalar()
569 .scalar()
570 if obj:
570 if obj:
571 self.sa.delete(obj)
571 self.sa.delete(obj)
572 log.debug(
572 log.debug(
573 'Revoked perm on %s on %s', target_user_group, user_group)
573 'Revoked perm on %s on %s', target_user_group, user_group)
574 action_logger_generic(
574 action_logger_generic(
575 'revoked permission from usergroup: {} on usergroup: {}'.format(
575 'revoked permission from usergroup: {} on usergroup: {}'.format(
576 user_group, target_user_group),
576 user_group, target_user_group),
577 namespace='security.repogroup')
577 namespace='security.repogroup')
578
578
579 changes['deleted'].append({
579 changes['deleted'].append({
580 'change_obj': target_user_group.get_api_data(),
580 'change_obj': target_user_group.get_api_data(),
581 'type': 'user_group', 'id': member_id,
581 'type': 'user_group', 'id': member_id,
582 'name': member_name, 'new_perm': perm_name})
582 'name': member_name, 'new_perm': perm_name})
583
583
584 return changes
584 return changes
585
585
586 def get_perms_summary(self, user_group_id):
586 def get_perms_summary(self, user_group_id):
587 permissions = {
587 permissions = {
588 'repositories': {},
588 'repositories': {},
589 'repositories_groups': {},
589 'repositories_groups': {},
590 }
590 }
591 ugroup_repo_perms = UserGroupRepoToPerm.query()\
591 ugroup_repo_perms = UserGroupRepoToPerm.query()\
592 .options(joinedload(UserGroupRepoToPerm.permission))\
592 .options(joinedload(UserGroupRepoToPerm.permission))\
593 .options(joinedload(UserGroupRepoToPerm.repository))\
593 .options(joinedload(UserGroupRepoToPerm.repository))\
594 .filter(UserGroupRepoToPerm.users_group_id == user_group_id)\
594 .filter(UserGroupRepoToPerm.users_group_id == user_group_id)\
595 .all()
595 .all()
596
596
597 for gr in ugroup_repo_perms:
597 for gr in ugroup_repo_perms:
598 permissions['repositories'][gr.repository.repo_name] \
598 permissions['repositories'][gr.repository.repo_name] \
599 = gr.permission.permission_name
599 = gr.permission.permission_name
600
600
601 ugroup_group_perms = UserGroupRepoGroupToPerm.query()\
601 ugroup_group_perms = UserGroupRepoGroupToPerm.query()\
602 .options(joinedload(UserGroupRepoGroupToPerm.permission))\
602 .options(joinedload(UserGroupRepoGroupToPerm.permission))\
603 .options(joinedload(UserGroupRepoGroupToPerm.group))\
603 .options(joinedload(UserGroupRepoGroupToPerm.group))\
604 .filter(UserGroupRepoGroupToPerm.users_group_id == user_group_id)\
604 .filter(UserGroupRepoGroupToPerm.users_group_id == user_group_id)\
605 .all()
605 .all()
606
606
607 for gr in ugroup_group_perms:
607 for gr in ugroup_group_perms:
608 permissions['repositories_groups'][gr.group.group_name] \
608 permissions['repositories_groups'][gr.group.group_name] \
609 = gr.permission.permission_name
609 = gr.permission.permission_name
610 return permissions
610 return permissions
611
611
612 def enforce_groups(self, user, groups, extern_type=None):
612 def enforce_groups(self, user, groups, extern_type=None):
613 user = self._get_user(user)
613 user = self._get_user(user)
614 current_groups = user.group_member
614 current_groups = user.group_member
615
615
616 # find the external created groups, i.e automatically created
616 # find the external created groups, i.e automatically created
617 log.debug('Enforcing user group set `%s` on user %s', groups, user)
617 log.debug('Enforcing user group set `%s` on user %s', groups, user)
618 # calculate from what groups user should be removed
618 # calculate from what groups user should be removed
619 # external_groups that are not in groups
619 # external_groups that are not in groups
620 for gr in [x.users_group for x in current_groups]:
620 for gr in [x.users_group for x in current_groups]:
621 managed = gr.group_data.get('extern_type')
621 managed = gr.group_data.get('extern_type')
622 if managed:
622 if managed:
623 if gr.users_group_name not in groups:
623 if gr.users_group_name not in groups:
624 log.debug('Removing user %s from user group %s. '
624 log.debug('Removing user %s from user group %s. '
625 'Group sync managed by: %s', user, gr, managed)
625 'Group sync managed by: %s', user, gr, managed)
626 self.remove_user_from_group(gr, user)
626 self.remove_user_from_group(gr, user)
627 else:
627 else:
628 log.debug('Skipping removal from group %s since it is '
628 log.debug('Skipping removal from group %s since it is '
629 'not set to be automatically synchronized', gr)
629 'not set to be automatically synchronized', gr)
630
630
631 # now we calculate in which groups user should be == groups params
631 # now we calculate in which groups user should be == groups params
632 owner = User.get_first_super_admin().username
632 owner = User.get_first_super_admin().username
633 for gr in set(groups):
633 for gr in set(groups):
634 existing_group = UserGroup.get_by_group_name(gr)
634 existing_group = UserGroup.get_by_group_name(gr)
635 if not existing_group:
635 if not existing_group:
636 desc = 'Automatically created from plugin:%s' % extern_type
636 desc = 'Automatically created from plugin:%s' % extern_type
637 # we use first admin account to set the owner of the group
637 # we use first admin account to set the owner of the group
638 existing_group = UserGroupModel().create(
638 existing_group = UserGroupModel().create(
639 gr, desc, owner, group_data={'extern_type': extern_type})
639 gr, desc, owner, group_data={'extern_type': extern_type})
640
640
641 # we can only add users to groups which have set sync flag via
641 # we can only add users to groups which have set sync flag via
642 # extern_type attribute.
642 # extern_type attribute.
643 # This is either set and created via plugins, or manually
643 # This is either set and created via plugins, or manually
644 managed = existing_group.group_data.get('extern_type')
644 managed = existing_group.group_data.get('extern_type')
645 if managed:
645 if managed:
646 log.debug('Adding user %s to user group %s', user, gr)
646 log.debug('Adding user %s to user group %s', user, gr)
647 UserGroupModel().add_user_to_group(existing_group, user)
647 UserGroupModel().add_user_to_group(existing_group, user)
648 else:
648 else:
649 log.debug('Skipping addition to group %s since it is '
649 log.debug('Skipping addition to group %s since it is '
650 'not set to be automatically synchronized', gr)
650 'not set to be automatically synchronized', gr)
651
651
652 def change_groups(self, user, groups):
652 def change_groups(self, user, groups):
653 """
653 """
654 This method changes user group assignment
654 This method changes user group assignment
655 :param user: User
655 :param user: User
656 :param groups: array of UserGroupModel
656 :param groups: array of UserGroupModel
657 """
657 """
658 user = self._get_user(user)
658 user = self._get_user(user)
659 log.debug('Changing user(%s) assignment to groups(%s)', user, groups)
659 log.debug('Changing user(%s) assignment to groups(%s)', user, groups)
660 current_groups = user.group_member
660 current_groups = user.group_member
661 current_groups = [x.users_group for x in current_groups]
661 current_groups = [x.users_group for x in current_groups]
662
662
663 # calculate from what groups user should be removed/add
663 # calculate from what groups user should be removed/add
664 groups = set(groups)
664 groups = set(groups)
665 current_groups = set(current_groups)
665 current_groups = set(current_groups)
666
666
667 groups_to_remove = current_groups - groups
667 groups_to_remove = current_groups - groups
668 groups_to_add = groups - current_groups
668 groups_to_add = groups - current_groups
669
669
670 removed_from_groups = []
670 removed_from_groups = []
671 added_to_groups = []
671 added_to_groups = []
672 for gr in groups_to_remove:
672 for gr in groups_to_remove:
673 log.debug('Removing user %s from user group %s',
673 log.debug('Removing user %s from user group %s',
674 user.username, gr.users_group_name)
674 user.username, gr.users_group_name)
675 removed_from_groups.append(gr.users_group_id)
675 removed_from_groups.append(gr.users_group_id)
676 self.remove_user_from_group(gr.users_group_name, user.username)
676 self.remove_user_from_group(gr.users_group_name, user.username)
677 for gr in groups_to_add:
677 for gr in groups_to_add:
678 log.debug('Adding user %s to user group %s',
678 log.debug('Adding user %s to user group %s',
679 user.username, gr.users_group_name)
679 user.username, gr.users_group_name)
680 added_to_groups.append(gr.users_group_id)
680 added_to_groups.append(gr.users_group_id)
681 UserGroupModel().add_user_to_group(
681 UserGroupModel().add_user_to_group(
682 gr.users_group_name, user.username)
682 gr.users_group_name, user.username)
683
683
684 return added_to_groups, removed_from_groups
684 return added_to_groups, removed_from_groups
685
685
686 def _serialize_user_group(self, user_group):
686 def _serialize_user_group(self, user_group):
687 import rhodecode.lib.helpers as h
687 import rhodecode.lib.helpers as h
688 return {
688 return {
689 'id': user_group.users_group_id,
689 'id': user_group.users_group_id,
690 # TODO: marcink figure out a way to generate the url for the
690 # TODO: marcink figure out a way to generate the url for the
691 # icon
691 # icon
692 'icon_link': '',
692 'icon_link': '',
693 'value_display': 'Group: %s (%d members)' % (
693 'value_display': 'Group: %s (%d members)' % (
694 user_group.users_group_name, len(user_group.members),),
694 user_group.users_group_name, len(user_group.members),),
695 'value': user_group.users_group_name,
695 'value': user_group.users_group_name,
696 'description': user_group.user_group_description,
696 'description': user_group.user_group_description,
697 'owner': user_group.user.username,
697 'owner': user_group.user.username,
698
698
699 'owner_icon': h.gravatar_url(user_group.user.email, 30),
699 'owner_icon': h.gravatar_url(user_group.user.email, 30),
700 'value_display_owner': h.person(user_group.user.email),
700 'value_display_owner': h.person(user_group.user.email),
701
701
702 'value_type': 'user_group',
702 'value_type': 'user_group',
703 'active': user_group.users_group_active,
703 'active': user_group.users_group_active,
704 }
704 }
705
705
706 def get_user_groups(self, name_contains=None, limit=20, only_active=True,
706 def get_user_groups(self, name_contains=None, limit=20, only_active=True,
707 expand_groups=False):
707 expand_groups=False):
708 query = self.sa.query(UserGroup)
708 query = self.sa.query(UserGroup)
709 if only_active:
709 if only_active:
710 query = query.filter(UserGroup.users_group_active == true())
710 query = query.filter(UserGroup.users_group_active == true())
711
711
712 if name_contains:
712 if name_contains:
713 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
713 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
714 query = query.filter(
714 query = query.filter(
715 UserGroup.users_group_name.ilike(ilike_expression))\
715 UserGroup.users_group_name.ilike(ilike_expression))\
716 .order_by(func.length(UserGroup.users_group_name))\
716 .order_by(func.length(UserGroup.users_group_name))\
717 .order_by(UserGroup.users_group_name)
717 .order_by(UserGroup.users_group_name)
718
718
719 query = query.limit(limit)
719 query = query.limit(limit)
720 user_groups = query.all()
720 user_groups = query.all()
721 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
721 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
722 user_groups = UserGroupList(user_groups, perm_set=perm_set)
722 user_groups = UserGroupList(user_groups, perm_set=perm_set)
723
723
724 # store same serialize method to extract data from User
724 # store same serialize method to extract data from User
725 from rhodecode.model.user import UserModel
725 from rhodecode.model.user import UserModel
726 serialize_user = UserModel()._serialize_user
726 serialize_user = UserModel()._serialize_user
727
727
728 _groups = []
728 _groups = []
729 for group in user_groups:
729 for group in user_groups:
730 entry = self._serialize_user_group(group)
730 entry = self._serialize_user_group(group)
731 if expand_groups:
731 if expand_groups:
732 expanded_members = []
732 expanded_members = []
733 for member in group.members:
733 for member in group.members:
734 expanded_members.append(serialize_user(member.user))
734 expanded_members.append(serialize_user(member.user))
735 entry['members'] = expanded_members
735 entry['members'] = expanded_members
736 _groups.append(entry)
736 _groups.append(entry)
737 return _groups
737 return _groups
738
738
739 @staticmethod
739 @staticmethod
740 def get_user_groups_as_dict(user_group):
740 def get_user_groups_as_dict(user_group):
741 import rhodecode.lib.helpers as h
741 import rhodecode.lib.helpers as h
742
742
743 data = {
743 data = {
744 'users_group_id': user_group.users_group_id,
744 'users_group_id': user_group.users_group_id,
745 'group_name': h.link_to_group(user_group.users_group_name),
745 'group_name': h.link_to_group(user_group.users_group_name),
746 'group_description': user_group.user_group_description,
746 'group_description': user_group.user_group_description,
747 'active': user_group.users_group_active,
747 'active': user_group.users_group_active,
748 "owner": user_group.user.username,
748 "owner": user_group.user.username,
749 'owner_icon': h.gravatar_url(user_group.user.email, 30),
749 'owner_icon': h.gravatar_url(user_group.user.email, 30),
750 "owner_data": {
750 "owner_data": {
751 'owner': user_group.user.username,
751 'owner': user_group.user.username,
752 'owner_icon': h.gravatar_url(user_group.user.email, 30)}
752 'owner_icon': h.gravatar_url(user_group.user.email, 30)}
753 }
753 }
754 return data
754 return data
@@ -1,24 +1,24 b''
1 # -*- coding: utf-8 -*-
1
2
2
3 # Copyright (C) 2016-2020 RhodeCode GmbH
3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import colander
21 import colander
22
22
23 from colander import Invalid # pragma: no cover
23 from colander import Invalid # pragma: no cover
24
24
@@ -1,88 +1,88 b''
1 # -*- coding: utf-8 -*-
1
2
2
3 # Copyright (C) 2016-2020 RhodeCode GmbH
3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import unicodedata
21 import unicodedata
22
22
23
23
24 def strip_preparer(value):
24 def strip_preparer(value):
25 """
25 """
26 strips given values using .strip() function
26 strips given values using .strip() function
27 """
27 """
28
28
29 if value:
29 if value:
30 value = value.strip()
30 value = value.strip()
31 return value
31 return value
32
32
33
33
34 def slugify_preparer(value, keep_case=True):
34 def slugify_preparer(value, keep_case=True):
35 """
35 """
36 Slugify given value to a safe representation for url/id
36 Slugify given value to a safe representation for url/id
37 """
37 """
38 from rhodecode.lib.utils import repo_name_slug
38 from rhodecode.lib.utils import repo_name_slug
39 if value:
39 if value:
40 value = repo_name_slug(value if keep_case else value.lower())
40 value = repo_name_slug(value if keep_case else value.lower())
41 return value
41 return value
42
42
43
43
44 def non_ascii_strip_preparer(value):
44 def non_ascii_strip_preparer(value):
45 """
45 """
46 trie to replace non-ascii letters to their ascii representation
46 trie to replace non-ascii letters to their ascii representation
47 eg::
47 eg::
48
48
49 `żołw` converts into `zolw`
49 `żołw` converts into `zolw`
50 """
50 """
51 if value:
51 if value:
52 value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore')
52 value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore')
53 return value
53 return value
54
54
55
55
56 def unique_list_preparer(value):
56 def unique_list_preparer(value):
57 """
57 """
58 Converts an list to a list with only unique values
58 Converts an list to a list with only unique values
59 """
59 """
60
60
61 def make_unique(value):
61 def make_unique(value):
62 seen = []
62 seen = []
63 return [c for c in value if
63 return [c for c in value if
64 not (c in seen or seen.append(c))]
64 not (c in seen or seen.append(c))]
65
65
66 if isinstance(value, list):
66 if isinstance(value, list):
67 ret_val = make_unique(value)
67 ret_val = make_unique(value)
68 elif isinstance(value, set):
68 elif isinstance(value, set):
69 ret_val = list(value)
69 ret_val = list(value)
70 elif isinstance(value, tuple):
70 elif isinstance(value, tuple):
71 ret_val = make_unique(value)
71 ret_val = make_unique(value)
72 elif value is None:
72 elif value is None:
73 ret_val = []
73 ret_val = []
74 else:
74 else:
75 ret_val = [value]
75 ret_val = [value]
76
76
77 return ret_val
77 return ret_val
78
78
79
79
80 def unique_list_from_str_preparer(value):
80 def unique_list_from_str_preparer(value):
81 """
81 """
82 Converts an list to a list with only unique values
82 Converts an list to a list with only unique values
83 """
83 """
84 from rhodecode.lib.utils2 import aslist
84 from rhodecode.lib.utils2 import aslist
85
85
86 if isinstance(value, str):
86 if isinstance(value, str):
87 value = aslist(value, ',')
87 value = aslist(value, ',')
88 return unique_list_preparer(value) No newline at end of file
88 return unique_list_preparer(value)
@@ -1,25 +1,25 b''
1 # -*- coding: utf-8 -*-
1
2
2
3 # Copyright (C) 2016-2020 RhodeCode GmbH
3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Colander Schema nodes
22 Colander Schema nodes
23 http://docs.pylonsproject.org/projects/colander/en/latest/basics.html#schema-node-objects
23 http://docs.pylonsproject.org/projects/colander/en/latest/basics.html#schema-node-objects
24 """
24 """
25
25
@@ -1,74 +1,74 b''
1 # -*- coding: utf-8 -*-
1
2
2
3 # Copyright (C) 2017-2020 RhodeCode GmbH
3 # Copyright (C) 2017-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22
22
23 import colander
23 import colander
24
24
25 from rhodecode.translation import _
25 from rhodecode.translation import _
26 from rhodecode.model.validation_schema import preparers
26 from rhodecode.model.validation_schema import preparers
27 from rhodecode.model.validation_schema import types
27 from rhodecode.model.validation_schema import types
28
28
29
29
30 @colander.deferred
30 @colander.deferred
31 def deferred_lifetime_validator(node, kw):
31 def deferred_lifetime_validator(node, kw):
32 options = kw.get('lifetime_options', [])
32 options = kw.get('lifetime_options', [])
33 return colander.All(
33 return colander.All(
34 colander.Range(min=-1, max=60 * 24 * 30 * 12),
34 colander.Range(min=-1, max=60 * 24 * 30 * 12),
35 colander.OneOf([x for x in options]))
35 colander.OneOf([x for x in options]))
36
36
37
37
38 def unique_gist_validator(node, value):
38 def unique_gist_validator(node, value):
39 from rhodecode.model.db import Gist
39 from rhodecode.model.db import Gist
40 existing = Gist.get_by_access_id(value)
40 existing = Gist.get_by_access_id(value)
41 if existing:
41 if existing:
42 msg = _(u'Gist with name {} already exists').format(value)
42 msg = _(u'Gist with name {} already exists').format(value)
43 raise colander.Invalid(node, msg)
43 raise colander.Invalid(node, msg)
44
44
45
45
46 def filename_validator(node, value):
46 def filename_validator(node, value):
47 if value != os.path.basename(value):
47 if value != os.path.basename(value):
48 msg = _(u'Filename {} cannot be inside a directory').format(value)
48 msg = _(u'Filename {} cannot be inside a directory').format(value)
49 raise colander.Invalid(node, msg)
49 raise colander.Invalid(node, msg)
50
50
51
51
52 comment_types = ['note', 'todo']
52 comment_types = ['note', 'todo']
53
53
54
54
55 class CommentSchema(colander.MappingSchema):
55 class CommentSchema(colander.MappingSchema):
56 from rhodecode.model.db import ChangesetComment, ChangesetStatus
56 from rhodecode.model.db import ChangesetComment, ChangesetStatus
57
57
58 comment_body = colander.SchemaNode(colander.String())
58 comment_body = colander.SchemaNode(colander.String())
59 comment_type = colander.SchemaNode(
59 comment_type = colander.SchemaNode(
60 colander.String(),
60 colander.String(),
61 validator=colander.OneOf(ChangesetComment.COMMENT_TYPES),
61 validator=colander.OneOf(ChangesetComment.COMMENT_TYPES),
62 missing=ChangesetComment.COMMENT_TYPE_NOTE)
62 missing=ChangesetComment.COMMENT_TYPE_NOTE)
63 is_draft = colander.SchemaNode(colander.Boolean(),missing=False)
63 is_draft = colander.SchemaNode(colander.Boolean(),missing=False)
64 comment_file = colander.SchemaNode(colander.String(), missing=None)
64 comment_file = colander.SchemaNode(colander.String(), missing=None)
65 comment_line = colander.SchemaNode(colander.String(), missing=None)
65 comment_line = colander.SchemaNode(colander.String(), missing=None)
66 status_change = colander.SchemaNode(
66 status_change = colander.SchemaNode(
67 colander.String(), missing=None,
67 colander.String(), missing=None,
68 validator=colander.OneOf([x[0] for x in ChangesetStatus.STATUSES]))
68 validator=colander.OneOf([x[0] for x in ChangesetStatus.STATUSES]))
69 renderer_type = colander.SchemaNode(colander.String())
69 renderer_type = colander.SchemaNode(colander.String())
70
70
71 resolves_comment_id = colander.SchemaNode(colander.Integer(), missing=None)
71 resolves_comment_id = colander.SchemaNode(colander.Integer(), missing=None)
72
72
73 user = colander.SchemaNode(types.StrOrIntType())
73 user = colander.SchemaNode(types.StrOrIntType())
74 repo = colander.SchemaNode(types.StrOrIntType())
74 repo = colander.SchemaNode(types.StrOrIntType())
@@ -1,183 +1,183 b''
1 # -*- coding: utf-8 -*-
1
2
2
3 # Copyright (C) 2016-2020 RhodeCode GmbH
3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22
22
23 import colander
23 import colander
24
24
25 from rhodecode.translation import _
25 from rhodecode.translation import _
26 from rhodecode.model.validation_schema import preparers
26 from rhodecode.model.validation_schema import preparers
27
27
28
28
29 def nodes_to_sequence(nodes, colander_node=None):
29 def nodes_to_sequence(nodes, colander_node=None):
30 """
30 """
31 Converts old style dict nodes to new list of dicts
31 Converts old style dict nodes to new list of dicts
32
32
33 :param nodes: dict with key beeing name of the file
33 :param nodes: dict with key beeing name of the file
34
34
35 """
35 """
36 if not isinstance(nodes, dict):
36 if not isinstance(nodes, dict):
37 msg = 'Nodes needs to be a dict, got {}'.format(type(nodes))
37 msg = 'Nodes needs to be a dict, got {}'.format(type(nodes))
38 raise colander.Invalid(colander_node, msg)
38 raise colander.Invalid(colander_node, msg)
39 out = []
39 out = []
40
40
41 for key, val in nodes.items():
41 for key, val in nodes.items():
42 val = (isinstance(val, dict) and val) or {}
42 val = (isinstance(val, dict) and val) or {}
43 out.append(dict(
43 out.append(dict(
44 filename=key,
44 filename=key,
45 content=val.get('content'),
45 content=val.get('content'),
46 mimetype=val.get('mimetype')
46 mimetype=val.get('mimetype')
47 ))
47 ))
48
48
49 out = Nodes().deserialize(out)
49 out = Nodes().deserialize(out)
50 return out
50 return out
51
51
52
52
53 def sequence_to_nodes(nodes, colander_node=None):
53 def sequence_to_nodes(nodes, colander_node=None):
54 if not isinstance(nodes, list):
54 if not isinstance(nodes, list):
55 msg = 'Nodes needs to be a list, got {}'.format(type(nodes))
55 msg = 'Nodes needs to be a list, got {}'.format(type(nodes))
56 raise colander.Invalid(colander_node, msg)
56 raise colander.Invalid(colander_node, msg)
57 nodes = Nodes().deserialize(nodes)
57 nodes = Nodes().deserialize(nodes)
58
58
59 out = {}
59 out = {}
60 try:
60 try:
61 for file_data in nodes:
61 for file_data in nodes:
62 file_data_skip = file_data.copy()
62 file_data_skip = file_data.copy()
63 # if we got filename_org we use it as a key so we keep old
63 # if we got filename_org we use it as a key so we keep old
64 # name as input and rename is-reflected inside the values as
64 # name as input and rename is-reflected inside the values as
65 # filename and filename_org differences.
65 # filename and filename_org differences.
66 filename_org = file_data.get('filename_org')
66 filename_org = file_data.get('filename_org')
67 filename = filename_org or file_data['filename']
67 filename = filename_org or file_data['filename']
68 out[filename] = {}
68 out[filename] = {}
69 out[filename].update(file_data_skip)
69 out[filename].update(file_data_skip)
70
70
71 except Exception as e:
71 except Exception as e:
72 msg = 'Invalid data format org_exc:`{}`'.format(repr(e))
72 msg = 'Invalid data format org_exc:`{}`'.format(repr(e))
73 raise colander.Invalid(colander_node, msg)
73 raise colander.Invalid(colander_node, msg)
74 return out
74 return out
75
75
76
76
77 @colander.deferred
77 @colander.deferred
78 def deferred_lifetime_validator(node, kw):
78 def deferred_lifetime_validator(node, kw):
79 options = kw.get('lifetime_options', [])
79 options = kw.get('lifetime_options', [])
80 return colander.All(
80 return colander.All(
81 colander.Range(min=-1, max=60 * 24 * 30 * 12),
81 colander.Range(min=-1, max=60 * 24 * 30 * 12),
82 colander.OneOf([x for x in options]))
82 colander.OneOf([x for x in options]))
83
83
84
84
85 def unique_gist_validator(node, value):
85 def unique_gist_validator(node, value):
86 from rhodecode.model.db import Gist
86 from rhodecode.model.db import Gist
87 existing = Gist.get_by_access_id(value)
87 existing = Gist.get_by_access_id(value)
88 if existing:
88 if existing:
89 msg = _(u'Gist with name {} already exists').format(value)
89 msg = _(u'Gist with name {} already exists').format(value)
90 raise colander.Invalid(node, msg)
90 raise colander.Invalid(node, msg)
91
91
92
92
93 def filename_validator(node, value):
93 def filename_validator(node, value):
94 if value != os.path.basename(value):
94 if value != os.path.basename(value):
95 msg = _(u'Filename {} cannot be inside a directory').format(value)
95 msg = _(u'Filename {} cannot be inside a directory').format(value)
96 raise colander.Invalid(node, msg)
96 raise colander.Invalid(node, msg)
97
97
98
98
99 class NodeSchema(colander.MappingSchema):
99 class NodeSchema(colander.MappingSchema):
100 # if we perform rename this will be org filename
100 # if we perform rename this will be org filename
101 filename_org = colander.SchemaNode(
101 filename_org = colander.SchemaNode(
102 colander.String(),
102 colander.String(),
103 preparer=[preparers.strip_preparer,
103 preparer=[preparers.strip_preparer,
104 preparers.non_ascii_strip_preparer],
104 preparers.non_ascii_strip_preparer],
105 validator=filename_validator,
105 validator=filename_validator,
106 missing=None)
106 missing=None)
107
107
108 filename = colander.SchemaNode(
108 filename = colander.SchemaNode(
109 colander.String(),
109 colander.String(),
110 preparer=[preparers.strip_preparer,
110 preparer=[preparers.strip_preparer,
111 preparers.non_ascii_strip_preparer],
111 preparers.non_ascii_strip_preparer],
112 validator=filename_validator)
112 validator=filename_validator)
113
113
114 content = colander.SchemaNode(
114 content = colander.SchemaNode(
115 colander.String())
115 colander.String())
116 mimetype = colander.SchemaNode(
116 mimetype = colander.SchemaNode(
117 colander.String(),
117 colander.String(),
118 missing=None)
118 missing=None)
119
119
120
120
121 class Nodes(colander.SequenceSchema):
121 class Nodes(colander.SequenceSchema):
122 filenames = NodeSchema()
122 filenames = NodeSchema()
123
123
124 def validator(self, node, cstruct):
124 def validator(self, node, cstruct):
125 if not isinstance(cstruct, list):
125 if not isinstance(cstruct, list):
126 return
126 return
127
127
128 found_filenames = []
128 found_filenames = []
129 for data in cstruct:
129 for data in cstruct:
130 filename = data['filename']
130 filename = data['filename']
131 if filename in found_filenames:
131 if filename in found_filenames:
132 msg = _('Duplicated value for filename found: `{}`').format(
132 msg = _('Duplicated value for filename found: `{}`').format(
133 filename)
133 filename)
134 raise colander.Invalid(node, msg)
134 raise colander.Invalid(node, msg)
135 found_filenames.append(filename)
135 found_filenames.append(filename)
136
136
137
137
138 class GistSchema(colander.MappingSchema):
138 class GistSchema(colander.MappingSchema):
139 """
139 """
140 schema = GistSchema()
140 schema = GistSchema()
141 schema.bind(
141 schema.bind(
142 lifetime_options = [1,2,3]
142 lifetime_options = [1,2,3]
143 )
143 )
144 out = schema.deserialize(dict(
144 out = schema.deserialize(dict(
145 nodes=[
145 nodes=[
146 {'filename': 'x', 'content': 'xxx', },
146 {'filename': 'x', 'content': 'xxx', },
147 {'filename': 'docs/Z', 'content': 'xxx', 'mimetype': 'x'},
147 {'filename': 'docs/Z', 'content': 'xxx', 'mimetype': 'x'},
148 ]
148 ]
149 ))
149 ))
150 """
150 """
151
151
152 from rhodecode.model.db import Gist
152 from rhodecode.model.db import Gist
153
153
154 gistid = colander.SchemaNode(
154 gistid = colander.SchemaNode(
155 colander.String(),
155 colander.String(),
156 missing=None,
156 missing=None,
157 preparer=[preparers.strip_preparer,
157 preparer=[preparers.strip_preparer,
158 preparers.non_ascii_strip_preparer,
158 preparers.non_ascii_strip_preparer,
159 preparers.slugify_preparer],
159 preparers.slugify_preparer],
160 validator=colander.All(
160 validator=colander.All(
161 colander.Length(min=3),
161 colander.Length(min=3),
162 unique_gist_validator
162 unique_gist_validator
163 ))
163 ))
164
164
165 description = colander.SchemaNode(
165 description = colander.SchemaNode(
166 colander.String(),
166 colander.String(),
167 missing=u'')
167 missing=u'')
168
168
169 lifetime = colander.SchemaNode(
169 lifetime = colander.SchemaNode(
170 colander.Integer(),
170 colander.Integer(),
171 validator=deferred_lifetime_validator)
171 validator=deferred_lifetime_validator)
172
172
173 gist_acl_level = colander.SchemaNode(
173 gist_acl_level = colander.SchemaNode(
174 colander.String(),
174 colander.String(),
175 validator=colander.OneOf([Gist.ACL_LEVEL_PUBLIC,
175 validator=colander.OneOf([Gist.ACL_LEVEL_PUBLIC,
176 Gist.ACL_LEVEL_PRIVATE]))
176 Gist.ACL_LEVEL_PRIVATE]))
177
177
178 gist_type = colander.SchemaNode(
178 gist_type = colander.SchemaNode(
179 colander.String(),
179 colander.String(),
180 missing=Gist.GIST_PUBLIC,
180 missing=Gist.GIST_PUBLIC,
181 validator=colander.OneOf([Gist.GIST_PRIVATE, Gist.GIST_PUBLIC]))
181 validator=colander.OneOf([Gist.GIST_PRIVATE, Gist.GIST_PUBLIC]))
182
182
183 nodes = Nodes()
183 nodes = Nodes()
@@ -1,223 +1,223 b''
1 # -*- coding: utf-8 -*-
1
2
2
3 # Copyright (C) 2016-2020 RhodeCode GmbH
3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22
22
23 import deform
23 import deform
24 import colander
24 import colander
25
25
26 from rhodecode.translation import _
26 from rhodecode.translation import _
27 from rhodecode.model.db import Repository, RepoGroup
27 from rhodecode.model.db import Repository, RepoGroup
28 from rhodecode.model.validation_schema import validators, preparers
28 from rhodecode.model.validation_schema import validators, preparers
29
29
30
30
31 def integration_scope_choices(permissions):
31 def integration_scope_choices(permissions):
32 """
32 """
33 Return list of (value, label) choices for integration scopes depending on
33 Return list of (value, label) choices for integration scopes depending on
34 the permissions
34 the permissions
35 """
35 """
36 result = [('', _('Pick a scope:'))]
36 result = [('', _('Pick a scope:'))]
37 if 'hg.admin' in permissions['global']:
37 if 'hg.admin' in permissions['global']:
38 result.extend([
38 result.extend([
39 ('global', _('Global (all repositories)')),
39 ('global', _('Global (all repositories)')),
40 ('root-repos', _('Top level repositories only')),
40 ('root-repos', _('Top level repositories only')),
41 ])
41 ])
42
42
43 repo_choices = [
43 repo_choices = [
44 ('repo:%s' % repo_name, '/' + repo_name)
44 ('repo:%s' % repo_name, '/' + repo_name)
45 for repo_name, repo_perm
45 for repo_name, repo_perm
46 in list(permissions['repositories'].items())
46 in list(permissions['repositories'].items())
47 if repo_perm == 'repository.admin'
47 if repo_perm == 'repository.admin'
48 ]
48 ]
49 repogroup_choices = [
49 repogroup_choices = [
50 ('repogroup:%s' % repo_group_name, '/' + repo_group_name + '/ (child repos only)')
50 ('repogroup:%s' % repo_group_name, '/' + repo_group_name + '/ (child repos only)')
51 for repo_group_name, repo_group_perm
51 for repo_group_name, repo_group_perm
52 in list(permissions['repositories_groups'].items())
52 in list(permissions['repositories_groups'].items())
53 if repo_group_perm == 'group.admin'
53 if repo_group_perm == 'group.admin'
54 ]
54 ]
55 repogroup_recursive_choices = [
55 repogroup_recursive_choices = [
56 ('repogroup-recursive:%s' % repo_group_name, '/' + repo_group_name + '/ (recursive)')
56 ('repogroup-recursive:%s' % repo_group_name, '/' + repo_group_name + '/ (recursive)')
57 for repo_group_name, repo_group_perm
57 for repo_group_name, repo_group_perm
58 in list(permissions['repositories_groups'].items())
58 in list(permissions['repositories_groups'].items())
59 if repo_group_perm == 'group.admin'
59 if repo_group_perm == 'group.admin'
60 ]
60 ]
61 result.extend(
61 result.extend(
62 sorted(repogroup_recursive_choices + repogroup_choices + repo_choices,
62 sorted(repogroup_recursive_choices + repogroup_choices + repo_choices,
63 key=lambda choice_label: choice_label[0].split(':', 1)[1]
63 key=lambda choice_label: choice_label[0].split(':', 1)[1]
64 )
64 )
65 )
65 )
66 return result
66 return result
67
67
68
68
69 @colander.deferred
69 @colander.deferred
70 def deferred_integration_scopes_validator(node, kw):
70 def deferred_integration_scopes_validator(node, kw):
71 perms = kw.get('permissions')
71 perms = kw.get('permissions')
72 def _scope_validator(_node, scope):
72 def _scope_validator(_node, scope):
73 is_super_admin = 'hg.admin' in perms['global']
73 is_super_admin = 'hg.admin' in perms['global']
74
74
75 if scope.get('repo'):
75 if scope.get('repo'):
76 if (is_super_admin or perms['repositories'].get(
76 if (is_super_admin or perms['repositories'].get(
77 scope['repo'].repo_name) == 'repository.admin'):
77 scope['repo'].repo_name) == 'repository.admin'):
78 return True
78 return True
79 msg = _('Only repo admins can create integrations')
79 msg = _('Only repo admins can create integrations')
80 raise colander.Invalid(_node, msg)
80 raise colander.Invalid(_node, msg)
81 elif scope.get('repo_group'):
81 elif scope.get('repo_group'):
82 if (is_super_admin or perms['repositories_groups'].get(
82 if (is_super_admin or perms['repositories_groups'].get(
83 scope['repo_group'].group_name) == 'group.admin'):
83 scope['repo_group'].group_name) == 'group.admin'):
84 return True
84 return True
85
85
86 msg = _('Only repogroup admins can create integrations')
86 msg = _('Only repogroup admins can create integrations')
87 raise colander.Invalid(_node, msg)
87 raise colander.Invalid(_node, msg)
88 else:
88 else:
89 if is_super_admin:
89 if is_super_admin:
90 return True
90 return True
91 msg = _('Only superadmins can create global integrations')
91 msg = _('Only superadmins can create global integrations')
92 raise colander.Invalid(_node, msg)
92 raise colander.Invalid(_node, msg)
93
93
94 return _scope_validator
94 return _scope_validator
95
95
96
96
97 @colander.deferred
97 @colander.deferred
98 def deferred_integration_scopes_widget(node, kw):
98 def deferred_integration_scopes_widget(node, kw):
99 if kw.get('no_scope'):
99 if kw.get('no_scope'):
100 return deform.widget.TextInputWidget(readonly=True)
100 return deform.widget.TextInputWidget(readonly=True)
101
101
102 choices = integration_scope_choices(kw.get('permissions'))
102 choices = integration_scope_choices(kw.get('permissions'))
103 widget = deform.widget.Select2Widget(values=choices)
103 widget = deform.widget.Select2Widget(values=choices)
104 return widget
104 return widget
105
105
106
106
107 class IntegrationScopeType(colander.SchemaType):
107 class IntegrationScopeType(colander.SchemaType):
108 def serialize(self, node, appstruct):
108 def serialize(self, node, appstruct):
109 if appstruct is colander.null:
109 if appstruct is colander.null:
110 return colander.null
110 return colander.null
111
111
112 if appstruct.get('repo'):
112 if appstruct.get('repo'):
113 return 'repo:%s' % appstruct['repo'].repo_name
113 return 'repo:%s' % appstruct['repo'].repo_name
114 elif appstruct.get('repo_group'):
114 elif appstruct.get('repo_group'):
115 if appstruct.get('child_repos_only'):
115 if appstruct.get('child_repos_only'):
116 return 'repogroup:%s' % appstruct['repo_group'].group_name
116 return 'repogroup:%s' % appstruct['repo_group'].group_name
117 else:
117 else:
118 return 'repogroup-recursive:%s' % (
118 return 'repogroup-recursive:%s' % (
119 appstruct['repo_group'].group_name)
119 appstruct['repo_group'].group_name)
120 else:
120 else:
121 if appstruct.get('child_repos_only'):
121 if appstruct.get('child_repos_only'):
122 return 'root-repos'
122 return 'root-repos'
123 else:
123 else:
124 return 'global'
124 return 'global'
125
125
126 def deserialize(self, node, cstruct):
126 def deserialize(self, node, cstruct):
127 if cstruct is colander.null:
127 if cstruct is colander.null:
128 return colander.null
128 return colander.null
129
129
130 if cstruct.startswith('repo:'):
130 if cstruct.startswith('repo:'):
131 repo = Repository.get_by_repo_name(cstruct.split(':')[1])
131 repo = Repository.get_by_repo_name(cstruct.split(':')[1])
132 if repo:
132 if repo:
133 return {
133 return {
134 'repo': repo,
134 'repo': repo,
135 'repo_group': None,
135 'repo_group': None,
136 'child_repos_only': False,
136 'child_repos_only': False,
137 }
137 }
138 elif cstruct.startswith('repogroup-recursive:'):
138 elif cstruct.startswith('repogroup-recursive:'):
139 repo_group = RepoGroup.get_by_group_name(cstruct.split(':')[1])
139 repo_group = RepoGroup.get_by_group_name(cstruct.split(':')[1])
140 if repo_group:
140 if repo_group:
141 return {
141 return {
142 'repo': None,
142 'repo': None,
143 'repo_group': repo_group,
143 'repo_group': repo_group,
144 'child_repos_only': False
144 'child_repos_only': False
145 }
145 }
146 elif cstruct.startswith('repogroup:'):
146 elif cstruct.startswith('repogroup:'):
147 repo_group = RepoGroup.get_by_group_name(cstruct.split(':')[1])
147 repo_group = RepoGroup.get_by_group_name(cstruct.split(':')[1])
148 if repo_group:
148 if repo_group:
149 return {
149 return {
150 'repo': None,
150 'repo': None,
151 'repo_group': repo_group,
151 'repo_group': repo_group,
152 'child_repos_only': True
152 'child_repos_only': True
153 }
153 }
154 elif cstruct == 'global':
154 elif cstruct == 'global':
155 return {
155 return {
156 'repo': None,
156 'repo': None,
157 'repo_group': None,
157 'repo_group': None,
158 'child_repos_only': False
158 'child_repos_only': False
159 }
159 }
160 elif cstruct == 'root-repos':
160 elif cstruct == 'root-repos':
161 return {
161 return {
162 'repo': None,
162 'repo': None,
163 'repo_group': None,
163 'repo_group': None,
164 'child_repos_only': True
164 'child_repos_only': True
165 }
165 }
166
166
167 raise colander.Invalid(node, '%r is not a valid scope' % cstruct)
167 raise colander.Invalid(node, '%r is not a valid scope' % cstruct)
168
168
169
169
170 class IntegrationOptionsSchemaBase(colander.MappingSchema):
170 class IntegrationOptionsSchemaBase(colander.MappingSchema):
171
171
172 name = colander.SchemaNode(
172 name = colander.SchemaNode(
173 colander.String(),
173 colander.String(),
174 description=_('Short name for this integration.'),
174 description=_('Short name for this integration.'),
175 missing=colander.required,
175 missing=colander.required,
176 title=_('Integration name'),
176 title=_('Integration name'),
177 )
177 )
178
178
179 scope = colander.SchemaNode(
179 scope = colander.SchemaNode(
180 IntegrationScopeType(),
180 IntegrationScopeType(),
181 description=_(
181 description=_(
182 'Scope of the integration. Recursive means the integration '
182 'Scope of the integration. Recursive means the integration '
183 ' runs on all repos of that group and children recursively.'),
183 ' runs on all repos of that group and children recursively.'),
184 title=_('Integration scope'),
184 title=_('Integration scope'),
185 validator=deferred_integration_scopes_validator,
185 validator=deferred_integration_scopes_validator,
186 widget=deferred_integration_scopes_widget,
186 widget=deferred_integration_scopes_widget,
187 missing=colander.required,
187 missing=colander.required,
188 )
188 )
189
189
190 enabled = colander.SchemaNode(
190 enabled = colander.SchemaNode(
191 colander.Bool(),
191 colander.Bool(),
192 default=True,
192 default=True,
193 description=_('Enable or disable this integration.'),
193 description=_('Enable or disable this integration.'),
194 missing=False,
194 missing=False,
195 title=_('Enabled'),
195 title=_('Enabled'),
196 )
196 )
197
197
198
198
199 def make_integration_schema(IntegrationType, settings=None):
199 def make_integration_schema(IntegrationType, settings=None):
200 """
200 """
201 Return a colander schema for an integration type
201 Return a colander schema for an integration type
202
202
203 :param IntegrationType: the integration type class
203 :param IntegrationType: the integration type class
204 :param settings: existing integration settings dict (optional)
204 :param settings: existing integration settings dict (optional)
205 """
205 """
206
206
207 settings = settings or {}
207 settings = settings or {}
208 settings_schema = IntegrationType(settings=settings).settings_schema()
208 settings_schema = IntegrationType(settings=settings).settings_schema()
209
209
210 class IntegrationSchema(colander.Schema):
210 class IntegrationSchema(colander.Schema):
211 options = IntegrationOptionsSchemaBase()
211 options = IntegrationOptionsSchemaBase()
212
212
213 schema = IntegrationSchema()
213 schema = IntegrationSchema()
214 schema['options'].title = _('General integration options')
214 schema['options'].title = _('General integration options')
215
215
216 settings_schema.name = 'settings'
216 settings_schema.name = 'settings'
217 settings_schema.title = _('{integration_type} settings').format(
217 settings_schema.title = _('{integration_type} settings').format(
218 integration_type=IntegrationType.display_name)
218 integration_type=IntegrationType.display_name)
219 schema.add(settings_schema)
219 schema.add(settings_schema)
220
220
221 return schema
221 return schema
222
222
223
223
@@ -1,313 +1,313 b''
1 # -*- coding: utf-8 -*-
1
2
2
3 # Copyright (C) 2016-2020 RhodeCode GmbH
3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 import colander
22 import colander
23 import deform.widget
23 import deform.widget
24
24
25 from rhodecode.model.validation_schema.utils import username_converter
25 from rhodecode.model.validation_schema.utils import username_converter
26 from rhodecode.translation import _
26 from rhodecode.translation import _
27 from rhodecode.model.validation_schema import validators, preparers, types
27 from rhodecode.model.validation_schema import validators, preparers, types
28
28
29
29
30 def get_group_and_repo(repo_name):
30 def get_group_and_repo(repo_name):
31 from rhodecode.model.repo_group import RepoGroupModel
31 from rhodecode.model.repo_group import RepoGroupModel
32 return RepoGroupModel()._get_group_name_and_parent(
32 return RepoGroupModel()._get_group_name_and_parent(
33 repo_name, get_object=True)
33 repo_name, get_object=True)
34
34
35
35
36 def get_repo_group(repo_group_id):
36 def get_repo_group(repo_group_id):
37 from rhodecode.model.repo_group import RepoGroup
37 from rhodecode.model.repo_group import RepoGroup
38 return RepoGroup.get(repo_group_id), RepoGroup.CHOICES_SEPARATOR
38 return RepoGroup.get(repo_group_id), RepoGroup.CHOICES_SEPARATOR
39
39
40
40
41 @colander.deferred
41 @colander.deferred
42 def deferred_can_write_to_group_validator(node, kw):
42 def deferred_can_write_to_group_validator(node, kw):
43 old_values = kw.get('old_values') or {}
43 old_values = kw.get('old_values') or {}
44 request_user = kw.get('user')
44 request_user = kw.get('user')
45
45
46 def can_write_group_validator(node, value):
46 def can_write_group_validator(node, value):
47 from rhodecode.lib.auth import (
47 from rhodecode.lib.auth import (
48 HasPermissionAny, HasRepoGroupPermissionAny)
48 HasPermissionAny, HasRepoGroupPermissionAny)
49 from rhodecode.model.repo_group import RepoGroupModel
49 from rhodecode.model.repo_group import RepoGroupModel
50
50
51 messages = {
51 messages = {
52 'invalid_parent_repo_group':
52 'invalid_parent_repo_group':
53 _(u"Parent repository group `{}` does not exist"),
53 _(u"Parent repository group `{}` does not exist"),
54 # permissions denied we expose as not existing, to prevent
54 # permissions denied we expose as not existing, to prevent
55 # resource discovery
55 # resource discovery
56 'permission_denied_parent_group':
56 'permission_denied_parent_group':
57 _(u"You do not have the permissions to store "
57 _(u"You do not have the permissions to store "
58 u"repository groups inside repository group `{}`"),
58 u"repository groups inside repository group `{}`"),
59 'permission_denied_root':
59 'permission_denied_root':
60 _(u"You do not have the permission to store "
60 _(u"You do not have the permission to store "
61 u"repository groups in the root location.")
61 u"repository groups in the root location.")
62 }
62 }
63
63
64 value = value['repo_group_name']
64 value = value['repo_group_name']
65 parent_group_name = value
65 parent_group_name = value
66
66
67 is_root_location = value is types.RootLocation
67 is_root_location = value is types.RootLocation
68
68
69 # NOT initialized validators, we must call them
69 # NOT initialized validators, we must call them
70 can_create_repo_groups_at_root = HasPermissionAny(
70 can_create_repo_groups_at_root = HasPermissionAny(
71 'hg.admin', 'hg.repogroup.create.true')
71 'hg.admin', 'hg.repogroup.create.true')
72
72
73 if is_root_location:
73 if is_root_location:
74 if can_create_repo_groups_at_root(user=request_user):
74 if can_create_repo_groups_at_root(user=request_user):
75 # we can create repo group inside tool-level. No more checks
75 # we can create repo group inside tool-level. No more checks
76 # are required
76 # are required
77 return
77 return
78 else:
78 else:
79 raise colander.Invalid(node, messages['permission_denied_root'])
79 raise colander.Invalid(node, messages['permission_denied_root'])
80
80
81 # check if the parent repo group actually exists
81 # check if the parent repo group actually exists
82 parent_group = None
82 parent_group = None
83 if parent_group_name:
83 if parent_group_name:
84 parent_group = RepoGroupModel().get_by_group_name(parent_group_name)
84 parent_group = RepoGroupModel().get_by_group_name(parent_group_name)
85 if value and not parent_group:
85 if value and not parent_group:
86 raise colander.Invalid(
86 raise colander.Invalid(
87 node, messages['invalid_parent_repo_group'].format(
87 node, messages['invalid_parent_repo_group'].format(
88 parent_group_name))
88 parent_group_name))
89
89
90 # check if we have permissions to create new groups under
90 # check if we have permissions to create new groups under
91 # parent repo group
91 # parent repo group
92 # create repositories with write permission on group is set to true
92 # create repositories with write permission on group is set to true
93 create_on_write = HasPermissionAny(
93 create_on_write = HasPermissionAny(
94 'hg.create.write_on_repogroup.true')(user=request_user)
94 'hg.create.write_on_repogroup.true')(user=request_user)
95
95
96 group_admin = HasRepoGroupPermissionAny('group.admin')(
96 group_admin = HasRepoGroupPermissionAny('group.admin')(
97 parent_group_name, 'can write into group validator', user=request_user)
97 parent_group_name, 'can write into group validator', user=request_user)
98 group_write = HasRepoGroupPermissionAny('group.write')(
98 group_write = HasRepoGroupPermissionAny('group.write')(
99 parent_group_name, 'can write into group validator', user=request_user)
99 parent_group_name, 'can write into group validator', user=request_user)
100
100
101 # creation by write access is currently disabled. Needs thinking if
101 # creation by write access is currently disabled. Needs thinking if
102 # we want to allow this...
102 # we want to allow this...
103 forbidden = not (group_admin or (group_write and create_on_write and 0))
103 forbidden = not (group_admin or (group_write and create_on_write and 0))
104
104
105 old_name = old_values.get('group_name')
105 old_name = old_values.get('group_name')
106 if old_name and old_name == old_values.get('submitted_repo_group_name'):
106 if old_name and old_name == old_values.get('submitted_repo_group_name'):
107 # we're editing a repository group, we didn't change the name
107 # we're editing a repository group, we didn't change the name
108 # we skip the check for write into parent group now
108 # we skip the check for write into parent group now
109 # this allows changing settings for this repo group
109 # this allows changing settings for this repo group
110 return
110 return
111
111
112 if parent_group and forbidden:
112 if parent_group and forbidden:
113 msg = messages['permission_denied_parent_group'].format(parent_group_name)
113 msg = messages['permission_denied_parent_group'].format(parent_group_name)
114 raise colander.Invalid(node, msg)
114 raise colander.Invalid(node, msg)
115
115
116 return can_write_group_validator
116 return can_write_group_validator
117
117
118
118
119 @colander.deferred
119 @colander.deferred
120 def deferred_repo_group_owner_validator(node, kw):
120 def deferred_repo_group_owner_validator(node, kw):
121
121
122 def repo_owner_validator(node, value):
122 def repo_owner_validator(node, value):
123 from rhodecode.model.db import User
123 from rhodecode.model.db import User
124 value = username_converter(value)
124 value = username_converter(value)
125 existing = User.get_by_username(value)
125 existing = User.get_by_username(value)
126 if not existing:
126 if not existing:
127 msg = _(u'Repo group owner with id `{}` does not exists').format(
127 msg = _(u'Repo group owner with id `{}` does not exists').format(
128 value)
128 value)
129 raise colander.Invalid(node, msg)
129 raise colander.Invalid(node, msg)
130
130
131 return repo_owner_validator
131 return repo_owner_validator
132
132
133
133
134 @colander.deferred
134 @colander.deferred
135 def deferred_unique_name_validator(node, kw):
135 def deferred_unique_name_validator(node, kw):
136 request_user = kw.get('user')
136 request_user = kw.get('user')
137 old_values = kw.get('old_values') or {}
137 old_values = kw.get('old_values') or {}
138
138
139 def unique_name_validator(node, value):
139 def unique_name_validator(node, value):
140 from rhodecode.model.db import Repository, RepoGroup
140 from rhodecode.model.db import Repository, RepoGroup
141 name_changed = value != old_values.get('group_name')
141 name_changed = value != old_values.get('group_name')
142
142
143 existing = Repository.get_by_repo_name(value)
143 existing = Repository.get_by_repo_name(value)
144 if name_changed and existing:
144 if name_changed and existing:
145 msg = _(u'Repository with name `{}` already exists').format(value)
145 msg = _(u'Repository with name `{}` already exists').format(value)
146 raise colander.Invalid(node, msg)
146 raise colander.Invalid(node, msg)
147
147
148 existing_group = RepoGroup.get_by_group_name(value)
148 existing_group = RepoGroup.get_by_group_name(value)
149 if name_changed and existing_group:
149 if name_changed and existing_group:
150 msg = _(u'Repository group with name `{}` already exists').format(
150 msg = _(u'Repository group with name `{}` already exists').format(
151 value)
151 value)
152 raise colander.Invalid(node, msg)
152 raise colander.Invalid(node, msg)
153 return unique_name_validator
153 return unique_name_validator
154
154
155
155
156 @colander.deferred
156 @colander.deferred
157 def deferred_repo_group_name_validator(node, kw):
157 def deferred_repo_group_name_validator(node, kw):
158 return validators.valid_name_validator
158 return validators.valid_name_validator
159
159
160
160
161 @colander.deferred
161 @colander.deferred
162 def deferred_repo_group_validator(node, kw):
162 def deferred_repo_group_validator(node, kw):
163 options = kw.get(
163 options = kw.get(
164 'repo_group_repo_group_options')
164 'repo_group_repo_group_options')
165 return colander.OneOf([x for x in options])
165 return colander.OneOf([x for x in options])
166
166
167
167
168 @colander.deferred
168 @colander.deferred
169 def deferred_repo_group_widget(node, kw):
169 def deferred_repo_group_widget(node, kw):
170 items = kw.get('repo_group_repo_group_items')
170 items = kw.get('repo_group_repo_group_items')
171 return deform.widget.Select2Widget(values=items)
171 return deform.widget.Select2Widget(values=items)
172
172
173
173
174 class GroupType(colander.Mapping):
174 class GroupType(colander.Mapping):
175 def _validate(self, node, value):
175 def _validate(self, node, value):
176 try:
176 try:
177 return dict(repo_group_name=value)
177 return dict(repo_group_name=value)
178 except Exception as e:
178 except Exception as e:
179 raise colander.Invalid(
179 raise colander.Invalid(
180 node, '"${val}" is not a mapping type: ${err}'.format(
180 node, '"${val}" is not a mapping type: ${err}'.format(
181 val=value, err=e))
181 val=value, err=e))
182
182
183 def deserialize(self, node, cstruct):
183 def deserialize(self, node, cstruct):
184 if cstruct is colander.null:
184 if cstruct is colander.null:
185 return cstruct
185 return cstruct
186
186
187 appstruct = super(GroupType, self).deserialize(node, cstruct)
187 appstruct = super(GroupType, self).deserialize(node, cstruct)
188 validated_name = appstruct['repo_group_name']
188 validated_name = appstruct['repo_group_name']
189
189
190 # inject group based on once deserialized data
190 # inject group based on once deserialized data
191 (repo_group_name_without_group,
191 (repo_group_name_without_group,
192 parent_group_name,
192 parent_group_name,
193 parent_group) = get_group_and_repo(validated_name)
193 parent_group) = get_group_and_repo(validated_name)
194
194
195 appstruct['repo_group_name_with_group'] = validated_name
195 appstruct['repo_group_name_with_group'] = validated_name
196 appstruct['repo_group_name_without_group'] = repo_group_name_without_group
196 appstruct['repo_group_name_without_group'] = repo_group_name_without_group
197 appstruct['repo_group_name'] = parent_group_name or types.RootLocation
197 appstruct['repo_group_name'] = parent_group_name or types.RootLocation
198 if parent_group:
198 if parent_group:
199 appstruct['repo_group_id'] = parent_group.group_id
199 appstruct['repo_group_id'] = parent_group.group_id
200
200
201 return appstruct
201 return appstruct
202
202
203
203
204 class GroupSchema(colander.SchemaNode):
204 class GroupSchema(colander.SchemaNode):
205 schema_type = GroupType
205 schema_type = GroupType
206 validator = deferred_can_write_to_group_validator
206 validator = deferred_can_write_to_group_validator
207 missing = colander.null
207 missing = colander.null
208
208
209
209
210 class RepoGroup(GroupSchema):
210 class RepoGroup(GroupSchema):
211 repo_group_name = colander.SchemaNode(
211 repo_group_name = colander.SchemaNode(
212 types.GroupNameType())
212 types.GroupNameType())
213 repo_group_id = colander.SchemaNode(
213 repo_group_id = colander.SchemaNode(
214 colander.String(), missing=None)
214 colander.String(), missing=None)
215 repo_group_name_without_group = colander.SchemaNode(
215 repo_group_name_without_group = colander.SchemaNode(
216 colander.String(), missing=None)
216 colander.String(), missing=None)
217
217
218
218
219 class RepoGroupAccessSchema(colander.MappingSchema):
219 class RepoGroupAccessSchema(colander.MappingSchema):
220 repo_group = RepoGroup()
220 repo_group = RepoGroup()
221
221
222
222
223 class RepoGroupNameUniqueSchema(colander.MappingSchema):
223 class RepoGroupNameUniqueSchema(colander.MappingSchema):
224 unique_repo_group_name = colander.SchemaNode(
224 unique_repo_group_name = colander.SchemaNode(
225 colander.String(),
225 colander.String(),
226 validator=deferred_unique_name_validator)
226 validator=deferred_unique_name_validator)
227
227
228
228
229 class RepoGroupSchema(colander.Schema):
229 class RepoGroupSchema(colander.Schema):
230
230
231 repo_group_name = colander.SchemaNode(
231 repo_group_name = colander.SchemaNode(
232 types.GroupNameType(),
232 types.GroupNameType(),
233 validator=deferred_repo_group_name_validator)
233 validator=deferred_repo_group_name_validator)
234
234
235 repo_group_owner = colander.SchemaNode(
235 repo_group_owner = colander.SchemaNode(
236 colander.String(),
236 colander.String(),
237 validator=deferred_repo_group_owner_validator)
237 validator=deferred_repo_group_owner_validator)
238
238
239 repo_group_description = colander.SchemaNode(
239 repo_group_description = colander.SchemaNode(
240 colander.String(), missing='', widget=deform.widget.TextAreaWidget())
240 colander.String(), missing='', widget=deform.widget.TextAreaWidget())
241
241
242 repo_group_copy_permissions = colander.SchemaNode(
242 repo_group_copy_permissions = colander.SchemaNode(
243 types.StringBooleanType(),
243 types.StringBooleanType(),
244 missing=False, widget=deform.widget.CheckboxWidget())
244 missing=False, widget=deform.widget.CheckboxWidget())
245
245
246 repo_group_enable_locking = colander.SchemaNode(
246 repo_group_enable_locking = colander.SchemaNode(
247 types.StringBooleanType(),
247 types.StringBooleanType(),
248 missing=False, widget=deform.widget.CheckboxWidget())
248 missing=False, widget=deform.widget.CheckboxWidget())
249
249
250 def deserialize(self, cstruct):
250 def deserialize(self, cstruct):
251 """
251 """
252 Custom deserialize that allows to chain validation, and verify
252 Custom deserialize that allows to chain validation, and verify
253 permissions, and as last step uniqueness
253 permissions, and as last step uniqueness
254 """
254 """
255
255
256 appstruct = super(RepoGroupSchema, self).deserialize(cstruct)
256 appstruct = super(RepoGroupSchema, self).deserialize(cstruct)
257 validated_name = appstruct['repo_group_name']
257 validated_name = appstruct['repo_group_name']
258
258
259 # second pass to validate permissions to repo_group
259 # second pass to validate permissions to repo_group
260 if 'old_values' in self.bindings:
260 if 'old_values' in self.bindings:
261 # save current repo name for name change checks
261 # save current repo name for name change checks
262 self.bindings['old_values']['submitted_repo_group_name'] = validated_name
262 self.bindings['old_values']['submitted_repo_group_name'] = validated_name
263 second = RepoGroupAccessSchema().bind(**self.bindings)
263 second = RepoGroupAccessSchema().bind(**self.bindings)
264 appstruct_second = second.deserialize({'repo_group': validated_name})
264 appstruct_second = second.deserialize({'repo_group': validated_name})
265 # save result
265 # save result
266 appstruct['repo_group'] = appstruct_second['repo_group']
266 appstruct['repo_group'] = appstruct_second['repo_group']
267
267
268 # thirds to validate uniqueness
268 # thirds to validate uniqueness
269 third = RepoGroupNameUniqueSchema().bind(**self.bindings)
269 third = RepoGroupNameUniqueSchema().bind(**self.bindings)
270 third.deserialize({'unique_repo_group_name': validated_name})
270 third.deserialize({'unique_repo_group_name': validated_name})
271
271
272 return appstruct
272 return appstruct
273
273
274
274
275 class RepoGroupSettingsSchema(RepoGroupSchema):
275 class RepoGroupSettingsSchema(RepoGroupSchema):
276 repo_group = colander.SchemaNode(
276 repo_group = colander.SchemaNode(
277 colander.Integer(),
277 colander.Integer(),
278 validator=deferred_repo_group_validator,
278 validator=deferred_repo_group_validator,
279 widget=deferred_repo_group_widget,
279 widget=deferred_repo_group_widget,
280 missing='')
280 missing='')
281
281
282 def deserialize(self, cstruct):
282 def deserialize(self, cstruct):
283 """
283 """
284 Custom deserialize that allows to chain validation, and verify
284 Custom deserialize that allows to chain validation, and verify
285 permissions, and as last step uniqueness
285 permissions, and as last step uniqueness
286 """
286 """
287
287
288 # first pass, to validate given data
288 # first pass, to validate given data
289 appstruct = super(RepoGroupSchema, self).deserialize(cstruct)
289 appstruct = super(RepoGroupSchema, self).deserialize(cstruct)
290 validated_name = appstruct['repo_group_name']
290 validated_name = appstruct['repo_group_name']
291
291
292 # because of repoSchema adds repo-group as an ID, we inject it as
292 # because of repoSchema adds repo-group as an ID, we inject it as
293 # full name here because validators require it, it's unwrapped later
293 # full name here because validators require it, it's unwrapped later
294 # so it's safe to use and final name is going to be without group anyway
294 # so it's safe to use and final name is going to be without group anyway
295
295
296 group, separator = get_repo_group(appstruct['repo_group'])
296 group, separator = get_repo_group(appstruct['repo_group'])
297 if group:
297 if group:
298 validated_name = separator.join([group.group_name, validated_name])
298 validated_name = separator.join([group.group_name, validated_name])
299
299
300 # second pass to validate permissions to repo_group
300 # second pass to validate permissions to repo_group
301 if 'old_values' in self.bindings:
301 if 'old_values' in self.bindings:
302 # save current repo name for name change checks
302 # save current repo name for name change checks
303 self.bindings['old_values']['submitted_repo_group_name'] = validated_name
303 self.bindings['old_values']['submitted_repo_group_name'] = validated_name
304 second = RepoGroupAccessSchema().bind(**self.bindings)
304 second = RepoGroupAccessSchema().bind(**self.bindings)
305 appstruct_second = second.deserialize({'repo_group': validated_name})
305 appstruct_second = second.deserialize({'repo_group': validated_name})
306 # save result
306 # save result
307 appstruct['repo_group'] = appstruct_second['repo_group']
307 appstruct['repo_group'] = appstruct_second['repo_group']
308
308
309 # thirds to validate uniqueness
309 # thirds to validate uniqueness
310 third = RepoGroupNameUniqueSchema().bind(**self.bindings)
310 third = RepoGroupNameUniqueSchema().bind(**self.bindings)
311 third.deserialize({'unique_repo_group_name': validated_name})
311 third.deserialize({'unique_repo_group_name': validated_name})
312
312
313 return appstruct
313 return appstruct
@@ -1,450 +1,450 b''
1 # -*- coding: utf-8 -*-
1
2
2
3 # Copyright (C) 2016-2020 RhodeCode GmbH
3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import colander
21 import colander
22 import deform.widget
22 import deform.widget
23
23
24 from rhodecode.translation import _
24 from rhodecode.translation import _
25 from rhodecode.model.validation_schema.utils import convert_to_optgroup, username_converter
25 from rhodecode.model.validation_schema.utils import convert_to_optgroup, username_converter
26 from rhodecode.model.validation_schema import validators, preparers, types
26 from rhodecode.model.validation_schema import validators, preparers, types
27
27
28 DEFAULT_LANDING_REF = 'rev:tip'
28 DEFAULT_LANDING_REF = 'rev:tip'
29
29
30
30
31 def get_group_and_repo(repo_name):
31 def get_group_and_repo(repo_name):
32 from rhodecode.model.repo_group import RepoGroupModel
32 from rhodecode.model.repo_group import RepoGroupModel
33 return RepoGroupModel()._get_group_name_and_parent(
33 return RepoGroupModel()._get_group_name_and_parent(
34 repo_name, get_object=True)
34 repo_name, get_object=True)
35
35
36
36
37 def get_repo_group(repo_group_id):
37 def get_repo_group(repo_group_id):
38 from rhodecode.model.repo_group import RepoGroup
38 from rhodecode.model.repo_group import RepoGroup
39 return RepoGroup.get(repo_group_id), RepoGroup.CHOICES_SEPARATOR
39 return RepoGroup.get(repo_group_id), RepoGroup.CHOICES_SEPARATOR
40
40
41
41
42 @colander.deferred
42 @colander.deferred
43 def deferred_repo_type_validator(node, kw):
43 def deferred_repo_type_validator(node, kw):
44 options = kw.get('repo_type_options', [])
44 options = kw.get('repo_type_options', [])
45 return colander.OneOf([x for x in options])
45 return colander.OneOf([x for x in options])
46
46
47
47
48 @colander.deferred
48 @colander.deferred
49 def deferred_repo_owner_validator(node, kw):
49 def deferred_repo_owner_validator(node, kw):
50
50
51 def repo_owner_validator(node, value):
51 def repo_owner_validator(node, value):
52 from rhodecode.model.db import User
52 from rhodecode.model.db import User
53 value = username_converter(value)
53 value = username_converter(value)
54 existing = User.get_by_username(value)
54 existing = User.get_by_username(value)
55 if not existing:
55 if not existing:
56 msg = _(u'Repo owner with id `{}` does not exists').format(value)
56 msg = _(u'Repo owner with id `{}` does not exists').format(value)
57 raise colander.Invalid(node, msg)
57 raise colander.Invalid(node, msg)
58
58
59 return repo_owner_validator
59 return repo_owner_validator
60
60
61
61
62 @colander.deferred
62 @colander.deferred
63 def deferred_landing_ref_validator(node, kw):
63 def deferred_landing_ref_validator(node, kw):
64 options = kw.get(
64 options = kw.get(
65 'repo_ref_options', [DEFAULT_LANDING_REF])
65 'repo_ref_options', [DEFAULT_LANDING_REF])
66 return colander.OneOf([x for x in options])
66 return colander.OneOf([x for x in options])
67
67
68
68
69 @colander.deferred
69 @colander.deferred
70 def deferred_sync_uri_validator(node, kw):
70 def deferred_sync_uri_validator(node, kw):
71 repo_type = kw.get('repo_type')
71 repo_type = kw.get('repo_type')
72 validator = validators.CloneUriValidator(repo_type)
72 validator = validators.CloneUriValidator(repo_type)
73 return validator
73 return validator
74
74
75
75
76 @colander.deferred
76 @colander.deferred
77 def deferred_landing_ref_widget(node, kw):
77 def deferred_landing_ref_widget(node, kw):
78 from rhodecode.model.scm import ScmModel
78 from rhodecode.model.scm import ScmModel
79
79
80 repo_type = kw.get('repo_type')
80 repo_type = kw.get('repo_type')
81 default_opts = []
81 default_opts = []
82 if repo_type:
82 if repo_type:
83 default_landing_ref, _lbl = ScmModel.backend_landing_ref(repo_type)
83 default_landing_ref, _lbl = ScmModel.backend_landing_ref(repo_type)
84 default_opts.append((default_landing_ref, default_landing_ref))
84 default_opts.append((default_landing_ref, default_landing_ref))
85
85
86 items = kw.get('repo_ref_items', default_opts)
86 items = kw.get('repo_ref_items', default_opts)
87 items = convert_to_optgroup(items)
87 items = convert_to_optgroup(items)
88 return deform.widget.Select2Widget(values=items)
88 return deform.widget.Select2Widget(values=items)
89
89
90
90
91 @colander.deferred
91 @colander.deferred
92 def deferred_fork_of_validator(node, kw):
92 def deferred_fork_of_validator(node, kw):
93 old_values = kw.get('old_values') or {}
93 old_values = kw.get('old_values') or {}
94
94
95 def fork_of_validator(node, value):
95 def fork_of_validator(node, value):
96 from rhodecode.model.db import Repository, RepoGroup
96 from rhodecode.model.db import Repository, RepoGroup
97 existing = Repository.get_by_repo_name(value)
97 existing = Repository.get_by_repo_name(value)
98 if not existing:
98 if not existing:
99 msg = _(u'Fork with id `{}` does not exists').format(value)
99 msg = _(u'Fork with id `{}` does not exists').format(value)
100 raise colander.Invalid(node, msg)
100 raise colander.Invalid(node, msg)
101 elif old_values['repo_name'] == existing.repo_name:
101 elif old_values['repo_name'] == existing.repo_name:
102 msg = _(u'Cannot set fork of '
102 msg = _(u'Cannot set fork of '
103 u'parameter of this repository to itself').format(value)
103 u'parameter of this repository to itself').format(value)
104 raise colander.Invalid(node, msg)
104 raise colander.Invalid(node, msg)
105
105
106 return fork_of_validator
106 return fork_of_validator
107
107
108
108
109 @colander.deferred
109 @colander.deferred
110 def deferred_can_write_to_group_validator(node, kw):
110 def deferred_can_write_to_group_validator(node, kw):
111 request_user = kw.get('user')
111 request_user = kw.get('user')
112 old_values = kw.get('old_values') or {}
112 old_values = kw.get('old_values') or {}
113
113
114 def can_write_to_group_validator(node, value):
114 def can_write_to_group_validator(node, value):
115 """
115 """
116 Checks if given repo path is writable by user. This includes checks if
116 Checks if given repo path is writable by user. This includes checks if
117 user is allowed to create repositories under root path or under
117 user is allowed to create repositories under root path or under
118 repo group paths
118 repo group paths
119 """
119 """
120
120
121 from rhodecode.lib.auth import (
121 from rhodecode.lib.auth import (
122 HasPermissionAny, HasRepoGroupPermissionAny)
122 HasPermissionAny, HasRepoGroupPermissionAny)
123 from rhodecode.model.repo_group import RepoGroupModel
123 from rhodecode.model.repo_group import RepoGroupModel
124
124
125 messages = {
125 messages = {
126 'invalid_repo_group':
126 'invalid_repo_group':
127 _(u"Repository group `{}` does not exist"),
127 _(u"Repository group `{}` does not exist"),
128 # permissions denied we expose as not existing, to prevent
128 # permissions denied we expose as not existing, to prevent
129 # resource discovery
129 # resource discovery
130 'permission_denied':
130 'permission_denied':
131 _(u"Repository group `{}` does not exist"),
131 _(u"Repository group `{}` does not exist"),
132 'permission_denied_root':
132 'permission_denied_root':
133 _(u"You do not have the permission to store "
133 _(u"You do not have the permission to store "
134 u"repositories in the root location.")
134 u"repositories in the root location.")
135 }
135 }
136
136
137 value = value['repo_group_name']
137 value = value['repo_group_name']
138
138
139 is_root_location = value is types.RootLocation
139 is_root_location = value is types.RootLocation
140 # NOT initialized validators, we must call them
140 # NOT initialized validators, we must call them
141 can_create_repos_at_root = HasPermissionAny('hg.admin', 'hg.create.repository')
141 can_create_repos_at_root = HasPermissionAny('hg.admin', 'hg.create.repository')
142
142
143 # if values is root location, we simply need to check if we can write
143 # if values is root location, we simply need to check if we can write
144 # to root location !
144 # to root location !
145 if is_root_location:
145 if is_root_location:
146
146
147 if can_create_repos_at_root(user=request_user):
147 if can_create_repos_at_root(user=request_user):
148 # we can create repo group inside tool-level. No more checks
148 # we can create repo group inside tool-level. No more checks
149 # are required
149 # are required
150 return
150 return
151 else:
151 else:
152 old_name = old_values.get('repo_name')
152 old_name = old_values.get('repo_name')
153 if old_name and old_name == old_values.get('submitted_repo_name'):
153 if old_name and old_name == old_values.get('submitted_repo_name'):
154 # since we didn't change the name, we can skip validation and
154 # since we didn't change the name, we can skip validation and
155 # allow current users without store-in-root permissions to update
155 # allow current users without store-in-root permissions to update
156 return
156 return
157
157
158 # "fake" node name as repo_name, otherwise we oddly report
158 # "fake" node name as repo_name, otherwise we oddly report
159 # the error as if it was coming form repo_group
159 # the error as if it was coming form repo_group
160 # however repo_group is empty when using root location.
160 # however repo_group is empty when using root location.
161 node.name = 'repo_name'
161 node.name = 'repo_name'
162 raise colander.Invalid(node, messages['permission_denied_root'])
162 raise colander.Invalid(node, messages['permission_denied_root'])
163
163
164 # parent group not exists ? throw an error
164 # parent group not exists ? throw an error
165 repo_group = RepoGroupModel().get_by_group_name(value)
165 repo_group = RepoGroupModel().get_by_group_name(value)
166 if value and not repo_group:
166 if value and not repo_group:
167 raise colander.Invalid(
167 raise colander.Invalid(
168 node, messages['invalid_repo_group'].format(value))
168 node, messages['invalid_repo_group'].format(value))
169
169
170 gr_name = repo_group.group_name
170 gr_name = repo_group.group_name
171
171
172 # create repositories with write permission on group is set to true
172 # create repositories with write permission on group is set to true
173 create_on_write = HasPermissionAny(
173 create_on_write = HasPermissionAny(
174 'hg.create.write_on_repogroup.true')(user=request_user)
174 'hg.create.write_on_repogroup.true')(user=request_user)
175
175
176 group_admin = HasRepoGroupPermissionAny('group.admin')(
176 group_admin = HasRepoGroupPermissionAny('group.admin')(
177 gr_name, 'can write into group validator', user=request_user)
177 gr_name, 'can write into group validator', user=request_user)
178 group_write = HasRepoGroupPermissionAny('group.write')(
178 group_write = HasRepoGroupPermissionAny('group.write')(
179 gr_name, 'can write into group validator', user=request_user)
179 gr_name, 'can write into group validator', user=request_user)
180
180
181 forbidden = not (group_admin or (group_write and create_on_write))
181 forbidden = not (group_admin or (group_write and create_on_write))
182
182
183 # TODO: handling of old values, and detecting no-change in path
183 # TODO: handling of old values, and detecting no-change in path
184 # to skip permission checks in such cases. This only needs to be
184 # to skip permission checks in such cases. This only needs to be
185 # implemented if we use this schema in forms as well
185 # implemented if we use this schema in forms as well
186
186
187 # gid = (old_data['repo_group'].get('group_id')
187 # gid = (old_data['repo_group'].get('group_id')
188 # if (old_data and 'repo_group' in old_data) else None)
188 # if (old_data and 'repo_group' in old_data) else None)
189 # value_changed = gid != safe_int(value)
189 # value_changed = gid != safe_int(value)
190 # new = not old_data
190 # new = not old_data
191
191
192 # do check if we changed the value, there's a case that someone got
192 # do check if we changed the value, there's a case that someone got
193 # revoked write permissions to a repository, he still created, we
193 # revoked write permissions to a repository, he still created, we
194 # don't need to check permission if he didn't change the value of
194 # don't need to check permission if he didn't change the value of
195 # groups in form box
195 # groups in form box
196 # if value_changed or new:
196 # if value_changed or new:
197 # # parent group need to be existing
197 # # parent group need to be existing
198 # TODO: ENDS HERE
198 # TODO: ENDS HERE
199
199
200 if repo_group and forbidden:
200 if repo_group and forbidden:
201 msg = messages['permission_denied'].format(value)
201 msg = messages['permission_denied'].format(value)
202 raise colander.Invalid(node, msg)
202 raise colander.Invalid(node, msg)
203
203
204 return can_write_to_group_validator
204 return can_write_to_group_validator
205
205
206
206
207 @colander.deferred
207 @colander.deferred
208 def deferred_unique_name_validator(node, kw):
208 def deferred_unique_name_validator(node, kw):
209 request_user = kw.get('user')
209 request_user = kw.get('user')
210 old_values = kw.get('old_values') or {}
210 old_values = kw.get('old_values') or {}
211
211
212 def unique_name_validator(node, value):
212 def unique_name_validator(node, value):
213 from rhodecode.model.db import Repository, RepoGroup
213 from rhodecode.model.db import Repository, RepoGroup
214 name_changed = value != old_values.get('repo_name')
214 name_changed = value != old_values.get('repo_name')
215
215
216 existing = Repository.get_by_repo_name(value)
216 existing = Repository.get_by_repo_name(value)
217 if name_changed and existing:
217 if name_changed and existing:
218 msg = _(u'Repository with name `{}` already exists').format(value)
218 msg = _(u'Repository with name `{}` already exists').format(value)
219 raise colander.Invalid(node, msg)
219 raise colander.Invalid(node, msg)
220
220
221 existing_group = RepoGroup.get_by_group_name(value)
221 existing_group = RepoGroup.get_by_group_name(value)
222 if name_changed and existing_group:
222 if name_changed and existing_group:
223 msg = _(u'Repository group with name `{}` already exists').format(
223 msg = _(u'Repository group with name `{}` already exists').format(
224 value)
224 value)
225 raise colander.Invalid(node, msg)
225 raise colander.Invalid(node, msg)
226 return unique_name_validator
226 return unique_name_validator
227
227
228
228
229 @colander.deferred
229 @colander.deferred
230 def deferred_repo_name_validator(node, kw):
230 def deferred_repo_name_validator(node, kw):
231 def no_git_suffix_validator(node, value):
231 def no_git_suffix_validator(node, value):
232 if value.endswith('.git'):
232 if value.endswith('.git'):
233 msg = _('Repository name cannot end with .git')
233 msg = _('Repository name cannot end with .git')
234 raise colander.Invalid(node, msg)
234 raise colander.Invalid(node, msg)
235 return colander.All(
235 return colander.All(
236 no_git_suffix_validator, validators.valid_name_validator)
236 no_git_suffix_validator, validators.valid_name_validator)
237
237
238
238
239 @colander.deferred
239 @colander.deferred
240 def deferred_repo_group_validator(node, kw):
240 def deferred_repo_group_validator(node, kw):
241 options = kw.get(
241 options = kw.get(
242 'repo_repo_group_options')
242 'repo_repo_group_options')
243 return colander.OneOf([x for x in options])
243 return colander.OneOf([x for x in options])
244
244
245
245
246 @colander.deferred
246 @colander.deferred
247 def deferred_repo_group_widget(node, kw):
247 def deferred_repo_group_widget(node, kw):
248 items = kw.get('repo_repo_group_items')
248 items = kw.get('repo_repo_group_items')
249 return deform.widget.Select2Widget(values=items)
249 return deform.widget.Select2Widget(values=items)
250
250
251
251
252 class GroupType(colander.Mapping):
252 class GroupType(colander.Mapping):
253 def _validate(self, node, value):
253 def _validate(self, node, value):
254 try:
254 try:
255 return dict(repo_group_name=value)
255 return dict(repo_group_name=value)
256 except Exception as e:
256 except Exception as e:
257 raise colander.Invalid(
257 raise colander.Invalid(
258 node, '"${val}" is not a mapping type: ${err}'.format(
258 node, '"${val}" is not a mapping type: ${err}'.format(
259 val=value, err=e))
259 val=value, err=e))
260
260
261 def deserialize(self, node, cstruct):
261 def deserialize(self, node, cstruct):
262 if cstruct is colander.null:
262 if cstruct is colander.null:
263 return cstruct
263 return cstruct
264
264
265 appstruct = super(GroupType, self).deserialize(node, cstruct)
265 appstruct = super(GroupType, self).deserialize(node, cstruct)
266 validated_name = appstruct['repo_group_name']
266 validated_name = appstruct['repo_group_name']
267
267
268 # inject group based on once deserialized data
268 # inject group based on once deserialized data
269 (repo_name_without_group,
269 (repo_name_without_group,
270 parent_group_name,
270 parent_group_name,
271 parent_group) = get_group_and_repo(validated_name)
271 parent_group) = get_group_and_repo(validated_name)
272
272
273 appstruct['repo_name_with_group'] = validated_name
273 appstruct['repo_name_with_group'] = validated_name
274 appstruct['repo_name_without_group'] = repo_name_without_group
274 appstruct['repo_name_without_group'] = repo_name_without_group
275 appstruct['repo_group_name'] = parent_group_name or types.RootLocation
275 appstruct['repo_group_name'] = parent_group_name or types.RootLocation
276
276
277 if parent_group:
277 if parent_group:
278 appstruct['repo_group_id'] = parent_group.group_id
278 appstruct['repo_group_id'] = parent_group.group_id
279
279
280 return appstruct
280 return appstruct
281
281
282
282
283 class GroupSchema(colander.SchemaNode):
283 class GroupSchema(colander.SchemaNode):
284 schema_type = GroupType
284 schema_type = GroupType
285 validator = deferred_can_write_to_group_validator
285 validator = deferred_can_write_to_group_validator
286 missing = colander.null
286 missing = colander.null
287
287
288
288
289 class RepoGroup(GroupSchema):
289 class RepoGroup(GroupSchema):
290 repo_group_name = colander.SchemaNode(
290 repo_group_name = colander.SchemaNode(
291 types.GroupNameType())
291 types.GroupNameType())
292 repo_group_id = colander.SchemaNode(
292 repo_group_id = colander.SchemaNode(
293 colander.String(), missing=None)
293 colander.String(), missing=None)
294 repo_name_without_group = colander.SchemaNode(
294 repo_name_without_group = colander.SchemaNode(
295 colander.String(), missing=None)
295 colander.String(), missing=None)
296
296
297
297
298 class RepoGroupAccessSchema(colander.MappingSchema):
298 class RepoGroupAccessSchema(colander.MappingSchema):
299 repo_group = RepoGroup()
299 repo_group = RepoGroup()
300
300
301
301
302 class RepoNameUniqueSchema(colander.MappingSchema):
302 class RepoNameUniqueSchema(colander.MappingSchema):
303 unique_repo_name = colander.SchemaNode(
303 unique_repo_name = colander.SchemaNode(
304 colander.String(),
304 colander.String(),
305 validator=deferred_unique_name_validator)
305 validator=deferred_unique_name_validator)
306
306
307
307
308 class RepoSchema(colander.MappingSchema):
308 class RepoSchema(colander.MappingSchema):
309
309
310 repo_name = colander.SchemaNode(
310 repo_name = colander.SchemaNode(
311 types.RepoNameType(),
311 types.RepoNameType(),
312 validator=deferred_repo_name_validator)
312 validator=deferred_repo_name_validator)
313
313
314 repo_type = colander.SchemaNode(
314 repo_type = colander.SchemaNode(
315 colander.String(),
315 colander.String(),
316 validator=deferred_repo_type_validator)
316 validator=deferred_repo_type_validator)
317
317
318 repo_owner = colander.SchemaNode(
318 repo_owner = colander.SchemaNode(
319 colander.String(),
319 colander.String(),
320 validator=deferred_repo_owner_validator,
320 validator=deferred_repo_owner_validator,
321 widget=deform.widget.TextInputWidget())
321 widget=deform.widget.TextInputWidget())
322
322
323 repo_description = colander.SchemaNode(
323 repo_description = colander.SchemaNode(
324 colander.String(), missing='',
324 colander.String(), missing='',
325 widget=deform.widget.TextAreaWidget())
325 widget=deform.widget.TextAreaWidget())
326
326
327 repo_landing_commit_ref = colander.SchemaNode(
327 repo_landing_commit_ref = colander.SchemaNode(
328 colander.String(),
328 colander.String(),
329 validator=deferred_landing_ref_validator,
329 validator=deferred_landing_ref_validator,
330 preparers=[preparers.strip_preparer],
330 preparers=[preparers.strip_preparer],
331 missing=DEFAULT_LANDING_REF,
331 missing=DEFAULT_LANDING_REF,
332 widget=deferred_landing_ref_widget)
332 widget=deferred_landing_ref_widget)
333
333
334 repo_clone_uri = colander.SchemaNode(
334 repo_clone_uri = colander.SchemaNode(
335 colander.String(),
335 colander.String(),
336 validator=deferred_sync_uri_validator,
336 validator=deferred_sync_uri_validator,
337 preparers=[preparers.strip_preparer],
337 preparers=[preparers.strip_preparer],
338 missing='')
338 missing='')
339
339
340 repo_push_uri = colander.SchemaNode(
340 repo_push_uri = colander.SchemaNode(
341 colander.String(),
341 colander.String(),
342 validator=deferred_sync_uri_validator,
342 validator=deferred_sync_uri_validator,
343 preparers=[preparers.strip_preparer],
343 preparers=[preparers.strip_preparer],
344 missing='')
344 missing='')
345
345
346 repo_fork_of = colander.SchemaNode(
346 repo_fork_of = colander.SchemaNode(
347 colander.String(),
347 colander.String(),
348 validator=deferred_fork_of_validator,
348 validator=deferred_fork_of_validator,
349 missing=None)
349 missing=None)
350
350
351 repo_private = colander.SchemaNode(
351 repo_private = colander.SchemaNode(
352 types.StringBooleanType(),
352 types.StringBooleanType(),
353 missing=False, widget=deform.widget.CheckboxWidget())
353 missing=False, widget=deform.widget.CheckboxWidget())
354 repo_copy_permissions = colander.SchemaNode(
354 repo_copy_permissions = colander.SchemaNode(
355 types.StringBooleanType(),
355 types.StringBooleanType(),
356 missing=False, widget=deform.widget.CheckboxWidget())
356 missing=False, widget=deform.widget.CheckboxWidget())
357 repo_enable_statistics = colander.SchemaNode(
357 repo_enable_statistics = colander.SchemaNode(
358 types.StringBooleanType(),
358 types.StringBooleanType(),
359 missing=False, widget=deform.widget.CheckboxWidget())
359 missing=False, widget=deform.widget.CheckboxWidget())
360 repo_enable_downloads = colander.SchemaNode(
360 repo_enable_downloads = colander.SchemaNode(
361 types.StringBooleanType(),
361 types.StringBooleanType(),
362 missing=False, widget=deform.widget.CheckboxWidget())
362 missing=False, widget=deform.widget.CheckboxWidget())
363 repo_enable_locking = colander.SchemaNode(
363 repo_enable_locking = colander.SchemaNode(
364 types.StringBooleanType(),
364 types.StringBooleanType(),
365 missing=False, widget=deform.widget.CheckboxWidget())
365 missing=False, widget=deform.widget.CheckboxWidget())
366
366
367 def deserialize(self, cstruct):
367 def deserialize(self, cstruct):
368 """
368 """
369 Custom deserialize that allows to chain validation, and verify
369 Custom deserialize that allows to chain validation, and verify
370 permissions, and as last step uniqueness
370 permissions, and as last step uniqueness
371 """
371 """
372
372
373 # first pass, to validate given data
373 # first pass, to validate given data
374 appstruct = super(RepoSchema, self).deserialize(cstruct)
374 appstruct = super(RepoSchema, self).deserialize(cstruct)
375 validated_name = appstruct['repo_name']
375 validated_name = appstruct['repo_name']
376
376
377 # second pass to validate permissions to repo_group
377 # second pass to validate permissions to repo_group
378 if 'old_values' in self.bindings:
378 if 'old_values' in self.bindings:
379 # save current repo name for name change checks
379 # save current repo name for name change checks
380 self.bindings['old_values']['submitted_repo_name'] = validated_name
380 self.bindings['old_values']['submitted_repo_name'] = validated_name
381 second = RepoGroupAccessSchema().bind(**self.bindings)
381 second = RepoGroupAccessSchema().bind(**self.bindings)
382 appstruct_second = second.deserialize({'repo_group': validated_name})
382 appstruct_second = second.deserialize({'repo_group': validated_name})
383 # save result
383 # save result
384 appstruct['repo_group'] = appstruct_second['repo_group']
384 appstruct['repo_group'] = appstruct_second['repo_group']
385
385
386 # thirds to validate uniqueness
386 # thirds to validate uniqueness
387 third = RepoNameUniqueSchema().bind(**self.bindings)
387 third = RepoNameUniqueSchema().bind(**self.bindings)
388 third.deserialize({'unique_repo_name': validated_name})
388 third.deserialize({'unique_repo_name': validated_name})
389
389
390 return appstruct
390 return appstruct
391
391
392
392
393 class RepoSettingsSchema(RepoSchema):
393 class RepoSettingsSchema(RepoSchema):
394 repo_group = colander.SchemaNode(
394 repo_group = colander.SchemaNode(
395 colander.Integer(),
395 colander.Integer(),
396 validator=deferred_repo_group_validator,
396 validator=deferred_repo_group_validator,
397 widget=deferred_repo_group_widget,
397 widget=deferred_repo_group_widget,
398 missing='')
398 missing='')
399
399
400 repo_clone_uri_change = colander.SchemaNode(
400 repo_clone_uri_change = colander.SchemaNode(
401 colander.String(),
401 colander.String(),
402 missing='NEW')
402 missing='NEW')
403
403
404 repo_clone_uri = colander.SchemaNode(
404 repo_clone_uri = colander.SchemaNode(
405 colander.String(),
405 colander.String(),
406 preparers=[preparers.strip_preparer],
406 preparers=[preparers.strip_preparer],
407 validator=deferred_sync_uri_validator,
407 validator=deferred_sync_uri_validator,
408 missing='')
408 missing='')
409
409
410 repo_push_uri_change = colander.SchemaNode(
410 repo_push_uri_change = colander.SchemaNode(
411 colander.String(),
411 colander.String(),
412 missing='NEW')
412 missing='NEW')
413
413
414 repo_push_uri = colander.SchemaNode(
414 repo_push_uri = colander.SchemaNode(
415 colander.String(),
415 colander.String(),
416 preparers=[preparers.strip_preparer],
416 preparers=[preparers.strip_preparer],
417 validator=deferred_sync_uri_validator,
417 validator=deferred_sync_uri_validator,
418 missing='')
418 missing='')
419
419
420 def deserialize(self, cstruct):
420 def deserialize(self, cstruct):
421 """
421 """
422 Custom deserialize that allows to chain validation, and verify
422 Custom deserialize that allows to chain validation, and verify
423 permissions, and as last step uniqueness
423 permissions, and as last step uniqueness
424 """
424 """
425
425
426 # first pass, to validate given data
426 # first pass, to validate given data
427 appstruct = super(RepoSchema, self).deserialize(cstruct)
427 appstruct = super(RepoSchema, self).deserialize(cstruct)
428 validated_name = appstruct['repo_name']
428 validated_name = appstruct['repo_name']
429 # because of repoSchema adds repo-group as an ID, we inject it as
429 # because of repoSchema adds repo-group as an ID, we inject it as
430 # full name here because validators require it, it's unwrapped later
430 # full name here because validators require it, it's unwrapped later
431 # so it's safe to use and final name is going to be without group anyway
431 # so it's safe to use and final name is going to be without group anyway
432
432
433 group, separator = get_repo_group(appstruct['repo_group'])
433 group, separator = get_repo_group(appstruct['repo_group'])
434 if group:
434 if group:
435 validated_name = separator.join([group.group_name, validated_name])
435 validated_name = separator.join([group.group_name, validated_name])
436
436
437 # second pass to validate permissions to repo_group
437 # second pass to validate permissions to repo_group
438 if 'old_values' in self.bindings:
438 if 'old_values' in self.bindings:
439 # save current repo name for name change checks
439 # save current repo name for name change checks
440 self.bindings['old_values']['submitted_repo_name'] = validated_name
440 self.bindings['old_values']['submitted_repo_name'] = validated_name
441 second = RepoGroupAccessSchema().bind(**self.bindings)
441 second = RepoGroupAccessSchema().bind(**self.bindings)
442 appstruct_second = second.deserialize({'repo_group': validated_name})
442 appstruct_second = second.deserialize({'repo_group': validated_name})
443 # save result
443 # save result
444 appstruct['repo_group'] = appstruct_second['repo_group']
444 appstruct['repo_group'] = appstruct_second['repo_group']
445
445
446 # thirds to validate uniqueness
446 # thirds to validate uniqueness
447 third = RepoNameUniqueSchema().bind(**self.bindings)
447 third = RepoNameUniqueSchema().bind(**self.bindings)
448 third.deserialize({'unique_repo_name': validated_name})
448 third.deserialize({'unique_repo_name': validated_name})
449
449
450 return appstruct
450 return appstruct
@@ -1,40 +1,40 b''
1 # -*- coding: utf-8 -*-
1
2
2
3 # Copyright (C) 2016-2020 RhodeCode GmbH
3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import colander
21 import colander
22 from rhodecode.model.validation_schema import validators, preparers, types
22 from rhodecode.model.validation_schema import validators, preparers, types
23
23
24 DEFAULT_ROLE = 'reviewer'
24 DEFAULT_ROLE = 'reviewer'
25 VALID_ROLES = ['reviewer', 'observer']
25 VALID_ROLES = ['reviewer', 'observer']
26
26
27
27
28 class ReviewerSchema(colander.MappingSchema):
28 class ReviewerSchema(colander.MappingSchema):
29 username = colander.SchemaNode(types.StrOrIntType())
29 username = colander.SchemaNode(types.StrOrIntType())
30 reasons = colander.SchemaNode(colander.List(), missing=['no reason specified'])
30 reasons = colander.SchemaNode(colander.List(), missing=['no reason specified'])
31 mandatory = colander.SchemaNode(colander.Boolean(), missing=False)
31 mandatory = colander.SchemaNode(colander.Boolean(), missing=False)
32 rules = colander.SchemaNode(colander.List(), missing=[])
32 rules = colander.SchemaNode(colander.List(), missing=[])
33 role = colander.SchemaNode(colander.String(), missing=DEFAULT_ROLE,
33 role = colander.SchemaNode(colander.String(), missing=DEFAULT_ROLE,
34 validator=colander.OneOf(VALID_ROLES))
34 validator=colander.OneOf(VALID_ROLES))
35
35
36
36
37 class ReviewerListSchema(colander.SequenceSchema):
37 class ReviewerListSchema(colander.SequenceSchema):
38 reviewers = ReviewerSchema()
38 reviewers = ReviewerSchema()
39
39
40
40
@@ -1,58 +1,58 b''
1 # -*- coding: utf-8 -*-
1
2
2
3 # Copyright (C) 2016-2020 RhodeCode GmbH
3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 import colander
22 import colander
23
23
24
24
25 def sort_validator(node, value):
25 def sort_validator(node, value):
26 if value in ['oldfirst', 'newfirst']:
26 if value in ['oldfirst', 'newfirst']:
27 return value
27 return value
28 if value.startswith('asc:'):
28 if value.startswith('asc:'):
29 return value
29 return value
30 if value.startswith('desc:'):
30 if value.startswith('desc:'):
31 return value
31 return value
32
32
33 msg = u'Invalid search sort, must be `oldfirst`, `newfirst`, or start with asc: or desc:'
33 msg = u'Invalid search sort, must be `oldfirst`, `newfirst`, or start with asc: or desc:'
34 raise colander.Invalid(node, msg)
34 raise colander.Invalid(node, msg)
35
35
36
36
37 class SearchParamsSchema(colander.MappingSchema):
37 class SearchParamsSchema(colander.MappingSchema):
38 search_query = colander.SchemaNode(
38 search_query = colander.SchemaNode(
39 colander.String(),
39 colander.String(),
40 missing='')
40 missing='')
41 search_type = colander.SchemaNode(
41 search_type = colander.SchemaNode(
42 colander.String(),
42 colander.String(),
43 missing='content',
43 missing='content',
44 validator=colander.OneOf(['content', 'path', 'commit', 'repository']))
44 validator=colander.OneOf(['content', 'path', 'commit', 'repository']))
45 search_sort = colander.SchemaNode(
45 search_sort = colander.SchemaNode(
46 colander.String(),
46 colander.String(),
47 missing='desc:date',
47 missing='desc:date',
48 validator=sort_validator)
48 validator=sort_validator)
49 search_max_lines = colander.SchemaNode(
49 search_max_lines = colander.SchemaNode(
50 colander.Integer(),
50 colander.Integer(),
51 missing=10)
51 missing=10)
52 page_limit = colander.SchemaNode(
52 page_limit = colander.SchemaNode(
53 colander.Integer(),
53 colander.Integer(),
54 missing=10,
54 missing=10,
55 validator=colander.Range(1, 500))
55 validator=colander.Range(1, 500))
56 requested_page = colander.SchemaNode(
56 requested_page = colander.SchemaNode(
57 colander.Integer(),
57 colander.Integer(),
58 missing=1)
58 missing=1)
@@ -1,80 +1,80 b''
1 # -*- coding: utf-8 -*-
1
2
2
3 # Copyright (C) 2016-2020 RhodeCode GmbH
3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 import re
20 import re
21 import colander
21 import colander
22
22
23 from rhodecode.model.validation_schema import types, validators
23 from rhodecode.model.validation_schema import types, validators
24 from rhodecode.model.validation_schema.utils import username_converter
24 from rhodecode.model.validation_schema.utils import username_converter
25 from rhodecode.translation import _
25 from rhodecode.translation import _
26
26
27
27
28 @colander.deferred
28 @colander.deferred
29 def deferred_user_group_name_validator(node, kw):
29 def deferred_user_group_name_validator(node, kw):
30
30
31 def name_validator(node, value):
31 def name_validator(node, value):
32
32
33 msg = _('Allowed in name are letters, numbers, and `-`, `_`, `.` '
33 msg = _('Allowed in name are letters, numbers, and `-`, `_`, `.` '
34 'Name must start with a letter or number. Got `{}`').format(value)
34 'Name must start with a letter or number. Got `{}`').format(value)
35
35
36 if not re.match(r'^[a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+$', value):
36 if not re.match(r'^[a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+$', value):
37 raise colander.Invalid(node, msg)
37 raise colander.Invalid(node, msg)
38
38
39 return name_validator
39 return name_validator
40
40
41
41
42 @colander.deferred
42 @colander.deferred
43 def deferred_user_group_owner_validator(node, kw):
43 def deferred_user_group_owner_validator(node, kw):
44
44
45 def owner_validator(node, value):
45 def owner_validator(node, value):
46 from rhodecode.model.db import User
46 from rhodecode.model.db import User
47 value = username_converter(value)
47 value = username_converter(value)
48 existing = User.get_by_username(value)
48 existing = User.get_by_username(value)
49 if not existing:
49 if not existing:
50 msg = _(u'User group owner with id `{}` does not exists').format(value)
50 msg = _(u'User group owner with id `{}` does not exists').format(value)
51 raise colander.Invalid(node, msg)
51 raise colander.Invalid(node, msg)
52
52
53 return owner_validator
53 return owner_validator
54
54
55
55
56 class UserGroupSchema(colander.Schema):
56 class UserGroupSchema(colander.Schema):
57
57
58 user_group_name = colander.SchemaNode(
58 user_group_name = colander.SchemaNode(
59 colander.String(),
59 colander.String(),
60 validator=deferred_user_group_name_validator)
60 validator=deferred_user_group_name_validator)
61
61
62 user_group_description = colander.SchemaNode(
62 user_group_description = colander.SchemaNode(
63 colander.String(), missing='')
63 colander.String(), missing='')
64
64
65 user_group_owner = colander.SchemaNode(
65 user_group_owner = colander.SchemaNode(
66 colander.String(),
66 colander.String(),
67 validator=deferred_user_group_owner_validator)
67 validator=deferred_user_group_owner_validator)
68
68
69 user_group_active = colander.SchemaNode(
69 user_group_active = colander.SchemaNode(
70 types.StringBooleanType(),
70 types.StringBooleanType(),
71 missing=False)
71 missing=False)
72
72
73 def deserialize(self, cstruct):
73 def deserialize(self, cstruct):
74 """
74 """
75 Custom deserialize that allows to chain validation, and verify
75 Custom deserialize that allows to chain validation, and verify
76 permissions, and as last step uniqueness
76 permissions, and as last step uniqueness
77 """
77 """
78
78
79 appstruct = super(UserGroupSchema, self).deserialize(cstruct)
79 appstruct = super(UserGroupSchema, self).deserialize(cstruct)
80 return appstruct
80 return appstruct
@@ -1,198 +1,198 b''
1 # -*- coding: utf-8 -*-
1
2
2
3 # Copyright (C) 2016-2020 RhodeCode GmbH
3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import re
21 import re
22 import colander
22 import colander
23
23
24 from rhodecode import forms
24 from rhodecode import forms
25 from rhodecode.model.db import User, UserEmailMap
25 from rhodecode.model.db import User, UserEmailMap
26 from rhodecode.model.validation_schema import types, validators
26 from rhodecode.model.validation_schema import types, validators
27 from rhodecode.translation import _
27 from rhodecode.translation import _
28 from rhodecode.lib.auth import check_password
28 from rhodecode.lib.auth import check_password
29 from rhodecode.lib import helpers as h
29 from rhodecode.lib import helpers as h
30
30
31
31
32 @colander.deferred
32 @colander.deferred
33 def deferred_user_password_validator(node, kw):
33 def deferred_user_password_validator(node, kw):
34 username = kw.get('username')
34 username = kw.get('username')
35 user = User.get_by_username(username)
35 user = User.get_by_username(username)
36
36
37 def _user_password_validator(node, value):
37 def _user_password_validator(node, value):
38 if not check_password(value, user.password):
38 if not check_password(value, user.password):
39 msg = _('Password is incorrect')
39 msg = _('Password is incorrect')
40 raise colander.Invalid(node, msg)
40 raise colander.Invalid(node, msg)
41 return _user_password_validator
41 return _user_password_validator
42
42
43
43
44
44
45 class ChangePasswordSchema(colander.Schema):
45 class ChangePasswordSchema(colander.Schema):
46
46
47 current_password = colander.SchemaNode(
47 current_password = colander.SchemaNode(
48 colander.String(),
48 colander.String(),
49 missing=colander.required,
49 missing=colander.required,
50 widget=forms.widget.PasswordWidget(redisplay=True),
50 widget=forms.widget.PasswordWidget(redisplay=True),
51 validator=deferred_user_password_validator)
51 validator=deferred_user_password_validator)
52
52
53 new_password = colander.SchemaNode(
53 new_password = colander.SchemaNode(
54 colander.String(),
54 colander.String(),
55 missing=colander.required,
55 missing=colander.required,
56 widget=forms.widget.CheckedPasswordWidget(redisplay=True),
56 widget=forms.widget.CheckedPasswordWidget(redisplay=True),
57 validator=colander.Length(min=6))
57 validator=colander.Length(min=6))
58
58
59 def validator(self, form, values):
59 def validator(self, form, values):
60 if values['current_password'] == values['new_password']:
60 if values['current_password'] == values['new_password']:
61 exc = colander.Invalid(form)
61 exc = colander.Invalid(form)
62 exc['new_password'] = _('New password must be different '
62 exc['new_password'] = _('New password must be different '
63 'to old password')
63 'to old password')
64 raise exc
64 raise exc
65
65
66
66
67 @colander.deferred
67 @colander.deferred
68 def deferred_username_validator(node, kw):
68 def deferred_username_validator(node, kw):
69
69
70 def name_validator(node, value):
70 def name_validator(node, value):
71 msg = _(
71 msg = _(
72 u'Username may only contain alphanumeric characters '
72 u'Username may only contain alphanumeric characters '
73 u'underscores, periods or dashes and must begin with '
73 u'underscores, periods or dashes and must begin with '
74 u'alphanumeric character or underscore')
74 u'alphanumeric character or underscore')
75
75
76 if not re.match(r'^[\w]{1}[\w\-\.]{0,254}$', value):
76 if not re.match(r'^[\w]{1}[\w\-\.]{0,254}$', value):
77 raise colander.Invalid(node, msg)
77 raise colander.Invalid(node, msg)
78
78
79 return name_validator
79 return name_validator
80
80
81
81
82 @colander.deferred
82 @colander.deferred
83 def deferred_email_validator(node, kw):
83 def deferred_email_validator(node, kw):
84 # NOTE(marcink): we might provide uniqueness validation later here...
84 # NOTE(marcink): we might provide uniqueness validation later here...
85 return colander.Email()
85 return colander.Email()
86
86
87
87
88 class UserSchema(colander.Schema):
88 class UserSchema(colander.Schema):
89 username = colander.SchemaNode(
89 username = colander.SchemaNode(
90 colander.String(),
90 colander.String(),
91 validator=deferred_username_validator)
91 validator=deferred_username_validator)
92
92
93 email = colander.SchemaNode(
93 email = colander.SchemaNode(
94 colander.String(),
94 colander.String(),
95 validator=deferred_email_validator)
95 validator=deferred_email_validator)
96
96
97 password = colander.SchemaNode(
97 password = colander.SchemaNode(
98 colander.String(), missing='')
98 colander.String(), missing='')
99
99
100 first_name = colander.SchemaNode(
100 first_name = colander.SchemaNode(
101 colander.String(), missing='')
101 colander.String(), missing='')
102
102
103 last_name = colander.SchemaNode(
103 last_name = colander.SchemaNode(
104 colander.String(), missing='')
104 colander.String(), missing='')
105
105
106 description = colander.SchemaNode(
106 description = colander.SchemaNode(
107 colander.String(), missing='')
107 colander.String(), missing='')
108
108
109 active = colander.SchemaNode(
109 active = colander.SchemaNode(
110 types.StringBooleanType(),
110 types.StringBooleanType(),
111 missing=False)
111 missing=False)
112
112
113 admin = colander.SchemaNode(
113 admin = colander.SchemaNode(
114 types.StringBooleanType(),
114 types.StringBooleanType(),
115 missing=False)
115 missing=False)
116
116
117 extern_name = colander.SchemaNode(
117 extern_name = colander.SchemaNode(
118 colander.String(), missing='')
118 colander.String(), missing='')
119
119
120 extern_type = colander.SchemaNode(
120 extern_type = colander.SchemaNode(
121 colander.String(), missing='')
121 colander.String(), missing='')
122
122
123 def deserialize(self, cstruct):
123 def deserialize(self, cstruct):
124 """
124 """
125 Custom deserialize that allows to chain validation, and verify
125 Custom deserialize that allows to chain validation, and verify
126 permissions, and as last step uniqueness
126 permissions, and as last step uniqueness
127 """
127 """
128
128
129 appstruct = super(UserSchema, self).deserialize(cstruct)
129 appstruct = super(UserSchema, self).deserialize(cstruct)
130 return appstruct
130 return appstruct
131
131
132
132
133 @colander.deferred
133 @colander.deferred
134 def deferred_user_email_in_emails_validator(node, kw):
134 def deferred_user_email_in_emails_validator(node, kw):
135 return colander.OneOf(kw.get('user_emails'))
135 return colander.OneOf(kw.get('user_emails'))
136
136
137
137
138 @colander.deferred
138 @colander.deferred
139 def deferred_additional_email_validator(node, kw):
139 def deferred_additional_email_validator(node, kw):
140 emails = kw.get('user_emails')
140 emails = kw.get('user_emails')
141
141
142 def name_validator(node, value):
142 def name_validator(node, value):
143 if value in emails:
143 if value in emails:
144 msg = _('This e-mail address is already taken')
144 msg = _('This e-mail address is already taken')
145 raise colander.Invalid(node, msg)
145 raise colander.Invalid(node, msg)
146 user = User.get_by_email(value, case_insensitive=True)
146 user = User.get_by_email(value, case_insensitive=True)
147 if user:
147 if user:
148 msg = _(u'This e-mail address is already taken')
148 msg = _(u'This e-mail address is already taken')
149 raise colander.Invalid(node, msg)
149 raise colander.Invalid(node, msg)
150 c = colander.Email()
150 c = colander.Email()
151 return c(node, value)
151 return c(node, value)
152 return name_validator
152 return name_validator
153
153
154
154
155 @colander.deferred
155 @colander.deferred
156 def deferred_user_email_in_emails_widget(node, kw):
156 def deferred_user_email_in_emails_widget(node, kw):
157 import deform.widget
157 import deform.widget
158 emails = [(email, email) for email in kw.get('user_emails')]
158 emails = [(email, email) for email in kw.get('user_emails')]
159 return deform.widget.Select2Widget(values=emails)
159 return deform.widget.Select2Widget(values=emails)
160
160
161
161
162 class UserProfileSchema(colander.Schema):
162 class UserProfileSchema(colander.Schema):
163 username = colander.SchemaNode(
163 username = colander.SchemaNode(
164 colander.String(),
164 colander.String(),
165 validator=deferred_username_validator)
165 validator=deferred_username_validator)
166
166
167 firstname = colander.SchemaNode(
167 firstname = colander.SchemaNode(
168 colander.String(), missing='', title='First name')
168 colander.String(), missing='', title='First name')
169
169
170 lastname = colander.SchemaNode(
170 lastname = colander.SchemaNode(
171 colander.String(), missing='', title='Last name')
171 colander.String(), missing='', title='Last name')
172
172
173 description = colander.SchemaNode(
173 description = colander.SchemaNode(
174 colander.String(), missing='', title='Personal Description',
174 colander.String(), missing='', title='Personal Description',
175 widget=forms.widget.TextAreaWidget(),
175 widget=forms.widget.TextAreaWidget(),
176 validator=colander.Length(max=250)
176 validator=colander.Length(max=250)
177 )
177 )
178
178
179 email = colander.SchemaNode(
179 email = colander.SchemaNode(
180 colander.String(), widget=deferred_user_email_in_emails_widget,
180 colander.String(), widget=deferred_user_email_in_emails_widget,
181 validator=deferred_user_email_in_emails_validator,
181 validator=deferred_user_email_in_emails_validator,
182 description=h.literal(
182 description=h.literal(
183 _('Additional emails can be specified at <a href="{}">extra emails</a> page.').format(
183 _('Additional emails can be specified at <a href="{}">extra emails</a> page.').format(
184 '/_admin/my_account/emails')),
184 '/_admin/my_account/emails')),
185 )
185 )
186
186
187
187
188
188
189 class AddEmailSchema(colander.Schema):
189 class AddEmailSchema(colander.Schema):
190 current_password = colander.SchemaNode(
190 current_password = colander.SchemaNode(
191 colander.String(),
191 colander.String(),
192 missing=colander.required,
192 missing=colander.required,
193 widget=forms.widget.PasswordWidget(redisplay=True),
193 widget=forms.widget.PasswordWidget(redisplay=True),
194 validator=deferred_user_password_validator)
194 validator=deferred_user_password_validator)
195
195
196 email = colander.SchemaNode(
196 email = colander.SchemaNode(
197 colander.String(), title='New Email',
197 colander.String(), title='New Email',
198 validator=deferred_additional_email_validator)
198 validator=deferred_additional_email_validator)
@@ -1,196 +1,196 b''
1 # -*- coding: utf-8 -*-
1
2
2
3 # Copyright (C) 2016-2020 RhodeCode GmbH
3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import re
21 import re
22 import colander
22 import colander
23
23
24 from rhodecode.model.validation_schema import preparers
24 from rhodecode.model.validation_schema import preparers
25 from rhodecode.model.db import User, UserGroup
25 from rhodecode.model.db import User, UserGroup
26
26
27
27
28 class _RootLocation(object):
28 class _RootLocation(object):
29 pass
29 pass
30
30
31 RootLocation = _RootLocation()
31 RootLocation = _RootLocation()
32
32
33
33
34 def _normalize(seperator, path):
34 def _normalize(seperator, path):
35
35
36 if not path:
36 if not path:
37 return ''
37 return ''
38 elif path is colander.null:
38 elif path is colander.null:
39 return colander.null
39 return colander.null
40
40
41 parts = path.split(seperator)
41 parts = path.split(seperator)
42
42
43 def bad_parts(value):
43 def bad_parts(value):
44 if not value:
44 if not value:
45 return False
45 return False
46 if re.match(r'^[.]+$', value):
46 if re.match(r'^[.]+$', value):
47 return False
47 return False
48
48
49 return True
49 return True
50
50
51 def slugify(value):
51 def slugify(value):
52 value = preparers.slugify_preparer(value)
52 value = preparers.slugify_preparer(value)
53 value = re.sub(r'[.]{2,}', '.', value)
53 value = re.sub(r'[.]{2,}', '.', value)
54 return value
54 return value
55
55
56 clean_parts = [slugify(item) for item in parts if item]
56 clean_parts = [slugify(item) for item in parts if item]
57 path = filter(bad_parts, clean_parts)
57 path = filter(bad_parts, clean_parts)
58 return seperator.join(path)
58 return seperator.join(path)
59
59
60
60
61 class RepoNameType(colander.String):
61 class RepoNameType(colander.String):
62 SEPARATOR = '/'
62 SEPARATOR = '/'
63
63
64 def deserialize(self, node, cstruct):
64 def deserialize(self, node, cstruct):
65 result = super(RepoNameType, self).deserialize(node, cstruct)
65 result = super(RepoNameType, self).deserialize(node, cstruct)
66 if cstruct is colander.null:
66 if cstruct is colander.null:
67 return colander.null
67 return colander.null
68 return self._normalize(result)
68 return self._normalize(result)
69
69
70 def _normalize(self, path):
70 def _normalize(self, path):
71 return _normalize(self.SEPARATOR, path)
71 return _normalize(self.SEPARATOR, path)
72
72
73
73
74 class GroupNameType(colander.String):
74 class GroupNameType(colander.String):
75 SEPARATOR = '/'
75 SEPARATOR = '/'
76
76
77 def deserialize(self, node, cstruct):
77 def deserialize(self, node, cstruct):
78 if cstruct is RootLocation:
78 if cstruct is RootLocation:
79 return cstruct
79 return cstruct
80
80
81 result = super(GroupNameType, self).deserialize(node, cstruct)
81 result = super(GroupNameType, self).deserialize(node, cstruct)
82 if cstruct is colander.null:
82 if cstruct is colander.null:
83 return colander.null
83 return colander.null
84 return self._normalize(result)
84 return self._normalize(result)
85
85
86 def _normalize(self, path):
86 def _normalize(self, path):
87 return _normalize(self.SEPARATOR, path)
87 return _normalize(self.SEPARATOR, path)
88
88
89
89
90 class StringBooleanType(colander.String):
90 class StringBooleanType(colander.String):
91 true_values = ['true', 't', 'yes', 'y', 'on', '1']
91 true_values = ['true', 't', 'yes', 'y', 'on', '1']
92 false_values = ['false', 'f', 'no', 'n', 'off', '0']
92 false_values = ['false', 'f', 'no', 'n', 'off', '0']
93
93
94 def serialize(self, node, appstruct):
94 def serialize(self, node, appstruct):
95 if appstruct is colander.null:
95 if appstruct is colander.null:
96 return colander.null
96 return colander.null
97 if not isinstance(appstruct, bool):
97 if not isinstance(appstruct, bool):
98 raise colander.Invalid(node, '%r is not a boolean' % appstruct)
98 raise colander.Invalid(node, '%r is not a boolean' % appstruct)
99
99
100 return appstruct and 'true' or 'false'
100 return appstruct and 'true' or 'false'
101
101
102 def deserialize(self, node, cstruct):
102 def deserialize(self, node, cstruct):
103 if cstruct is colander.null:
103 if cstruct is colander.null:
104 return colander.null
104 return colander.null
105
105
106 if isinstance(cstruct, bool):
106 if isinstance(cstruct, bool):
107 return cstruct
107 return cstruct
108
108
109 if not isinstance(cstruct, str):
109 if not isinstance(cstruct, str):
110 raise colander.Invalid(node, '%r is not a string' % cstruct)
110 raise colander.Invalid(node, '%r is not a string' % cstruct)
111
111
112 value = cstruct.lower()
112 value = cstruct.lower()
113 if value in self.true_values:
113 if value in self.true_values:
114 return True
114 return True
115 elif value in self.false_values:
115 elif value in self.false_values:
116 return False
116 return False
117 else:
117 else:
118 raise colander.Invalid(
118 raise colander.Invalid(
119 node, '{} value cannot be translated to bool'.format(value))
119 node, '{} value cannot be translated to bool'.format(value))
120
120
121
121
122 class UserOrUserGroupType(colander.SchemaType):
122 class UserOrUserGroupType(colander.SchemaType):
123 """ colander Schema type for valid rhodecode user and/or usergroup """
123 """ colander Schema type for valid rhodecode user and/or usergroup """
124 scopes = ('user', 'usergroup')
124 scopes = ('user', 'usergroup')
125
125
126 def __init__(self):
126 def __init__(self):
127 self.users = 'user' in self.scopes
127 self.users = 'user' in self.scopes
128 self.usergroups = 'usergroup' in self.scopes
128 self.usergroups = 'usergroup' in self.scopes
129
129
130 def serialize(self, node, appstruct):
130 def serialize(self, node, appstruct):
131 if appstruct is colander.null:
131 if appstruct is colander.null:
132 return colander.null
132 return colander.null
133
133
134 if self.users:
134 if self.users:
135 if isinstance(appstruct, User):
135 if isinstance(appstruct, User):
136 if self.usergroups:
136 if self.usergroups:
137 return 'user:%s' % appstruct.username
137 return 'user:%s' % appstruct.username
138 return appstruct.username
138 return appstruct.username
139
139
140 if self.usergroups:
140 if self.usergroups:
141 if isinstance(appstruct, UserGroup):
141 if isinstance(appstruct, UserGroup):
142 if self.users:
142 if self.users:
143 return 'usergroup:%s' % appstruct.users_group_name
143 return 'usergroup:%s' % appstruct.users_group_name
144 return appstruct.users_group_name
144 return appstruct.users_group_name
145
145
146 raise colander.Invalid(
146 raise colander.Invalid(
147 node, '%s is not a valid %s' % (appstruct, ' or '.join(self.scopes)))
147 node, '%s is not a valid %s' % (appstruct, ' or '.join(self.scopes)))
148
148
149 def deserialize(self, node, cstruct):
149 def deserialize(self, node, cstruct):
150 if cstruct is colander.null:
150 if cstruct is colander.null:
151 return colander.null
151 return colander.null
152
152
153 user, usergroup = None, None
153 user, usergroup = None, None
154 if self.users:
154 if self.users:
155 if cstruct.startswith('user:'):
155 if cstruct.startswith('user:'):
156 user = User.get_by_username(cstruct.split(':')[1])
156 user = User.get_by_username(cstruct.split(':')[1])
157 else:
157 else:
158 user = User.get_by_username(cstruct)
158 user = User.get_by_username(cstruct)
159
159
160 if self.usergroups:
160 if self.usergroups:
161 if cstruct.startswith('usergroup:'):
161 if cstruct.startswith('usergroup:'):
162 usergroup = UserGroup.get_by_group_name(cstruct.split(':')[1])
162 usergroup = UserGroup.get_by_group_name(cstruct.split(':')[1])
163 else:
163 else:
164 usergroup = UserGroup.get_by_group_name(cstruct)
164 usergroup = UserGroup.get_by_group_name(cstruct)
165
165
166 if self.users and self.usergroups:
166 if self.users and self.usergroups:
167 if user and usergroup:
167 if user and usergroup:
168 raise colander.Invalid(node, (
168 raise colander.Invalid(node, (
169 '%s is both a user and usergroup, specify which '
169 '%s is both a user and usergroup, specify which '
170 'one was wanted by prepending user: or usergroup: to the '
170 'one was wanted by prepending user: or usergroup: to the '
171 'name') % cstruct)
171 'name') % cstruct)
172
172
173 if self.users and user:
173 if self.users and user:
174 return user
174 return user
175
175
176 if self.usergroups and usergroup:
176 if self.usergroups and usergroup:
177 return usergroup
177 return usergroup
178
178
179 raise colander.Invalid(
179 raise colander.Invalid(
180 node, '%s is not a valid %s' % (cstruct, ' or '.join(self.scopes)))
180 node, '%s is not a valid %s' % (cstruct, ' or '.join(self.scopes)))
181
181
182
182
183 class UserType(UserOrUserGroupType):
183 class UserType(UserOrUserGroupType):
184 scopes = ('user',)
184 scopes = ('user',)
185
185
186
186
187 class UserGroupType(UserOrUserGroupType):
187 class UserGroupType(UserOrUserGroupType):
188 scopes = ('usergroup',)
188 scopes = ('usergroup',)
189
189
190
190
191 class StrOrIntType(colander.String):
191 class StrOrIntType(colander.String):
192 def deserialize(self, node, cstruct):
192 def deserialize(self, node, cstruct):
193 if isinstance(cstruct, str):
193 if isinstance(cstruct, str):
194 return super(StrOrIntType, self).deserialize(node, cstruct)
194 return super(StrOrIntType, self).deserialize(node, cstruct)
195 else:
195 else:
196 return colander.Integer().deserialize(node, cstruct)
196 return colander.Integer().deserialize(node, cstruct)
@@ -1,56 +1,56 b''
1 # -*- coding: utf-8 -*-
1
2
2
3 # Copyright (C) 2016-2020 RhodeCode GmbH
3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import deform.widget
21 import deform.widget
22
22
23
23
24 def convert_to_optgroup(items):
24 def convert_to_optgroup(items):
25 """
25 """
26 Convert such format::
26 Convert such format::
27
27
28 [
28 [
29 ['rev:tip', u'latest tip'],
29 ['rev:tip', u'latest tip'],
30 ([(u'branch:default', u'default')], u'Branches'),
30 ([(u'branch:default', u'default')], u'Branches'),
31 ]
31 ]
32
32
33 into one used by deform Select widget::
33 into one used by deform Select widget::
34
34
35 (
35 (
36 ('rev:tip', 'latest tip'),
36 ('rev:tip', 'latest tip'),
37 OptGroup('Branches',
37 OptGroup('Branches',
38 ('branch:default', 'default'),
38 ('branch:default', 'default'),
39 )
39 )
40 """
40 """
41 result = []
41 result = []
42 for value, label in items:
42 for value, label in items:
43 # option group
43 # option group
44 if isinstance(value, (tuple, list)):
44 if isinstance(value, (tuple, list)):
45 result.append(deform.widget.OptGroup(label, *value))
45 result.append(deform.widget.OptGroup(label, *value))
46 else:
46 else:
47 result.append((value, label))
47 result.append((value, label))
48
48
49 return result
49 return result
50
50
51
51
52 def username_converter(value):
52 def username_converter(value):
53 for noise in ('/', ',', '*', '"', "'", '<', '>', '(', ')', '[', ']', ';'):
53 for noise in ('/', ',', '*', '"', "'", '<', '>', '(', ')', '[', ']', ';'):
54 value = value.replace(noise, '')
54 value = value.replace(noise, '')
55
55
56 return value
56 return value
@@ -1,160 +1,160 b''
1 # -*- coding: utf-8 -*-
1
2
2
3 # Copyright (C) 2011-2020 RhodeCode GmbH
3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22 import re
22 import re
23 import logging
23 import logging
24
24
25
25
26 import ipaddress
26 import ipaddress
27 import colander
27 import colander
28
28
29 from rhodecode.translation import _
29 from rhodecode.translation import _
30 from rhodecode.lib.utils2 import glob2re, safe_unicode
30 from rhodecode.lib.utils2 import glob2re, safe_unicode
31 from rhodecode.lib.ext_json import json
31 from rhodecode.lib.ext_json import json
32
32
33 log = logging.getLogger(__name__)
33 log = logging.getLogger(__name__)
34
34
35
35
36 def ip_addr_validator(node, value):
36 def ip_addr_validator(node, value):
37 try:
37 try:
38 # this raises an ValueError if address is not IpV4 or IpV6
38 # this raises an ValueError if address is not IpV4 or IpV6
39 ipaddress.ip_network(safe_unicode(value), strict=False)
39 ipaddress.ip_network(safe_unicode(value), strict=False)
40 except ValueError:
40 except ValueError:
41 msg = _(u'Please enter a valid IPv4 or IpV6 address')
41 msg = _(u'Please enter a valid IPv4 or IpV6 address')
42 raise colander.Invalid(node, msg)
42 raise colander.Invalid(node, msg)
43
43
44
44
45 class IpAddrValidator(object):
45 class IpAddrValidator(object):
46 def __init__(self, strict=True):
46 def __init__(self, strict=True):
47 self.strict = strict
47 self.strict = strict
48
48
49 def __call__(self, node, value):
49 def __call__(self, node, value):
50 try:
50 try:
51 # this raises an ValueError if address is not IpV4 or IpV6
51 # this raises an ValueError if address is not IpV4 or IpV6
52 ipaddress.ip_network(safe_unicode(value), strict=self.strict)
52 ipaddress.ip_network(safe_unicode(value), strict=self.strict)
53 except ValueError:
53 except ValueError:
54 msg = _(u'Please enter a valid IPv4 or IpV6 address')
54 msg = _(u'Please enter a valid IPv4 or IpV6 address')
55 raise colander.Invalid(node, msg)
55 raise colander.Invalid(node, msg)
56
56
57
57
58 def glob_validator(node, value):
58 def glob_validator(node, value):
59 try:
59 try:
60 re.compile('^' + glob2re(value) + '$')
60 re.compile('^' + glob2re(value) + '$')
61 except Exception:
61 except Exception:
62 msg = _(u'Invalid glob pattern')
62 msg = _(u'Invalid glob pattern')
63 raise colander.Invalid(node, msg)
63 raise colander.Invalid(node, msg)
64
64
65
65
66 def valid_name_validator(node, value):
66 def valid_name_validator(node, value):
67 from rhodecode.model.validation_schema import types
67 from rhodecode.model.validation_schema import types
68 if value is types.RootLocation:
68 if value is types.RootLocation:
69 return
69 return
70
70
71 msg = _('Name must start with a letter or number. Got `{}`').format(value)
71 msg = _('Name must start with a letter or number. Got `{}`').format(value)
72 if not re.match(r'^[a-zA-z0-9]{1,}', value):
72 if not re.match(r'^[a-zA-z0-9]{1,}', value):
73 raise colander.Invalid(node, msg)
73 raise colander.Invalid(node, msg)
74
74
75
75
76 class InvalidCloneUrl(Exception):
76 class InvalidCloneUrl(Exception):
77 allowed_prefixes = ()
77 allowed_prefixes = ()
78
78
79
79
80 def url_validator(url, repo_type, config):
80 def url_validator(url, repo_type, config):
81 from rhodecode.lib.vcs.backends.hg import MercurialRepository
81 from rhodecode.lib.vcs.backends.hg import MercurialRepository
82 from rhodecode.lib.vcs.backends.git import GitRepository
82 from rhodecode.lib.vcs.backends.git import GitRepository
83 from rhodecode.lib.vcs.backends.svn import SubversionRepository
83 from rhodecode.lib.vcs.backends.svn import SubversionRepository
84
84
85 if repo_type == 'hg':
85 if repo_type == 'hg':
86 allowed_prefixes = ('http', 'svn+http', 'git+http')
86 allowed_prefixes = ('http', 'svn+http', 'git+http')
87
87
88 if 'http' in url[:4]:
88 if 'http' in url[:4]:
89 # initially check if it's at least the proper URL
89 # initially check if it's at least the proper URL
90 # or does it pass basic auth
90 # or does it pass basic auth
91
91
92 return MercurialRepository.check_url(url, config)
92 return MercurialRepository.check_url(url, config)
93 elif 'svn+http' in url[:8]: # svn->hg import
93 elif 'svn+http' in url[:8]: # svn->hg import
94 SubversionRepository.check_url(url, config)
94 SubversionRepository.check_url(url, config)
95 elif 'git+http' in url[:8]: # git->hg import
95 elif 'git+http' in url[:8]: # git->hg import
96 raise NotImplementedError()
96 raise NotImplementedError()
97 else:
97 else:
98 exc = InvalidCloneUrl('Clone from URI %s not allowed. '
98 exc = InvalidCloneUrl('Clone from URI %s not allowed. '
99 'Allowed url must start with one of %s'
99 'Allowed url must start with one of %s'
100 % (url, ','.join(allowed_prefixes)))
100 % (url, ','.join(allowed_prefixes)))
101 exc.allowed_prefixes = allowed_prefixes
101 exc.allowed_prefixes = allowed_prefixes
102 raise exc
102 raise exc
103
103
104 elif repo_type == 'git':
104 elif repo_type == 'git':
105 allowed_prefixes = ('http', 'svn+http', 'hg+http')
105 allowed_prefixes = ('http', 'svn+http', 'hg+http')
106 if 'http' in url[:4]:
106 if 'http' in url[:4]:
107 # initially check if it's at least the proper URL
107 # initially check if it's at least the proper URL
108 # or does it pass basic auth
108 # or does it pass basic auth
109 return GitRepository.check_url(url, config)
109 return GitRepository.check_url(url, config)
110 elif 'svn+http' in url[:8]: # svn->git import
110 elif 'svn+http' in url[:8]: # svn->git import
111 raise NotImplementedError()
111 raise NotImplementedError()
112 elif 'hg+http' in url[:8]: # hg->git import
112 elif 'hg+http' in url[:8]: # hg->git import
113 raise NotImplementedError()
113 raise NotImplementedError()
114 else:
114 else:
115 exc = InvalidCloneUrl('Clone from URI %s not allowed. '
115 exc = InvalidCloneUrl('Clone from URI %s not allowed. '
116 'Allowed url must start with one of %s'
116 'Allowed url must start with one of %s'
117 % (url, ','.join(allowed_prefixes)))
117 % (url, ','.join(allowed_prefixes)))
118 exc.allowed_prefixes = allowed_prefixes
118 exc.allowed_prefixes = allowed_prefixes
119 raise exc
119 raise exc
120 elif repo_type == 'svn':
120 elif repo_type == 'svn':
121 # no validation for SVN yet
121 # no validation for SVN yet
122 return
122 return
123
123
124 raise InvalidCloneUrl('Invalid repo type specified: `{}`'.format(repo_type))
124 raise InvalidCloneUrl('Invalid repo type specified: `{}`'.format(repo_type))
125
125
126
126
127 class CloneUriValidator(object):
127 class CloneUriValidator(object):
128 def __init__(self, repo_type):
128 def __init__(self, repo_type):
129 self.repo_type = repo_type
129 self.repo_type = repo_type
130
130
131 def __call__(self, node, value):
131 def __call__(self, node, value):
132
132
133 from rhodecode.lib.utils import make_db_config
133 from rhodecode.lib.utils import make_db_config
134 try:
134 try:
135 config = make_db_config(clear_session=False)
135 config = make_db_config(clear_session=False)
136 url_validator(value, self.repo_type, config)
136 url_validator(value, self.repo_type, config)
137 except InvalidCloneUrl as e:
137 except InvalidCloneUrl as e:
138 log.warning(e)
138 log.warning(e)
139 raise colander.Invalid(node, e.message)
139 raise colander.Invalid(node, e.message)
140 except Exception:
140 except Exception:
141 log.exception('Url validation failed')
141 log.exception('Url validation failed')
142 msg = _(u'invalid clone url or credentials for {repo_type} repository').format(
142 msg = _(u'invalid clone url or credentials for {repo_type} repository').format(
143 repo_type=self.repo_type)
143 repo_type=self.repo_type)
144 raise colander.Invalid(node, msg)
144 raise colander.Invalid(node, msg)
145
145
146
146
147 def json_validator(node, value):
147 def json_validator(node, value):
148 try:
148 try:
149 json.loads(value)
149 json.loads(value)
150 except (Exception,) as e:
150 except (Exception,) as e:
151 msg = _(u'Please enter a valid json object')
151 msg = _(u'Please enter a valid json object')
152 raise colander.Invalid(node, msg)
152 raise colander.Invalid(node, msg)
153
153
154
154
155 def json_validator_with_exc(node, value):
155 def json_validator_with_exc(node, value):
156 try:
156 try:
157 json.loads(value)
157 json.loads(value)
158 except (Exception,) as e:
158 except (Exception,) as e:
159 msg = _(u'Please enter a valid json object: `{}`'.format(e))
159 msg = _(u'Please enter a valid json object: `{}`'.format(e))
160 raise colander.Invalid(node, msg)
160 raise colander.Invalid(node, msg)
@@ -1,59 +1,59 b''
1 # -*- coding: utf-8 -*-
1
2
2
3 # Copyright (C) 2011-2020 RhodeCode GmbH
3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22
22
23 import deform.widget
23 import deform.widget
24 from deform.widget import null, OptGroup
24 from deform.widget import null, OptGroup
25
25
26 log = logging.getLogger(__name__)
26 log = logging.getLogger(__name__)
27
27
28
28
29 def _normalize_choices(values):
29 def _normalize_choices(values):
30 result = []
30 result = []
31 for item in values:
31 for item in values:
32 if isinstance(item, OptGroup):
32 if isinstance(item, OptGroup):
33 normalized_options = _normalize_choices(item.options)
33 normalized_options = _normalize_choices(item.options)
34 result.append(OptGroup(item.label, *normalized_options))
34 result.append(OptGroup(item.label, *normalized_options))
35 else:
35 else:
36 value, description, help_block = item
36 value, description, help_block = item
37 if not isinstance(value, str):
37 if not isinstance(value, str):
38 value = str(value)
38 value = str(value)
39 result.append((value, description, help_block))
39 result.append((value, description, help_block))
40 return result
40 return result
41
41
42
42
43 class CodeMirrorWidget(deform.widget.TextAreaWidget):
43 class CodeMirrorWidget(deform.widget.TextAreaWidget):
44 template = 'codemirror'
44 template = 'codemirror'
45 requirements = (('deform', None), ('codemirror', None))
45 requirements = (('deform', None), ('codemirror', None))
46
46
47
47
48 class CheckboxChoiceWidgetDesc(deform.widget.CheckboxChoiceWidget):
48 class CheckboxChoiceWidgetDesc(deform.widget.CheckboxChoiceWidget):
49 template = "checkbox_choice_desc"
49 template = "checkbox_choice_desc"
50
50
51 def serialize(self, field, cstruct, **kw):
51 def serialize(self, field, cstruct, **kw):
52 if cstruct in (null, None):
52 if cstruct in (null, None):
53 cstruct = ()
53 cstruct = ()
54 readonly = kw.get("readonly", self.readonly)
54 readonly = kw.get("readonly", self.readonly)
55 values = kw.get("values", self.values)
55 values = kw.get("values", self.values)
56 kw["values"] = _normalize_choices(values)
56 kw["values"] = _normalize_choices(values)
57 template = readonly and self.readonly_template or self.template
57 template = readonly and self.readonly_template or self.template
58 tmpl_values = self.get_template_values(field, cstruct, kw)
58 tmpl_values = self.get_template_values(field, cstruct, kw)
59 return field.renderer(template, **tmpl_values)
59 return field.renderer(template, **tmpl_values)
@@ -1,1115 +1,1116 b''
1 # -*- coding: utf-8 -*-
2
1
3 # Copyright (C) 2010-2020 RhodeCode GmbH
2 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
3 #
5 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
8 #
7 #
9 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
11 # GNU General Public License for more details.
13 #
12 #
14 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
15 #
17 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
19
21 """
20 """
22 Set of generic validators
21 Set of generic validators
23 """
22 """
24
23
25
24
26 import os
25 import os
27 import re
26 import re
28 import logging
27 import logging
29 import collections
28 import collections
30
29
31 import formencode
30 import formencode
32 import ipaddress
31 import ipaddress
33 from formencode.validators import (
32 from formencode.validators import (
34 UnicodeString, OneOf, Int, Number, Regex, Email, Bool, StringBoolean, Set,
33 UnicodeString, OneOf, Int, Number, Regex, Email, Bool, StringBoolean, Set,
35 NotEmpty, IPAddress, CIDR, String, FancyValidator
34 NotEmpty, IPAddress, CIDR, String, FancyValidator
36 )
35 )
37
36
38 from sqlalchemy.sql.expression import true
37 from sqlalchemy.sql.expression import true
39 from sqlalchemy.util import OrderedSet
38 from sqlalchemy.util import OrderedSet
40
39
41 from rhodecode.authentication import (
40 from rhodecode.authentication import (
42 legacy_plugin_prefix, _import_legacy_plugin)
41 legacy_plugin_prefix, _import_legacy_plugin)
43 from rhodecode.authentication.base import loadplugin
42 from rhodecode.authentication.base import loadplugin
44 from rhodecode.apps._base import ADMIN_PREFIX
43 from rhodecode.apps._base import ADMIN_PREFIX
45 from rhodecode.lib.auth import HasRepoGroupPermissionAny, HasPermissionAny
44 from rhodecode.lib.auth import HasRepoGroupPermissionAny, HasPermissionAny
46 from rhodecode.lib.utils import repo_name_slug, make_db_config
45 from rhodecode.lib.utils import repo_name_slug, make_db_config
47 from rhodecode.lib.utils2 import safe_int, str2bool, aslist
46 from rhodecode.lib.utils2 import safe_int, str2bool, aslist
48 from rhodecode.lib.str_utils import safe_str
47 from rhodecode.lib.str_utils import safe_str
49 from rhodecode.lib.hash_utils import md5_safe
48 from rhodecode.lib.hash_utils import md5_safe
50 from rhodecode.lib.vcs.backends.git.repository import GitRepository
49 from rhodecode.lib.vcs.backends.git.repository import GitRepository
51 from rhodecode.lib.vcs.backends.hg.repository import MercurialRepository
50 from rhodecode.lib.vcs.backends.hg.repository import MercurialRepository
52 from rhodecode.lib.vcs.backends.svn.repository import SubversionRepository
51 from rhodecode.lib.vcs.backends.svn.repository import SubversionRepository
53 from rhodecode.model.db import (
52 from rhodecode.model.db import (
54 RepoGroup, Repository, UserGroup, User, ChangesetStatus, Gist)
53 RepoGroup, Repository, UserGroup, User, ChangesetStatus, Gist)
55 from rhodecode.model.settings import VcsSettingsModel
54 from rhodecode.model.settings import VcsSettingsModel
56
55
57 # silence warnings and pylint
56 # silence warnings and pylint
58 UnicodeString, OneOf, Int, Number, Regex, Email, Bool, StringBoolean, Set, \
57 UnicodeString, OneOf, Int, Number, Regex, Email, Bool, StringBoolean, Set, \
59 NotEmpty, IPAddress, CIDR, String, FancyValidator
58 NotEmpty, IPAddress, CIDR, String, FancyValidator
60
59
61 log = logging.getLogger(__name__)
60 log = logging.getLogger(__name__)
62
61
63
62
64 class _Missing(object):
63 class _Missing(object):
65 pass
64 pass
66
65
67
66
68 Missing = _Missing()
67 Missing = _Missing()
69
68
70
69
71 def M(self, key, state, **kwargs):
70 def M(self, key, state, **kwargs):
72 """
71 """
73 returns string from self.message based on given key,
72 returns string from self.message based on given key,
74 passed kw params are used to substitute %(named)s params inside
73 passed kw params are used to substitute %(named)s params inside
75 translated strings
74 translated strings
76
75
77 :param msg:
76 :param msg:
78 :param state:
77 :param state:
79 """
78 """
80
79
81 #state._ = staticmethod(_)
80 #state._ = staticmethod(_)
82 # inject validator into state object
81 # inject validator into state object
83 return self.message(key, state, **kwargs)
82 return self.message(key, state, **kwargs)
84
83
85
84
86 def UniqueList(localizer, convert=None):
85 def UniqueList(localizer, convert=None):
87 _ = localizer
86 _ = localizer
88
87
89 class _validator(formencode.FancyValidator):
88 class _validator(formencode.FancyValidator):
90 """
89 """
91 Unique List !
90 Unique List !
92 """
91 """
92 accept_iterator = True
93
93 messages = {
94 messages = {
94 'empty': _('Value cannot be an empty list'),
95 'empty': _('Value cannot be an empty list'),
95 'missing_value': _('Value cannot be an empty list'),
96 'missing_value': _('Value cannot be an empty list'),
96 }
97 }
97
98
98 def _convert_to_python(self, value, state):
99 def _convert_to_python(self, value, state):
99 ret_val = []
100
100
101 def make_unique(value):
101 def make_unique(_value):
102 seen = []
102 seen = []
103 return [c for c in value if not (c in seen or seen.append(c))]
103 return [c for c in _value if not (c in seen or seen.append(c))]
104
104
105 if isinstance(value, list):
105 if isinstance(value, list):
106 ret_val = make_unique(value)
106 ret_val = make_unique(value)
107 elif isinstance(value, set):
107 elif isinstance(value, set):
108 ret_val = make_unique(list(value))
108 ret_val = make_unique(list(value))
109 elif isinstance(value, tuple):
109 elif isinstance(value, tuple):
110 ret_val = make_unique(list(value))
110 ret_val = make_unique(list(value))
111 elif value is None:
111 elif value is None:
112 ret_val = []
112 ret_val = []
113 else:
113 else:
114 ret_val = [value]
114 ret_val = [value]
115
115
116 if convert:
116 if convert:
117 ret_val = list(map(convert, ret_val))
117 ret_val = list(map(convert, ret_val))
118 return ret_val
118 return ret_val
119
119
120 def empty_value(self, value):
120 def empty_value(self, value):
121 return []
121 return []
122
122 return _validator
123 return _validator
123
124
124
125
125 def UniqueListFromString(localizer):
126 def UniqueListFromString(localizer):
126 _ = localizer
127 _ = localizer
127
128
128 class _validator(UniqueList(localizer)):
129 class _validator(UniqueList(localizer)):
129 def _convert_to_python(self, value, state):
130 def _convert_to_python(self, value, state):
130 if isinstance(value, str):
131 if isinstance(value, str):
131 value = aslist(value, ',')
132 value = aslist(value, ',')
132 return super(_validator, self)._convert_to_python(value, state)
133 return super(_validator, self)._convert_to_python(value, state)
133 return _validator
134 return _validator
134
135
135
136
136 def ValidSvnPattern(localizer, section, repo_name=None):
137 def ValidSvnPattern(localizer, section, repo_name=None):
137 _ = localizer
138 _ = localizer
138
139
139 class _validator(formencode.validators.FancyValidator):
140 class _validator(formencode.validators.FancyValidator):
140 messages = {
141 messages = {
141 'pattern_exists': _('Pattern already exists'),
142 'pattern_exists': _('Pattern already exists'),
142 }
143 }
143
144
144 def _validate_python(self, value, state):
145 def _validate_python(self, value, state):
145 if not value:
146 if not value:
146 return
147 return
147 model = VcsSettingsModel(repo=repo_name)
148 model = VcsSettingsModel(repo=repo_name)
148 ui_settings = model.get_svn_patterns(section=section)
149 ui_settings = model.get_svn_patterns(section=section)
149 for entry in ui_settings:
150 for entry in ui_settings:
150 if value == entry.value:
151 if value == entry.value:
151 msg = M(self, 'pattern_exists', state)
152 msg = M(self, 'pattern_exists', state)
152 raise formencode.Invalid(msg, value, state)
153 raise formencode.Invalid(msg, value, state)
153 return _validator
154 return _validator
154
155
155
156
156 def ValidUsername(localizer, edit=False, old_data=None):
157 def ValidUsername(localizer, edit=False, old_data=None):
157 _ = localizer
158 _ = localizer
158 old_data = old_data or {}
159 old_data = old_data or {}
159
160
160 class _validator(formencode.validators.FancyValidator):
161 class _validator(formencode.validators.FancyValidator):
161 messages = {
162 messages = {
162 'username_exists': _('Username "%(username)s" already exists'),
163 'username_exists': _('Username "%(username)s" already exists'),
163 'system_invalid_username':
164 'system_invalid_username':
164 _('Username "%(username)s" is forbidden'),
165 _('Username "%(username)s" is forbidden'),
165 'invalid_username':
166 'invalid_username':
166 _('Username may only contain alphanumeric characters '
167 _('Username may only contain alphanumeric characters '
167 'underscores, periods or dashes and must begin with '
168 'underscores, periods or dashes and must begin with '
168 'alphanumeric character or underscore')
169 'alphanumeric character or underscore')
169 }
170 }
170
171
171 def _validate_python(self, value, state):
172 def _validate_python(self, value, state):
172 if value in ['default', 'new_user']:
173 if value in ['default', 'new_user']:
173 msg = M(self, 'system_invalid_username', state, username=value)
174 msg = M(self, 'system_invalid_username', state, username=value)
174 raise formencode.Invalid(msg, value, state)
175 raise formencode.Invalid(msg, value, state)
175 # check if user is unique
176 # check if user is unique
176 old_un = None
177 old_un = None
177 if edit:
178 if edit:
178 old_un = User.get(old_data.get('user_id')).username
179 old_un = User.get(old_data.get('user_id')).username
179
180
180 if old_un != value or not edit:
181 if old_un != value or not edit:
181 if User.get_by_username(value, case_insensitive=True):
182 if User.get_by_username(value, case_insensitive=True):
182 msg = M(self, 'username_exists', state, username=value)
183 msg = M(self, 'username_exists', state, username=value)
183 raise formencode.Invalid(msg, value, state)
184 raise formencode.Invalid(msg, value, state)
184
185
185 if (re.match(r'^[\w]{1}[\w\-\.]{0,254}$', value)
186 if (re.match(r'^[\w]{1}[\w\-\.]{0,254}$', value)
186 is None):
187 is None):
187 msg = M(self, 'invalid_username', state)
188 msg = M(self, 'invalid_username', state)
188 raise formencode.Invalid(msg, value, state)
189 raise formencode.Invalid(msg, value, state)
189 return _validator
190 return _validator
190
191
191
192
192 def ValidRepoUser(localizer, allow_disabled=False):
193 def ValidRepoUser(localizer, allow_disabled=False):
193 _ = localizer
194 _ = localizer
194
195
195 class _validator(formencode.validators.FancyValidator):
196 class _validator(formencode.validators.FancyValidator):
196 messages = {
197 messages = {
197 'invalid_username': _('Username %(username)s is not valid'),
198 'invalid_username': _('Username %(username)s is not valid'),
198 'disabled_username': _('Username %(username)s is disabled')
199 'disabled_username': _('Username %(username)s is disabled')
199 }
200 }
200
201
201 def _validate_python(self, value, state):
202 def _validate_python(self, value, state):
202 try:
203 try:
203 user = User.query().filter(User.username == value).one()
204 user = User.query().filter(User.username == value).one()
204 except Exception:
205 except Exception:
205 msg = M(self, 'invalid_username', state, username=value)
206 msg = M(self, 'invalid_username', state, username=value)
206 raise formencode.Invalid(
207 raise formencode.Invalid(
207 msg, value, state, error_dict={'username': msg}
208 msg, value, state, error_dict={'username': msg}
208 )
209 )
209 if user and (not allow_disabled and not user.active):
210 if user and (not allow_disabled and not user.active):
210 msg = M(self, 'disabled_username', state, username=value)
211 msg = M(self, 'disabled_username', state, username=value)
211 raise formencode.Invalid(
212 raise formencode.Invalid(
212 msg, value, state, error_dict={'username': msg}
213 msg, value, state, error_dict={'username': msg}
213 )
214 )
214 return _validator
215 return _validator
215
216
216
217
217 def ValidUserGroup(localizer, edit=False, old_data=None):
218 def ValidUserGroup(localizer, edit=False, old_data=None):
218 _ = localizer
219 _ = localizer
219 old_data = old_data or {}
220 old_data = old_data or {}
220
221
221 class _validator(formencode.validators.FancyValidator):
222 class _validator(formencode.validators.FancyValidator):
222 messages = {
223 messages = {
223 'invalid_group': _('Invalid user group name'),
224 'invalid_group': _('Invalid user group name'),
224 'group_exist': _('User group `%(usergroup)s` already exists'),
225 'group_exist': _('User group `%(usergroup)s` already exists'),
225 'invalid_usergroup_name':
226 'invalid_usergroup_name':
226 _('user group name may only contain alphanumeric '
227 _('user group name may only contain alphanumeric '
227 'characters underscores, periods or dashes and must begin '
228 'characters underscores, periods or dashes and must begin '
228 'with alphanumeric character')
229 'with alphanumeric character')
229 }
230 }
230
231
231 def _validate_python(self, value, state):
232 def _validate_python(self, value, state):
232 if value in ['default']:
233 if value in ['default']:
233 msg = M(self, 'invalid_group', state)
234 msg = M(self, 'invalid_group', state)
234 raise formencode.Invalid(
235 raise formencode.Invalid(
235 msg, value, state, error_dict={'users_group_name': msg}
236 msg, value, state, error_dict={'users_group_name': msg}
236 )
237 )
237 # check if group is unique
238 # check if group is unique
238 old_ugname = None
239 old_ugname = None
239 if edit:
240 if edit:
240 old_id = old_data.get('users_group_id')
241 old_id = old_data.get('users_group_id')
241 old_ugname = UserGroup.get(old_id).users_group_name
242 old_ugname = UserGroup.get(old_id).users_group_name
242
243
243 if old_ugname != value or not edit:
244 if old_ugname != value or not edit:
244 is_existing_group = UserGroup.get_by_group_name(
245 is_existing_group = UserGroup.get_by_group_name(
245 value, case_insensitive=True)
246 value, case_insensitive=True)
246 if is_existing_group:
247 if is_existing_group:
247 msg = M(self, 'group_exist', state, usergroup=value)
248 msg = M(self, 'group_exist', state, usergroup=value)
248 raise formencode.Invalid(
249 raise formencode.Invalid(
249 msg, value, state, error_dict={'users_group_name': msg}
250 msg, value, state, error_dict={'users_group_name': msg}
250 )
251 )
251
252
252 if re.match(r'^[a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+$', value) is None:
253 if re.match(r'^[a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+$', value) is None:
253 msg = M(self, 'invalid_usergroup_name', state)
254 msg = M(self, 'invalid_usergroup_name', state)
254 raise formencode.Invalid(
255 raise formencode.Invalid(
255 msg, value, state, error_dict={'users_group_name': msg}
256 msg, value, state, error_dict={'users_group_name': msg}
256 )
257 )
257 return _validator
258 return _validator
258
259
259
260
260 def ValidRepoGroup(localizer, edit=False, old_data=None, can_create_in_root=False):
261 def ValidRepoGroup(localizer, edit=False, old_data=None, can_create_in_root=False):
261 _ = localizer
262 _ = localizer
262 old_data = old_data or {}
263 old_data = old_data or {}
263
264
264 class _validator(formencode.validators.FancyValidator):
265 class _validator(formencode.validators.FancyValidator):
265 messages = {
266 messages = {
266 'group_parent_id': _('Cannot assign this group as parent'),
267 'group_parent_id': _('Cannot assign this group as parent'),
267 'group_exists': _('Group "%(group_name)s" already exists'),
268 'group_exists': _('Group "%(group_name)s" already exists'),
268 'repo_exists': _('Repository with name "%(group_name)s" '
269 'repo_exists': _('Repository with name "%(group_name)s" '
269 'already exists'),
270 'already exists'),
270 'permission_denied': _("no permission to store repository group"
271 'permission_denied': _("no permission to store repository group"
271 "in this location"),
272 "in this location"),
272 'permission_denied_root': _(
273 'permission_denied_root': _(
273 "no permission to store repository group "
274 "no permission to store repository group "
274 "in root location")
275 "in root location")
275 }
276 }
276
277
277 def _convert_to_python(self, value, state):
278 def _convert_to_python(self, value, state):
278 group_name = repo_name_slug(value.get('group_name', ''))
279 group_name = repo_name_slug(value.get('group_name', ''))
279 group_parent_id = safe_int(value.get('group_parent_id'))
280 group_parent_id = safe_int(value.get('group_parent_id'))
280 gr = RepoGroup.get(group_parent_id)
281 gr = RepoGroup.get(group_parent_id)
281 if gr:
282 if gr:
282 parent_group_path = gr.full_path
283 parent_group_path = gr.full_path
283 # value needs to be aware of group name in order to check
284 # value needs to be aware of group name in order to check
284 # db key This is an actual just the name to store in the
285 # db key This is an actual just the name to store in the
285 # database
286 # database
286 group_name_full = (
287 group_name_full = (
287 parent_group_path + RepoGroup.url_sep() + group_name)
288 parent_group_path + RepoGroup.url_sep() + group_name)
288 else:
289 else:
289 group_name_full = group_name
290 group_name_full = group_name
290
291
291 value['group_name'] = group_name
292 value['group_name'] = group_name
292 value['group_name_full'] = group_name_full
293 value['group_name_full'] = group_name_full
293 value['group_parent_id'] = group_parent_id
294 value['group_parent_id'] = group_parent_id
294 return value
295 return value
295
296
296 def _validate_python(self, value, state):
297 def _validate_python(self, value, state):
297
298
298 old_group_name = None
299 old_group_name = None
299 group_name = value.get('group_name')
300 group_name = value.get('group_name')
300 group_name_full = value.get('group_name_full')
301 group_name_full = value.get('group_name_full')
301 group_parent_id = safe_int(value.get('group_parent_id'))
302 group_parent_id = safe_int(value.get('group_parent_id'))
302 if group_parent_id == -1:
303 if group_parent_id == -1:
303 group_parent_id = None
304 group_parent_id = None
304
305
305 group_obj = RepoGroup.get(old_data.get('group_id'))
306 group_obj = RepoGroup.get(old_data.get('group_id'))
306 parent_group_changed = False
307 parent_group_changed = False
307 if edit:
308 if edit:
308 old_group_name = group_obj.group_name
309 old_group_name = group_obj.group_name
309 old_group_parent_id = group_obj.group_parent_id
310 old_group_parent_id = group_obj.group_parent_id
310
311
311 if group_parent_id != old_group_parent_id:
312 if group_parent_id != old_group_parent_id:
312 parent_group_changed = True
313 parent_group_changed = True
313
314
314 # TODO: mikhail: the following if statement is not reached
315 # TODO: mikhail: the following if statement is not reached
315 # since group_parent_id's OneOf validation fails before.
316 # since group_parent_id's OneOf validation fails before.
316 # Can be removed.
317 # Can be removed.
317
318
318 # check against setting a parent of self
319 # check against setting a parent of self
319 parent_of_self = (
320 parent_of_self = (
320 old_data['group_id'] == group_parent_id
321 old_data['group_id'] == group_parent_id
321 if group_parent_id else False
322 if group_parent_id else False
322 )
323 )
323 if parent_of_self:
324 if parent_of_self:
324 msg = M(self, 'group_parent_id', state)
325 msg = M(self, 'group_parent_id', state)
325 raise formencode.Invalid(
326 raise formencode.Invalid(
326 msg, value, state, error_dict={'group_parent_id': msg}
327 msg, value, state, error_dict={'group_parent_id': msg}
327 )
328 )
328
329
329 # group we're moving current group inside
330 # group we're moving current group inside
330 child_group = None
331 child_group = None
331 if group_parent_id:
332 if group_parent_id:
332 child_group = RepoGroup.query().filter(
333 child_group = RepoGroup.query().filter(
333 RepoGroup.group_id == group_parent_id).scalar()
334 RepoGroup.group_id == group_parent_id).scalar()
334
335
335 # do a special check that we cannot move a group to one of
336 # do a special check that we cannot move a group to one of
336 # it's children
337 # it's children
337 if edit and child_group:
338 if edit and child_group:
338 parents = [x.group_id for x in child_group.parents]
339 parents = [x.group_id for x in child_group.parents]
339 move_to_children = old_data['group_id'] in parents
340 move_to_children = old_data['group_id'] in parents
340 if move_to_children:
341 if move_to_children:
341 msg = M(self, 'group_parent_id', state)
342 msg = M(self, 'group_parent_id', state)
342 raise formencode.Invalid(
343 raise formencode.Invalid(
343 msg, value, state, error_dict={'group_parent_id': msg})
344 msg, value, state, error_dict={'group_parent_id': msg})
344
345
345 # Check if we have permission to store in the parent.
346 # Check if we have permission to store in the parent.
346 # Only check if the parent group changed.
347 # Only check if the parent group changed.
347 if parent_group_changed:
348 if parent_group_changed:
348 if child_group is None:
349 if child_group is None:
349 if not can_create_in_root:
350 if not can_create_in_root:
350 msg = M(self, 'permission_denied_root', state)
351 msg = M(self, 'permission_denied_root', state)
351 raise formencode.Invalid(
352 raise formencode.Invalid(
352 msg, value, state,
353 msg, value, state,
353 error_dict={'group_parent_id': msg})
354 error_dict={'group_parent_id': msg})
354 else:
355 else:
355 valid = HasRepoGroupPermissionAny('group.admin')
356 valid = HasRepoGroupPermissionAny('group.admin')
356 forbidden = not valid(
357 forbidden = not valid(
357 child_group.group_name, 'can create group validator')
358 child_group.group_name, 'can create group validator')
358 if forbidden:
359 if forbidden:
359 msg = M(self, 'permission_denied', state)
360 msg = M(self, 'permission_denied', state)
360 raise formencode.Invalid(
361 raise formencode.Invalid(
361 msg, value, state,
362 msg, value, state,
362 error_dict={'group_parent_id': msg})
363 error_dict={'group_parent_id': msg})
363
364
364 # if we change the name or it's new group, check for existing names
365 # if we change the name or it's new group, check for existing names
365 # or repositories with the same name
366 # or repositories with the same name
366 if old_group_name != group_name_full or not edit:
367 if old_group_name != group_name_full or not edit:
367 # check group
368 # check group
368 gr = RepoGroup.get_by_group_name(group_name_full)
369 gr = RepoGroup.get_by_group_name(group_name_full)
369 if gr:
370 if gr:
370 msg = M(self, 'group_exists', state, group_name=group_name)
371 msg = M(self, 'group_exists', state, group_name=group_name)
371 raise formencode.Invalid(
372 raise formencode.Invalid(
372 msg, value, state, error_dict={'group_name': msg})
373 msg, value, state, error_dict={'group_name': msg})
373
374
374 # check for same repo
375 # check for same repo
375 repo = Repository.get_by_repo_name(group_name_full)
376 repo = Repository.get_by_repo_name(group_name_full)
376 if repo:
377 if repo:
377 msg = M(self, 'repo_exists', state, group_name=group_name)
378 msg = M(self, 'repo_exists', state, group_name=group_name)
378 raise formencode.Invalid(
379 raise formencode.Invalid(
379 msg, value, state, error_dict={'group_name': msg})
380 msg, value, state, error_dict={'group_name': msg})
380 return _validator
381 return _validator
381
382
382
383
383 def ValidPassword(localizer):
384 def ValidPassword(localizer):
384 _ = localizer
385 _ = localizer
385
386
386 class _validator(formencode.validators.FancyValidator):
387 class _validator(formencode.validators.FancyValidator):
387 messages = {
388 messages = {
388 'invalid_password':
389 'invalid_password':
389 _('Invalid characters (non-ascii) in password')
390 _('Invalid characters (non-ascii) in password')
390 }
391 }
391
392
392 def _validate_python(self, value, state):
393 def _validate_python(self, value, state):
393 if value and not value.isascii():
394 if value and not value.isascii():
394 msg = M(self, 'invalid_password', state)
395 msg = M(self, 'invalid_password', state)
395 raise formencode.Invalid(msg, value, state,)
396 raise formencode.Invalid(msg, value, state,)
396 return _validator
397 return _validator
397
398
398
399
399 def ValidPasswordsMatch(
400 def ValidPasswordsMatch(
400 localizer, passwd='new_password',
401 localizer, passwd='new_password',
401 passwd_confirmation='password_confirmation'):
402 passwd_confirmation='password_confirmation'):
402 _ = localizer
403 _ = localizer
403
404
404 class _validator(formencode.validators.FancyValidator):
405 class _validator(formencode.validators.FancyValidator):
405 messages = {
406 messages = {
406 'password_mismatch': _('Passwords do not match'),
407 'password_mismatch': _('Passwords do not match'),
407 }
408 }
408
409
409 def _validate_python(self, value, state):
410 def _validate_python(self, value, state):
410
411
411 pass_val = value.get('password') or value.get(passwd)
412 pass_val = value.get('password') or value.get(passwd)
412 if pass_val != value[passwd_confirmation]:
413 if pass_val != value[passwd_confirmation]:
413 msg = M(self, 'password_mismatch', state)
414 msg = M(self, 'password_mismatch', state)
414 raise formencode.Invalid(
415 raise formencode.Invalid(
415 msg, value, state,
416 msg, value, state,
416 error_dict={passwd: msg, passwd_confirmation: msg}
417 error_dict={passwd: msg, passwd_confirmation: msg}
417 )
418 )
418 return _validator
419 return _validator
419
420
420
421
421 def ValidAuth(localizer):
422 def ValidAuth(localizer):
422 _ = localizer
423 _ = localizer
423
424
424 class _validator(formencode.validators.FancyValidator):
425 class _validator(formencode.validators.FancyValidator):
425 messages = {
426 messages = {
426 'invalid_password': _('invalid password'),
427 'invalid_password': _('invalid password'),
427 'invalid_username': _('invalid user name'),
428 'invalid_username': _('invalid user name'),
428 'disabled_account': _('Your account is disabled')
429 'disabled_account': _('Your account is disabled')
429 }
430 }
430
431
431 def _validate_python(self, value, state):
432 def _validate_python(self, value, state):
432 from rhodecode.authentication.base import authenticate, HTTP_TYPE
433 from rhodecode.authentication.base import authenticate, HTTP_TYPE
433
434
434 password = value['password']
435 password = value['password']
435 username = value['username']
436 username = value['username']
436
437
437 if not authenticate(username, password, '', HTTP_TYPE,
438 if not authenticate(username, password, '', HTTP_TYPE,
438 skip_missing=True):
439 skip_missing=True):
439 user = User.get_by_username(username)
440 user = User.get_by_username(username)
440 if user and not user.active:
441 if user and not user.active:
441 log.warning('user %s is disabled', username)
442 log.warning('user %s is disabled', username)
442 msg = M(self, 'disabled_account', state)
443 msg = M(self, 'disabled_account', state)
443 raise formencode.Invalid(
444 raise formencode.Invalid(
444 msg, value, state, error_dict={'username': msg}
445 msg, value, state, error_dict={'username': msg}
445 )
446 )
446 else:
447 else:
447 log.warning('user `%s` failed to authenticate', username)
448 log.warning('user `%s` failed to authenticate', username)
448 msg = M(self, 'invalid_username', state)
449 msg = M(self, 'invalid_username', state)
449 msg2 = M(self, 'invalid_password', state)
450 msg2 = M(self, 'invalid_password', state)
450 raise formencode.Invalid(
451 raise formencode.Invalid(
451 msg, value, state,
452 msg, value, state,
452 error_dict={'username': msg, 'password': msg2}
453 error_dict={'username': msg, 'password': msg2}
453 )
454 )
454 return _validator
455 return _validator
455
456
456
457
457 def ValidRepoName(localizer, edit=False, old_data=None):
458 def ValidRepoName(localizer, edit=False, old_data=None):
458 old_data = old_data or {}
459 old_data = old_data or {}
459 _ = localizer
460 _ = localizer
460
461
461 class _validator(formencode.validators.FancyValidator):
462 class _validator(formencode.validators.FancyValidator):
462 messages = {
463 messages = {
463 'invalid_repo_name':
464 'invalid_repo_name':
464 _('Repository name %(repo)s is disallowed'),
465 _('Repository name %(repo)s is disallowed'),
465 # top level
466 # top level
466 'repository_exists': _('Repository with name %(repo)s '
467 'repository_exists': _('Repository with name %(repo)s '
467 'already exists'),
468 'already exists'),
468 'group_exists': _('Repository group with name "%(repo)s" '
469 'group_exists': _('Repository group with name "%(repo)s" '
469 'already exists'),
470 'already exists'),
470 # inside a group
471 # inside a group
471 'repository_in_group_exists': _('Repository with name %(repo)s '
472 'repository_in_group_exists': _('Repository with name %(repo)s '
472 'exists in group "%(group)s"'),
473 'exists in group "%(group)s"'),
473 'group_in_group_exists': _(
474 'group_in_group_exists': _(
474 'Repository group with name "%(repo)s" '
475 'Repository group with name "%(repo)s" '
475 'exists in group "%(group)s"'),
476 'exists in group "%(group)s"'),
476 }
477 }
477
478
478 def _convert_to_python(self, value, state):
479 def _convert_to_python(self, value, state):
479 repo_name = repo_name_slug(value.get('repo_name', ''))
480 repo_name = repo_name_slug(value.get('repo_name', ''))
480 repo_group = value.get('repo_group')
481 repo_group = value.get('repo_group')
481 if repo_group:
482 if repo_group:
482 gr = RepoGroup.get(repo_group)
483 gr = RepoGroup.get(repo_group)
483 group_path = gr.full_path
484 group_path = gr.full_path
484 group_name = gr.group_name
485 group_name = gr.group_name
485 # value needs to be aware of group name in order to check
486 # value needs to be aware of group name in order to check
486 # db key This is an actual just the name to store in the
487 # db key This is an actual just the name to store in the
487 # database
488 # database
488 repo_name_full = group_path + RepoGroup.url_sep() + repo_name
489 repo_name_full = group_path + RepoGroup.url_sep() + repo_name
489 else:
490 else:
490 group_name = group_path = ''
491 group_name = group_path = ''
491 repo_name_full = repo_name
492 repo_name_full = repo_name
492
493
493 value['repo_name'] = repo_name
494 value['repo_name'] = repo_name
494 value['repo_name_full'] = repo_name_full
495 value['repo_name_full'] = repo_name_full
495 value['group_path'] = group_path
496 value['group_path'] = group_path
496 value['group_name'] = group_name
497 value['group_name'] = group_name
497 return value
498 return value
498
499
499 def _validate_python(self, value, state):
500 def _validate_python(self, value, state):
500
501
501 repo_name = value.get('repo_name')
502 repo_name = value.get('repo_name')
502 repo_name_full = value.get('repo_name_full')
503 repo_name_full = value.get('repo_name_full')
503 group_path = value.get('group_path')
504 group_path = value.get('group_path')
504 group_name = value.get('group_name')
505 group_name = value.get('group_name')
505
506
506 if repo_name in [ADMIN_PREFIX, '']:
507 if repo_name in [ADMIN_PREFIX, '']:
507 msg = M(self, 'invalid_repo_name', state, repo=repo_name)
508 msg = M(self, 'invalid_repo_name', state, repo=repo_name)
508 raise formencode.Invalid(
509 raise formencode.Invalid(
509 msg, value, state, error_dict={'repo_name': msg})
510 msg, value, state, error_dict={'repo_name': msg})
510
511
511 rename = old_data.get('repo_name') != repo_name_full
512 rename = old_data.get('repo_name') != repo_name_full
512 create = not edit
513 create = not edit
513 if rename or create:
514 if rename or create:
514
515
515 if group_path:
516 if group_path:
516 if Repository.get_by_repo_name(repo_name_full):
517 if Repository.get_by_repo_name(repo_name_full):
517 msg = M(self, 'repository_in_group_exists', state,
518 msg = M(self, 'repository_in_group_exists', state,
518 repo=repo_name, group=group_name)
519 repo=repo_name, group=group_name)
519 raise formencode.Invalid(
520 raise formencode.Invalid(
520 msg, value, state, error_dict={'repo_name': msg})
521 msg, value, state, error_dict={'repo_name': msg})
521 if RepoGroup.get_by_group_name(repo_name_full):
522 if RepoGroup.get_by_group_name(repo_name_full):
522 msg = M(self, 'group_in_group_exists', state,
523 msg = M(self, 'group_in_group_exists', state,
523 repo=repo_name, group=group_name)
524 repo=repo_name, group=group_name)
524 raise formencode.Invalid(
525 raise formencode.Invalid(
525 msg, value, state, error_dict={'repo_name': msg})
526 msg, value, state, error_dict={'repo_name': msg})
526 else:
527 else:
527 if RepoGroup.get_by_group_name(repo_name_full):
528 if RepoGroup.get_by_group_name(repo_name_full):
528 msg = M(self, 'group_exists', state, repo=repo_name)
529 msg = M(self, 'group_exists', state, repo=repo_name)
529 raise formencode.Invalid(
530 raise formencode.Invalid(
530 msg, value, state, error_dict={'repo_name': msg})
531 msg, value, state, error_dict={'repo_name': msg})
531
532
532 if Repository.get_by_repo_name(repo_name_full):
533 if Repository.get_by_repo_name(repo_name_full):
533 msg = M(
534 msg = M(
534 self, 'repository_exists', state, repo=repo_name)
535 self, 'repository_exists', state, repo=repo_name)
535 raise formencode.Invalid(
536 raise formencode.Invalid(
536 msg, value, state, error_dict={'repo_name': msg})
537 msg, value, state, error_dict={'repo_name': msg})
537 return value
538 return value
538 return _validator
539 return _validator
539
540
540
541
541 def ValidForkName(localizer, *args, **kwargs):
542 def ValidForkName(localizer, *args, **kwargs):
542 _ = localizer
543 _ = localizer
543
544
544 return ValidRepoName(localizer, *args, **kwargs)
545 return ValidRepoName(localizer, *args, **kwargs)
545
546
546
547
547 def SlugifyName(localizer):
548 def SlugifyName(localizer):
548 _ = localizer
549 _ = localizer
549
550
550 class _validator(formencode.validators.FancyValidator):
551 class _validator(formencode.validators.FancyValidator):
551
552
552 def _convert_to_python(self, value, state):
553 def _convert_to_python(self, value, state):
553 return repo_name_slug(value)
554 return repo_name_slug(value)
554
555
555 def _validate_python(self, value, state):
556 def _validate_python(self, value, state):
556 pass
557 pass
557 return _validator
558 return _validator
558
559
559
560
560 def CannotHaveGitSuffix(localizer):
561 def CannotHaveGitSuffix(localizer):
561 _ = localizer
562 _ = localizer
562
563
563 class _validator(formencode.validators.FancyValidator):
564 class _validator(formencode.validators.FancyValidator):
564 messages = {
565 messages = {
565 'has_git_suffix':
566 'has_git_suffix':
566 _('Repository name cannot end with .git'),
567 _('Repository name cannot end with .git'),
567 }
568 }
568
569
569 def _convert_to_python(self, value, state):
570 def _convert_to_python(self, value, state):
570 return value
571 return value
571
572
572 def _validate_python(self, value, state):
573 def _validate_python(self, value, state):
573 if value and value.endswith('.git'):
574 if value and value.endswith('.git'):
574 msg = M(
575 msg = M(
575 self, 'has_git_suffix', state)
576 self, 'has_git_suffix', state)
576 raise formencode.Invalid(
577 raise formencode.Invalid(
577 msg, value, state, error_dict={'repo_name': msg})
578 msg, value, state, error_dict={'repo_name': msg})
578 return _validator
579 return _validator
579
580
580
581
581 def ValidCloneUri(localizer):
582 def ValidCloneUri(localizer):
582 _ = localizer
583 _ = localizer
583
584
584 class InvalidCloneUrl(Exception):
585 class InvalidCloneUrl(Exception):
585 allowed_prefixes = ()
586 allowed_prefixes = ()
586
587
587 def url_handler(repo_type, url):
588 def url_handler(repo_type, url):
588 config = make_db_config(clear_session=False)
589 config = make_db_config(clear_session=False)
589 if repo_type == 'hg':
590 if repo_type == 'hg':
590 allowed_prefixes = ('http', 'svn+http', 'git+http')
591 allowed_prefixes = ('http', 'svn+http', 'git+http')
591
592
592 if 'http' in url[:4]:
593 if 'http' in url[:4]:
593 # initially check if it's at least the proper URL
594 # initially check if it's at least the proper URL
594 # or does it pass basic auth
595 # or does it pass basic auth
595 MercurialRepository.check_url(url, config)
596 MercurialRepository.check_url(url, config)
596 elif 'svn+http' in url[:8]: # svn->hg import
597 elif 'svn+http' in url[:8]: # svn->hg import
597 SubversionRepository.check_url(url, config)
598 SubversionRepository.check_url(url, config)
598 elif 'git+http' in url[:8]: # git->hg import
599 elif 'git+http' in url[:8]: # git->hg import
599 raise NotImplementedError()
600 raise NotImplementedError()
600 else:
601 else:
601 exc = InvalidCloneUrl('Clone from URI %s not allowed. '
602 exc = InvalidCloneUrl('Clone from URI %s not allowed. '
602 'Allowed url must start with one of %s'
603 'Allowed url must start with one of %s'
603 % (url, ','.join(allowed_prefixes)))
604 % (url, ','.join(allowed_prefixes)))
604 exc.allowed_prefixes = allowed_prefixes
605 exc.allowed_prefixes = allowed_prefixes
605 raise exc
606 raise exc
606
607
607 elif repo_type == 'git':
608 elif repo_type == 'git':
608 allowed_prefixes = ('http', 'svn+http', 'hg+http')
609 allowed_prefixes = ('http', 'svn+http', 'hg+http')
609 if 'http' in url[:4]:
610 if 'http' in url[:4]:
610 # initially check if it's at least the proper URL
611 # initially check if it's at least the proper URL
611 # or does it pass basic auth
612 # or does it pass basic auth
612 GitRepository.check_url(url, config)
613 GitRepository.check_url(url, config)
613 elif 'svn+http' in url[:8]: # svn->git import
614 elif 'svn+http' in url[:8]: # svn->git import
614 raise NotImplementedError()
615 raise NotImplementedError()
615 elif 'hg+http' in url[:8]: # hg->git import
616 elif 'hg+http' in url[:8]: # hg->git import
616 raise NotImplementedError()
617 raise NotImplementedError()
617 else:
618 else:
618 exc = InvalidCloneUrl('Clone from URI %s not allowed. '
619 exc = InvalidCloneUrl('Clone from URI %s not allowed. '
619 'Allowed url must start with one of %s'
620 'Allowed url must start with one of %s'
620 % (url, ','.join(allowed_prefixes)))
621 % (url, ','.join(allowed_prefixes)))
621 exc.allowed_prefixes = allowed_prefixes
622 exc.allowed_prefixes = allowed_prefixes
622 raise exc
623 raise exc
623
624
624 class _validator(formencode.validators.FancyValidator):
625 class _validator(formencode.validators.FancyValidator):
625 messages = {
626 messages = {
626 'clone_uri': _('invalid clone url or credentials for %(rtype)s repository'),
627 'clone_uri': _('invalid clone url or credentials for %(rtype)s repository'),
627 'invalid_clone_uri': _(
628 'invalid_clone_uri': _(
628 'Invalid clone url, provide a valid clone '
629 'Invalid clone url, provide a valid clone '
629 'url starting with one of %(allowed_prefixes)s')
630 'url starting with one of %(allowed_prefixes)s')
630 }
631 }
631
632
632 def _validate_python(self, value, state):
633 def _validate_python(self, value, state):
633 repo_type = value.get('repo_type')
634 repo_type = value.get('repo_type')
634 url = value.get('clone_uri')
635 url = value.get('clone_uri')
635
636
636 if url:
637 if url:
637 try:
638 try:
638 url_handler(repo_type, url)
639 url_handler(repo_type, url)
639 except InvalidCloneUrl as e:
640 except InvalidCloneUrl as e:
640 log.warning(e)
641 log.warning(e)
641 msg = M(self, 'invalid_clone_uri', state, rtype=repo_type,
642 msg = M(self, 'invalid_clone_uri', state, rtype=repo_type,
642 allowed_prefixes=','.join(e.allowed_prefixes))
643 allowed_prefixes=','.join(e.allowed_prefixes))
643 raise formencode.Invalid(msg, value, state,
644 raise formencode.Invalid(msg, value, state,
644 error_dict={'clone_uri': msg})
645 error_dict={'clone_uri': msg})
645 except Exception:
646 except Exception:
646 log.exception('Url validation failed')
647 log.exception('Url validation failed')
647 msg = M(self, 'clone_uri', state, rtype=repo_type)
648 msg = M(self, 'clone_uri', state, rtype=repo_type)
648 raise formencode.Invalid(msg, value, state,
649 raise formencode.Invalid(msg, value, state,
649 error_dict={'clone_uri': msg})
650 error_dict={'clone_uri': msg})
650 return _validator
651 return _validator
651
652
652
653
653 def ValidForkType(localizer, old_data=None):
654 def ValidForkType(localizer, old_data=None):
654 _ = localizer
655 _ = localizer
655 old_data = old_data or {}
656 old_data = old_data or {}
656
657
657 class _validator(formencode.validators.FancyValidator):
658 class _validator(formencode.validators.FancyValidator):
658 messages = {
659 messages = {
659 'invalid_fork_type': _('Fork have to be the same type as parent')
660 'invalid_fork_type': _('Fork have to be the same type as parent')
660 }
661 }
661
662
662 def _validate_python(self, value, state):
663 def _validate_python(self, value, state):
663 if old_data['repo_type'] != value:
664 if old_data['repo_type'] != value:
664 msg = M(self, 'invalid_fork_type', state)
665 msg = M(self, 'invalid_fork_type', state)
665 raise formencode.Invalid(
666 raise formencode.Invalid(
666 msg, value, state, error_dict={'repo_type': msg}
667 msg, value, state, error_dict={'repo_type': msg}
667 )
668 )
668 return _validator
669 return _validator
669
670
670
671
671 def CanWriteGroup(localizer, old_data=None):
672 def CanWriteGroup(localizer, old_data=None):
672 _ = localizer
673 _ = localizer
673
674
674 class _validator(formencode.validators.FancyValidator):
675 class _validator(formencode.validators.FancyValidator):
675 messages = {
676 messages = {
676 'permission_denied': _(
677 'permission_denied': _(
677 "You do not have the permission "
678 "You do not have the permission "
678 "to create repositories in this group."),
679 "to create repositories in this group."),
679 'permission_denied_root': _(
680 'permission_denied_root': _(
680 "You do not have the permission to store repositories in "
681 "You do not have the permission to store repositories in "
681 "the root location.")
682 "the root location.")
682 }
683 }
683
684
684 def _convert_to_python(self, value, state):
685 def _convert_to_python(self, value, state):
685 # root location
686 # root location
686 if value in [-1, "-1"]:
687 if value in [-1, "-1"]:
687 return None
688 return None
688 return value
689 return value
689
690
690 def _validate_python(self, value, state):
691 def _validate_python(self, value, state):
691 gr = RepoGroup.get(value)
692 gr = RepoGroup.get(value)
692 gr_name = gr.group_name if gr else None # None means ROOT location
693 gr_name = gr.group_name if gr else None # None means ROOT location
693 # create repositories with write permission on group is set to true
694 # create repositories with write permission on group is set to true
694 create_on_write = HasPermissionAny(
695 create_on_write = HasPermissionAny(
695 'hg.create.write_on_repogroup.true')()
696 'hg.create.write_on_repogroup.true')()
696 group_admin = HasRepoGroupPermissionAny('group.admin')(
697 group_admin = HasRepoGroupPermissionAny('group.admin')(
697 gr_name, 'can write into group validator')
698 gr_name, 'can write into group validator')
698 group_write = HasRepoGroupPermissionAny('group.write')(
699 group_write = HasRepoGroupPermissionAny('group.write')(
699 gr_name, 'can write into group validator')
700 gr_name, 'can write into group validator')
700 forbidden = not (group_admin or (group_write and create_on_write))
701 forbidden = not (group_admin or (group_write and create_on_write))
701 can_create_repos = HasPermissionAny(
702 can_create_repos = HasPermissionAny(
702 'hg.admin', 'hg.create.repository')
703 'hg.admin', 'hg.create.repository')
703 gid = (old_data['repo_group'].get('group_id')
704 gid = (old_data['repo_group'].get('group_id')
704 if (old_data and 'repo_group' in old_data) else None)
705 if (old_data and 'repo_group' in old_data) else None)
705 value_changed = gid != safe_int(value)
706 value_changed = gid != safe_int(value)
706 new = not old_data
707 new = not old_data
707 # do check if we changed the value, there's a case that someone got
708 # do check if we changed the value, there's a case that someone got
708 # revoked write permissions to a repository, he still created, we
709 # revoked write permissions to a repository, he still created, we
709 # don't need to check permission if he didn't change the value of
710 # don't need to check permission if he didn't change the value of
710 # groups in form box
711 # groups in form box
711 if value_changed or new:
712 if value_changed or new:
712 # parent group need to be existing
713 # parent group need to be existing
713 if gr and forbidden:
714 if gr and forbidden:
714 msg = M(self, 'permission_denied', state)
715 msg = M(self, 'permission_denied', state)
715 raise formencode.Invalid(
716 raise formencode.Invalid(
716 msg, value, state, error_dict={'repo_type': msg}
717 msg, value, state, error_dict={'repo_type': msg}
717 )
718 )
718 # check if we can write to root location !
719 # check if we can write to root location !
719 elif gr is None and not can_create_repos():
720 elif gr is None and not can_create_repos():
720 msg = M(self, 'permission_denied_root', state)
721 msg = M(self, 'permission_denied_root', state)
721 raise formencode.Invalid(
722 raise formencode.Invalid(
722 msg, value, state, error_dict={'repo_type': msg}
723 msg, value, state, error_dict={'repo_type': msg}
723 )
724 )
724 return _validator
725 return _validator
725
726
726
727
727 def ValidPerms(localizer, type_='repo'):
728 def ValidPerms(localizer, type_='repo'):
728 _ = localizer
729 _ = localizer
729 if type_ == 'repo_group':
730 if type_ == 'repo_group':
730 EMPTY_PERM = 'group.none'
731 EMPTY_PERM = 'group.none'
731 elif type_ == 'repo':
732 elif type_ == 'repo':
732 EMPTY_PERM = 'repository.none'
733 EMPTY_PERM = 'repository.none'
733 elif type_ == 'user_group':
734 elif type_ == 'user_group':
734 EMPTY_PERM = 'usergroup.none'
735 EMPTY_PERM = 'usergroup.none'
735
736
736 class _validator(formencode.validators.FancyValidator):
737 class _validator(formencode.validators.FancyValidator):
737 messages = {
738 messages = {
738 'perm_new_member_name':
739 'perm_new_member_name':
739 _('This username or user group name is not valid')
740 _('This username or user group name is not valid')
740 }
741 }
741
742
742 def _convert_to_python(self, value, state):
743 def _convert_to_python(self, value, state):
743 perm_updates = OrderedSet()
744 perm_updates = OrderedSet()
744 perm_additions = OrderedSet()
745 perm_additions = OrderedSet()
745 perm_deletions = OrderedSet()
746 perm_deletions = OrderedSet()
746 # build a list of permission to update/delete and new permission
747 # build a list of permission to update/delete and new permission
747
748
748 # Read the perm_new_member/perm_del_member attributes and group
749 # Read the perm_new_member/perm_del_member attributes and group
749 # them by they IDs
750 # them by they IDs
750 new_perms_group = collections.defaultdict(dict)
751 new_perms_group = collections.defaultdict(dict)
751 del_perms_group = collections.defaultdict(dict)
752 del_perms_group = collections.defaultdict(dict)
752 for k, v in list(value.copy().items()):
753 for k, v in list(value.copy().items()):
753 if k.startswith('perm_del_member'):
754 if k.startswith('perm_del_member'):
754 # delete from org storage so we don't process that later
755 # delete from org storage so we don't process that later
755 del value[k]
756 del value[k]
756 # part is `id`, `type`
757 # part is `id`, `type`
757 _type, part = k.split('perm_del_member_')
758 _type, part = k.split('perm_del_member_')
758 args = part.split('_')
759 args = part.split('_')
759 if len(args) == 2:
760 if len(args) == 2:
760 _key, pos = args
761 _key, pos = args
761 del_perms_group[pos][_key] = v
762 del_perms_group[pos][_key] = v
762 if k.startswith('perm_new_member'):
763 if k.startswith('perm_new_member'):
763 # delete from org storage so we don't process that later
764 # delete from org storage so we don't process that later
764 del value[k]
765 del value[k]
765 # part is `id`, `type`, `perm`
766 # part is `id`, `type`, `perm`
766 _type, part = k.split('perm_new_member_')
767 _type, part = k.split('perm_new_member_')
767 args = part.split('_')
768 args = part.split('_')
768 if len(args) == 2:
769 if len(args) == 2:
769 _key, pos = args
770 _key, pos = args
770 new_perms_group[pos][_key] = v
771 new_perms_group[pos][_key] = v
771
772
772 # store the deletes
773 # store the deletes
773 for k in sorted(del_perms_group.keys()):
774 for k in sorted(del_perms_group.keys()):
774 perm_dict = del_perms_group[k]
775 perm_dict = del_perms_group[k]
775 del_member = perm_dict.get('id')
776 del_member = perm_dict.get('id')
776 del_type = perm_dict.get('type')
777 del_type = perm_dict.get('type')
777 if del_member and del_type:
778 if del_member and del_type:
778 perm_deletions.add(
779 perm_deletions.add(
779 (del_member, None, del_type))
780 (del_member, None, del_type))
780
781
781 # store additions in order of how they were added in web form
782 # store additions in order of how they were added in web form
782 for k in sorted(new_perms_group.keys()):
783 for k in sorted(new_perms_group.keys()):
783 perm_dict = new_perms_group[k]
784 perm_dict = new_perms_group[k]
784 new_member = perm_dict.get('id')
785 new_member = perm_dict.get('id')
785 new_type = perm_dict.get('type')
786 new_type = perm_dict.get('type')
786 new_perm = perm_dict.get('perm')
787 new_perm = perm_dict.get('perm')
787 if new_member and new_perm and new_type:
788 if new_member and new_perm and new_type:
788 perm_additions.add(
789 perm_additions.add(
789 (new_member, new_perm, new_type))
790 (new_member, new_perm, new_type))
790
791
791 # get updates of permissions
792 # get updates of permissions
792 # (read the existing radio button states)
793 # (read the existing radio button states)
793 default_user_id = User.get_default_user_id()
794 default_user_id = User.get_default_user_id()
794
795
795 for k, update_value in list(value.items()):
796 for k, update_value in list(value.items()):
796 if k.startswith('u_perm_') or k.startswith('g_perm_'):
797 if k.startswith('u_perm_') or k.startswith('g_perm_'):
797 obj_type = k[0]
798 obj_type = k[0]
798 obj_id = k[7:]
799 obj_id = k[7:]
799 update_type = {'u': 'user',
800 update_type = {'u': 'user',
800 'g': 'user_group'}[obj_type]
801 'g': 'user_group'}[obj_type]
801
802
802 if obj_type == 'u' and safe_int(obj_id) == default_user_id:
803 if obj_type == 'u' and safe_int(obj_id) == default_user_id:
803 if str2bool(value.get('repo_private')):
804 if str2bool(value.get('repo_private')):
804 # prevent from updating default user permissions
805 # prevent from updating default user permissions
805 # when this repository is marked as private
806 # when this repository is marked as private
806 update_value = EMPTY_PERM
807 update_value = EMPTY_PERM
807
808
808 perm_updates.add(
809 perm_updates.add(
809 (obj_id, update_value, update_type))
810 (obj_id, update_value, update_type))
810
811
811 value['perm_additions'] = [] # propagated later
812 value['perm_additions'] = [] # propagated later
812 value['perm_updates'] = list(perm_updates)
813 value['perm_updates'] = list(perm_updates)
813 value['perm_deletions'] = list(perm_deletions)
814 value['perm_deletions'] = list(perm_deletions)
814
815
815 updates_map = dict(
816 updates_map = dict(
816 (x[0], (x[1], x[2])) for x in value['perm_updates'])
817 (x[0], (x[1], x[2])) for x in value['perm_updates'])
817 # make sure Additions don't override updates.
818 # make sure Additions don't override updates.
818 for member_id, perm, member_type in list(perm_additions):
819 for member_id, perm, member_type in list(perm_additions):
819 if member_id in updates_map:
820 if member_id in updates_map:
820 perm = updates_map[member_id][0]
821 perm = updates_map[member_id][0]
821 value['perm_additions'].append((member_id, perm, member_type))
822 value['perm_additions'].append((member_id, perm, member_type))
822
823
823 # on new entries validate users they exist and they are active !
824 # on new entries validate users they exist and they are active !
824 # this leaves feedback to the form
825 # this leaves feedback to the form
825 try:
826 try:
826 if member_type == 'user':
827 if member_type == 'user':
827 User.query()\
828 User.query()\
828 .filter(User.active == true())\
829 .filter(User.active == true())\
829 .filter(User.user_id == member_id).one()
830 .filter(User.user_id == member_id).one()
830 if member_type == 'user_group':
831 if member_type == 'user_group':
831 UserGroup.query()\
832 UserGroup.query()\
832 .filter(UserGroup.users_group_active == true())\
833 .filter(UserGroup.users_group_active == true())\
833 .filter(UserGroup.users_group_id == member_id)\
834 .filter(UserGroup.users_group_id == member_id)\
834 .one()
835 .one()
835
836
836 except Exception:
837 except Exception:
837 log.exception('Updated permission failed: org_exc:')
838 log.exception('Updated permission failed: org_exc:')
838 msg = M(self, 'perm_new_member_type', state)
839 msg = M(self, 'perm_new_member_type', state)
839 raise formencode.Invalid(
840 raise formencode.Invalid(
840 msg, value, state, error_dict={
841 msg, value, state, error_dict={
841 'perm_new_member_name': msg}
842 'perm_new_member_name': msg}
842 )
843 )
843 return value
844 return value
844 return _validator
845 return _validator
845
846
846
847
847 def ValidPath(localizer):
848 def ValidPath(localizer):
848 _ = localizer
849 _ = localizer
849
850
850 class _validator(formencode.validators.FancyValidator):
851 class _validator(formencode.validators.FancyValidator):
851 messages = {
852 messages = {
852 'invalid_path': _('This is not a valid path')
853 'invalid_path': _('This is not a valid path')
853 }
854 }
854
855
855 def _validate_python(self, value, state):
856 def _validate_python(self, value, state):
856 if not os.path.isdir(value):
857 if not os.path.isdir(value):
857 msg = M(self, 'invalid_path', state)
858 msg = M(self, 'invalid_path', state)
858 raise formencode.Invalid(
859 raise formencode.Invalid(
859 msg, value, state, error_dict={'paths_root_path': msg}
860 msg, value, state, error_dict={'paths_root_path': msg}
860 )
861 )
861 return _validator
862 return _validator
862
863
863
864
864 def UniqSystemEmail(localizer, old_data=None):
865 def UniqSystemEmail(localizer, old_data=None):
865 _ = localizer
866 _ = localizer
866 old_data = old_data or {}
867 old_data = old_data or {}
867
868
868 class _validator(formencode.validators.FancyValidator):
869 class _validator(formencode.validators.FancyValidator):
869 messages = {
870 messages = {
870 'email_taken': _('This e-mail address is already taken')
871 'email_taken': _('This e-mail address is already taken')
871 }
872 }
872
873
873 def _convert_to_python(self, value, state):
874 def _convert_to_python(self, value, state):
874 return value.lower()
875 return value.lower()
875
876
876 def _validate_python(self, value, state):
877 def _validate_python(self, value, state):
877 if (old_data.get('email') or '').lower() != value:
878 if (old_data.get('email') or '').lower() != value:
878 user = User.get_by_email(value, case_insensitive=True)
879 user = User.get_by_email(value, case_insensitive=True)
879 if user:
880 if user:
880 msg = M(self, 'email_taken', state)
881 msg = M(self, 'email_taken', state)
881 raise formencode.Invalid(
882 raise formencode.Invalid(
882 msg, value, state, error_dict={'email': msg}
883 msg, value, state, error_dict={'email': msg}
883 )
884 )
884 return _validator
885 return _validator
885
886
886
887
887 def ValidSystemEmail(localizer):
888 def ValidSystemEmail(localizer):
888 _ = localizer
889 _ = localizer
889
890
890 class _validator(formencode.validators.FancyValidator):
891 class _validator(formencode.validators.FancyValidator):
891 messages = {
892 messages = {
892 'non_existing_email': _('e-mail "%(email)s" does not exist.')
893 'non_existing_email': _('e-mail "%(email)s" does not exist.')
893 }
894 }
894
895
895 def _convert_to_python(self, value, state):
896 def _convert_to_python(self, value, state):
896 return value.lower()
897 return value.lower()
897
898
898 def _validate_python(self, value, state):
899 def _validate_python(self, value, state):
899 user = User.get_by_email(value, case_insensitive=True)
900 user = User.get_by_email(value, case_insensitive=True)
900 if user is None:
901 if user is None:
901 msg = M(self, 'non_existing_email', state, email=value)
902 msg = M(self, 'non_existing_email', state, email=value)
902 raise formencode.Invalid(
903 raise formencode.Invalid(
903 msg, value, state, error_dict={'email': msg}
904 msg, value, state, error_dict={'email': msg}
904 )
905 )
905 return _validator
906 return _validator
906
907
907
908
908 def NotReviewedRevisions(localizer, repo_id):
909 def NotReviewedRevisions(localizer, repo_id):
909 _ = localizer
910 _ = localizer
910 class _validator(formencode.validators.FancyValidator):
911 class _validator(formencode.validators.FancyValidator):
911 messages = {
912 messages = {
912 'rev_already_reviewed':
913 'rev_already_reviewed':
913 _('Revisions %(revs)s are already part of pull request '
914 _('Revisions %(revs)s are already part of pull request '
914 'or have set status'),
915 'or have set status'),
915 }
916 }
916
917
917 def _validate_python(self, value, state):
918 def _validate_python(self, value, state):
918 # check revisions if they are not reviewed, or a part of another
919 # check revisions if they are not reviewed, or a part of another
919 # pull request
920 # pull request
920 statuses = ChangesetStatus.query()\
921 statuses = ChangesetStatus.query()\
921 .filter(ChangesetStatus.revision.in_(value))\
922 .filter(ChangesetStatus.revision.in_(value))\
922 .filter(ChangesetStatus.repo_id == repo_id)\
923 .filter(ChangesetStatus.repo_id == repo_id)\
923 .all()
924 .all()
924
925
925 errors = []
926 errors = []
926 for status in statuses:
927 for status in statuses:
927 if status.pull_request_id:
928 if status.pull_request_id:
928 errors.append(['pull_req', status.revision[:12]])
929 errors.append(['pull_req', status.revision[:12]])
929 elif status.status:
930 elif status.status:
930 errors.append(['status', status.revision[:12]])
931 errors.append(['status', status.revision[:12]])
931
932
932 if errors:
933 if errors:
933 revs = ','.join([x[1] for x in errors])
934 revs = ','.join([x[1] for x in errors])
934 msg = M(self, 'rev_already_reviewed', state, revs=revs)
935 msg = M(self, 'rev_already_reviewed', state, revs=revs)
935 raise formencode.Invalid(
936 raise formencode.Invalid(
936 msg, value, state, error_dict={'revisions': revs})
937 msg, value, state, error_dict={'revisions': revs})
937
938
938 return _validator
939 return _validator
939
940
940
941
941 def ValidIp(localizer):
942 def ValidIp(localizer):
942 _ = localizer
943 _ = localizer
943
944
944 class _validator(CIDR):
945 class _validator(CIDR):
945 messages = {
946 messages = {
946 'badFormat': _('Please enter a valid IPv4 or IpV6 address'),
947 'badFormat': _('Please enter a valid IPv4 or IpV6 address'),
947 'illegalBits': _(
948 'illegalBits': _(
948 'The network size (bits) must be within the range '
949 'The network size (bits) must be within the range '
949 'of 0-32 (not %(bits)r)'),
950 'of 0-32 (not %(bits)r)'),
950 }
951 }
951
952
952 # we override the default to_python() call
953 # we override the default to_python() call
953 def to_python(self, value, state):
954 def to_python(self, value, state):
954 v = super(_validator, self).to_python(value, state)
955 v = super(_validator, self).to_python(value, state)
955 v = safe_str(v.strip())
956 v = safe_str(v.strip())
956 net = ipaddress.ip_network(address=v, strict=False)
957 net = ipaddress.ip_network(address=v, strict=False)
957 return str(net)
958 return str(net)
958
959
959 def _validate_python(self, value, state):
960 def _validate_python(self, value, state):
960 try:
961 try:
961 addr = safe_str(value.strip())
962 addr = safe_str(value.strip())
962 # this raises an ValueError if address is not IpV4 or IpV6
963 # this raises an ValueError if address is not IpV4 or IpV6
963 ipaddress.ip_network(addr, strict=False)
964 ipaddress.ip_network(addr, strict=False)
964 except ValueError:
965 except ValueError:
965 raise formencode.Invalid(self.message('badFormat', state),
966 raise formencode.Invalid(self.message('badFormat', state),
966 value, state)
967 value, state)
967 return _validator
968 return _validator
968
969
969
970
970 def FieldKey(localizer):
971 def FieldKey(localizer):
971 _ = localizer
972 _ = localizer
972
973
973 class _validator(formencode.validators.FancyValidator):
974 class _validator(formencode.validators.FancyValidator):
974 messages = {
975 messages = {
975 'badFormat': _(
976 'badFormat': _(
976 'Key name can only consist of letters, '
977 'Key name can only consist of letters, '
977 'underscore, dash or numbers'),
978 'underscore, dash or numbers'),
978 }
979 }
979
980
980 def _validate_python(self, value, state):
981 def _validate_python(self, value, state):
981 if not re.match('[a-zA-Z0-9_-]+$', value):
982 if not re.match('[a-zA-Z0-9_-]+$', value):
982 raise formencode.Invalid(self.message('badFormat', state),
983 raise formencode.Invalid(self.message('badFormat', state),
983 value, state)
984 value, state)
984 return _validator
985 return _validator
985
986
986
987
987 def ValidAuthPlugins(localizer):
988 def ValidAuthPlugins(localizer):
988 _ = localizer
989 _ = localizer
989
990
990 class _validator(formencode.validators.FancyValidator):
991 class _validator(formencode.validators.FancyValidator):
991 messages = {
992 messages = {
992 'import_duplicate': _(
993 'import_duplicate': _(
993 'Plugins %(loaded)s and %(next_to_load)s '
994 'Plugins %(loaded)s and %(next_to_load)s '
994 'both export the same name'),
995 'both export the same name'),
995 'missing_includeme': _(
996 'missing_includeme': _(
996 'The plugin "%(plugin_id)s" is missing an includeme '
997 'The plugin "%(plugin_id)s" is missing an includeme '
997 'function.'),
998 'function.'),
998 'import_error': _(
999 'import_error': _(
999 'Can not load plugin "%(plugin_id)s"'),
1000 'Can not load plugin "%(plugin_id)s"'),
1000 'no_plugin': _(
1001 'no_plugin': _(
1001 'No plugin available with ID "%(plugin_id)s"'),
1002 'No plugin available with ID "%(plugin_id)s"'),
1002 }
1003 }
1003
1004
1004 def _convert_to_python(self, value, state):
1005 def _convert_to_python(self, value, state):
1005 # filter empty values
1006 # filter empty values
1006 return [s for s in value if s not in [None, '']]
1007 return [s for s in value if s not in [None, '']]
1007
1008
1008 def _validate_legacy_plugin_id(self, plugin_id, value, state):
1009 def _validate_legacy_plugin_id(self, plugin_id, value, state):
1009 """
1010 """
1010 Validates that the plugin import works. It also checks that the
1011 Validates that the plugin import works. It also checks that the
1011 plugin has an includeme attribute.
1012 plugin has an includeme attribute.
1012 """
1013 """
1013 try:
1014 try:
1014 plugin = _import_legacy_plugin(plugin_id)
1015 plugin = _import_legacy_plugin(plugin_id)
1015 except Exception as e:
1016 except Exception as e:
1016 log.exception(
1017 log.exception(
1017 'Exception during import of auth legacy plugin "{}"'
1018 'Exception during import of auth legacy plugin "{}"'
1018 .format(plugin_id))
1019 .format(plugin_id))
1019 msg = M(self, 'import_error', state, plugin_id=plugin_id)
1020 msg = M(self, 'import_error', state, plugin_id=plugin_id)
1020 raise formencode.Invalid(msg, value, state)
1021 raise formencode.Invalid(msg, value, state)
1021
1022
1022 if not hasattr(plugin, 'includeme'):
1023 if not hasattr(plugin, 'includeme'):
1023 msg = M(self, 'missing_includeme', state, plugin_id=plugin_id)
1024 msg = M(self, 'missing_includeme', state, plugin_id=plugin_id)
1024 raise formencode.Invalid(msg, value, state)
1025 raise formencode.Invalid(msg, value, state)
1025
1026
1026 return plugin
1027 return plugin
1027
1028
1028 def _validate_plugin_id(self, plugin_id, value, state):
1029 def _validate_plugin_id(self, plugin_id, value, state):
1029 """
1030 """
1030 Plugins are already imported during app start up. Therefore this
1031 Plugins are already imported during app start up. Therefore this
1031 validation only retrieves the plugin from the plugin registry and
1032 validation only retrieves the plugin from the plugin registry and
1032 if it returns something not None everything is OK.
1033 if it returns something not None everything is OK.
1033 """
1034 """
1034 plugin = loadplugin(plugin_id)
1035 plugin = loadplugin(plugin_id)
1035
1036
1036 if plugin is None:
1037 if plugin is None:
1037 msg = M(self, 'no_plugin', state, plugin_id=plugin_id)
1038 msg = M(self, 'no_plugin', state, plugin_id=plugin_id)
1038 raise formencode.Invalid(msg, value, state)
1039 raise formencode.Invalid(msg, value, state)
1039
1040
1040 return plugin
1041 return plugin
1041
1042
1042 def _validate_python(self, value, state):
1043 def _validate_python(self, value, state):
1043 unique_names = {}
1044 unique_names = {}
1044 for plugin_id in value:
1045 for plugin_id in value:
1045
1046
1046 # Validate legacy or normal plugin.
1047 # Validate legacy or normal plugin.
1047 if plugin_id.startswith(legacy_plugin_prefix):
1048 if plugin_id.startswith(legacy_plugin_prefix):
1048 plugin = self._validate_legacy_plugin_id(
1049 plugin = self._validate_legacy_plugin_id(
1049 plugin_id, value, state)
1050 plugin_id, value, state)
1050 else:
1051 else:
1051 plugin = self._validate_plugin_id(plugin_id, value, state)
1052 plugin = self._validate_plugin_id(plugin_id, value, state)
1052
1053
1053 # Only allow unique plugin names.
1054 # Only allow unique plugin names.
1054 if plugin.name in unique_names:
1055 if plugin.name in unique_names:
1055 msg = M(self, 'import_duplicate', state,
1056 msg = M(self, 'import_duplicate', state,
1056 loaded=unique_names[plugin.name],
1057 loaded=unique_names[plugin.name],
1057 next_to_load=plugin)
1058 next_to_load=plugin)
1058 raise formencode.Invalid(msg, value, state)
1059 raise formencode.Invalid(msg, value, state)
1059 unique_names[plugin.name] = plugin
1060 unique_names[plugin.name] = plugin
1060 return _validator
1061 return _validator
1061
1062
1062
1063
1063 def ValidPattern(localizer):
1064 def ValidPattern(localizer):
1064 _ = localizer
1065 _ = localizer
1065
1066
1066 class _validator(formencode.validators.FancyValidator):
1067 class _validator(formencode.validators.FancyValidator):
1067 messages = {
1068 messages = {
1068 'bad_format': _('Url must start with http or /'),
1069 'bad_format': _('Url must start with http or /'),
1069 }
1070 }
1070
1071
1071 def _convert_to_python(self, value, state):
1072 def _convert_to_python(self, value, state):
1072 patterns = []
1073 patterns = []
1073
1074
1074 prefix = 'new_pattern'
1075 prefix = 'new_pattern'
1075 for name, v in list(value.items()):
1076 for name, v in list(value.items()):
1076 pattern_name = '_'.join((prefix, 'pattern'))
1077 pattern_name = '_'.join((prefix, 'pattern'))
1077 if name.startswith(pattern_name):
1078 if name.startswith(pattern_name):
1078 new_item_id = name[len(pattern_name)+1:]
1079 new_item_id = name[len(pattern_name)+1:]
1079
1080
1080 def _field(name):
1081 def _field(name):
1081 return '%s_%s_%s' % (prefix, name, new_item_id)
1082 return '%s_%s_%s' % (prefix, name, new_item_id)
1082
1083
1083 values = {
1084 values = {
1084 'issuetracker_pat': value.get(_field('pattern')),
1085 'issuetracker_pat': value.get(_field('pattern')),
1085 'issuetracker_url': value.get(_field('url')),
1086 'issuetracker_url': value.get(_field('url')),
1086 'issuetracker_pref': value.get(_field('prefix')),
1087 'issuetracker_pref': value.get(_field('prefix')),
1087 'issuetracker_desc': value.get(_field('description'))
1088 'issuetracker_desc': value.get(_field('description'))
1088 }
1089 }
1089 new_uid = md5_safe(values['issuetracker_pat'])
1090 new_uid = md5_safe(values['issuetracker_pat'])
1090
1091
1091 has_required_fields = (
1092 has_required_fields = (
1092 values['issuetracker_pat']
1093 values['issuetracker_pat']
1093 and values['issuetracker_url'])
1094 and values['issuetracker_url'])
1094
1095
1095 if has_required_fields:
1096 if has_required_fields:
1096 # validate url that it starts with http or /
1097 # validate url that it starts with http or /
1097 # otherwise it can lead to JS injections
1098 # otherwise it can lead to JS injections
1098 # e.g specifig javascript:<malicios code>
1099 # e.g specifig javascript:<malicios code>
1099 if not values['issuetracker_url'].startswith(('http', '/')):
1100 if not values['issuetracker_url'].startswith(('http', '/')):
1100 raise formencode.Invalid(
1101 raise formencode.Invalid(
1101 self.message('bad_format', state),
1102 self.message('bad_format', state),
1102 value, state)
1103 value, state)
1103
1104
1104 settings = [
1105 settings = [
1105 ('_'.join((key, new_uid)), values[key], 'unicode')
1106 ('_'.join((key, new_uid)), values[key], 'unicode')
1106 for key in values]
1107 for key in values]
1107 patterns.append(settings)
1108 patterns.append(settings)
1108
1109
1109 value['patterns'] = patterns
1110 value['patterns'] = patterns
1110 delete_patterns = value.get('uid') or []
1111 delete_patterns = value.get('uid') or []
1111 if not isinstance(delete_patterns, (list, tuple)):
1112 if not isinstance(delete_patterns, (list, tuple)):
1112 delete_patterns = [delete_patterns]
1113 delete_patterns = [delete_patterns]
1113 value['delete_patterns'] = delete_patterns
1114 value['delete_patterns'] = delete_patterns
1114 return value
1115 return value
1115 return _validator
1116 return _validator
General Comments 0
You need to be logged in to leave comments. Login now