Show More
The requested changes are too big and content was truncated. Show full diff
@@ -1,135 +1,134 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
|
5 | 4 | # This program is free software: you can redistribute it and/or modify |
|
6 | 5 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 6 | # (only), as published by the Free Software Foundation. |
|
8 | 7 | # |
|
9 | 8 | # This program is distributed in the hope that it will be useful, |
|
10 | 9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 11 | # GNU General Public License for more details. |
|
13 | 12 | # |
|
14 | 13 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 14 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 15 | # |
|
17 | 16 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 17 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 18 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 19 | |
|
21 | 20 | |
|
22 | 21 | import logging |
|
23 | 22 | |
|
24 | 23 | import rhodecode |
|
25 | 24 | from rhodecode.model import meta, db |
|
26 | 25 | from rhodecode.lib.utils2 import obfuscate_url_pw, get_encryption_key |
|
27 | 26 | |
|
28 | 27 | log = logging.getLogger(__name__) |
|
29 | 28 | |
|
30 | 29 | |
|
31 | 30 | def init_model(engine, encryption_key=None): |
|
32 | 31 | """ |
|
33 | 32 | Initializes db session, bind the engine with the metadata, |
|
34 | 33 | Call this before using any of the tables or classes in the model, |
|
35 | 34 | preferably once in application start |
|
36 | 35 | |
|
37 | 36 | :param engine: engine to bind to |
|
38 | 37 | """ |
|
39 | 38 | engine_str = obfuscate_url_pw(str(engine.url)) |
|
40 | 39 | log.info("RhodeCode %s initializing db for %s", rhodecode.__version__, engine_str) |
|
41 | 40 | meta.Base.metadata.bind = engine |
|
42 | 41 | db.ENCRYPTION_KEY = encryption_key |
|
43 | 42 | |
|
44 | 43 | |
|
45 | 44 | def init_model_encryption(migration_models, config=None): |
|
46 | 45 | from pyramid.threadlocal import get_current_registry |
|
47 | 46 | config = config or get_current_registry().settings |
|
48 | 47 | migration_models.ENCRYPTION_KEY = get_encryption_key(config) |
|
49 | 48 | db.ENCRYPTION_KEY = get_encryption_key(config) |
|
50 | 49 | |
|
51 | 50 | |
|
52 | 51 | class BaseModel(object): |
|
53 | 52 | """ |
|
54 | 53 | Base Model for all RhodeCode models, it adds sql alchemy session |
|
55 | 54 | into instance of model |
|
56 | 55 | |
|
57 | 56 | :param sa: If passed it reuses this session instead of creating a new one |
|
58 | 57 | """ |
|
59 | 58 | |
|
60 | 59 | cls = None # override in child class |
|
61 | 60 | |
|
62 | 61 | def __init__(self, sa=None): |
|
63 | 62 | if sa is not None: |
|
64 | 63 | self.sa = sa |
|
65 | 64 | else: |
|
66 | 65 | self.sa = meta.Session() |
|
67 | 66 | |
|
68 | 67 | def _get_instance(self, cls, instance, callback=None): |
|
69 | 68 | """ |
|
70 | 69 | Gets instance of given cls using some simple lookup mechanism. |
|
71 | 70 | |
|
72 | 71 | :param cls: classes to fetch |
|
73 | 72 | :param instance: int or Instance |
|
74 | 73 | :param callback: callback to call if all lookups failed |
|
75 | 74 | """ |
|
76 | 75 | |
|
77 | 76 | if isinstance(instance, cls): |
|
78 | 77 | return instance |
|
79 | 78 | elif isinstance(instance, int): |
|
80 | 79 | if isinstance(cls, tuple): |
|
81 | 80 | # if we pass multi instances we pick first to .get() |
|
82 | 81 | cls = cls[0] |
|
83 | 82 | return cls.get(instance) |
|
84 | 83 | else: |
|
85 | 84 | if instance: |
|
86 | 85 | if callback is None: |
|
87 | 86 | raise Exception( |
|
88 | 87 | 'given object must be int or Instance of %s ' |
|
89 | 88 | 'got %s, no callback provided' % (cls, type(instance)) |
|
90 | 89 | ) |
|
91 | 90 | else: |
|
92 | 91 | return callback(instance) |
|
93 | 92 | |
|
94 | 93 | def _get_user(self, user): |
|
95 | 94 | """ |
|
96 | 95 | Helper method to get user by ID, or username fallback |
|
97 | 96 | |
|
98 | 97 | :param user: UserID, username, or User instance |
|
99 | 98 | """ |
|
100 | 99 | return self._get_instance( |
|
101 | 100 | db.User, user, callback=db.User.get_by_username) |
|
102 | 101 | |
|
103 | 102 | def _get_user_group(self, user_group): |
|
104 | 103 | """ |
|
105 | 104 | Helper method to get user by ID, or username fallback |
|
106 | 105 | |
|
107 | 106 | :param user_group: UserGroupID, user_group_name, or UserGroup instance |
|
108 | 107 | """ |
|
109 | 108 | return self._get_instance( |
|
110 | 109 | db.UserGroup, user_group, callback=db.UserGroup.get_by_group_name) |
|
111 | 110 | |
|
112 | 111 | def _get_repo(self, repository): |
|
113 | 112 | """ |
|
114 | 113 | Helper method to get repository by ID, or repository name |
|
115 | 114 | |
|
116 | 115 | :param repository: RepoID, repository name or Repository Instance |
|
117 | 116 | """ |
|
118 | 117 | return self._get_instance( |
|
119 | 118 | db.Repository, repository, callback=db.Repository.get_by_repo_name) |
|
120 | 119 | |
|
121 | 120 | def _get_perm(self, permission): |
|
122 | 121 | """ |
|
123 | 122 | Helper method to get permission by ID, or permission name |
|
124 | 123 | |
|
125 | 124 | :param permission: PermissionID, permission_name or Permission instance |
|
126 | 125 | """ |
|
127 | 126 | return self._get_instance( |
|
128 | 127 | db.Permission, permission, callback=db.Permission.get_by_key) |
|
129 | 128 | |
|
130 | 129 | @classmethod |
|
131 | 130 | def get_all(cls): |
|
132 | 131 | """ |
|
133 | 132 | Returns all instances of what is defined in `cls` class variable |
|
134 | 133 | """ |
|
135 | 134 | return cls.cls.getAll() |
@@ -1,124 +1,124 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
1 | ||
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2013-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | authentication tokens model for RhodeCode |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import time |
|
26 | 26 | import logging |
|
27 | 27 | import traceback |
|
28 | 28 | from sqlalchemy import or_ |
|
29 | 29 | |
|
30 | 30 | from rhodecode.model import BaseModel |
|
31 | 31 | from rhodecode.model.db import UserApiKeys |
|
32 | 32 | from rhodecode.model.meta import Session |
|
33 | 33 | |
|
34 | 34 | log = logging.getLogger(__name__) |
|
35 | 35 | |
|
36 | 36 | |
|
37 | 37 | class AuthTokenModel(BaseModel): |
|
38 | 38 | cls = UserApiKeys |
|
39 | 39 | |
|
40 | 40 | @classmethod |
|
41 | 41 | def get_lifetime_values(cls, translator): |
|
42 | 42 | from rhodecode.lib import helpers as h |
|
43 | 43 | _ = translator |
|
44 | 44 | |
|
45 | 45 | def date_after_min(mins): |
|
46 | 46 | after = time.time() + (60 * mins) |
|
47 | 47 | return h.format_date(h.time_to_datetime(after)) |
|
48 | 48 | |
|
49 | 49 | return [ |
|
50 | 50 | (str(-1), |
|
51 | 51 | _('forever')), |
|
52 | 52 | (str(5), |
|
53 | 53 | _('5 minutes {end_date}').format(end_date=date_after_min(5))), |
|
54 | 54 | (str(60), |
|
55 | 55 | _('1 hour {end_date}').format(end_date=date_after_min(60))), |
|
56 | 56 | (str(60 * 24), |
|
57 | 57 | _('1 day {end_date}').format(end_date=date_after_min(60 * 24))), |
|
58 | 58 | (str(60 * 24 * 30), |
|
59 | 59 | _('1 month {end_date}').format(end_date=date_after_min(60 * 24 * 30))), |
|
60 | 60 | ] |
|
61 | 61 | |
|
62 | 62 | def create(self, user, description, lifetime=-1, role=UserApiKeys.ROLE_ALL): |
|
63 | 63 | """ |
|
64 | 64 | :param user: user or user_id |
|
65 | 65 | :param description: description of ApiKey |
|
66 | 66 | :param lifetime: expiration time in minutes |
|
67 | 67 | :param role: role for the apikey |
|
68 | 68 | """ |
|
69 | 69 | from rhodecode.lib.auth import generate_auth_token |
|
70 | 70 | |
|
71 | 71 | user = self._get_user(user) |
|
72 | 72 | |
|
73 | 73 | new_auth_token = UserApiKeys() |
|
74 | 74 | new_auth_token.api_key = generate_auth_token(user.username) |
|
75 | 75 | new_auth_token.user_id = user.user_id |
|
76 | 76 | new_auth_token.description = description |
|
77 | 77 | new_auth_token.role = role |
|
78 | 78 | new_auth_token.expires = time.time() + (lifetime * 60) \ |
|
79 | 79 | if lifetime != -1 else -1 |
|
80 | 80 | Session().add(new_auth_token) |
|
81 | 81 | |
|
82 | 82 | return new_auth_token |
|
83 | 83 | |
|
84 | 84 | def delete(self, auth_token_id, user=None): |
|
85 | 85 | """ |
|
86 | 86 | Deletes given api_key, if user is set it also filters the object for |
|
87 | 87 | deletion by given user. |
|
88 | 88 | """ |
|
89 | 89 | auth_token = UserApiKeys.query().filter( |
|
90 | 90 | UserApiKeys.user_api_key_id == auth_token_id) |
|
91 | 91 | |
|
92 | 92 | if user: |
|
93 | 93 | user = self._get_user(user) |
|
94 | 94 | auth_token = auth_token.filter(UserApiKeys.user_id == user.user_id) |
|
95 | 95 | auth_token = auth_token.scalar() |
|
96 | 96 | |
|
97 | 97 | if auth_token: |
|
98 | 98 | try: |
|
99 | 99 | Session().delete(auth_token) |
|
100 | 100 | except Exception: |
|
101 | 101 | log.error(traceback.format_exc()) |
|
102 | 102 | raise |
|
103 | 103 | |
|
104 | 104 | def get_auth_tokens(self, user, show_expired=True): |
|
105 | 105 | user = self._get_user(user) |
|
106 | 106 | user_auth_tokens = UserApiKeys.query()\ |
|
107 | 107 | .filter(UserApiKeys.user_id == user.user_id) |
|
108 | 108 | if not show_expired: |
|
109 | 109 | user_auth_tokens = user_auth_tokens\ |
|
110 | 110 | .filter(or_(UserApiKeys.expires == -1, |
|
111 | 111 | UserApiKeys.expires >= time.time())) |
|
112 | 112 | user_auth_tokens = user_auth_tokens.order_by( |
|
113 | 113 | UserApiKeys.user_api_key_id) |
|
114 | 114 | return user_auth_tokens |
|
115 | 115 | |
|
116 | 116 | def get_auth_token(self, auth_token): |
|
117 | 117 | auth_token = UserApiKeys.query().filter( |
|
118 | 118 | UserApiKeys.api_key == auth_token) |
|
119 | 119 | auth_token = auth_token \ |
|
120 | 120 | .filter(or_(UserApiKeys.expires == -1, |
|
121 | 121 | UserApiKeys.expires >= time.time()))\ |
|
122 | 122 | .first() |
|
123 | 123 | |
|
124 | 124 | return auth_token |
@@ -1,403 +1,402 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
|
5 | 4 | # This program is free software: you can redistribute it and/or modify |
|
6 | 5 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 6 | # (only), as published by the Free Software Foundation. |
|
8 | 7 | # |
|
9 | 8 | # This program is distributed in the hope that it will be useful, |
|
10 | 9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 11 | # GNU General Public License for more details. |
|
13 | 12 | # |
|
14 | 13 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 14 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 15 | # |
|
17 | 16 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 17 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 18 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 19 | |
|
21 | 20 | |
|
22 | 21 | import itertools |
|
23 | 22 | import logging |
|
24 | 23 | import collections |
|
25 | 24 | |
|
26 | 25 | from rhodecode.model import BaseModel |
|
27 | 26 | from rhodecode.model.db import ( |
|
28 | 27 | ChangesetStatus, ChangesetComment, PullRequest, PullRequestReviewers, Session) |
|
29 | 28 | from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError |
|
30 | 29 | from rhodecode.lib.markup_renderer import ( |
|
31 | 30 | DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer) |
|
32 | 31 | |
|
33 | 32 | log = logging.getLogger(__name__) |
|
34 | 33 | |
|
35 | 34 | |
|
36 | 35 | class ChangesetStatusModel(BaseModel): |
|
37 | 36 | |
|
38 | 37 | cls = ChangesetStatus |
|
39 | 38 | |
|
40 | 39 | def __get_changeset_status(self, changeset_status): |
|
41 | 40 | return self._get_instance(ChangesetStatus, changeset_status) |
|
42 | 41 | |
|
43 | 42 | def __get_pull_request(self, pull_request): |
|
44 | 43 | return self._get_instance(PullRequest, pull_request) |
|
45 | 44 | |
|
46 | 45 | def _get_status_query(self, repo, revision, pull_request, |
|
47 | 46 | with_revisions=False): |
|
48 | 47 | repo = self._get_repo(repo) |
|
49 | 48 | |
|
50 | 49 | q = ChangesetStatus.query()\ |
|
51 | 50 | .filter(ChangesetStatus.repo == repo) |
|
52 | 51 | if not with_revisions: |
|
53 | 52 | q = q.filter(ChangesetStatus.version == 0) |
|
54 | 53 | |
|
55 | 54 | if revision: |
|
56 | 55 | q = q.filter(ChangesetStatus.revision == revision) |
|
57 | 56 | elif pull_request: |
|
58 | 57 | pull_request = self.__get_pull_request(pull_request) |
|
59 | 58 | # TODO: johbo: Think about the impact of this join, there must |
|
60 | 59 | # be a reason why ChangesetStatus and ChanagesetComment is linked |
|
61 | 60 | # to the pull request. Might be that we want to do the same for |
|
62 | 61 | # the pull_request_version_id. |
|
63 | 62 | q = q.join(ChangesetComment).filter( |
|
64 | 63 | ChangesetStatus.pull_request == pull_request, |
|
65 | 64 | ChangesetComment.pull_request_version_id == None) |
|
66 | 65 | else: |
|
67 | 66 | raise Exception('Please specify revision or pull_request') |
|
68 | 67 | q = q.order_by(ChangesetStatus.version.asc()) |
|
69 | 68 | return q |
|
70 | 69 | |
|
71 | 70 | def calculate_group_vote(self, group_id, group_statuses_by_reviewers, |
|
72 | 71 | trim_votes=True): |
|
73 | 72 | """ |
|
74 | 73 | Calculate status based on given group members, and voting rule |
|
75 | 74 | |
|
76 | 75 | |
|
77 | 76 | group1 - 4 members, 3 required for approval |
|
78 | 77 | user1 - approved |
|
79 | 78 | user2 - reject |
|
80 | 79 | user3 - approved |
|
81 | 80 | user4 - rejected |
|
82 | 81 | |
|
83 | 82 | final_state: rejected, reasons not at least 3 votes |
|
84 | 83 | |
|
85 | 84 | |
|
86 | 85 | group1 - 4 members, 2 required for approval |
|
87 | 86 | user1 - approved |
|
88 | 87 | user2 - reject |
|
89 | 88 | user3 - approved |
|
90 | 89 | user4 - rejected |
|
91 | 90 | |
|
92 | 91 | final_state: approved, reasons got at least 2 approvals |
|
93 | 92 | |
|
94 | 93 | group1 - 4 members, ALL required for approval |
|
95 | 94 | user1 - approved |
|
96 | 95 | user2 - reject |
|
97 | 96 | user3 - approved |
|
98 | 97 | user4 - rejected |
|
99 | 98 | |
|
100 | 99 | final_state: rejected, reasons not all approvals |
|
101 | 100 | |
|
102 | 101 | |
|
103 | 102 | group1 - 4 members, ALL required for approval |
|
104 | 103 | user1 - approved |
|
105 | 104 | user2 - approved |
|
106 | 105 | user3 - approved |
|
107 | 106 | user4 - approved |
|
108 | 107 | |
|
109 | 108 | final_state: approved, reason all approvals received |
|
110 | 109 | |
|
111 | 110 | group1 - 4 members, 5 required for approval |
|
112 | 111 | (approval should be shorted to number of actual members) |
|
113 | 112 | |
|
114 | 113 | user1 - approved |
|
115 | 114 | user2 - approved |
|
116 | 115 | user3 - approved |
|
117 | 116 | user4 - approved |
|
118 | 117 | |
|
119 | 118 | final_state: approved, reason all approvals received |
|
120 | 119 | |
|
121 | 120 | """ |
|
122 | 121 | group_vote_data = {} |
|
123 | 122 | got_rule = False |
|
124 | 123 | members = collections.OrderedDict() |
|
125 | 124 | for review_obj, user, reasons, mandatory, statuses \ |
|
126 | 125 | in group_statuses_by_reviewers: |
|
127 | 126 | |
|
128 | 127 | if not got_rule: |
|
129 | 128 | group_vote_data = review_obj.rule_user_group_data() |
|
130 | 129 | got_rule = bool(group_vote_data) |
|
131 | 130 | |
|
132 | 131 | members[user.user_id] = statuses |
|
133 | 132 | |
|
134 | 133 | if not group_vote_data: |
|
135 | 134 | return [] |
|
136 | 135 | |
|
137 | 136 | required_votes = group_vote_data['vote_rule'] |
|
138 | 137 | if required_votes == -1: |
|
139 | 138 | # -1 means all required, so we replace it with how many people |
|
140 | 139 | # are in the members |
|
141 | 140 | required_votes = len(members) |
|
142 | 141 | |
|
143 | 142 | if trim_votes and required_votes > len(members): |
|
144 | 143 | # we require more votes than we have members in the group |
|
145 | 144 | # in this case we trim the required votes to the number of members |
|
146 | 145 | required_votes = len(members) |
|
147 | 146 | |
|
148 | 147 | approvals = sum([ |
|
149 | 148 | 1 for statuses in members.values() |
|
150 | 149 | if statuses and |
|
151 | 150 | statuses[0][1].status == ChangesetStatus.STATUS_APPROVED]) |
|
152 | 151 | |
|
153 | 152 | calculated_votes = [] |
|
154 | 153 | # we have all votes from users, now check if we have enough votes |
|
155 | 154 | # to fill other |
|
156 | 155 | fill_in = ChangesetStatus.STATUS_UNDER_REVIEW |
|
157 | 156 | if approvals >= required_votes: |
|
158 | 157 | fill_in = ChangesetStatus.STATUS_APPROVED |
|
159 | 158 | |
|
160 | 159 | for member, statuses in members.items(): |
|
161 | 160 | if statuses: |
|
162 | 161 | ver, latest = statuses[0] |
|
163 | 162 | if fill_in == ChangesetStatus.STATUS_APPROVED: |
|
164 | 163 | calculated_votes.append(fill_in) |
|
165 | 164 | else: |
|
166 | 165 | calculated_votes.append(latest.status) |
|
167 | 166 | else: |
|
168 | 167 | calculated_votes.append(fill_in) |
|
169 | 168 | |
|
170 | 169 | return calculated_votes |
|
171 | 170 | |
|
172 | 171 | def calculate_status(self, statuses_by_reviewers): |
|
173 | 172 | """ |
|
174 | 173 | Given the approval statuses from reviewers, calculates final approval |
|
175 | 174 | status. There can only be 3 results, all approved, all rejected. If |
|
176 | 175 | there is no consensus the PR is under review. |
|
177 | 176 | |
|
178 | 177 | :param statuses_by_reviewers: |
|
179 | 178 | """ |
|
180 | 179 | |
|
181 | 180 | def group_rule(element): |
|
182 | 181 | review_obj = element[0] |
|
183 | 182 | rule_data = review_obj.rule_user_group_data() |
|
184 | 183 | if rule_data and rule_data['id']: |
|
185 | 184 | return rule_data['id'] |
|
186 | 185 | |
|
187 | 186 | voting_groups = itertools.groupby( |
|
188 | 187 | sorted(statuses_by_reviewers, key=group_rule), group_rule) |
|
189 | 188 | |
|
190 | 189 | voting_by_groups = [(x, list(y)) for x, y in voting_groups] |
|
191 | 190 | |
|
192 | 191 | reviewers_number = len(statuses_by_reviewers) |
|
193 | 192 | votes = collections.defaultdict(int) |
|
194 | 193 | for group, group_statuses_by_reviewers in voting_by_groups: |
|
195 | 194 | if group: |
|
196 | 195 | # calculate how the "group" voted |
|
197 | 196 | for vote_status in self.calculate_group_vote( |
|
198 | 197 | group, group_statuses_by_reviewers): |
|
199 | 198 | votes[vote_status] += 1 |
|
200 | 199 | else: |
|
201 | 200 | |
|
202 | 201 | for review_obj, user, reasons, mandatory, statuses \ |
|
203 | 202 | in group_statuses_by_reviewers: |
|
204 | 203 | # individual vote |
|
205 | 204 | if statuses: |
|
206 | 205 | ver, latest = statuses[0] |
|
207 | 206 | votes[latest.status] += 1 |
|
208 | 207 | |
|
209 | 208 | approved_votes_count = votes[ChangesetStatus.STATUS_APPROVED] |
|
210 | 209 | rejected_votes_count = votes[ChangesetStatus.STATUS_REJECTED] |
|
211 | 210 | |
|
212 | 211 | # TODO(marcink): with group voting, how does rejected work, |
|
213 | 212 | # do we ever get rejected state ? |
|
214 | 213 | |
|
215 | 214 | if approved_votes_count and (approved_votes_count == reviewers_number): |
|
216 | 215 | return ChangesetStatus.STATUS_APPROVED |
|
217 | 216 | |
|
218 | 217 | if rejected_votes_count and (rejected_votes_count == reviewers_number): |
|
219 | 218 | return ChangesetStatus.STATUS_REJECTED |
|
220 | 219 | |
|
221 | 220 | return ChangesetStatus.STATUS_UNDER_REVIEW |
|
222 | 221 | |
|
223 | 222 | def get_statuses(self, repo, revision=None, pull_request=None, |
|
224 | 223 | with_revisions=False): |
|
225 | 224 | q = self._get_status_query(repo, revision, pull_request, |
|
226 | 225 | with_revisions) |
|
227 | 226 | return q.all() |
|
228 | 227 | |
|
229 | 228 | def get_status(self, repo, revision=None, pull_request=None, as_str=True): |
|
230 | 229 | """ |
|
231 | 230 | Returns latest status of changeset for given revision or for given |
|
232 | 231 | pull request. Statuses are versioned inside a table itself and |
|
233 | 232 | version == 0 is always the current one |
|
234 | 233 | |
|
235 | 234 | :param repo: |
|
236 | 235 | :param revision: 40char hash or None |
|
237 | 236 | :param pull_request: pull_request reference |
|
238 | 237 | :param as_str: return status as string not object |
|
239 | 238 | """ |
|
240 | 239 | q = self._get_status_query(repo, revision, pull_request) |
|
241 | 240 | |
|
242 | 241 | # need to use first here since there can be multiple statuses |
|
243 | 242 | # returned from pull_request |
|
244 | 243 | status = q.first() |
|
245 | 244 | if as_str: |
|
246 | 245 | status = status.status if status else status |
|
247 | 246 | st = status or ChangesetStatus.DEFAULT |
|
248 | 247 | return str(st) |
|
249 | 248 | return status |
|
250 | 249 | |
|
251 | 250 | def _render_auto_status_message( |
|
252 | 251 | self, status, commit_id=None, pull_request=None): |
|
253 | 252 | """ |
|
254 | 253 | render the message using DEFAULT_COMMENTS_RENDERER (RST renderer), |
|
255 | 254 | so it's always looking the same disregarding on which default |
|
256 | 255 | renderer system is using. |
|
257 | 256 | |
|
258 | 257 | :param status: status text to change into |
|
259 | 258 | :param commit_id: the commit_id we change the status for |
|
260 | 259 | :param pull_request: the pull request we change the status for |
|
261 | 260 | """ |
|
262 | 261 | |
|
263 | 262 | new_status = ChangesetStatus.get_status_lbl(status) |
|
264 | 263 | |
|
265 | 264 | params = { |
|
266 | 265 | 'new_status_label': new_status, |
|
267 | 266 | 'pull_request': pull_request, |
|
268 | 267 | 'commit_id': commit_id, |
|
269 | 268 | } |
|
270 | 269 | renderer = RstTemplateRenderer() |
|
271 | 270 | return renderer.render('auto_status_change.mako', **params) |
|
272 | 271 | |
|
273 | 272 | def set_status(self, repo, status, user, comment=None, revision=None, |
|
274 | 273 | pull_request=None, dont_allow_on_closed_pull_request=False): |
|
275 | 274 | """ |
|
276 | 275 | Creates new status for changeset or updates the old ones bumping their |
|
277 | 276 | version, leaving the current status at |
|
278 | 277 | |
|
279 | 278 | :param repo: |
|
280 | 279 | :param revision: |
|
281 | 280 | :param status: |
|
282 | 281 | :param user: |
|
283 | 282 | :param comment: |
|
284 | 283 | :param dont_allow_on_closed_pull_request: don't allow a status change |
|
285 | 284 | if last status was for pull request and it's closed. We shouldn't |
|
286 | 285 | mess around this manually |
|
287 | 286 | """ |
|
288 | 287 | repo = self._get_repo(repo) |
|
289 | 288 | |
|
290 | 289 | q = ChangesetStatus.query() |
|
291 | 290 | |
|
292 | 291 | if revision: |
|
293 | 292 | q = q.filter(ChangesetStatus.repo == repo) |
|
294 | 293 | q = q.filter(ChangesetStatus.revision == revision) |
|
295 | 294 | elif pull_request: |
|
296 | 295 | pull_request = self.__get_pull_request(pull_request) |
|
297 | 296 | q = q.filter(ChangesetStatus.repo == pull_request.source_repo) |
|
298 | 297 | q = q.filter(ChangesetStatus.revision.in_(pull_request.revisions)) |
|
299 | 298 | cur_statuses = q.all() |
|
300 | 299 | |
|
301 | 300 | # if statuses exists and last is associated with a closed pull request |
|
302 | 301 | # we need to check if we can allow this status change |
|
303 | 302 | if (dont_allow_on_closed_pull_request and cur_statuses |
|
304 | 303 | and getattr(cur_statuses[0].pull_request, 'status', '') |
|
305 | 304 | == PullRequest.STATUS_CLOSED): |
|
306 | 305 | raise StatusChangeOnClosedPullRequestError( |
|
307 | 306 | 'Changing status on closed pull request is not allowed' |
|
308 | 307 | ) |
|
309 | 308 | |
|
310 | 309 | # update all current statuses with older version |
|
311 | 310 | if cur_statuses: |
|
312 | 311 | for st in cur_statuses: |
|
313 | 312 | st.version += 1 |
|
314 | 313 | Session().add(st) |
|
315 | 314 | Session().flush() |
|
316 | 315 | |
|
317 | 316 | def _create_status(user, repo, status, comment, revision, pull_request): |
|
318 | 317 | new_status = ChangesetStatus() |
|
319 | 318 | new_status.author = self._get_user(user) |
|
320 | 319 | new_status.repo = self._get_repo(repo) |
|
321 | 320 | new_status.status = status |
|
322 | 321 | new_status.comment = comment |
|
323 | 322 | new_status.revision = revision |
|
324 | 323 | new_status.pull_request = pull_request |
|
325 | 324 | return new_status |
|
326 | 325 | |
|
327 | 326 | if not comment: |
|
328 | 327 | from rhodecode.model.comment import CommentsModel |
|
329 | 328 | comment = CommentsModel().create( |
|
330 | 329 | text=self._render_auto_status_message( |
|
331 | 330 | status, commit_id=revision, pull_request=pull_request), |
|
332 | 331 | repo=repo, |
|
333 | 332 | user=user, |
|
334 | 333 | pull_request=pull_request, |
|
335 | 334 | send_email=False, renderer=DEFAULT_COMMENTS_RENDERER |
|
336 | 335 | ) |
|
337 | 336 | |
|
338 | 337 | if revision: |
|
339 | 338 | new_status = _create_status( |
|
340 | 339 | user=user, repo=repo, status=status, comment=comment, |
|
341 | 340 | revision=revision, pull_request=pull_request) |
|
342 | 341 | Session().add(new_status) |
|
343 | 342 | return new_status |
|
344 | 343 | elif pull_request: |
|
345 | 344 | # pull request can have more than one revision associated to it |
|
346 | 345 | # we need to create new version for each one |
|
347 | 346 | new_statuses = [] |
|
348 | 347 | repo = pull_request.source_repo |
|
349 | 348 | for rev in pull_request.revisions: |
|
350 | 349 | new_status = _create_status( |
|
351 | 350 | user=user, repo=repo, status=status, comment=comment, |
|
352 | 351 | revision=rev, pull_request=pull_request) |
|
353 | 352 | new_statuses.append(new_status) |
|
354 | 353 | Session().add(new_status) |
|
355 | 354 | return new_statuses |
|
356 | 355 | |
|
357 | 356 | def aggregate_votes_by_user(self, commit_statuses, reviewers_data, user=None): |
|
358 | 357 | |
|
359 | 358 | commit_statuses_map = collections.defaultdict(list) |
|
360 | 359 | for st in commit_statuses: |
|
361 | 360 | commit_statuses_map[st.author.username] += [st] |
|
362 | 361 | |
|
363 | 362 | reviewers = [] |
|
364 | 363 | |
|
365 | 364 | def version(commit_status): |
|
366 | 365 | return commit_status.version |
|
367 | 366 | |
|
368 | 367 | for obj in reviewers_data: |
|
369 | 368 | if not obj.user: |
|
370 | 369 | continue |
|
371 | 370 | if user and obj.user.username != user.username: |
|
372 | 371 | # single user filter |
|
373 | 372 | continue |
|
374 | 373 | |
|
375 | 374 | statuses = commit_statuses_map.get(obj.user.username, None) |
|
376 | 375 | if statuses: |
|
377 | 376 | status_groups = itertools.groupby( |
|
378 | 377 | sorted(statuses, key=version), version) |
|
379 | 378 | statuses = [(x, list(y)[0]) for x, y in status_groups] |
|
380 | 379 | |
|
381 | 380 | reviewers.append((obj, obj.user, obj.reasons, obj.mandatory, statuses)) |
|
382 | 381 | |
|
383 | 382 | if user: |
|
384 | 383 | return reviewers[0] if reviewers else reviewers |
|
385 | 384 | else: |
|
386 | 385 | return reviewers |
|
387 | 386 | |
|
388 | 387 | def reviewers_statuses(self, pull_request, user=None): |
|
389 | 388 | _commit_statuses = self.get_statuses( |
|
390 | 389 | pull_request.source_repo, |
|
391 | 390 | pull_request=pull_request, |
|
392 | 391 | with_revisions=True) |
|
393 | 392 | reviewers = pull_request.get_pull_request_reviewers( |
|
394 | 393 | role=PullRequestReviewers.ROLE_REVIEWER) |
|
395 | 394 | return self.aggregate_votes_by_user(_commit_statuses, reviewers, user=user) |
|
396 | 395 | |
|
397 | 396 | def calculated_review_status(self, pull_request): |
|
398 | 397 | """ |
|
399 | 398 | calculate pull request status based on reviewers, it should be a list |
|
400 | 399 | of two element lists. |
|
401 | 400 | """ |
|
402 | 401 | reviewers = self.reviewers_statuses(pull_request) |
|
403 | 402 | return self.calculate_status(reviewers) |
@@ -1,857 +1,857 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
1 | ||
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2011-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | comments model for RhodeCode |
|
23 | 23 | """ |
|
24 | 24 | import datetime |
|
25 | 25 | |
|
26 | 26 | import logging |
|
27 | 27 | import traceback |
|
28 | 28 | import collections |
|
29 | 29 | |
|
30 | 30 | from pyramid.threadlocal import get_current_registry, get_current_request |
|
31 | 31 | from sqlalchemy.sql.expression import null |
|
32 | 32 | from sqlalchemy.sql.functions import coalesce |
|
33 | 33 | |
|
34 | 34 | from rhodecode.lib import helpers as h, diffs, channelstream, hooks_utils |
|
35 | 35 | from rhodecode.lib import audit_logger |
|
36 | 36 | from rhodecode.lib.exceptions import CommentVersionMismatch |
|
37 | 37 | from rhodecode.lib.utils2 import extract_mentioned_users, safe_str, safe_int |
|
38 | 38 | from rhodecode.model import BaseModel |
|
39 | 39 | from rhodecode.model.db import ( |
|
40 | 40 | false, true, |
|
41 | 41 | ChangesetComment, |
|
42 | 42 | User, |
|
43 | 43 | Notification, |
|
44 | 44 | PullRequest, |
|
45 | 45 | AttributeDict, |
|
46 | 46 | ChangesetCommentHistory, |
|
47 | 47 | ) |
|
48 | 48 | from rhodecode.model.notification import NotificationModel |
|
49 | 49 | from rhodecode.model.meta import Session |
|
50 | 50 | from rhodecode.model.settings import VcsSettingsModel |
|
51 | 51 | from rhodecode.model.notification import EmailNotificationModel |
|
52 | 52 | from rhodecode.model.validation_schema.schemas import comment_schema |
|
53 | 53 | |
|
54 | 54 | |
|
55 | 55 | log = logging.getLogger(__name__) |
|
56 | 56 | |
|
57 | 57 | |
|
58 | 58 | class CommentsModel(BaseModel): |
|
59 | 59 | |
|
60 | 60 | cls = ChangesetComment |
|
61 | 61 | |
|
62 | 62 | DIFF_CONTEXT_BEFORE = 3 |
|
63 | 63 | DIFF_CONTEXT_AFTER = 3 |
|
64 | 64 | |
|
65 | 65 | def __get_commit_comment(self, changeset_comment): |
|
66 | 66 | return self._get_instance(ChangesetComment, changeset_comment) |
|
67 | 67 | |
|
68 | 68 | def __get_pull_request(self, pull_request): |
|
69 | 69 | return self._get_instance(PullRequest, pull_request) |
|
70 | 70 | |
|
71 | 71 | def _extract_mentions(self, s): |
|
72 | 72 | user_objects = [] |
|
73 | 73 | for username in extract_mentioned_users(s): |
|
74 | 74 | user_obj = User.get_by_username(username, case_insensitive=True) |
|
75 | 75 | if user_obj: |
|
76 | 76 | user_objects.append(user_obj) |
|
77 | 77 | return user_objects |
|
78 | 78 | |
|
79 | 79 | def _get_renderer(self, global_renderer='rst', request=None): |
|
80 | 80 | request = request or get_current_request() |
|
81 | 81 | |
|
82 | 82 | try: |
|
83 | 83 | global_renderer = request.call_context.visual.default_renderer |
|
84 | 84 | except AttributeError: |
|
85 | 85 | log.debug("Renderer not set, falling back " |
|
86 | 86 | "to default renderer '%s'", global_renderer) |
|
87 | 87 | except Exception: |
|
88 | 88 | log.error(traceback.format_exc()) |
|
89 | 89 | return global_renderer |
|
90 | 90 | |
|
91 | 91 | def aggregate_comments(self, comments, versions, show_version, inline=False): |
|
92 | 92 | # group by versions, and count until, and display objects |
|
93 | 93 | |
|
94 | 94 | comment_groups = collections.defaultdict(list) |
|
95 | 95 | [comment_groups[_co.pull_request_version_id].append(_co) for _co in comments] |
|
96 | 96 | |
|
97 | 97 | def yield_comments(pos): |
|
98 | 98 | for co in comment_groups[pos]: |
|
99 | 99 | yield co |
|
100 | 100 | |
|
101 | 101 | comment_versions = collections.defaultdict( |
|
102 | 102 | lambda: collections.defaultdict(list)) |
|
103 | 103 | prev_prvid = -1 |
|
104 | 104 | # fake last entry with None, to aggregate on "latest" version which |
|
105 | 105 | # doesn't have an pull_request_version_id |
|
106 | 106 | for ver in versions + [AttributeDict({'pull_request_version_id': None})]: |
|
107 | 107 | prvid = ver.pull_request_version_id |
|
108 | 108 | if prev_prvid == -1: |
|
109 | 109 | prev_prvid = prvid |
|
110 | 110 | |
|
111 | 111 | for co in yield_comments(prvid): |
|
112 | 112 | comment_versions[prvid]['at'].append(co) |
|
113 | 113 | |
|
114 | 114 | # save until |
|
115 | 115 | current = comment_versions[prvid]['at'] |
|
116 | 116 | prev_until = comment_versions[prev_prvid]['until'] |
|
117 | 117 | cur_until = prev_until + current |
|
118 | 118 | comment_versions[prvid]['until'].extend(cur_until) |
|
119 | 119 | |
|
120 | 120 | # save outdated |
|
121 | 121 | if inline: |
|
122 | 122 | outdated = [x for x in cur_until |
|
123 | 123 | if x.outdated_at_version(show_version)] |
|
124 | 124 | else: |
|
125 | 125 | outdated = [x for x in cur_until |
|
126 | 126 | if x.older_than_version(show_version)] |
|
127 | 127 | display = [x for x in cur_until if x not in outdated] |
|
128 | 128 | |
|
129 | 129 | comment_versions[prvid]['outdated'] = outdated |
|
130 | 130 | comment_versions[prvid]['display'] = display |
|
131 | 131 | |
|
132 | 132 | prev_prvid = prvid |
|
133 | 133 | |
|
134 | 134 | return comment_versions |
|
135 | 135 | |
|
136 | 136 | def get_repository_comments(self, repo, comment_type=None, user=None, commit_id=None): |
|
137 | 137 | qry = Session().query(ChangesetComment) \ |
|
138 | 138 | .filter(ChangesetComment.repo == repo) |
|
139 | 139 | |
|
140 | 140 | if comment_type and comment_type in ChangesetComment.COMMENT_TYPES: |
|
141 | 141 | qry = qry.filter(ChangesetComment.comment_type == comment_type) |
|
142 | 142 | |
|
143 | 143 | if user: |
|
144 | 144 | user = self._get_user(user) |
|
145 | 145 | if user: |
|
146 | 146 | qry = qry.filter(ChangesetComment.user_id == user.user_id) |
|
147 | 147 | |
|
148 | 148 | if commit_id: |
|
149 | 149 | qry = qry.filter(ChangesetComment.revision == commit_id) |
|
150 | 150 | |
|
151 | 151 | qry = qry.order_by(ChangesetComment.created_on) |
|
152 | 152 | return qry.all() |
|
153 | 153 | |
|
154 | 154 | def get_repository_unresolved_todos(self, repo): |
|
155 | 155 | todos = Session().query(ChangesetComment) \ |
|
156 | 156 | .filter(ChangesetComment.repo == repo) \ |
|
157 | 157 | .filter(ChangesetComment.resolved_by == None) \ |
|
158 | 158 | .filter(ChangesetComment.comment_type |
|
159 | 159 | == ChangesetComment.COMMENT_TYPE_TODO) |
|
160 | 160 | todos = todos.all() |
|
161 | 161 | |
|
162 | 162 | return todos |
|
163 | 163 | |
|
164 | 164 | def get_pull_request_unresolved_todos(self, pull_request, show_outdated=True, include_drafts=True): |
|
165 | 165 | |
|
166 | 166 | todos = Session().query(ChangesetComment) \ |
|
167 | 167 | .filter(ChangesetComment.pull_request == pull_request) \ |
|
168 | 168 | .filter(ChangesetComment.resolved_by == None) \ |
|
169 | 169 | .filter(ChangesetComment.comment_type |
|
170 | 170 | == ChangesetComment.COMMENT_TYPE_TODO) |
|
171 | 171 | |
|
172 | 172 | if not include_drafts: |
|
173 | 173 | todos = todos.filter(ChangesetComment.draft == false()) |
|
174 | 174 | |
|
175 | 175 | if not show_outdated: |
|
176 | 176 | todos = todos.filter( |
|
177 | 177 | coalesce(ChangesetComment.display_state, '') != |
|
178 | 178 | ChangesetComment.COMMENT_OUTDATED) |
|
179 | 179 | |
|
180 | 180 | todos = todos.all() |
|
181 | 181 | |
|
182 | 182 | return todos |
|
183 | 183 | |
|
184 | 184 | def get_pull_request_resolved_todos(self, pull_request, show_outdated=True, include_drafts=True): |
|
185 | 185 | |
|
186 | 186 | todos = Session().query(ChangesetComment) \ |
|
187 | 187 | .filter(ChangesetComment.pull_request == pull_request) \ |
|
188 | 188 | .filter(ChangesetComment.resolved_by != None) \ |
|
189 | 189 | .filter(ChangesetComment.comment_type |
|
190 | 190 | == ChangesetComment.COMMENT_TYPE_TODO) |
|
191 | 191 | |
|
192 | 192 | if not include_drafts: |
|
193 | 193 | todos = todos.filter(ChangesetComment.draft == false()) |
|
194 | 194 | |
|
195 | 195 | if not show_outdated: |
|
196 | 196 | todos = todos.filter( |
|
197 | 197 | coalesce(ChangesetComment.display_state, '') != |
|
198 | 198 | ChangesetComment.COMMENT_OUTDATED) |
|
199 | 199 | |
|
200 | 200 | todos = todos.all() |
|
201 | 201 | |
|
202 | 202 | return todos |
|
203 | 203 | |
|
204 | 204 | def get_pull_request_drafts(self, user_id, pull_request): |
|
205 | 205 | drafts = Session().query(ChangesetComment) \ |
|
206 | 206 | .filter(ChangesetComment.pull_request == pull_request) \ |
|
207 | 207 | .filter(ChangesetComment.user_id == user_id) \ |
|
208 | 208 | .filter(ChangesetComment.draft == true()) |
|
209 | 209 | return drafts.all() |
|
210 | 210 | |
|
211 | 211 | def get_commit_unresolved_todos(self, commit_id, show_outdated=True, include_drafts=True): |
|
212 | 212 | |
|
213 | 213 | todos = Session().query(ChangesetComment) \ |
|
214 | 214 | .filter(ChangesetComment.revision == commit_id) \ |
|
215 | 215 | .filter(ChangesetComment.resolved_by == None) \ |
|
216 | 216 | .filter(ChangesetComment.comment_type |
|
217 | 217 | == ChangesetComment.COMMENT_TYPE_TODO) |
|
218 | 218 | |
|
219 | 219 | if not include_drafts: |
|
220 | 220 | todos = todos.filter(ChangesetComment.draft == false()) |
|
221 | 221 | |
|
222 | 222 | if not show_outdated: |
|
223 | 223 | todos = todos.filter( |
|
224 | 224 | coalesce(ChangesetComment.display_state, '') != |
|
225 | 225 | ChangesetComment.COMMENT_OUTDATED) |
|
226 | 226 | |
|
227 | 227 | todos = todos.all() |
|
228 | 228 | |
|
229 | 229 | return todos |
|
230 | 230 | |
|
231 | 231 | def get_commit_resolved_todos(self, commit_id, show_outdated=True, include_drafts=True): |
|
232 | 232 | |
|
233 | 233 | todos = Session().query(ChangesetComment) \ |
|
234 | 234 | .filter(ChangesetComment.revision == commit_id) \ |
|
235 | 235 | .filter(ChangesetComment.resolved_by != None) \ |
|
236 | 236 | .filter(ChangesetComment.comment_type |
|
237 | 237 | == ChangesetComment.COMMENT_TYPE_TODO) |
|
238 | 238 | |
|
239 | 239 | if not include_drafts: |
|
240 | 240 | todos = todos.filter(ChangesetComment.draft == false()) |
|
241 | 241 | |
|
242 | 242 | if not show_outdated: |
|
243 | 243 | todos = todos.filter( |
|
244 | 244 | coalesce(ChangesetComment.display_state, '') != |
|
245 | 245 | ChangesetComment.COMMENT_OUTDATED) |
|
246 | 246 | |
|
247 | 247 | todos = todos.all() |
|
248 | 248 | |
|
249 | 249 | return todos |
|
250 | 250 | |
|
251 | 251 | def get_commit_inline_comments(self, commit_id, include_drafts=True): |
|
252 | 252 | inline_comments = Session().query(ChangesetComment) \ |
|
253 | 253 | .filter(ChangesetComment.line_no != None) \ |
|
254 | 254 | .filter(ChangesetComment.f_path != None) \ |
|
255 | 255 | .filter(ChangesetComment.revision == commit_id) |
|
256 | 256 | |
|
257 | 257 | if not include_drafts: |
|
258 | 258 | inline_comments = inline_comments.filter(ChangesetComment.draft == false()) |
|
259 | 259 | |
|
260 | 260 | inline_comments = inline_comments.all() |
|
261 | 261 | return inline_comments |
|
262 | 262 | |
|
263 | 263 | def _log_audit_action(self, action, action_data, auth_user, comment): |
|
264 | 264 | audit_logger.store( |
|
265 | 265 | action=action, |
|
266 | 266 | action_data=action_data, |
|
267 | 267 | user=auth_user, |
|
268 | 268 | repo=comment.repo) |
|
269 | 269 | |
|
270 | 270 | def create(self, text, repo, user, commit_id=None, pull_request=None, |
|
271 | 271 | f_path=None, line_no=None, status_change=None, |
|
272 | 272 | status_change_type=None, comment_type=None, is_draft=False, |
|
273 | 273 | resolves_comment_id=None, closing_pr=False, send_email=True, |
|
274 | 274 | renderer=None, auth_user=None, extra_recipients=None): |
|
275 | 275 | """ |
|
276 | 276 | Creates new comment for commit or pull request. |
|
277 | 277 | IF status_change is not none this comment is associated with a |
|
278 | 278 | status change of commit or commit associated with pull request |
|
279 | 279 | |
|
280 | 280 | :param text: |
|
281 | 281 | :param repo: |
|
282 | 282 | :param user: |
|
283 | 283 | :param commit_id: |
|
284 | 284 | :param pull_request: |
|
285 | 285 | :param f_path: |
|
286 | 286 | :param line_no: |
|
287 | 287 | :param status_change: Label for status change |
|
288 | 288 | :param comment_type: Type of comment |
|
289 | 289 | :param is_draft: is comment a draft only |
|
290 | 290 | :param resolves_comment_id: id of comment which this one will resolve |
|
291 | 291 | :param status_change_type: type of status change |
|
292 | 292 | :param closing_pr: |
|
293 | 293 | :param send_email: |
|
294 | 294 | :param renderer: pick renderer for this comment |
|
295 | 295 | :param auth_user: current authenticated user calling this method |
|
296 | 296 | :param extra_recipients: list of extra users to be added to recipients |
|
297 | 297 | """ |
|
298 | 298 | |
|
299 | 299 | if not text: |
|
300 | 300 | log.warning('Missing text for comment, skipping...') |
|
301 | 301 | return |
|
302 | 302 | request = get_current_request() |
|
303 | 303 | _ = request.translate |
|
304 | 304 | |
|
305 | 305 | if not renderer: |
|
306 | 306 | renderer = self._get_renderer(request=request) |
|
307 | 307 | |
|
308 | 308 | repo = self._get_repo(repo) |
|
309 | 309 | user = self._get_user(user) |
|
310 | 310 | auth_user = auth_user or user |
|
311 | 311 | |
|
312 | 312 | schema = comment_schema.CommentSchema() |
|
313 | 313 | validated_kwargs = schema.deserialize(dict( |
|
314 | 314 | comment_body=text, |
|
315 | 315 | comment_type=comment_type, |
|
316 | 316 | is_draft=is_draft, |
|
317 | 317 | comment_file=f_path, |
|
318 | 318 | comment_line=line_no, |
|
319 | 319 | renderer_type=renderer, |
|
320 | 320 | status_change=status_change_type, |
|
321 | 321 | resolves_comment_id=resolves_comment_id, |
|
322 | 322 | repo=repo.repo_id, |
|
323 | 323 | user=user.user_id, |
|
324 | 324 | )) |
|
325 | 325 | is_draft = validated_kwargs['is_draft'] |
|
326 | 326 | |
|
327 | 327 | comment = ChangesetComment() |
|
328 | 328 | comment.renderer = validated_kwargs['renderer_type'] |
|
329 | 329 | comment.text = validated_kwargs['comment_body'] |
|
330 | 330 | comment.f_path = validated_kwargs['comment_file'] |
|
331 | 331 | comment.line_no = validated_kwargs['comment_line'] |
|
332 | 332 | comment.comment_type = validated_kwargs['comment_type'] |
|
333 | 333 | comment.draft = is_draft |
|
334 | 334 | |
|
335 | 335 | comment.repo = repo |
|
336 | 336 | comment.author = user |
|
337 | 337 | resolved_comment = self.__get_commit_comment( |
|
338 | 338 | validated_kwargs['resolves_comment_id']) |
|
339 | 339 | |
|
340 | 340 | # check if the comment actually belongs to this PR |
|
341 | 341 | if resolved_comment and resolved_comment.pull_request and \ |
|
342 | 342 | resolved_comment.pull_request != pull_request: |
|
343 | 343 | log.warning('Comment tried to resolved unrelated todo comment: %s', |
|
344 | 344 | resolved_comment) |
|
345 | 345 | # comment not bound to this pull request, forbid |
|
346 | 346 | resolved_comment = None |
|
347 | 347 | |
|
348 | 348 | elif resolved_comment and resolved_comment.repo and \ |
|
349 | 349 | resolved_comment.repo != repo: |
|
350 | 350 | log.warning('Comment tried to resolved unrelated todo comment: %s', |
|
351 | 351 | resolved_comment) |
|
352 | 352 | # comment not bound to this repo, forbid |
|
353 | 353 | resolved_comment = None |
|
354 | 354 | |
|
355 | 355 | if resolved_comment and resolved_comment.resolved_by: |
|
356 | 356 | # if this comment is already resolved, don't mark it again! |
|
357 | 357 | resolved_comment = None |
|
358 | 358 | |
|
359 | 359 | comment.resolved_comment = resolved_comment |
|
360 | 360 | |
|
361 | 361 | pull_request_id = pull_request |
|
362 | 362 | |
|
363 | 363 | commit_obj = None |
|
364 | 364 | pull_request_obj = None |
|
365 | 365 | |
|
366 | 366 | if commit_id: |
|
367 | 367 | notification_type = EmailNotificationModel.TYPE_COMMIT_COMMENT |
|
368 | 368 | # do a lookup, so we don't pass something bad here |
|
369 | 369 | commit_obj = repo.scm_instance().get_commit(commit_id=commit_id) |
|
370 | 370 | comment.revision = commit_obj.raw_id |
|
371 | 371 | |
|
372 | 372 | elif pull_request_id: |
|
373 | 373 | notification_type = EmailNotificationModel.TYPE_PULL_REQUEST_COMMENT |
|
374 | 374 | pull_request_obj = self.__get_pull_request(pull_request_id) |
|
375 | 375 | comment.pull_request = pull_request_obj |
|
376 | 376 | else: |
|
377 | 377 | raise Exception('Please specify commit or pull_request_id') |
|
378 | 378 | |
|
379 | 379 | Session().add(comment) |
|
380 | 380 | Session().flush() |
|
381 | 381 | kwargs = { |
|
382 | 382 | 'user': user, |
|
383 | 383 | 'renderer_type': renderer, |
|
384 | 384 | 'repo_name': repo.repo_name, |
|
385 | 385 | 'status_change': status_change, |
|
386 | 386 | 'status_change_type': status_change_type, |
|
387 | 387 | 'comment_body': text, |
|
388 | 388 | 'comment_file': f_path, |
|
389 | 389 | 'comment_line': line_no, |
|
390 | 390 | 'comment_type': comment_type or 'note', |
|
391 | 391 | 'comment_id': comment.comment_id |
|
392 | 392 | } |
|
393 | 393 | |
|
394 | 394 | if commit_obj: |
|
395 | 395 | recipients = ChangesetComment.get_users( |
|
396 | 396 | revision=commit_obj.raw_id) |
|
397 | 397 | # add commit author if it's in RhodeCode system |
|
398 | 398 | cs_author = User.get_from_cs_author(commit_obj.author) |
|
399 | 399 | if not cs_author: |
|
400 | 400 | # use repo owner if we cannot extract the author correctly |
|
401 | 401 | cs_author = repo.user |
|
402 | 402 | recipients += [cs_author] |
|
403 | 403 | |
|
404 | 404 | commit_comment_url = self.get_url(comment, request=request) |
|
405 | 405 | commit_comment_reply_url = self.get_url( |
|
406 | 406 | comment, request=request, |
|
407 | 407 | anchor='comment-{}/?/ReplyToComment'.format(comment.comment_id)) |
|
408 | 408 | |
|
409 | 409 | target_repo_url = h.link_to( |
|
410 | 410 | repo.repo_name, |
|
411 | 411 | h.route_url('repo_summary', repo_name=repo.repo_name)) |
|
412 | 412 | |
|
413 | 413 | commit_url = h.route_url('repo_commit', repo_name=repo.repo_name, |
|
414 | 414 | commit_id=commit_id) |
|
415 | 415 | |
|
416 | 416 | # commit specifics |
|
417 | 417 | kwargs.update({ |
|
418 | 418 | 'commit': commit_obj, |
|
419 | 419 | 'commit_message': commit_obj.message, |
|
420 | 420 | 'commit_target_repo_url': target_repo_url, |
|
421 | 421 | 'commit_comment_url': commit_comment_url, |
|
422 | 422 | 'commit_comment_reply_url': commit_comment_reply_url, |
|
423 | 423 | 'commit_url': commit_url, |
|
424 | 424 | 'thread_ids': [commit_url, commit_comment_url], |
|
425 | 425 | }) |
|
426 | 426 | |
|
427 | 427 | elif pull_request_obj: |
|
428 | 428 | # get the current participants of this pull request |
|
429 | 429 | recipients = ChangesetComment.get_users( |
|
430 | 430 | pull_request_id=pull_request_obj.pull_request_id) |
|
431 | 431 | # add pull request author |
|
432 | 432 | recipients += [pull_request_obj.author] |
|
433 | 433 | |
|
434 | 434 | # add the reviewers to notification |
|
435 | 435 | recipients += [x.user for x in pull_request_obj.get_pull_request_reviewers()] |
|
436 | 436 | |
|
437 | 437 | pr_target_repo = pull_request_obj.target_repo |
|
438 | 438 | pr_source_repo = pull_request_obj.source_repo |
|
439 | 439 | |
|
440 | 440 | pr_comment_url = self.get_url(comment, request=request) |
|
441 | 441 | pr_comment_reply_url = self.get_url( |
|
442 | 442 | comment, request=request, |
|
443 | 443 | anchor='comment-{}/?/ReplyToComment'.format(comment.comment_id)) |
|
444 | 444 | |
|
445 | 445 | pr_url = h.route_url( |
|
446 | 446 | 'pullrequest_show', |
|
447 | 447 | repo_name=pr_target_repo.repo_name, |
|
448 | 448 | pull_request_id=pull_request_obj.pull_request_id, ) |
|
449 | 449 | |
|
450 | 450 | # set some variables for email notification |
|
451 | 451 | pr_target_repo_url = h.route_url( |
|
452 | 452 | 'repo_summary', repo_name=pr_target_repo.repo_name) |
|
453 | 453 | |
|
454 | 454 | pr_source_repo_url = h.route_url( |
|
455 | 455 | 'repo_summary', repo_name=pr_source_repo.repo_name) |
|
456 | 456 | |
|
457 | 457 | # pull request specifics |
|
458 | 458 | kwargs.update({ |
|
459 | 459 | 'pull_request': pull_request_obj, |
|
460 | 460 | 'pr_id': pull_request_obj.pull_request_id, |
|
461 | 461 | 'pull_request_url': pr_url, |
|
462 | 462 | 'pull_request_target_repo': pr_target_repo, |
|
463 | 463 | 'pull_request_target_repo_url': pr_target_repo_url, |
|
464 | 464 | 'pull_request_source_repo': pr_source_repo, |
|
465 | 465 | 'pull_request_source_repo_url': pr_source_repo_url, |
|
466 | 466 | 'pr_comment_url': pr_comment_url, |
|
467 | 467 | 'pr_comment_reply_url': pr_comment_reply_url, |
|
468 | 468 | 'pr_closing': closing_pr, |
|
469 | 469 | 'thread_ids': [pr_url, pr_comment_url], |
|
470 | 470 | }) |
|
471 | 471 | |
|
472 | 472 | if send_email: |
|
473 | 473 | recipients += [self._get_user(u) for u in (extra_recipients or [])] |
|
474 | 474 | |
|
475 | 475 | mention_recipients = set( |
|
476 | 476 | self._extract_mentions(text)).difference(recipients) |
|
477 | 477 | |
|
478 | 478 | # create notification objects, and emails |
|
479 | 479 | NotificationModel().create( |
|
480 | 480 | created_by=user, |
|
481 | 481 | notification_subject='', # Filled in based on the notification_type |
|
482 | 482 | notification_body='', # Filled in based on the notification_type |
|
483 | 483 | notification_type=notification_type, |
|
484 | 484 | recipients=recipients, |
|
485 | 485 | mention_recipients=mention_recipients, |
|
486 | 486 | email_kwargs=kwargs, |
|
487 | 487 | ) |
|
488 | 488 | |
|
489 | 489 | Session().flush() |
|
490 | 490 | if comment.pull_request: |
|
491 | 491 | action = 'repo.pull_request.comment.create' |
|
492 | 492 | else: |
|
493 | 493 | action = 'repo.commit.comment.create' |
|
494 | 494 | |
|
495 | 495 | if not is_draft: |
|
496 | 496 | comment_data = comment.get_api_data() |
|
497 | 497 | |
|
498 | 498 | self._log_audit_action( |
|
499 | 499 | action, {'data': comment_data}, auth_user, comment) |
|
500 | 500 | |
|
501 | 501 | return comment |
|
502 | 502 | |
|
503 | 503 | def edit(self, comment_id, text, auth_user, version): |
|
504 | 504 | """ |
|
505 | 505 | Change existing comment for commit or pull request. |
|
506 | 506 | |
|
507 | 507 | :param comment_id: |
|
508 | 508 | :param text: |
|
509 | 509 | :param auth_user: current authenticated user calling this method |
|
510 | 510 | :param version: last comment version |
|
511 | 511 | """ |
|
512 | 512 | if not text: |
|
513 | 513 | log.warning('Missing text for comment, skipping...') |
|
514 | 514 | return |
|
515 | 515 | |
|
516 | 516 | comment = ChangesetComment.get(comment_id) |
|
517 | 517 | old_comment_text = comment.text |
|
518 | 518 | comment.text = text |
|
519 | 519 | comment.modified_at = datetime.datetime.now() |
|
520 | 520 | version = safe_int(version) |
|
521 | 521 | |
|
522 | 522 | # NOTE(marcink): this returns initial comment + edits, so v2 from ui |
|
523 | 523 | # would return 3 here |
|
524 | 524 | comment_version = ChangesetCommentHistory.get_version(comment_id) |
|
525 | 525 | |
|
526 | 526 | if isinstance(version, int) and (comment_version - version) != 1: |
|
527 | 527 | log.warning( |
|
528 | 528 | 'Version mismatch comment_version {} submitted {}, skipping'.format( |
|
529 | 529 | comment_version-1, # -1 since note above |
|
530 | 530 | version |
|
531 | 531 | ) |
|
532 | 532 | ) |
|
533 | 533 | raise CommentVersionMismatch() |
|
534 | 534 | |
|
535 | 535 | comment_history = ChangesetCommentHistory() |
|
536 | 536 | comment_history.comment_id = comment_id |
|
537 | 537 | comment_history.version = comment_version |
|
538 | 538 | comment_history.created_by_user_id = auth_user.user_id |
|
539 | 539 | comment_history.text = old_comment_text |
|
540 | 540 | # TODO add email notification |
|
541 | 541 | Session().add(comment_history) |
|
542 | 542 | Session().add(comment) |
|
543 | 543 | Session().flush() |
|
544 | 544 | |
|
545 | 545 | if comment.pull_request: |
|
546 | 546 | action = 'repo.pull_request.comment.edit' |
|
547 | 547 | else: |
|
548 | 548 | action = 'repo.commit.comment.edit' |
|
549 | 549 | |
|
550 | 550 | comment_data = comment.get_api_data() |
|
551 | 551 | comment_data['old_comment_text'] = old_comment_text |
|
552 | 552 | self._log_audit_action( |
|
553 | 553 | action, {'data': comment_data}, auth_user, comment) |
|
554 | 554 | |
|
555 | 555 | return comment_history |
|
556 | 556 | |
|
557 | 557 | def delete(self, comment, auth_user): |
|
558 | 558 | """ |
|
559 | 559 | Deletes given comment |
|
560 | 560 | """ |
|
561 | 561 | comment = self.__get_commit_comment(comment) |
|
562 | 562 | old_data = comment.get_api_data() |
|
563 | 563 | Session().delete(comment) |
|
564 | 564 | |
|
565 | 565 | if comment.pull_request: |
|
566 | 566 | action = 'repo.pull_request.comment.delete' |
|
567 | 567 | else: |
|
568 | 568 | action = 'repo.commit.comment.delete' |
|
569 | 569 | |
|
570 | 570 | self._log_audit_action( |
|
571 | 571 | action, {'old_data': old_data}, auth_user, comment) |
|
572 | 572 | |
|
573 | 573 | return comment |
|
574 | 574 | |
|
575 | 575 | def get_all_comments(self, repo_id, revision=None, pull_request=None, |
|
576 | 576 | include_drafts=True, count_only=False): |
|
577 | 577 | q = ChangesetComment.query()\ |
|
578 | 578 | .filter(ChangesetComment.repo_id == repo_id) |
|
579 | 579 | if revision: |
|
580 | 580 | q = q.filter(ChangesetComment.revision == revision) |
|
581 | 581 | elif pull_request: |
|
582 | 582 | pull_request = self.__get_pull_request(pull_request) |
|
583 | 583 | q = q.filter(ChangesetComment.pull_request_id == pull_request.pull_request_id) |
|
584 | 584 | else: |
|
585 | 585 | raise Exception('Please specify commit or pull_request') |
|
586 | 586 | if not include_drafts: |
|
587 | 587 | q = q.filter(ChangesetComment.draft == false()) |
|
588 | 588 | q = q.order_by(ChangesetComment.created_on) |
|
589 | 589 | if count_only: |
|
590 | 590 | return q.count() |
|
591 | 591 | |
|
592 | 592 | return q.all() |
|
593 | 593 | |
|
594 | 594 | def get_url(self, comment, request=None, permalink=False, anchor=None): |
|
595 | 595 | if not request: |
|
596 | 596 | request = get_current_request() |
|
597 | 597 | |
|
598 | 598 | comment = self.__get_commit_comment(comment) |
|
599 | 599 | if anchor is None: |
|
600 | 600 | anchor = 'comment-{}'.format(comment.comment_id) |
|
601 | 601 | |
|
602 | 602 | if comment.pull_request: |
|
603 | 603 | pull_request = comment.pull_request |
|
604 | 604 | if permalink: |
|
605 | 605 | return request.route_url( |
|
606 | 606 | 'pull_requests_global', |
|
607 | 607 | pull_request_id=pull_request.pull_request_id, |
|
608 | 608 | _anchor=anchor) |
|
609 | 609 | else: |
|
610 | 610 | return request.route_url( |
|
611 | 611 | 'pullrequest_show', |
|
612 | 612 | repo_name=safe_str(pull_request.target_repo.repo_name), |
|
613 | 613 | pull_request_id=pull_request.pull_request_id, |
|
614 | 614 | _anchor=anchor) |
|
615 | 615 | |
|
616 | 616 | else: |
|
617 | 617 | repo = comment.repo |
|
618 | 618 | commit_id = comment.revision |
|
619 | 619 | |
|
620 | 620 | if permalink: |
|
621 | 621 | return request.route_url( |
|
622 | 622 | 'repo_commit', repo_name=safe_str(repo.repo_id), |
|
623 | 623 | commit_id=commit_id, |
|
624 | 624 | _anchor=anchor) |
|
625 | 625 | |
|
626 | 626 | else: |
|
627 | 627 | return request.route_url( |
|
628 | 628 | 'repo_commit', repo_name=safe_str(repo.repo_name), |
|
629 | 629 | commit_id=commit_id, |
|
630 | 630 | _anchor=anchor) |
|
631 | 631 | |
|
632 | 632 | def get_comments(self, repo_id, revision=None, pull_request=None): |
|
633 | 633 | """ |
|
634 | 634 | Gets main comments based on revision or pull_request_id |
|
635 | 635 | |
|
636 | 636 | :param repo_id: |
|
637 | 637 | :param revision: |
|
638 | 638 | :param pull_request: |
|
639 | 639 | """ |
|
640 | 640 | |
|
641 | 641 | q = ChangesetComment.query()\ |
|
642 | 642 | .filter(ChangesetComment.repo_id == repo_id)\ |
|
643 | 643 | .filter(ChangesetComment.line_no == None)\ |
|
644 | 644 | .filter(ChangesetComment.f_path == None) |
|
645 | 645 | if revision: |
|
646 | 646 | q = q.filter(ChangesetComment.revision == revision) |
|
647 | 647 | elif pull_request: |
|
648 | 648 | pull_request = self.__get_pull_request(pull_request) |
|
649 | 649 | q = q.filter(ChangesetComment.pull_request == pull_request) |
|
650 | 650 | else: |
|
651 | 651 | raise Exception('Please specify commit or pull_request') |
|
652 | 652 | q = q.order_by(ChangesetComment.created_on) |
|
653 | 653 | return q.all() |
|
654 | 654 | |
|
655 | 655 | def get_inline_comments(self, repo_id, revision=None, pull_request=None): |
|
656 | 656 | q = self._get_inline_comments_query(repo_id, revision, pull_request) |
|
657 | 657 | return self._group_comments_by_path_and_line_number(q) |
|
658 | 658 | |
|
659 | 659 | def get_inline_comments_as_list(self, inline_comments, skip_outdated=True, |
|
660 | 660 | version=None): |
|
661 | 661 | inline_comms = [] |
|
662 | 662 | for fname, per_line_comments in inline_comments.items(): |
|
663 | 663 | for lno, comments in per_line_comments.items(): |
|
664 | 664 | for comm in comments: |
|
665 | 665 | if not comm.outdated_at_version(version) and skip_outdated: |
|
666 | 666 | inline_comms.append(comm) |
|
667 | 667 | |
|
668 | 668 | return inline_comms |
|
669 | 669 | |
|
670 | 670 | def get_outdated_comments(self, repo_id, pull_request): |
|
671 | 671 | # TODO: johbo: Remove `repo_id`, it is not needed to find the comments |
|
672 | 672 | # of a pull request. |
|
673 | 673 | q = self._all_inline_comments_of_pull_request(pull_request) |
|
674 | 674 | q = q.filter( |
|
675 | 675 | ChangesetComment.display_state == |
|
676 | 676 | ChangesetComment.COMMENT_OUTDATED |
|
677 | 677 | ).order_by(ChangesetComment.comment_id.asc()) |
|
678 | 678 | |
|
679 | 679 | return self._group_comments_by_path_and_line_number(q) |
|
680 | 680 | |
|
681 | 681 | def _get_inline_comments_query(self, repo_id, revision, pull_request): |
|
682 | 682 | # TODO: johbo: Split this into two methods: One for PR and one for |
|
683 | 683 | # commit. |
|
684 | 684 | if revision: |
|
685 | 685 | q = Session().query(ChangesetComment).filter( |
|
686 | 686 | ChangesetComment.repo_id == repo_id, |
|
687 | 687 | ChangesetComment.line_no != null(), |
|
688 | 688 | ChangesetComment.f_path != null(), |
|
689 | 689 | ChangesetComment.revision == revision) |
|
690 | 690 | |
|
691 | 691 | elif pull_request: |
|
692 | 692 | pull_request = self.__get_pull_request(pull_request) |
|
693 | 693 | if not CommentsModel.use_outdated_comments(pull_request): |
|
694 | 694 | q = self._visible_inline_comments_of_pull_request(pull_request) |
|
695 | 695 | else: |
|
696 | 696 | q = self._all_inline_comments_of_pull_request(pull_request) |
|
697 | 697 | |
|
698 | 698 | else: |
|
699 | 699 | raise Exception('Please specify commit or pull_request_id') |
|
700 | 700 | q = q.order_by(ChangesetComment.comment_id.asc()) |
|
701 | 701 | return q |
|
702 | 702 | |
|
703 | 703 | def _group_comments_by_path_and_line_number(self, q): |
|
704 | 704 | comments = q.all() |
|
705 | 705 | paths = collections.defaultdict(lambda: collections.defaultdict(list)) |
|
706 | 706 | for co in comments: |
|
707 | 707 | paths[co.f_path][co.line_no].append(co) |
|
708 | 708 | return paths |
|
709 | 709 | |
|
710 | 710 | @classmethod |
|
711 | 711 | def needed_extra_diff_context(cls): |
|
712 | 712 | return max(cls.DIFF_CONTEXT_BEFORE, cls.DIFF_CONTEXT_AFTER) |
|
713 | 713 | |
|
714 | 714 | def outdate_comments(self, pull_request, old_diff_data, new_diff_data): |
|
715 | 715 | if not CommentsModel.use_outdated_comments(pull_request): |
|
716 | 716 | return |
|
717 | 717 | |
|
718 | 718 | comments = self._visible_inline_comments_of_pull_request(pull_request) |
|
719 | 719 | comments_to_outdate = comments.all() |
|
720 | 720 | |
|
721 | 721 | for comment in comments_to_outdate: |
|
722 | 722 | self._outdate_one_comment(comment, old_diff_data, new_diff_data) |
|
723 | 723 | |
|
724 | 724 | def _outdate_one_comment(self, comment, old_diff_proc, new_diff_proc): |
|
725 | 725 | diff_line = _parse_comment_line_number(comment.line_no) |
|
726 | 726 | |
|
727 | 727 | try: |
|
728 | 728 | old_context = old_diff_proc.get_context_of_line( |
|
729 | 729 | path=comment.f_path, diff_line=diff_line) |
|
730 | 730 | new_context = new_diff_proc.get_context_of_line( |
|
731 | 731 | path=comment.f_path, diff_line=diff_line) |
|
732 | 732 | except (diffs.LineNotInDiffException, |
|
733 | 733 | diffs.FileNotInDiffException): |
|
734 | 734 | if not comment.draft: |
|
735 | 735 | comment.display_state = ChangesetComment.COMMENT_OUTDATED |
|
736 | 736 | return |
|
737 | 737 | |
|
738 | 738 | if old_context == new_context: |
|
739 | 739 | return |
|
740 | 740 | |
|
741 | 741 | if self._should_relocate_diff_line(diff_line): |
|
742 | 742 | new_diff_lines = new_diff_proc.find_context( |
|
743 | 743 | path=comment.f_path, context=old_context, |
|
744 | 744 | offset=self.DIFF_CONTEXT_BEFORE) |
|
745 | 745 | if not new_diff_lines and not comment.draft: |
|
746 | 746 | comment.display_state = ChangesetComment.COMMENT_OUTDATED |
|
747 | 747 | else: |
|
748 | 748 | new_diff_line = self._choose_closest_diff_line( |
|
749 | 749 | diff_line, new_diff_lines) |
|
750 | 750 | comment.line_no = _diff_to_comment_line_number(new_diff_line) |
|
751 | 751 | else: |
|
752 | 752 | if not comment.draft: |
|
753 | 753 | comment.display_state = ChangesetComment.COMMENT_OUTDATED |
|
754 | 754 | |
|
755 | 755 | def _should_relocate_diff_line(self, diff_line): |
|
756 | 756 | """ |
|
757 | 757 | Checks if relocation shall be tried for the given `diff_line`. |
|
758 | 758 | |
|
759 | 759 | If a comment points into the first lines, then we can have a situation |
|
760 | 760 | that after an update another line has been added on top. In this case |
|
761 | 761 | we would find the context still and move the comment around. This |
|
762 | 762 | would be wrong. |
|
763 | 763 | """ |
|
764 | 764 | should_relocate = ( |
|
765 | 765 | (diff_line.new and diff_line.new > self.DIFF_CONTEXT_BEFORE) or |
|
766 | 766 | (diff_line.old and diff_line.old > self.DIFF_CONTEXT_BEFORE)) |
|
767 | 767 | return should_relocate |
|
768 | 768 | |
|
769 | 769 | def _choose_closest_diff_line(self, diff_line, new_diff_lines): |
|
770 | 770 | candidate = new_diff_lines[0] |
|
771 | 771 | best_delta = _diff_line_delta(diff_line, candidate) |
|
772 | 772 | for new_diff_line in new_diff_lines[1:]: |
|
773 | 773 | delta = _diff_line_delta(diff_line, new_diff_line) |
|
774 | 774 | if delta < best_delta: |
|
775 | 775 | candidate = new_diff_line |
|
776 | 776 | best_delta = delta |
|
777 | 777 | return candidate |
|
778 | 778 | |
|
779 | 779 | def _visible_inline_comments_of_pull_request(self, pull_request): |
|
780 | 780 | comments = self._all_inline_comments_of_pull_request(pull_request) |
|
781 | 781 | comments = comments.filter( |
|
782 | 782 | coalesce(ChangesetComment.display_state, '') != |
|
783 | 783 | ChangesetComment.COMMENT_OUTDATED) |
|
784 | 784 | return comments |
|
785 | 785 | |
|
786 | 786 | def _all_inline_comments_of_pull_request(self, pull_request): |
|
787 | 787 | comments = Session().query(ChangesetComment)\ |
|
788 | 788 | .filter(ChangesetComment.line_no != None)\ |
|
789 | 789 | .filter(ChangesetComment.f_path != None)\ |
|
790 | 790 | .filter(ChangesetComment.pull_request == pull_request) |
|
791 | 791 | return comments |
|
792 | 792 | |
|
793 | 793 | def _all_general_comments_of_pull_request(self, pull_request): |
|
794 | 794 | comments = Session().query(ChangesetComment)\ |
|
795 | 795 | .filter(ChangesetComment.line_no == None)\ |
|
796 | 796 | .filter(ChangesetComment.f_path == None)\ |
|
797 | 797 | .filter(ChangesetComment.pull_request == pull_request) |
|
798 | 798 | |
|
799 | 799 | return comments |
|
800 | 800 | |
|
801 | 801 | @staticmethod |
|
802 | 802 | def use_outdated_comments(pull_request): |
|
803 | 803 | settings_model = VcsSettingsModel(repo=pull_request.target_repo) |
|
804 | 804 | settings = settings_model.get_general_settings() |
|
805 | 805 | return settings.get('rhodecode_use_outdated_comments', False) |
|
806 | 806 | |
|
807 | 807 | def trigger_commit_comment_hook(self, repo, user, action, data=None): |
|
808 | 808 | repo = self._get_repo(repo) |
|
809 | 809 | target_scm = repo.scm_instance() |
|
810 | 810 | if action == 'create': |
|
811 | 811 | trigger_hook = hooks_utils.trigger_comment_commit_hooks |
|
812 | 812 | elif action == 'edit': |
|
813 | 813 | trigger_hook = hooks_utils.trigger_comment_commit_edit_hooks |
|
814 | 814 | else: |
|
815 | 815 | return |
|
816 | 816 | |
|
817 | 817 | log.debug('Handling repo %s trigger_commit_comment_hook with action %s: %s', |
|
818 | 818 | repo, action, trigger_hook) |
|
819 | 819 | trigger_hook( |
|
820 | 820 | username=user.username, |
|
821 | 821 | repo_name=repo.repo_name, |
|
822 | 822 | repo_type=target_scm.alias, |
|
823 | 823 | repo=repo, |
|
824 | 824 | data=data) |
|
825 | 825 | |
|
826 | 826 | |
|
827 | 827 | def _parse_comment_line_number(line_no): |
|
828 | 828 | """ |
|
829 | 829 | Parses line numbers of the form "(o|n)\d+" and returns them in a tuple. |
|
830 | 830 | """ |
|
831 | 831 | old_line = None |
|
832 | 832 | new_line = None |
|
833 | 833 | if line_no.startswith('o'): |
|
834 | 834 | old_line = int(line_no[1:]) |
|
835 | 835 | elif line_no.startswith('n'): |
|
836 | 836 | new_line = int(line_no[1:]) |
|
837 | 837 | else: |
|
838 | 838 | raise ValueError("Comment lines have to start with either 'o' or 'n'.") |
|
839 | 839 | return diffs.DiffLineNumber(old_line, new_line) |
|
840 | 840 | |
|
841 | 841 | |
|
842 | 842 | def _diff_to_comment_line_number(diff_line): |
|
843 | 843 | if diff_line.new is not None: |
|
844 | 844 | return u'n{}'.format(diff_line.new) |
|
845 | 845 | elif diff_line.old is not None: |
|
846 | 846 | return u'o{}'.format(diff_line.old) |
|
847 | 847 | return u'' |
|
848 | 848 | |
|
849 | 849 | |
|
850 | 850 | def _diff_line_delta(a, b): |
|
851 | 851 | if None not in (a.new, b.new): |
|
852 | 852 | return abs(a.new - b.new) |
|
853 | 853 | elif None not in (a.old, b.old): |
|
854 | 854 | return abs(a.old - b.old) |
|
855 | 855 | else: |
|
856 | 856 | raise ValueError( |
|
857 | 857 | "Cannot compute delta between {} and {}".format(a, b)) |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
@@ -1,641 +1,640 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
|
5 | 4 | # This program is free software: you can redistribute it and/or modify |
|
6 | 5 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 6 | # (only), as published by the Free Software Foundation. |
|
8 | 7 | # |
|
9 | 8 | # This program is distributed in the hope that it will be useful, |
|
10 | 9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 11 | # GNU General Public License for more details. |
|
13 | 12 | # |
|
14 | 13 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 14 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 15 | # |
|
17 | 16 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 17 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 18 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 19 | |
|
21 | 20 | """ |
|
22 | 21 | this is forms validation classes |
|
23 | 22 | http://formencode.org/module-formencode.validators.html |
|
24 | 23 | for list off all availible validators |
|
25 | 24 | |
|
26 | 25 | we can create our own validators |
|
27 | 26 | |
|
28 | 27 | The table below outlines the options which can be used in a schema in addition to the validators themselves |
|
29 | 28 | pre_validators [] These validators will be applied before the schema |
|
30 | 29 | chained_validators [] These validators will be applied after the schema |
|
31 | 30 | allow_extra_fields False If True, then it is not an error when keys that aren't associated with a validator are present |
|
32 | 31 | filter_extra_fields False If True, then keys that aren't associated with a validator are removed |
|
33 | 32 | if_key_missing NoDefault If this is given, then any keys that aren't available but are expected will be replaced with this value (and then validated). This does not override a present .if_missing attribute on validators. NoDefault is a special FormEncode class to mean that no default values has been specified and therefore missing keys shouldn't take a default value. |
|
34 | 33 | ignore_key_missing False If True, then missing keys will be missing in the result, if the validator doesn't have .if_missing on it already |
|
35 | 34 | |
|
36 | 35 | |
|
37 | 36 | <name> = formencode.validators.<name of validator> |
|
38 | 37 | <name> must equal form name |
|
39 | 38 | list=[1,2,3,4,5] |
|
40 | 39 | for SELECT use formencode.All(OneOf(list), Int()) |
|
41 | 40 | |
|
42 | 41 | """ |
|
43 | 42 | |
|
44 | 43 | import deform |
|
45 | 44 | import logging |
|
46 | 45 | import formencode |
|
47 | 46 | |
|
48 | 47 | from pkg_resources import resource_filename |
|
49 | 48 | from formencode import All, Pipe |
|
50 | 49 | |
|
51 | 50 | from pyramid.threadlocal import get_current_request |
|
52 | 51 | |
|
53 | 52 | from rhodecode import BACKENDS |
|
54 | 53 | from rhodecode.lib import helpers |
|
55 | 54 | from rhodecode.model import validators as v |
|
56 | 55 | |
|
57 | 56 | log = logging.getLogger(__name__) |
|
58 | 57 | |
|
59 | 58 | |
|
60 | 59 | deform_templates = resource_filename('deform', 'templates') |
|
61 | 60 | rhodecode_templates = resource_filename('rhodecode', 'templates/forms') |
|
62 | 61 | search_path = (rhodecode_templates, deform_templates) |
|
63 | 62 | |
|
64 | 63 | |
|
65 | 64 | class RhodecodeFormZPTRendererFactory(deform.ZPTRendererFactory): |
|
66 | 65 | """ Subclass of ZPTRendererFactory to add rhodecode context variables """ |
|
67 | 66 | def __call__(self, template_name, **kw): |
|
68 | 67 | kw['h'] = helpers |
|
69 | 68 | kw['request'] = get_current_request() |
|
70 | 69 | return self.load(template_name)(**kw) |
|
71 | 70 | |
|
72 | 71 | |
|
73 | 72 | form_renderer = RhodecodeFormZPTRendererFactory(search_path) |
|
74 | 73 | deform.Form.set_default_renderer(form_renderer) |
|
75 | 74 | |
|
76 | 75 | |
|
77 | 76 | def LoginForm(localizer): |
|
78 | 77 | _ = localizer |
|
79 | 78 | |
|
80 | 79 | class _LoginForm(formencode.Schema): |
|
81 | 80 | allow_extra_fields = True |
|
82 | 81 | filter_extra_fields = True |
|
83 | 82 | username = v.UnicodeString( |
|
84 | 83 | strip=True, |
|
85 | 84 | min=1, |
|
86 | 85 | not_empty=True, |
|
87 | 86 | messages={ |
|
88 | 87 | 'empty': _(u'Please enter a login'), |
|
89 | 88 | 'tooShort': _(u'Enter a value %(min)i characters long or more') |
|
90 | 89 | } |
|
91 | 90 | ) |
|
92 | 91 | |
|
93 | 92 | password = v.UnicodeString( |
|
94 | 93 | strip=False, |
|
95 | 94 | min=3, |
|
96 | 95 | max=72, |
|
97 | 96 | not_empty=True, |
|
98 | 97 | messages={ |
|
99 | 98 | 'empty': _(u'Please enter a password'), |
|
100 | 99 | 'tooShort': _(u'Enter %(min)i characters or more')} |
|
101 | 100 | ) |
|
102 | 101 | |
|
103 | 102 | remember = v.StringBoolean(if_missing=False) |
|
104 | 103 | |
|
105 | 104 | chained_validators = [v.ValidAuth(localizer)] |
|
106 | 105 | return _LoginForm |
|
107 | 106 | |
|
108 | 107 | |
|
109 | 108 | def UserForm(localizer, edit=False, available_languages=None, old_data=None): |
|
110 | 109 | old_data = old_data or {} |
|
111 | 110 | available_languages = available_languages or [] |
|
112 | 111 | _ = localizer |
|
113 | 112 | |
|
114 | 113 | class _UserForm(formencode.Schema): |
|
115 | 114 | allow_extra_fields = True |
|
116 | 115 | filter_extra_fields = True |
|
117 | 116 | username = All(v.UnicodeString(strip=True, min=1, not_empty=True), |
|
118 | 117 | v.ValidUsername(localizer, edit, old_data)) |
|
119 | 118 | if edit: |
|
120 | 119 | new_password = All( |
|
121 | 120 | v.ValidPassword(localizer), |
|
122 | 121 | v.UnicodeString(strip=False, min=6, max=72, not_empty=False) |
|
123 | 122 | ) |
|
124 | 123 | password_confirmation = All( |
|
125 | 124 | v.ValidPassword(localizer), |
|
126 | 125 | v.UnicodeString(strip=False, min=6, max=72, not_empty=False), |
|
127 | 126 | ) |
|
128 | 127 | admin = v.StringBoolean(if_missing=False) |
|
129 | 128 | else: |
|
130 | 129 | password = All( |
|
131 | 130 | v.ValidPassword(localizer), |
|
132 | 131 | v.UnicodeString(strip=False, min=6, max=72, not_empty=True) |
|
133 | 132 | ) |
|
134 | 133 | password_confirmation = All( |
|
135 | 134 | v.ValidPassword(localizer), |
|
136 | 135 | v.UnicodeString(strip=False, min=6, max=72, not_empty=False) |
|
137 | 136 | ) |
|
138 | 137 | |
|
139 | 138 | password_change = v.StringBoolean(if_missing=False) |
|
140 | 139 | create_repo_group = v.StringBoolean(if_missing=False) |
|
141 | 140 | |
|
142 | 141 | active = v.StringBoolean(if_missing=False) |
|
143 | 142 | firstname = v.UnicodeString(strip=True, min=1, not_empty=False) |
|
144 | 143 | lastname = v.UnicodeString(strip=True, min=1, not_empty=False) |
|
145 | 144 | email = All(v.UniqSystemEmail(localizer, old_data), v.Email(not_empty=True)) |
|
146 | 145 | description = v.UnicodeString(strip=True, min=1, max=250, not_empty=False, |
|
147 | 146 | if_missing='') |
|
148 | 147 | extern_name = v.UnicodeString(strip=True) |
|
149 | 148 | extern_type = v.UnicodeString(strip=True) |
|
150 | 149 | language = v.OneOf(available_languages, hideList=False, |
|
151 | 150 | testValueList=True, if_missing=None) |
|
152 | 151 | chained_validators = [v.ValidPasswordsMatch(localizer)] |
|
153 | 152 | return _UserForm |
|
154 | 153 | |
|
155 | 154 | |
|
156 | 155 | def UserGroupForm(localizer, edit=False, old_data=None, allow_disabled=False): |
|
157 | 156 | old_data = old_data or {} |
|
158 | 157 | _ = localizer |
|
159 | 158 | |
|
160 | 159 | class _UserGroupForm(formencode.Schema): |
|
161 | 160 | allow_extra_fields = True |
|
162 | 161 | filter_extra_fields = True |
|
163 | 162 | |
|
164 | 163 | users_group_name = All( |
|
165 | 164 | v.UnicodeString(strip=True, min=1, not_empty=True), |
|
166 | 165 | v.ValidUserGroup(localizer, edit, old_data) |
|
167 | 166 | ) |
|
168 | 167 | user_group_description = v.UnicodeString(strip=True, min=1, |
|
169 | 168 | not_empty=False) |
|
170 | 169 | |
|
171 | 170 | users_group_active = v.StringBoolean(if_missing=False) |
|
172 | 171 | |
|
173 | 172 | if edit: |
|
174 | 173 | # this is user group owner |
|
175 | 174 | user = All( |
|
176 | 175 | v.UnicodeString(not_empty=True), |
|
177 | 176 | v.ValidRepoUser(localizer, allow_disabled)) |
|
178 | 177 | return _UserGroupForm |
|
179 | 178 | |
|
180 | 179 | |
|
181 | 180 | def RepoGroupForm(localizer, edit=False, old_data=None, available_groups=None, |
|
182 | 181 | can_create_in_root=False, allow_disabled=False): |
|
183 | 182 | _ = localizer |
|
184 | 183 | old_data = old_data or {} |
|
185 | 184 | available_groups = available_groups or [] |
|
186 | 185 | |
|
187 | 186 | class _RepoGroupForm(formencode.Schema): |
|
188 | 187 | allow_extra_fields = True |
|
189 | 188 | filter_extra_fields = False |
|
190 | 189 | |
|
191 | 190 | group_name = All(v.UnicodeString(strip=True, min=1, not_empty=True), |
|
192 | 191 | v.SlugifyName(localizer),) |
|
193 | 192 | group_description = v.UnicodeString(strip=True, min=1, |
|
194 | 193 | not_empty=False) |
|
195 | 194 | group_copy_permissions = v.StringBoolean(if_missing=False) |
|
196 | 195 | |
|
197 | 196 | group_parent_id = v.OneOf(available_groups, hideList=False, |
|
198 | 197 | testValueList=True, not_empty=True) |
|
199 | 198 | enable_locking = v.StringBoolean(if_missing=False) |
|
200 | 199 | chained_validators = [ |
|
201 | 200 | v.ValidRepoGroup(localizer, edit, old_data, can_create_in_root)] |
|
202 | 201 | |
|
203 | 202 | if edit: |
|
204 | 203 | # this is repo group owner |
|
205 | 204 | user = All( |
|
206 | 205 | v.UnicodeString(not_empty=True), |
|
207 | 206 | v.ValidRepoUser(localizer, allow_disabled)) |
|
208 | 207 | return _RepoGroupForm |
|
209 | 208 | |
|
210 | 209 | |
|
211 | 210 | def RegisterForm(localizer, edit=False, old_data=None): |
|
212 | 211 | _ = localizer |
|
213 | 212 | old_data = old_data or {} |
|
214 | 213 | |
|
215 | 214 | class _RegisterForm(formencode.Schema): |
|
216 | 215 | allow_extra_fields = True |
|
217 | 216 | filter_extra_fields = True |
|
218 | 217 | username = All( |
|
219 | 218 | v.ValidUsername(localizer, edit, old_data), |
|
220 | 219 | v.UnicodeString(strip=True, min=1, not_empty=True) |
|
221 | 220 | ) |
|
222 | 221 | password = All( |
|
223 | 222 | v.ValidPassword(localizer), |
|
224 | 223 | v.UnicodeString(strip=False, min=6, max=72, not_empty=True) |
|
225 | 224 | ) |
|
226 | 225 | password_confirmation = All( |
|
227 | 226 | v.ValidPassword(localizer), |
|
228 | 227 | v.UnicodeString(strip=False, min=6, max=72, not_empty=True) |
|
229 | 228 | ) |
|
230 | 229 | active = v.StringBoolean(if_missing=False) |
|
231 | 230 | firstname = v.UnicodeString(strip=True, min=1, not_empty=False) |
|
232 | 231 | lastname = v.UnicodeString(strip=True, min=1, not_empty=False) |
|
233 | 232 | email = All(v.UniqSystemEmail(localizer, old_data), v.Email(not_empty=True)) |
|
234 | 233 | |
|
235 | 234 | chained_validators = [v.ValidPasswordsMatch(localizer)] |
|
236 | 235 | return _RegisterForm |
|
237 | 236 | |
|
238 | 237 | |
|
239 | 238 | def PasswordResetForm(localizer): |
|
240 | 239 | _ = localizer |
|
241 | 240 | |
|
242 | 241 | class _PasswordResetForm(formencode.Schema): |
|
243 | 242 | allow_extra_fields = True |
|
244 | 243 | filter_extra_fields = True |
|
245 | 244 | email = All(v.ValidSystemEmail(localizer), v.Email(not_empty=True)) |
|
246 | 245 | return _PasswordResetForm |
|
247 | 246 | |
|
248 | 247 | |
|
249 | 248 | def RepoForm(localizer, edit=False, old_data=None, repo_groups=None, allow_disabled=False): |
|
250 | 249 | _ = localizer |
|
251 | 250 | old_data = old_data or {} |
|
252 | 251 | repo_groups = repo_groups or [] |
|
253 | 252 | supported_backends = BACKENDS.keys() |
|
254 | 253 | |
|
255 | 254 | class _RepoForm(formencode.Schema): |
|
256 | 255 | allow_extra_fields = True |
|
257 | 256 | filter_extra_fields = False |
|
258 | 257 | repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True), |
|
259 | 258 | v.SlugifyName(localizer), v.CannotHaveGitSuffix(localizer)) |
|
260 | 259 | repo_group = All(v.CanWriteGroup(localizer, old_data), |
|
261 | 260 | v.OneOf(repo_groups, hideList=True)) |
|
262 | 261 | repo_type = v.OneOf(supported_backends, required=False, |
|
263 | 262 | if_missing=old_data.get('repo_type')) |
|
264 | 263 | repo_description = v.UnicodeString(strip=True, min=1, not_empty=False) |
|
265 | 264 | repo_private = v.StringBoolean(if_missing=False) |
|
266 | 265 | repo_copy_permissions = v.StringBoolean(if_missing=False) |
|
267 | 266 | clone_uri = All(v.UnicodeString(strip=True, min=1, not_empty=False)) |
|
268 | 267 | |
|
269 | 268 | repo_enable_statistics = v.StringBoolean(if_missing=False) |
|
270 | 269 | repo_enable_downloads = v.StringBoolean(if_missing=False) |
|
271 | 270 | repo_enable_locking = v.StringBoolean(if_missing=False) |
|
272 | 271 | |
|
273 | 272 | if edit: |
|
274 | 273 | # this is repo owner |
|
275 | 274 | user = All( |
|
276 | 275 | v.UnicodeString(not_empty=True), |
|
277 | 276 | v.ValidRepoUser(localizer, allow_disabled)) |
|
278 | 277 | clone_uri_change = v.UnicodeString( |
|
279 | 278 | not_empty=False, if_missing=v.Missing) |
|
280 | 279 | |
|
281 | 280 | chained_validators = [v.ValidCloneUri(localizer), |
|
282 | 281 | v.ValidRepoName(localizer, edit, old_data)] |
|
283 | 282 | return _RepoForm |
|
284 | 283 | |
|
285 | 284 | |
|
286 | 285 | def RepoPermsForm(localizer): |
|
287 | 286 | _ = localizer |
|
288 | 287 | |
|
289 | 288 | class _RepoPermsForm(formencode.Schema): |
|
290 | 289 | allow_extra_fields = True |
|
291 | 290 | filter_extra_fields = False |
|
292 | 291 | chained_validators = [v.ValidPerms(localizer, type_='repo')] |
|
293 | 292 | return _RepoPermsForm |
|
294 | 293 | |
|
295 | 294 | |
|
296 | 295 | def RepoGroupPermsForm(localizer, valid_recursive_choices): |
|
297 | 296 | _ = localizer |
|
298 | 297 | |
|
299 | 298 | class _RepoGroupPermsForm(formencode.Schema): |
|
300 | 299 | allow_extra_fields = True |
|
301 | 300 | filter_extra_fields = False |
|
302 | 301 | recursive = v.OneOf(valid_recursive_choices) |
|
303 | 302 | chained_validators = [v.ValidPerms(localizer, type_='repo_group')] |
|
304 | 303 | return _RepoGroupPermsForm |
|
305 | 304 | |
|
306 | 305 | |
|
307 | 306 | def UserGroupPermsForm(localizer): |
|
308 | 307 | _ = localizer |
|
309 | 308 | |
|
310 | 309 | class _UserPermsForm(formencode.Schema): |
|
311 | 310 | allow_extra_fields = True |
|
312 | 311 | filter_extra_fields = False |
|
313 | 312 | chained_validators = [v.ValidPerms(localizer, type_='user_group')] |
|
314 | 313 | return _UserPermsForm |
|
315 | 314 | |
|
316 | 315 | |
|
317 | 316 | def RepoFieldForm(localizer): |
|
318 | 317 | _ = localizer |
|
319 | 318 | |
|
320 | 319 | class _RepoFieldForm(formencode.Schema): |
|
321 | 320 | filter_extra_fields = True |
|
322 | 321 | allow_extra_fields = True |
|
323 | 322 | |
|
324 | 323 | new_field_key = All(v.FieldKey(localizer), |
|
325 | 324 | v.UnicodeString(strip=True, min=3, not_empty=True)) |
|
326 | 325 | new_field_value = v.UnicodeString(not_empty=False, if_missing=u'') |
|
327 | 326 | new_field_type = v.OneOf(['str', 'unicode', 'list', 'tuple'], |
|
328 | 327 | if_missing='str') |
|
329 | 328 | new_field_label = v.UnicodeString(not_empty=False) |
|
330 | 329 | new_field_desc = v.UnicodeString(not_empty=False) |
|
331 | 330 | return _RepoFieldForm |
|
332 | 331 | |
|
333 | 332 | |
|
334 | 333 | def RepoForkForm(localizer, edit=False, old_data=None, |
|
335 | 334 | supported_backends=BACKENDS.keys(), repo_groups=None): |
|
336 | 335 | _ = localizer |
|
337 | 336 | old_data = old_data or {} |
|
338 | 337 | repo_groups = repo_groups or [] |
|
339 | 338 | |
|
340 | 339 | class _RepoForkForm(formencode.Schema): |
|
341 | 340 | allow_extra_fields = True |
|
342 | 341 | filter_extra_fields = False |
|
343 | 342 | repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True), |
|
344 | 343 | v.SlugifyName(localizer)) |
|
345 | 344 | repo_group = All(v.CanWriteGroup(localizer, ), |
|
346 | 345 | v.OneOf(repo_groups, hideList=True)) |
|
347 | 346 | repo_type = All(v.ValidForkType(localizer, old_data), v.OneOf(supported_backends)) |
|
348 | 347 | description = v.UnicodeString(strip=True, min=1, not_empty=True) |
|
349 | 348 | private = v.StringBoolean(if_missing=False) |
|
350 | 349 | copy_permissions = v.StringBoolean(if_missing=False) |
|
351 | 350 | fork_parent_id = v.UnicodeString() |
|
352 | 351 | chained_validators = [v.ValidForkName(localizer, edit, old_data)] |
|
353 | 352 | return _RepoForkForm |
|
354 | 353 | |
|
355 | 354 | |
|
356 | 355 | def ApplicationSettingsForm(localizer): |
|
357 | 356 | _ = localizer |
|
358 | 357 | |
|
359 | 358 | class _ApplicationSettingsForm(formencode.Schema): |
|
360 | 359 | allow_extra_fields = True |
|
361 | 360 | filter_extra_fields = False |
|
362 | 361 | rhodecode_title = v.UnicodeString(strip=True, max=40, not_empty=False) |
|
363 | 362 | rhodecode_realm = v.UnicodeString(strip=True, min=1, not_empty=True) |
|
364 | 363 | rhodecode_pre_code = v.UnicodeString(strip=True, min=1, not_empty=False) |
|
365 | 364 | rhodecode_post_code = v.UnicodeString(strip=True, min=1, not_empty=False) |
|
366 | 365 | rhodecode_captcha_public_key = v.UnicodeString(strip=True, min=1, not_empty=False) |
|
367 | 366 | rhodecode_captcha_private_key = v.UnicodeString(strip=True, min=1, not_empty=False) |
|
368 | 367 | rhodecode_create_personal_repo_group = v.StringBoolean(if_missing=False) |
|
369 | 368 | rhodecode_personal_repo_group_pattern = v.UnicodeString(strip=True, min=1, not_empty=False) |
|
370 | 369 | return _ApplicationSettingsForm |
|
371 | 370 | |
|
372 | 371 | |
|
373 | 372 | def ApplicationVisualisationForm(localizer): |
|
374 | 373 | from rhodecode.model.db import Repository |
|
375 | 374 | _ = localizer |
|
376 | 375 | |
|
377 | 376 | class _ApplicationVisualisationForm(formencode.Schema): |
|
378 | 377 | allow_extra_fields = True |
|
379 | 378 | filter_extra_fields = False |
|
380 | 379 | rhodecode_show_public_icon = v.StringBoolean(if_missing=False) |
|
381 | 380 | rhodecode_show_private_icon = v.StringBoolean(if_missing=False) |
|
382 | 381 | rhodecode_stylify_metatags = v.StringBoolean(if_missing=False) |
|
383 | 382 | |
|
384 | 383 | rhodecode_repository_fields = v.StringBoolean(if_missing=False) |
|
385 | 384 | rhodecode_lightweight_journal = v.StringBoolean(if_missing=False) |
|
386 | 385 | rhodecode_dashboard_items = v.Int(min=5, not_empty=True) |
|
387 | 386 | rhodecode_admin_grid_items = v.Int(min=5, not_empty=True) |
|
388 | 387 | rhodecode_show_version = v.StringBoolean(if_missing=False) |
|
389 | 388 | rhodecode_use_gravatar = v.StringBoolean(if_missing=False) |
|
390 | 389 | rhodecode_markup_renderer = v.OneOf(['markdown', 'rst']) |
|
391 | 390 | rhodecode_gravatar_url = v.UnicodeString(min=3) |
|
392 | 391 | rhodecode_clone_uri_tmpl = v.UnicodeString(not_empty=False, if_empty=Repository.DEFAULT_CLONE_URI) |
|
393 | 392 | rhodecode_clone_uri_id_tmpl = v.UnicodeString(not_empty=False, if_empty=Repository.DEFAULT_CLONE_URI_ID) |
|
394 | 393 | rhodecode_clone_uri_ssh_tmpl = v.UnicodeString(not_empty=False, if_empty=Repository.DEFAULT_CLONE_URI_SSH) |
|
395 | 394 | rhodecode_support_url = v.UnicodeString() |
|
396 | 395 | rhodecode_show_revision_number = v.StringBoolean(if_missing=False) |
|
397 | 396 | rhodecode_show_sha_length = v.Int(min=4, not_empty=True) |
|
398 | 397 | return _ApplicationVisualisationForm |
|
399 | 398 | |
|
400 | 399 | |
|
401 | 400 | class _BaseVcsSettingsForm(formencode.Schema): |
|
402 | 401 | |
|
403 | 402 | allow_extra_fields = True |
|
404 | 403 | filter_extra_fields = False |
|
405 | 404 | hooks_changegroup_repo_size = v.StringBoolean(if_missing=False) |
|
406 | 405 | hooks_changegroup_push_logger = v.StringBoolean(if_missing=False) |
|
407 | 406 | hooks_outgoing_pull_logger = v.StringBoolean(if_missing=False) |
|
408 | 407 | |
|
409 | 408 | # PR/Code-review |
|
410 | 409 | rhodecode_pr_merge_enabled = v.StringBoolean(if_missing=False) |
|
411 | 410 | rhodecode_use_outdated_comments = v.StringBoolean(if_missing=False) |
|
412 | 411 | |
|
413 | 412 | # hg |
|
414 | 413 | extensions_largefiles = v.StringBoolean(if_missing=False) |
|
415 | 414 | extensions_evolve = v.StringBoolean(if_missing=False) |
|
416 | 415 | phases_publish = v.StringBoolean(if_missing=False) |
|
417 | 416 | |
|
418 | 417 | rhodecode_hg_use_rebase_for_merging = v.StringBoolean(if_missing=False) |
|
419 | 418 | rhodecode_hg_close_branch_before_merging = v.StringBoolean(if_missing=False) |
|
420 | 419 | |
|
421 | 420 | # git |
|
422 | 421 | vcs_git_lfs_enabled = v.StringBoolean(if_missing=False) |
|
423 | 422 | rhodecode_git_use_rebase_for_merging = v.StringBoolean(if_missing=False) |
|
424 | 423 | rhodecode_git_close_branch_before_merging = v.StringBoolean(if_missing=False) |
|
425 | 424 | |
|
426 | 425 | # svn |
|
427 | 426 | vcs_svn_proxy_http_requests_enabled = v.StringBoolean(if_missing=False) |
|
428 | 427 | vcs_svn_proxy_http_server_url = v.UnicodeString(strip=True, if_missing=None) |
|
429 | 428 | |
|
430 | 429 | # cache |
|
431 | 430 | rhodecode_diff_cache = v.StringBoolean(if_missing=False) |
|
432 | 431 | |
|
433 | 432 | |
|
434 | 433 | def ApplicationUiSettingsForm(localizer): |
|
435 | 434 | _ = localizer |
|
436 | 435 | |
|
437 | 436 | class _ApplicationUiSettingsForm(_BaseVcsSettingsForm): |
|
438 | 437 | web_push_ssl = v.StringBoolean(if_missing=False) |
|
439 | 438 | paths_root_path = All( |
|
440 | 439 | v.ValidPath(localizer), |
|
441 | 440 | v.UnicodeString(strip=True, min=1, not_empty=True) |
|
442 | 441 | ) |
|
443 | 442 | largefiles_usercache = All( |
|
444 | 443 | v.ValidPath(localizer), |
|
445 | 444 | v.UnicodeString(strip=True, min=2, not_empty=True)) |
|
446 | 445 | vcs_git_lfs_store_location = All( |
|
447 | 446 | v.ValidPath(localizer), |
|
448 | 447 | v.UnicodeString(strip=True, min=2, not_empty=True)) |
|
449 | 448 | extensions_hgsubversion = v.StringBoolean(if_missing=False) |
|
450 | 449 | extensions_hggit = v.StringBoolean(if_missing=False) |
|
451 | 450 | new_svn_branch = v.ValidSvnPattern(localizer, section='vcs_svn_branch') |
|
452 | 451 | new_svn_tag = v.ValidSvnPattern(localizer, section='vcs_svn_tag') |
|
453 | 452 | return _ApplicationUiSettingsForm |
|
454 | 453 | |
|
455 | 454 | |
|
456 | 455 | def RepoVcsSettingsForm(localizer, repo_name): |
|
457 | 456 | _ = localizer |
|
458 | 457 | |
|
459 | 458 | class _RepoVcsSettingsForm(_BaseVcsSettingsForm): |
|
460 | 459 | inherit_global_settings = v.StringBoolean(if_missing=False) |
|
461 | 460 | new_svn_branch = v.ValidSvnPattern(localizer, |
|
462 | 461 | section='vcs_svn_branch', repo_name=repo_name) |
|
463 | 462 | new_svn_tag = v.ValidSvnPattern(localizer, |
|
464 | 463 | section='vcs_svn_tag', repo_name=repo_name) |
|
465 | 464 | return _RepoVcsSettingsForm |
|
466 | 465 | |
|
467 | 466 | |
|
468 | 467 | def LabsSettingsForm(localizer): |
|
469 | 468 | _ = localizer |
|
470 | 469 | |
|
471 | 470 | class _LabSettingsForm(formencode.Schema): |
|
472 | 471 | allow_extra_fields = True |
|
473 | 472 | filter_extra_fields = False |
|
474 | 473 | return _LabSettingsForm |
|
475 | 474 | |
|
476 | 475 | |
|
477 | 476 | def ApplicationPermissionsForm( |
|
478 | 477 | localizer, register_choices, password_reset_choices, |
|
479 | 478 | extern_activate_choices): |
|
480 | 479 | _ = localizer |
|
481 | 480 | |
|
482 | 481 | class _DefaultPermissionsForm(formencode.Schema): |
|
483 | 482 | allow_extra_fields = True |
|
484 | 483 | filter_extra_fields = True |
|
485 | 484 | |
|
486 | 485 | anonymous = v.StringBoolean(if_missing=False) |
|
487 | 486 | default_register = v.OneOf(register_choices) |
|
488 | 487 | default_register_message = v.UnicodeString() |
|
489 | 488 | default_password_reset = v.OneOf(password_reset_choices) |
|
490 | 489 | default_extern_activate = v.OneOf(extern_activate_choices) |
|
491 | 490 | return _DefaultPermissionsForm |
|
492 | 491 | |
|
493 | 492 | |
|
494 | 493 | def ObjectPermissionsForm(localizer, repo_perms_choices, group_perms_choices, |
|
495 | 494 | user_group_perms_choices): |
|
496 | 495 | _ = localizer |
|
497 | 496 | |
|
498 | 497 | class _ObjectPermissionsForm(formencode.Schema): |
|
499 | 498 | allow_extra_fields = True |
|
500 | 499 | filter_extra_fields = True |
|
501 | 500 | overwrite_default_repo = v.StringBoolean(if_missing=False) |
|
502 | 501 | overwrite_default_group = v.StringBoolean(if_missing=False) |
|
503 | 502 | overwrite_default_user_group = v.StringBoolean(if_missing=False) |
|
504 | 503 | |
|
505 | 504 | default_repo_perm = v.OneOf(repo_perms_choices) |
|
506 | 505 | default_group_perm = v.OneOf(group_perms_choices) |
|
507 | 506 | default_user_group_perm = v.OneOf(user_group_perms_choices) |
|
508 | 507 | |
|
509 | 508 | return _ObjectPermissionsForm |
|
510 | 509 | |
|
511 | 510 | |
|
512 | 511 | def BranchPermissionsForm(localizer, branch_perms_choices): |
|
513 | 512 | _ = localizer |
|
514 | 513 | |
|
515 | 514 | class _BranchPermissionsForm(formencode.Schema): |
|
516 | 515 | allow_extra_fields = True |
|
517 | 516 | filter_extra_fields = True |
|
518 | 517 | overwrite_default_branch = v.StringBoolean(if_missing=False) |
|
519 | 518 | default_branch_perm = v.OneOf(branch_perms_choices) |
|
520 | 519 | |
|
521 | 520 | return _BranchPermissionsForm |
|
522 | 521 | |
|
523 | 522 | |
|
524 | 523 | def UserPermissionsForm(localizer, create_choices, create_on_write_choices, |
|
525 | 524 | repo_group_create_choices, user_group_create_choices, |
|
526 | 525 | fork_choices, inherit_default_permissions_choices): |
|
527 | 526 | _ = localizer |
|
528 | 527 | |
|
529 | 528 | class _DefaultPermissionsForm(formencode.Schema): |
|
530 | 529 | allow_extra_fields = True |
|
531 | 530 | filter_extra_fields = True |
|
532 | 531 | |
|
533 | 532 | anonymous = v.StringBoolean(if_missing=False) |
|
534 | 533 | |
|
535 | 534 | default_repo_create = v.OneOf(create_choices) |
|
536 | 535 | default_repo_create_on_write = v.OneOf(create_on_write_choices) |
|
537 | 536 | default_user_group_create = v.OneOf(user_group_create_choices) |
|
538 | 537 | default_repo_group_create = v.OneOf(repo_group_create_choices) |
|
539 | 538 | default_fork_create = v.OneOf(fork_choices) |
|
540 | 539 | default_inherit_default_permissions = v.OneOf(inherit_default_permissions_choices) |
|
541 | 540 | return _DefaultPermissionsForm |
|
542 | 541 | |
|
543 | 542 | |
|
544 | 543 | def UserIndividualPermissionsForm(localizer): |
|
545 | 544 | _ = localizer |
|
546 | 545 | |
|
547 | 546 | class _DefaultPermissionsForm(formencode.Schema): |
|
548 | 547 | allow_extra_fields = True |
|
549 | 548 | filter_extra_fields = True |
|
550 | 549 | |
|
551 | 550 | inherit_default_permissions = v.StringBoolean(if_missing=False) |
|
552 | 551 | return _DefaultPermissionsForm |
|
553 | 552 | |
|
554 | 553 | |
|
555 | 554 | def DefaultsForm(localizer, edit=False, old_data=None, supported_backends=BACKENDS.keys()): |
|
556 | 555 | _ = localizer |
|
557 | 556 | old_data = old_data or {} |
|
558 | 557 | |
|
559 | 558 | class _DefaultsForm(formencode.Schema): |
|
560 | 559 | allow_extra_fields = True |
|
561 | 560 | filter_extra_fields = True |
|
562 | 561 | default_repo_type = v.OneOf(supported_backends) |
|
563 | 562 | default_repo_private = v.StringBoolean(if_missing=False) |
|
564 | 563 | default_repo_enable_statistics = v.StringBoolean(if_missing=False) |
|
565 | 564 | default_repo_enable_downloads = v.StringBoolean(if_missing=False) |
|
566 | 565 | default_repo_enable_locking = v.StringBoolean(if_missing=False) |
|
567 | 566 | return _DefaultsForm |
|
568 | 567 | |
|
569 | 568 | |
|
570 | 569 | def AuthSettingsForm(localizer): |
|
571 | 570 | _ = localizer |
|
572 | 571 | |
|
573 | 572 | class _AuthSettingsForm(formencode.Schema): |
|
574 | 573 | allow_extra_fields = True |
|
575 | 574 | filter_extra_fields = True |
|
576 | 575 | auth_plugins = All(v.ValidAuthPlugins(localizer), |
|
577 | 576 | v.UniqueListFromString(localizer)(not_empty=True)) |
|
578 | 577 | return _AuthSettingsForm |
|
579 | 578 | |
|
580 | 579 | |
|
581 | 580 | def UserExtraEmailForm(localizer): |
|
582 | 581 | _ = localizer |
|
583 | 582 | |
|
584 | 583 | class _UserExtraEmailForm(formencode.Schema): |
|
585 | 584 | email = All(v.UniqSystemEmail(localizer), v.Email(not_empty=True)) |
|
586 | 585 | return _UserExtraEmailForm |
|
587 | 586 | |
|
588 | 587 | |
|
589 | 588 | def UserExtraIpForm(localizer): |
|
590 | 589 | _ = localizer |
|
591 | 590 | |
|
592 | 591 | class _UserExtraIpForm(formencode.Schema): |
|
593 | 592 | ip = v.ValidIp(localizer)(not_empty=True) |
|
594 | 593 | return _UserExtraIpForm |
|
595 | 594 | |
|
596 | 595 | |
|
597 | 596 | def PullRequestForm(localizer, repo_id): |
|
598 | 597 | _ = localizer |
|
599 | 598 | |
|
600 | 599 | class ReviewerForm(formencode.Schema): |
|
601 | 600 | user_id = v.Int(not_empty=True) |
|
602 | 601 | reasons = All() |
|
603 | 602 | rules = All(v.UniqueList(localizer, convert=int)()) |
|
604 | 603 | mandatory = v.StringBoolean() |
|
605 | 604 | role = v.String(if_missing='reviewer') |
|
606 | 605 | |
|
607 | 606 | class ObserverForm(formencode.Schema): |
|
608 | 607 | user_id = v.Int(not_empty=True) |
|
609 | 608 | reasons = All() |
|
610 | 609 | rules = All(v.UniqueList(localizer, convert=int)()) |
|
611 | 610 | mandatory = v.StringBoolean() |
|
612 | 611 | role = v.String(if_missing='observer') |
|
613 | 612 | |
|
614 | 613 | class _PullRequestForm(formencode.Schema): |
|
615 | 614 | allow_extra_fields = True |
|
616 | 615 | filter_extra_fields = True |
|
617 | 616 | |
|
618 | 617 | common_ancestor = v.UnicodeString(strip=True, required=True) |
|
619 | 618 | source_repo = v.UnicodeString(strip=True, required=True) |
|
620 | 619 | source_ref = v.UnicodeString(strip=True, required=True) |
|
621 | 620 | target_repo = v.UnicodeString(strip=True, required=True) |
|
622 | 621 | target_ref = v.UnicodeString(strip=True, required=True) |
|
623 | 622 | revisions = All(#v.NotReviewedRevisions(localizer, repo_id)(), |
|
624 | 623 | v.UniqueList(localizer)(not_empty=True)) |
|
625 | 624 | review_members = formencode.ForEach(ReviewerForm()) |
|
626 | 625 | observer_members = formencode.ForEach(ObserverForm()) |
|
627 | 626 | pullrequest_title = v.UnicodeString(strip=True, required=True, min=1, max=255) |
|
628 | 627 | pullrequest_desc = v.UnicodeString(strip=True, required=False) |
|
629 | 628 | description_renderer = v.UnicodeString(strip=True, required=False) |
|
630 | 629 | |
|
631 | 630 | return _PullRequestForm |
|
632 | 631 | |
|
633 | 632 | |
|
634 | 633 | def IssueTrackerPatternsForm(localizer): |
|
635 | 634 | _ = localizer |
|
636 | 635 | |
|
637 | 636 | class _IssueTrackerPatternsForm(formencode.Schema): |
|
638 | 637 | allow_extra_fields = True |
|
639 | 638 | filter_extra_fields = False |
|
640 | 639 | chained_validators = [v.ValidPattern(localizer)] |
|
641 | 640 | return _IssueTrackerPatternsForm |
@@ -1,256 +1,256 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
1 | ||
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2013-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | gist model for RhodeCode |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import os |
|
26 | 26 | import time |
|
27 | 27 | import logging |
|
28 | 28 | import traceback |
|
29 | 29 | import shutil |
|
30 | 30 | |
|
31 | 31 | from pyramid.threadlocal import get_current_request |
|
32 | 32 | |
|
33 | 33 | from rhodecode.lib.utils2 import ( |
|
34 | 34 | safe_unicode, unique_id, safe_int, time_to_datetime, AttributeDict) |
|
35 | 35 | from rhodecode.lib.ext_json import json |
|
36 | 36 | from rhodecode.lib.vcs import VCSError |
|
37 | 37 | from rhodecode.model import BaseModel |
|
38 | 38 | from rhodecode.model.db import Gist |
|
39 | 39 | from rhodecode.model.repo import RepoModel |
|
40 | 40 | from rhodecode.model.scm import ScmModel |
|
41 | 41 | |
|
42 | 42 | log = logging.getLogger(__name__) |
|
43 | 43 | |
|
44 | 44 | GIST_STORE_LOC = '.rc_gist_store' |
|
45 | 45 | GIST_METADATA_FILE = '.rc_gist_metadata' |
|
46 | 46 | |
|
47 | 47 | |
|
48 | 48 | class GistModel(BaseModel): |
|
49 | 49 | cls = Gist |
|
50 | 50 | vcs_backend = 'hg' |
|
51 | 51 | |
|
52 | 52 | def _get_gist(self, gist): |
|
53 | 53 | """ |
|
54 | 54 | Helper method to get gist by ID, or gist_access_id as a fallback |
|
55 | 55 | |
|
56 | 56 | :param gist: GistID, gist_access_id, or Gist instance |
|
57 | 57 | """ |
|
58 | 58 | return self._get_instance(Gist, gist, callback=Gist.get_by_access_id) |
|
59 | 59 | |
|
60 | 60 | def __delete_gist(self, gist): |
|
61 | 61 | """ |
|
62 | 62 | removes gist from filesystem |
|
63 | 63 | |
|
64 | 64 | :param gist: gist object |
|
65 | 65 | """ |
|
66 | 66 | root_path = RepoModel().repos_path |
|
67 | 67 | rm_path = os.path.join(root_path, GIST_STORE_LOC, gist.gist_access_id) |
|
68 | 68 | log.info("Removing %s", rm_path) |
|
69 | 69 | shutil.rmtree(rm_path) |
|
70 | 70 | |
|
71 | 71 | def _store_metadata(self, repo, gist_id, gist_access_id, user_id, username, |
|
72 | 72 | gist_type, gist_expires, gist_acl_level): |
|
73 | 73 | """ |
|
74 | 74 | store metadata inside the gist repo, this can be later used for imports |
|
75 | 75 | or gist identification. Currently we use this inside RhodeCode tools |
|
76 | 76 | to do cleanup of gists that are in storage but not in database. |
|
77 | 77 | """ |
|
78 | 78 | metadata = { |
|
79 | 79 | 'metadata_version': '2', |
|
80 | 80 | 'gist_db_id': gist_id, |
|
81 | 81 | 'gist_access_id': gist_access_id, |
|
82 | 82 | 'gist_owner_id': user_id, |
|
83 | 83 | 'gist_owner_username': username, |
|
84 | 84 | 'gist_type': gist_type, |
|
85 | 85 | 'gist_expires': gist_expires, |
|
86 | 86 | 'gist_updated': time.time(), |
|
87 | 87 | 'gist_acl_level': gist_acl_level, |
|
88 | 88 | } |
|
89 | 89 | metadata_file = os.path.join(repo.path, '.hg', GIST_METADATA_FILE) |
|
90 | 90 | with open(metadata_file, 'wb') as f: |
|
91 | 91 | f.write(json.dumps(metadata)) |
|
92 | 92 | |
|
93 | 93 | def get_gist(self, gist): |
|
94 | 94 | return self._get_gist(gist) |
|
95 | 95 | |
|
96 | 96 | def get_gist_files(self, gist_access_id, revision=None): |
|
97 | 97 | """ |
|
98 | 98 | Get files for given gist |
|
99 | 99 | |
|
100 | 100 | :param gist_access_id: |
|
101 | 101 | """ |
|
102 | 102 | repo = Gist.get_by_access_id(gist_access_id) |
|
103 | 103 | vcs_repo = repo.scm_instance() |
|
104 | 104 | if not vcs_repo: |
|
105 | 105 | raise VCSError('Failed to load gist repository for {}'.format(repo)) |
|
106 | 106 | |
|
107 | 107 | commit = vcs_repo.get_commit(commit_id=revision) |
|
108 | 108 | return commit, [n for n in commit.get_node('/')] |
|
109 | 109 | |
|
110 | 110 | def create(self, description, owner, gist_mapping, |
|
111 | 111 | gist_type=Gist.GIST_PUBLIC, lifetime=-1, gist_id=None, |
|
112 | 112 | gist_acl_level=Gist.ACL_LEVEL_PRIVATE): |
|
113 | 113 | """ |
|
114 | 114 | Create a gist |
|
115 | 115 | |
|
116 | 116 | :param description: description of the gist |
|
117 | 117 | :param owner: user who created this gist |
|
118 | 118 | :param gist_mapping: mapping [{'filename': 'file1.txt', 'content': content}, ...}] |
|
119 | 119 | :param gist_type: type of gist private/public |
|
120 | 120 | :param lifetime: in minutes, -1 == forever |
|
121 | 121 | :param gist_acl_level: acl level for this gist |
|
122 | 122 | """ |
|
123 | 123 | owner = self._get_user(owner) |
|
124 | 124 | gist_id = safe_unicode(gist_id or unique_id(20)) |
|
125 | 125 | lifetime = safe_int(lifetime, -1) |
|
126 | 126 | gist_expires = time.time() + (lifetime * 60) if lifetime != -1 else -1 |
|
127 | 127 | expiration = (time_to_datetime(gist_expires) |
|
128 | 128 | if gist_expires != -1 else 'forever') |
|
129 | 129 | log.debug('set GIST expiration date to: %s', expiration) |
|
130 | 130 | # create the Database version |
|
131 | 131 | gist = Gist() |
|
132 | 132 | gist.gist_description = description |
|
133 | 133 | gist.gist_access_id = gist_id |
|
134 | 134 | gist.gist_owner = owner.user_id |
|
135 | 135 | gist.gist_expires = gist_expires |
|
136 | 136 | gist.gist_type = safe_unicode(gist_type) |
|
137 | 137 | gist.acl_level = gist_acl_level |
|
138 | 138 | self.sa.add(gist) |
|
139 | 139 | self.sa.flush() |
|
140 | 140 | if gist_type == Gist.GIST_PUBLIC: |
|
141 | 141 | # use DB ID for easy to use GIST ID |
|
142 | 142 | gist_id = safe_unicode(gist.gist_id) |
|
143 | 143 | gist.gist_access_id = gist_id |
|
144 | 144 | self.sa.add(gist) |
|
145 | 145 | |
|
146 | 146 | gist_repo_path = os.path.join(GIST_STORE_LOC, gist_id) |
|
147 | 147 | log.debug('Creating new %s GIST repo in %s', gist_type, gist_repo_path) |
|
148 | 148 | repo = RepoModel()._create_filesystem_repo( |
|
149 | 149 | repo_name=gist_id, repo_type=self.vcs_backend, repo_group=GIST_STORE_LOC, |
|
150 | 150 | use_global_config=True) |
|
151 | 151 | |
|
152 | 152 | # now create single multifile commit |
|
153 | 153 | message = 'added file' |
|
154 | 154 | message += 's: ' if len(gist_mapping) > 1 else ': ' |
|
155 | 155 | message += ', '.join([x for x in gist_mapping]) |
|
156 | 156 | |
|
157 | 157 | # fake RhodeCode Repository object |
|
158 | 158 | fake_repo = AttributeDict({ |
|
159 | 159 | 'repo_name': gist_repo_path, |
|
160 | 160 | 'scm_instance': lambda *args, **kwargs: repo, |
|
161 | 161 | }) |
|
162 | 162 | |
|
163 | 163 | ScmModel().create_nodes( |
|
164 | 164 | user=owner.user_id, repo=fake_repo, |
|
165 | 165 | message=message, |
|
166 | 166 | nodes=gist_mapping, |
|
167 | 167 | trigger_push_hook=False |
|
168 | 168 | ) |
|
169 | 169 | |
|
170 | 170 | self._store_metadata(repo, gist.gist_id, gist.gist_access_id, |
|
171 | 171 | owner.user_id, owner.username, gist.gist_type, |
|
172 | 172 | gist.gist_expires, gist_acl_level) |
|
173 | 173 | return gist |
|
174 | 174 | |
|
175 | 175 | def delete(self, gist, fs_remove=True): |
|
176 | 176 | gist = self._get_gist(gist) |
|
177 | 177 | try: |
|
178 | 178 | self.sa.delete(gist) |
|
179 | 179 | if fs_remove: |
|
180 | 180 | self.__delete_gist(gist) |
|
181 | 181 | else: |
|
182 | 182 | log.debug('skipping removal from filesystem') |
|
183 | 183 | except Exception: |
|
184 | 184 | log.error(traceback.format_exc()) |
|
185 | 185 | raise |
|
186 | 186 | |
|
187 | 187 | def update(self, gist, description, owner, gist_mapping, lifetime, |
|
188 | 188 | gist_acl_level): |
|
189 | 189 | gist = self._get_gist(gist) |
|
190 | 190 | gist_repo = gist.scm_instance() |
|
191 | 191 | |
|
192 | 192 | if lifetime == 0: # preserve old value |
|
193 | 193 | gist_expires = gist.gist_expires |
|
194 | 194 | else: |
|
195 | 195 | gist_expires = ( |
|
196 | 196 | time.time() + (lifetime * 60) if lifetime != -1 else -1) |
|
197 | 197 | |
|
198 | 198 | # calculate operation type based on given data |
|
199 | 199 | gist_mapping_op = {} |
|
200 | 200 | for k, v in gist_mapping.items(): |
|
201 | 201 | # add, mod, del |
|
202 | 202 | if not v['filename_org'] and v['filename']: |
|
203 | 203 | op = 'add' |
|
204 | 204 | elif v['filename_org'] and not v['filename']: |
|
205 | 205 | op = 'del' |
|
206 | 206 | else: |
|
207 | 207 | op = 'mod' |
|
208 | 208 | |
|
209 | 209 | v['op'] = op |
|
210 | 210 | gist_mapping_op[k] = v |
|
211 | 211 | |
|
212 | 212 | gist.gist_description = description |
|
213 | 213 | gist.gist_expires = gist_expires |
|
214 | 214 | gist.owner = owner |
|
215 | 215 | gist.acl_level = gist_acl_level |
|
216 | 216 | self.sa.add(gist) |
|
217 | 217 | self.sa.flush() |
|
218 | 218 | |
|
219 | 219 | message = 'updated file' |
|
220 | 220 | message += 's: ' if len(gist_mapping) > 1 else ': ' |
|
221 | 221 | message += ', '.join([x for x in gist_mapping]) |
|
222 | 222 | |
|
223 | 223 | # fake RhodeCode Repository object |
|
224 | 224 | fake_repo = AttributeDict({ |
|
225 | 225 | 'repo_name': gist_repo.path, |
|
226 | 226 | 'scm_instance': lambda *args, **kwargs: gist_repo, |
|
227 | 227 | }) |
|
228 | 228 | |
|
229 | 229 | self._store_metadata(gist_repo, gist.gist_id, gist.gist_access_id, |
|
230 | 230 | owner.user_id, owner.username, gist.gist_type, |
|
231 | 231 | gist.gist_expires, gist_acl_level) |
|
232 | 232 | |
|
233 | 233 | # this can throw NodeNotChangedError, if changes we're trying to commit |
|
234 | 234 | # are not actually changes... |
|
235 | 235 | ScmModel().update_nodes( |
|
236 | 236 | user=owner.user_id, |
|
237 | 237 | repo=fake_repo, |
|
238 | 238 | message=message, |
|
239 | 239 | nodes=gist_mapping_op, |
|
240 | 240 | trigger_push_hook=False |
|
241 | 241 | ) |
|
242 | 242 | |
|
243 | 243 | return gist |
|
244 | 244 | |
|
245 | 245 | def get_url(self, gist, request=None): |
|
246 | 246 | import rhodecode |
|
247 | 247 | |
|
248 | 248 | if not request: |
|
249 | 249 | request = get_current_request() |
|
250 | 250 | |
|
251 | 251 | alias_url = rhodecode.CONFIG.get('gist_alias_url') |
|
252 | 252 | if alias_url: |
|
253 | 253 | return alias_url.replace('{gistid}', gist.gist_access_id) |
|
254 | 254 | |
|
255 | 255 | return request.route_url('gist_show', gist_id=gist.gist_access_id) |
|
256 | 256 |
@@ -1,241 +1,241 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
1 | ||
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2011-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | |
|
22 | 22 | """ |
|
23 | 23 | Model for integrations |
|
24 | 24 | """ |
|
25 | 25 | |
|
26 | 26 | |
|
27 | 27 | import logging |
|
28 | 28 | |
|
29 | 29 | from sqlalchemy import or_, and_ |
|
30 | 30 | |
|
31 | 31 | import rhodecode |
|
32 | 32 | from rhodecode import events |
|
33 | 33 | from rhodecode.integrations.types.base import EEIntegration |
|
34 | 34 | from rhodecode.lib.caching_query import FromCache |
|
35 | 35 | from rhodecode.model import BaseModel |
|
36 | 36 | from rhodecode.model.db import Integration, Repository, RepoGroup, true, false, case |
|
37 | 37 | from rhodecode.integrations import integration_type_registry |
|
38 | 38 | |
|
39 | 39 | log = logging.getLogger(__name__) |
|
40 | 40 | |
|
41 | 41 | |
|
42 | 42 | class IntegrationModel(BaseModel): |
|
43 | 43 | |
|
44 | 44 | cls = Integration |
|
45 | 45 | |
|
46 | 46 | def __get_integration(self, integration): |
|
47 | 47 | if isinstance(integration, Integration): |
|
48 | 48 | return integration |
|
49 | 49 | elif isinstance(integration, int): |
|
50 | 50 | return self.sa.query(Integration).get(integration) |
|
51 | 51 | else: |
|
52 | 52 | if integration: |
|
53 | 53 | raise Exception('integration must be int or Instance' |
|
54 | 54 | ' of Integration got %s' % type(integration)) |
|
55 | 55 | |
|
56 | 56 | def create(self, IntegrationType, name, enabled, repo, repo_group, |
|
57 | 57 | child_repos_only, settings): |
|
58 | 58 | """ Create an IntegrationType integration """ |
|
59 | 59 | integration = Integration() |
|
60 | 60 | integration.integration_type = IntegrationType.key |
|
61 | 61 | self.sa.add(integration) |
|
62 | 62 | self.update_integration(integration, name, enabled, repo, repo_group, |
|
63 | 63 | child_repos_only, settings) |
|
64 | 64 | self.sa.commit() |
|
65 | 65 | return integration |
|
66 | 66 | |
|
67 | 67 | def update_integration(self, integration, name, enabled, repo, repo_group, |
|
68 | 68 | child_repos_only, settings): |
|
69 | 69 | integration = self.__get_integration(integration) |
|
70 | 70 | |
|
71 | 71 | integration.repo = repo |
|
72 | 72 | integration.repo_group = repo_group |
|
73 | 73 | integration.child_repos_only = child_repos_only |
|
74 | 74 | integration.name = name |
|
75 | 75 | integration.enabled = enabled |
|
76 | 76 | integration.settings = settings |
|
77 | 77 | |
|
78 | 78 | return integration |
|
79 | 79 | |
|
80 | 80 | def delete(self, integration): |
|
81 | 81 | integration = self.__get_integration(integration) |
|
82 | 82 | if integration: |
|
83 | 83 | self.sa.delete(integration) |
|
84 | 84 | return True |
|
85 | 85 | return False |
|
86 | 86 | |
|
87 | 87 | def get_integration_handler(self, integration): |
|
88 | 88 | TypeClass = integration_type_registry.get(integration.integration_type) |
|
89 | 89 | if not TypeClass: |
|
90 | 90 | log.error('No class could be found for integration type: {}'.format( |
|
91 | 91 | integration.integration_type)) |
|
92 | 92 | return None |
|
93 | 93 | elif isinstance(TypeClass, EEIntegration) or issubclass(TypeClass, EEIntegration): |
|
94 | 94 | log.error('EE integration cannot be ' |
|
95 | 95 | 'executed for integration type: {}'.format( |
|
96 | 96 | integration.integration_type)) |
|
97 | 97 | return None |
|
98 | 98 | |
|
99 | 99 | return TypeClass(integration.settings) |
|
100 | 100 | |
|
101 | 101 | def send_event(self, integration, event): |
|
102 | 102 | """ Send an event to an integration """ |
|
103 | 103 | handler = self.get_integration_handler(integration) |
|
104 | 104 | if handler: |
|
105 | 105 | log.debug( |
|
106 | 106 | 'events: sending event %s on integration %s using handler %s', |
|
107 | 107 | event, integration, handler) |
|
108 | 108 | handler.send_event(event) |
|
109 | 109 | |
|
110 | 110 | def get_integrations(self, scope, IntegrationType=None): |
|
111 | 111 | """ |
|
112 | 112 | Return integrations for a scope, which must be one of: |
|
113 | 113 | |
|
114 | 114 | 'all' - every integration, global/repogroup/repo |
|
115 | 115 | 'global' - global integrations only |
|
116 | 116 | <Repository> instance - integrations for this repo only |
|
117 | 117 | <RepoGroup> instance - integrations for this repogroup only |
|
118 | 118 | """ |
|
119 | 119 | |
|
120 | 120 | if isinstance(scope, Repository): |
|
121 | 121 | query = self.sa.query(Integration).filter( |
|
122 | 122 | Integration.repo == scope) |
|
123 | 123 | elif isinstance(scope, RepoGroup): |
|
124 | 124 | query = self.sa.query(Integration).filter( |
|
125 | 125 | Integration.repo_group == scope) |
|
126 | 126 | elif scope == 'global': |
|
127 | 127 | # global integrations |
|
128 | 128 | query = self.sa.query(Integration).filter( |
|
129 | 129 | and_(Integration.repo_id == None, Integration.repo_group_id == None) |
|
130 | 130 | ) |
|
131 | 131 | elif scope == 'root-repos': |
|
132 | 132 | query = self.sa.query(Integration).filter( |
|
133 | 133 | and_(Integration.repo_id == None, |
|
134 | 134 | Integration.repo_group_id == None, |
|
135 | 135 | Integration.child_repos_only == true()) |
|
136 | 136 | ) |
|
137 | 137 | elif scope == 'all': |
|
138 | 138 | query = self.sa.query(Integration) |
|
139 | 139 | else: |
|
140 | 140 | raise Exception( |
|
141 | 141 | "invalid `scope`, must be one of: " |
|
142 | 142 | "['global', 'all', <Repository>, <RepoGroup>]") |
|
143 | 143 | |
|
144 | 144 | if IntegrationType is not None: |
|
145 | 145 | query = query.filter( |
|
146 | 146 | Integration.integration_type==IntegrationType.key) |
|
147 | 147 | |
|
148 | 148 | result = [] |
|
149 | 149 | for integration in query.all(): |
|
150 | 150 | IntType = integration_type_registry.get(integration.integration_type) |
|
151 | 151 | result.append((IntType, integration)) |
|
152 | 152 | return result |
|
153 | 153 | |
|
154 | 154 | def get_for_event(self, event, cache=False): |
|
155 | 155 | """ |
|
156 | 156 | Get integrations that match an event |
|
157 | 157 | """ |
|
158 | 158 | # base query |
|
159 | 159 | query = self.sa.query( |
|
160 | 160 | Integration |
|
161 | 161 | ).filter( |
|
162 | 162 | Integration.enabled == true() |
|
163 | 163 | ) |
|
164 | 164 | |
|
165 | 165 | global_integrations_filter = and_( |
|
166 | 166 | Integration.repo_id == None, |
|
167 | 167 | Integration.repo_group_id == None, |
|
168 | 168 | Integration.child_repos_only == false(), |
|
169 | 169 | ) |
|
170 | 170 | |
|
171 | 171 | if isinstance(event, events.RepoEvent): |
|
172 | 172 | root_repos_integrations_filter = and_( |
|
173 | 173 | Integration.repo_id == None, |
|
174 | 174 | Integration.repo_group_id == None, |
|
175 | 175 | Integration.child_repos_only == true(), |
|
176 | 176 | ) |
|
177 | 177 | |
|
178 | 178 | clauses = [ |
|
179 | 179 | global_integrations_filter, |
|
180 | 180 | ] |
|
181 | 181 | cases = [ |
|
182 | 182 | (global_integrations_filter, 1), |
|
183 | 183 | (root_repos_integrations_filter, 2), |
|
184 | 184 | ] |
|
185 | 185 | |
|
186 | 186 | # repo group integrations |
|
187 | 187 | if event.repo.group: |
|
188 | 188 | # repo group with only root level repos |
|
189 | 189 | group_child_repos_filter = and_( |
|
190 | 190 | Integration.repo_group_id == event.repo.group.group_id, |
|
191 | 191 | Integration.child_repos_only == true() |
|
192 | 192 | ) |
|
193 | 193 | |
|
194 | 194 | clauses.append(group_child_repos_filter) |
|
195 | 195 | cases.append( |
|
196 | 196 | (group_child_repos_filter, 3), |
|
197 | 197 | ) |
|
198 | 198 | |
|
199 | 199 | # repo group cascade to kids |
|
200 | 200 | group_recursive_repos_filter = and_( |
|
201 | 201 | Integration.repo_group_id.in_( |
|
202 | 202 | [group.group_id for group in event.repo.groups_with_parents] |
|
203 | 203 | ), |
|
204 | 204 | Integration.child_repos_only == false() |
|
205 | 205 | ) |
|
206 | 206 | clauses.append(group_recursive_repos_filter) |
|
207 | 207 | cases.append( |
|
208 | 208 | (group_recursive_repos_filter, 4), |
|
209 | 209 | ) |
|
210 | 210 | |
|
211 | 211 | if not event.repo.group: # root repo |
|
212 | 212 | clauses.append(root_repos_integrations_filter) |
|
213 | 213 | |
|
214 | 214 | # repo integrations |
|
215 | 215 | if event.repo.repo_id: # pre create events dont have a repo_id yet |
|
216 | 216 | specific_repo_filter = Integration.repo_id == event.repo.repo_id |
|
217 | 217 | clauses.append(specific_repo_filter) |
|
218 | 218 | cases.append( |
|
219 | 219 | (specific_repo_filter, 5), |
|
220 | 220 | ) |
|
221 | 221 | |
|
222 | 222 | order_by_criterion = case(cases) |
|
223 | 223 | |
|
224 | 224 | query = query.filter(or_(*clauses)) |
|
225 | 225 | query = query.order_by(order_by_criterion) |
|
226 | 226 | |
|
227 | 227 | if cache: |
|
228 | 228 | cache_key = "get_enabled_repo_integrations_%i" % event.repo.repo_id |
|
229 | 229 | query = query.options( |
|
230 | 230 | FromCache("sql_cache_short", cache_key)) |
|
231 | 231 | else: # only global integrations |
|
232 | 232 | order_by_criterion = Integration.integration_id |
|
233 | 233 | |
|
234 | 234 | query = query.filter(global_integrations_filter) |
|
235 | 235 | query = query.order_by(order_by_criterion) |
|
236 | 236 | if cache: |
|
237 | 237 | query = query.options( |
|
238 | 238 | FromCache("sql_cache_short", "get_enabled_global_integrations")) |
|
239 | 239 | |
|
240 | 240 | result = query.all() |
|
241 | 241 | return result |
@@ -1,56 +1,55 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
|
5 | 4 | # This program is free software: you can redistribute it and/or modify |
|
6 | 5 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 6 | # (only), as published by the Free Software Foundation. |
|
8 | 7 | # |
|
9 | 8 | # This program is distributed in the hope that it will be useful, |
|
10 | 9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 11 | # GNU General Public License for more details. |
|
13 | 12 | # |
|
14 | 13 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 14 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 15 | # |
|
17 | 16 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 17 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 18 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 19 | |
|
21 | 20 | """ |
|
22 | 21 | SQLAlchemy Metadata and Session object |
|
23 | 22 | """ |
|
24 | 23 | |
|
25 | 24 | from sqlalchemy.orm import declarative_base |
|
26 | 25 | from sqlalchemy.orm import scoped_session, sessionmaker |
|
27 | 26 | from sqlalchemy.orm import Session as SASession |
|
28 | 27 | from rhodecode.lib.caching_query import ORMCache |
|
29 | 28 | |
|
30 | 29 | |
|
31 | 30 | __all__ = ['Base', 'Session', 'raw_query_executor'] |
|
32 | 31 | |
|
33 | 32 | # scoped_session. Apply our custom CachingQuery class to it, |
|
34 | 33 | # using a callable that will associate the dictionary |
|
35 | 34 | # of regions with the Query. |
|
36 | 35 | # to use cache use this in query |
|
37 | 36 | # .options(FromCache("sqlalchemy_cache_type", "cachekey")) |
|
38 | 37 | Session = scoped_session( |
|
39 | 38 | sessionmaker( |
|
40 | 39 | expire_on_commit=True, |
|
41 | 40 | ) |
|
42 | 41 | ) |
|
43 | 42 | |
|
44 | 43 | # pass empty regions so we can fetch it on-demand inside ORMCache |
|
45 | 44 | cache = ORMCache(regions={}) |
|
46 | 45 | cache.listen_on_session(Session) |
|
47 | 46 | |
|
48 | 47 | |
|
49 | 48 | # The declarative Base |
|
50 | 49 | Base = declarative_base() |
|
51 | 50 | |
|
52 | 51 | |
|
53 | 52 | def raw_query_executor(): |
|
54 | 53 | engine = Base.metadata.bind |
|
55 | 54 | session = SASession(engine) |
|
56 | 55 | return session |
@@ -1,453 +1,453 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
1 | ||
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2011-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | |
|
22 | 22 | """ |
|
23 | 23 | Model for notifications |
|
24 | 24 | """ |
|
25 | 25 | |
|
26 | 26 | import logging |
|
27 | 27 | import traceback |
|
28 | 28 | |
|
29 | 29 | import premailer |
|
30 | 30 | from pyramid.threadlocal import get_current_request |
|
31 | 31 | from sqlalchemy.sql.expression import false, true |
|
32 | 32 | |
|
33 | 33 | import rhodecode |
|
34 | 34 | from rhodecode.lib import helpers as h |
|
35 | 35 | from rhodecode.model import BaseModel |
|
36 | 36 | from rhodecode.model.db import Notification, User, UserNotification |
|
37 | 37 | from rhodecode.model.meta import Session |
|
38 | 38 | from rhodecode.translation import TranslationString |
|
39 | 39 | |
|
40 | 40 | log = logging.getLogger(__name__) |
|
41 | 41 | |
|
42 | 42 | |
|
43 | 43 | class NotificationModel(BaseModel): |
|
44 | 44 | |
|
45 | 45 | cls = Notification |
|
46 | 46 | |
|
47 | 47 | def __get_notification(self, notification): |
|
48 | 48 | if isinstance(notification, Notification): |
|
49 | 49 | return notification |
|
50 | 50 | elif isinstance(notification, int): |
|
51 | 51 | return Notification.get(notification) |
|
52 | 52 | else: |
|
53 | 53 | if notification: |
|
54 | 54 | raise Exception('notification must be int or Instance' |
|
55 | 55 | ' of Notification got %s' % type(notification)) |
|
56 | 56 | |
|
57 | 57 | def create( |
|
58 | 58 | self, created_by, notification_subject='', notification_body='', |
|
59 | 59 | notification_type=Notification.TYPE_MESSAGE, recipients=None, |
|
60 | 60 | mention_recipients=None, with_email=True, email_kwargs=None): |
|
61 | 61 | """ |
|
62 | 62 | |
|
63 | 63 | Creates notification of given type |
|
64 | 64 | |
|
65 | 65 | :param created_by: int, str or User instance. User who created this |
|
66 | 66 | notification |
|
67 | 67 | :param notification_subject: subject of notification itself, |
|
68 | 68 | it will be generated automatically from notification_type if not specified |
|
69 | 69 | :param notification_body: body of notification text |
|
70 | 70 | it will be generated automatically from notification_type if not specified |
|
71 | 71 | :param notification_type: type of notification, based on that we |
|
72 | 72 | pick templates |
|
73 | 73 | :param recipients: list of int, str or User objects, when None |
|
74 | 74 | is given send to all admins |
|
75 | 75 | :param mention_recipients: list of int, str or User objects, |
|
76 | 76 | that were mentioned |
|
77 | 77 | :param with_email: send email with this notification |
|
78 | 78 | :param email_kwargs: dict with arguments to generate email |
|
79 | 79 | """ |
|
80 | 80 | |
|
81 | 81 | from rhodecode.lib.celerylib import tasks, run_task |
|
82 | 82 | |
|
83 | 83 | if recipients and not getattr(recipients, '__iter__', False): |
|
84 | 84 | raise Exception('recipients must be an iterable object') |
|
85 | 85 | |
|
86 | 86 | if not (notification_subject and notification_body) and not notification_type: |
|
87 | 87 | raise ValueError('notification_subject, and notification_body ' |
|
88 | 88 | 'cannot be empty when notification_type is not specified') |
|
89 | 89 | |
|
90 | 90 | created_by_obj = self._get_user(created_by) |
|
91 | 91 | |
|
92 | 92 | if not created_by_obj: |
|
93 | 93 | raise Exception('unknown user %s' % created_by) |
|
94 | 94 | |
|
95 | 95 | # default MAIN body if not given |
|
96 | 96 | email_kwargs = email_kwargs or {'body': notification_body} |
|
97 | 97 | mention_recipients = mention_recipients or set() |
|
98 | 98 | |
|
99 | 99 | if recipients is None: |
|
100 | 100 | # recipients is None means to all admins |
|
101 | 101 | recipients_objs = User.query().filter(User.admin == true()).all() |
|
102 | 102 | log.debug('sending notifications %s to admins: %s', |
|
103 | 103 | notification_type, recipients_objs) |
|
104 | 104 | else: |
|
105 | 105 | recipients_objs = set() |
|
106 | 106 | for u in recipients: |
|
107 | 107 | obj = self._get_user(u) |
|
108 | 108 | if obj: |
|
109 | 109 | recipients_objs.add(obj) |
|
110 | 110 | else: # we didn't find this user, log the error and carry on |
|
111 | 111 | log.error('cannot notify unknown user %r', u) |
|
112 | 112 | |
|
113 | 113 | if not recipients_objs: |
|
114 | 114 | raise Exception('no valid recipients specified') |
|
115 | 115 | |
|
116 | 116 | log.debug('sending notifications %s to %s', |
|
117 | 117 | notification_type, recipients_objs) |
|
118 | 118 | |
|
119 | 119 | # add mentioned users into recipients |
|
120 | 120 | final_recipients = set(recipients_objs).union(mention_recipients) |
|
121 | 121 | |
|
122 | 122 | (subject, email_body, email_body_plaintext) = \ |
|
123 | 123 | EmailNotificationModel().render_email(notification_type, **email_kwargs) |
|
124 | 124 | |
|
125 | 125 | if not notification_subject: |
|
126 | 126 | notification_subject = subject |
|
127 | 127 | |
|
128 | 128 | if not notification_body: |
|
129 | 129 | notification_body = email_body_plaintext |
|
130 | 130 | |
|
131 | 131 | notification = Notification.create( |
|
132 | 132 | created_by=created_by_obj, subject=notification_subject, |
|
133 | 133 | body=notification_body, recipients=final_recipients, |
|
134 | 134 | type_=notification_type |
|
135 | 135 | ) |
|
136 | 136 | |
|
137 | 137 | if not with_email: # skip sending email, and just create notification |
|
138 | 138 | return notification |
|
139 | 139 | |
|
140 | 140 | # don't send email to person who created this comment |
|
141 | 141 | rec_objs = set(recipients_objs).difference({created_by_obj}) |
|
142 | 142 | |
|
143 | 143 | # now notify all recipients in question |
|
144 | 144 | |
|
145 | 145 | for recipient in rec_objs.union(mention_recipients): |
|
146 | 146 | # inject current recipient |
|
147 | 147 | email_kwargs['recipient'] = recipient |
|
148 | 148 | email_kwargs['mention'] = recipient in mention_recipients |
|
149 | 149 | (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email( |
|
150 | 150 | notification_type, **email_kwargs) |
|
151 | 151 | |
|
152 | 152 | extra_headers = None |
|
153 | 153 | if 'thread_ids' in email_kwargs: |
|
154 | 154 | extra_headers = {'thread_ids': email_kwargs.pop('thread_ids')} |
|
155 | 155 | |
|
156 | 156 | log.debug('Creating notification email task for user:`%s`', recipient) |
|
157 | 157 | task = run_task(tasks.send_email, recipient.email, subject, |
|
158 | 158 | email_body_plaintext, email_body, extra_headers=extra_headers) |
|
159 | 159 | log.debug('Created email task: %s', task) |
|
160 | 160 | |
|
161 | 161 | return notification |
|
162 | 162 | |
|
163 | 163 | def delete(self, user, notification): |
|
164 | 164 | # we don't want to remove actual notification just the assignment |
|
165 | 165 | try: |
|
166 | 166 | notification = self.__get_notification(notification) |
|
167 | 167 | user = self._get_user(user) |
|
168 | 168 | if notification and user: |
|
169 | 169 | obj = UserNotification.query()\ |
|
170 | 170 | .filter(UserNotification.user == user)\ |
|
171 | 171 | .filter(UserNotification.notification == notification)\ |
|
172 | 172 | .one() |
|
173 | 173 | Session().delete(obj) |
|
174 | 174 | return True |
|
175 | 175 | except Exception: |
|
176 | 176 | log.error(traceback.format_exc()) |
|
177 | 177 | raise |
|
178 | 178 | |
|
179 | 179 | def get_for_user(self, user, filter_=None): |
|
180 | 180 | """ |
|
181 | 181 | Get mentions for given user, filter them if filter dict is given |
|
182 | 182 | """ |
|
183 | 183 | user = self._get_user(user) |
|
184 | 184 | |
|
185 | 185 | q = UserNotification.query()\ |
|
186 | 186 | .filter(UserNotification.user == user)\ |
|
187 | 187 | .join(( |
|
188 | 188 | Notification, UserNotification.notification_id == |
|
189 | 189 | Notification.notification_id)) |
|
190 | 190 | if filter_ == ['all']: |
|
191 | 191 | q = q # no filter |
|
192 | 192 | elif filter_ == ['unread']: |
|
193 | 193 | q = q.filter(UserNotification.read == false()) |
|
194 | 194 | elif filter_: |
|
195 | 195 | q = q.filter(Notification.type_.in_(filter_)) |
|
196 | 196 | |
|
197 | 197 | return q |
|
198 | 198 | |
|
199 | 199 | def mark_read(self, user, notification): |
|
200 | 200 | try: |
|
201 | 201 | notification = self.__get_notification(notification) |
|
202 | 202 | user = self._get_user(user) |
|
203 | 203 | if notification and user: |
|
204 | 204 | obj = UserNotification.query()\ |
|
205 | 205 | .filter(UserNotification.user == user)\ |
|
206 | 206 | .filter(UserNotification.notification == notification)\ |
|
207 | 207 | .one() |
|
208 | 208 | obj.read = True |
|
209 | 209 | Session().add(obj) |
|
210 | 210 | return True |
|
211 | 211 | except Exception: |
|
212 | 212 | log.error(traceback.format_exc()) |
|
213 | 213 | raise |
|
214 | 214 | |
|
215 | 215 | def mark_all_read_for_user(self, user, filter_=None): |
|
216 | 216 | user = self._get_user(user) |
|
217 | 217 | q = UserNotification.query()\ |
|
218 | 218 | .filter(UserNotification.user == user)\ |
|
219 | 219 | .filter(UserNotification.read == false())\ |
|
220 | 220 | .join(( |
|
221 | 221 | Notification, UserNotification.notification_id == |
|
222 | 222 | Notification.notification_id)) |
|
223 | 223 | if filter_ == ['unread']: |
|
224 | 224 | q = q.filter(UserNotification.read == false()) |
|
225 | 225 | elif filter_: |
|
226 | 226 | q = q.filter(Notification.type_.in_(filter_)) |
|
227 | 227 | |
|
228 | 228 | # this is a little inefficient but sqlalchemy doesn't support |
|
229 | 229 | # update on joined tables :( |
|
230 | 230 | for obj in q.all(): |
|
231 | 231 | obj.read = True |
|
232 | 232 | Session().add(obj) |
|
233 | 233 | |
|
234 | 234 | def get_unread_cnt_for_user(self, user): |
|
235 | 235 | user = self._get_user(user) |
|
236 | 236 | return UserNotification.query()\ |
|
237 | 237 | .filter(UserNotification.read == false())\ |
|
238 | 238 | .filter(UserNotification.user == user).count() |
|
239 | 239 | |
|
240 | 240 | def get_unread_for_user(self, user): |
|
241 | 241 | user = self._get_user(user) |
|
242 | 242 | return [x.notification for x in UserNotification.query() |
|
243 | 243 | .filter(UserNotification.read == false()) |
|
244 | 244 | .filter(UserNotification.user == user).all()] |
|
245 | 245 | |
|
246 | 246 | def get_user_notification(self, user, notification): |
|
247 | 247 | user = self._get_user(user) |
|
248 | 248 | notification = self.__get_notification(notification) |
|
249 | 249 | |
|
250 | 250 | return UserNotification.query()\ |
|
251 | 251 | .filter(UserNotification.notification == notification)\ |
|
252 | 252 | .filter(UserNotification.user == user).scalar() |
|
253 | 253 | |
|
254 | 254 | def make_description(self, notification, translate, show_age=True): |
|
255 | 255 | """ |
|
256 | 256 | Creates a human readable description based on properties |
|
257 | 257 | of notification object |
|
258 | 258 | """ |
|
259 | 259 | _ = translate |
|
260 | 260 | _map = { |
|
261 | 261 | notification.TYPE_CHANGESET_COMMENT: [ |
|
262 | 262 | _('%(user)s commented on commit %(date_or_age)s'), |
|
263 | 263 | _('%(user)s commented on commit at %(date_or_age)s'), |
|
264 | 264 | ], |
|
265 | 265 | notification.TYPE_MESSAGE: [ |
|
266 | 266 | _('%(user)s sent message %(date_or_age)s'), |
|
267 | 267 | _('%(user)s sent message at %(date_or_age)s'), |
|
268 | 268 | ], |
|
269 | 269 | notification.TYPE_MENTION: [ |
|
270 | 270 | _('%(user)s mentioned you %(date_or_age)s'), |
|
271 | 271 | _('%(user)s mentioned you at %(date_or_age)s'), |
|
272 | 272 | ], |
|
273 | 273 | notification.TYPE_REGISTRATION: [ |
|
274 | 274 | _('%(user)s registered in RhodeCode %(date_or_age)s'), |
|
275 | 275 | _('%(user)s registered in RhodeCode at %(date_or_age)s'), |
|
276 | 276 | ], |
|
277 | 277 | notification.TYPE_PULL_REQUEST: [ |
|
278 | 278 | _('%(user)s opened new pull request %(date_or_age)s'), |
|
279 | 279 | _('%(user)s opened new pull request at %(date_or_age)s'), |
|
280 | 280 | ], |
|
281 | 281 | notification.TYPE_PULL_REQUEST_UPDATE: [ |
|
282 | 282 | _('%(user)s updated pull request %(date_or_age)s'), |
|
283 | 283 | _('%(user)s updated pull request at %(date_or_age)s'), |
|
284 | 284 | ], |
|
285 | 285 | notification.TYPE_PULL_REQUEST_COMMENT: [ |
|
286 | 286 | _('%(user)s commented on pull request %(date_or_age)s'), |
|
287 | 287 | _('%(user)s commented on pull request at %(date_or_age)s'), |
|
288 | 288 | ], |
|
289 | 289 | } |
|
290 | 290 | |
|
291 | 291 | templates = _map[notification.type_] |
|
292 | 292 | |
|
293 | 293 | if show_age: |
|
294 | 294 | template = templates[0] |
|
295 | 295 | date_or_age = h.age(notification.created_on) |
|
296 | 296 | if translate: |
|
297 | 297 | date_or_age = translate(date_or_age) |
|
298 | 298 | |
|
299 | 299 | if isinstance(date_or_age, TranslationString): |
|
300 | 300 | date_or_age = date_or_age.interpolate() |
|
301 | 301 | |
|
302 | 302 | else: |
|
303 | 303 | template = templates[1] |
|
304 | 304 | date_or_age = h.format_date(notification.created_on) |
|
305 | 305 | |
|
306 | 306 | return template % { |
|
307 | 307 | 'user': notification.created_by_user.username, |
|
308 | 308 | 'date_or_age': date_or_age, |
|
309 | 309 | } |
|
310 | 310 | |
|
311 | 311 | |
|
312 | 312 | # Templates for Titles, that could be overwritten by rcextensions |
|
313 | 313 | # Title of email for pull-request update |
|
314 | 314 | EMAIL_PR_UPDATE_SUBJECT_TEMPLATE = '' |
|
315 | 315 | # Title of email for request for pull request review |
|
316 | 316 | EMAIL_PR_REVIEW_SUBJECT_TEMPLATE = '' |
|
317 | 317 | |
|
318 | 318 | # Title of email for general comment on pull request |
|
319 | 319 | EMAIL_PR_COMMENT_SUBJECT_TEMPLATE = '' |
|
320 | 320 | # Title of email for general comment which includes status change on pull request |
|
321 | 321 | EMAIL_PR_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE = '' |
|
322 | 322 | # Title of email for inline comment on a file in pull request |
|
323 | 323 | EMAIL_PR_COMMENT_FILE_SUBJECT_TEMPLATE = '' |
|
324 | 324 | |
|
325 | 325 | # Title of email for general comment on commit |
|
326 | 326 | EMAIL_COMMENT_SUBJECT_TEMPLATE = '' |
|
327 | 327 | # Title of email for general comment which includes status change on commit |
|
328 | 328 | EMAIL_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE = '' |
|
329 | 329 | # Title of email for inline comment on a file in commit |
|
330 | 330 | EMAIL_COMMENT_FILE_SUBJECT_TEMPLATE = '' |
|
331 | 331 | |
|
332 | 332 | import cssutils |
|
333 | 333 | # hijack css utils logger and replace with ours |
|
334 | 334 | log = logging.getLogger('rhodecode.cssutils.premailer') |
|
335 | 335 | cssutils.log.setLog(log) |
|
336 | 336 | |
|
337 | 337 | |
|
338 | 338 | class EmailNotificationModel(BaseModel): |
|
339 | 339 | TYPE_COMMIT_COMMENT = Notification.TYPE_CHANGESET_COMMENT |
|
340 | 340 | TYPE_REGISTRATION = Notification.TYPE_REGISTRATION |
|
341 | 341 | TYPE_PULL_REQUEST = Notification.TYPE_PULL_REQUEST |
|
342 | 342 | TYPE_PULL_REQUEST_COMMENT = Notification.TYPE_PULL_REQUEST_COMMENT |
|
343 | 343 | TYPE_PULL_REQUEST_UPDATE = Notification.TYPE_PULL_REQUEST_UPDATE |
|
344 | 344 | TYPE_MAIN = Notification.TYPE_MESSAGE |
|
345 | 345 | |
|
346 | 346 | TYPE_PASSWORD_RESET = 'password_reset' |
|
347 | 347 | TYPE_PASSWORD_RESET_CONFIRMATION = 'password_reset_confirmation' |
|
348 | 348 | TYPE_EMAIL_TEST = 'email_test' |
|
349 | 349 | TYPE_EMAIL_EXCEPTION = 'exception' |
|
350 | 350 | TYPE_UPDATE_AVAILABLE = 'update_available' |
|
351 | 351 | TYPE_TEST = 'test' |
|
352 | 352 | |
|
353 | 353 | email_types = { |
|
354 | 354 | TYPE_MAIN: |
|
355 | 355 | 'rhodecode:templates/email_templates/main.mako', |
|
356 | 356 | TYPE_TEST: |
|
357 | 357 | 'rhodecode:templates/email_templates/test.mako', |
|
358 | 358 | TYPE_EMAIL_EXCEPTION: |
|
359 | 359 | 'rhodecode:templates/email_templates/exception_tracker.mako', |
|
360 | 360 | TYPE_UPDATE_AVAILABLE: |
|
361 | 361 | 'rhodecode:templates/email_templates/update_available.mako', |
|
362 | 362 | TYPE_EMAIL_TEST: |
|
363 | 363 | 'rhodecode:templates/email_templates/email_test.mako', |
|
364 | 364 | TYPE_REGISTRATION: |
|
365 | 365 | 'rhodecode:templates/email_templates/user_registration.mako', |
|
366 | 366 | TYPE_PASSWORD_RESET: |
|
367 | 367 | 'rhodecode:templates/email_templates/password_reset.mako', |
|
368 | 368 | TYPE_PASSWORD_RESET_CONFIRMATION: |
|
369 | 369 | 'rhodecode:templates/email_templates/password_reset_confirmation.mako', |
|
370 | 370 | TYPE_COMMIT_COMMENT: |
|
371 | 371 | 'rhodecode:templates/email_templates/commit_comment.mako', |
|
372 | 372 | TYPE_PULL_REQUEST: |
|
373 | 373 | 'rhodecode:templates/email_templates/pull_request_review.mako', |
|
374 | 374 | TYPE_PULL_REQUEST_COMMENT: |
|
375 | 375 | 'rhodecode:templates/email_templates/pull_request_comment.mako', |
|
376 | 376 | TYPE_PULL_REQUEST_UPDATE: |
|
377 | 377 | 'rhodecode:templates/email_templates/pull_request_update.mako', |
|
378 | 378 | } |
|
379 | 379 | |
|
380 | 380 | premailer_instance = premailer.Premailer() |
|
381 | 381 | |
|
382 | 382 | def __init__(self): |
|
383 | 383 | """ |
|
384 | 384 | Example usage:: |
|
385 | 385 | |
|
386 | 386 | (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email( |
|
387 | 387 | EmailNotificationModel.TYPE_TEST, **email_kwargs) |
|
388 | 388 | |
|
389 | 389 | """ |
|
390 | 390 | super(EmailNotificationModel, self).__init__() |
|
391 | 391 | self.rhodecode_instance_name = rhodecode.CONFIG.get('rhodecode_title') |
|
392 | 392 | |
|
393 | 393 | def _update_kwargs_for_render(self, kwargs): |
|
394 | 394 | """ |
|
395 | 395 | Inject params required for Mako rendering |
|
396 | 396 | |
|
397 | 397 | :param kwargs: |
|
398 | 398 | """ |
|
399 | 399 | |
|
400 | 400 | kwargs['rhodecode_instance_name'] = self.rhodecode_instance_name |
|
401 | 401 | kwargs['rhodecode_version'] = rhodecode.__version__ |
|
402 | 402 | instance_url = h.route_url('home') |
|
403 | 403 | _kwargs = { |
|
404 | 404 | 'instance_url': instance_url, |
|
405 | 405 | 'whitespace_filter': self.whitespace_filter, |
|
406 | 406 | 'email_pr_update_subject_template': EMAIL_PR_UPDATE_SUBJECT_TEMPLATE, |
|
407 | 407 | 'email_pr_review_subject_template': EMAIL_PR_REVIEW_SUBJECT_TEMPLATE, |
|
408 | 408 | 'email_pr_comment_subject_template': EMAIL_PR_COMMENT_SUBJECT_TEMPLATE, |
|
409 | 409 | 'email_pr_comment_status_change_subject_template': EMAIL_PR_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE, |
|
410 | 410 | 'email_pr_comment_file_subject_template': EMAIL_PR_COMMENT_FILE_SUBJECT_TEMPLATE, |
|
411 | 411 | 'email_comment_subject_template': EMAIL_COMMENT_SUBJECT_TEMPLATE, |
|
412 | 412 | 'email_comment_status_change_subject_template': EMAIL_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE, |
|
413 | 413 | 'email_comment_file_subject_template': EMAIL_COMMENT_FILE_SUBJECT_TEMPLATE, |
|
414 | 414 | } |
|
415 | 415 | _kwargs.update(kwargs) |
|
416 | 416 | return _kwargs |
|
417 | 417 | |
|
418 | 418 | def whitespace_filter(self, text): |
|
419 | 419 | return text.replace('\n', '').replace('\t', '') |
|
420 | 420 | |
|
421 | 421 | def get_renderer(self, type_, request): |
|
422 | 422 | template_name = self.email_types[type_] |
|
423 | 423 | return request.get_partial_renderer(template_name) |
|
424 | 424 | |
|
425 | 425 | def render_email(self, type_, **kwargs): |
|
426 | 426 | """ |
|
427 | 427 | renders template for email, and returns a tuple of |
|
428 | 428 | (subject, email_headers, email_html_body, email_plaintext_body) |
|
429 | 429 | """ |
|
430 | 430 | request = get_current_request() |
|
431 | 431 | |
|
432 | 432 | # translator and helpers inject |
|
433 | 433 | _kwargs = self._update_kwargs_for_render(kwargs) |
|
434 | 434 | email_template = self.get_renderer(type_, request=request) |
|
435 | 435 | subject = email_template.render('subject', **_kwargs) |
|
436 | 436 | |
|
437 | 437 | try: |
|
438 | 438 | body_plaintext = email_template.render('body_plaintext', **_kwargs) |
|
439 | 439 | except AttributeError: |
|
440 | 440 | # it's not defined in template, ok we can skip it |
|
441 | 441 | body_plaintext = '' |
|
442 | 442 | |
|
443 | 443 | # render WHOLE template |
|
444 | 444 | body = email_template.render(None, **_kwargs) |
|
445 | 445 | |
|
446 | 446 | try: |
|
447 | 447 | # Inline CSS styles and conversion |
|
448 | 448 | body = self.premailer_instance.transform(body) |
|
449 | 449 | except Exception: |
|
450 | 450 | log.exception('Failed to parse body with premailer') |
|
451 | 451 | pass |
|
452 | 452 | |
|
453 | 453 | return subject, body, body_plaintext |
@@ -1,600 +1,599 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | 1 |
|
|
3 | 2 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 3 | # |
|
5 | 4 | # This program is free software: you can redistribute it and/or modify |
|
6 | 5 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 6 | # (only), as published by the Free Software Foundation. |
|
8 | 7 | # |
|
9 | 8 | # This program is distributed in the hope that it will be useful, |
|
10 | 9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 11 | # GNU General Public License for more details. |
|
13 | 12 | # |
|
14 | 13 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 14 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 15 | # |
|
17 | 16 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 17 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 18 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 19 | |
|
21 | 20 | """ |
|
22 | 21 | permissions model for RhodeCode |
|
23 | 22 | """ |
|
24 | 23 | import collections |
|
25 | 24 | import logging |
|
26 | 25 | import traceback |
|
27 | 26 | |
|
28 | 27 | from sqlalchemy.exc import DatabaseError |
|
29 | 28 | |
|
30 | 29 | from rhodecode import events |
|
31 | 30 | from rhodecode.model import BaseModel |
|
32 | 31 | from rhodecode.model.db import ( |
|
33 | 32 | User, Permission, UserToPerm, UserRepoToPerm, UserRepoGroupToPerm, |
|
34 | 33 | UserUserGroupToPerm, UserGroup, UserGroupToPerm, UserToRepoBranchPermission) |
|
35 | 34 | from rhodecode.lib.utils2 import str2bool, safe_int |
|
36 | 35 | |
|
37 | 36 | log = logging.getLogger(__name__) |
|
38 | 37 | |
|
39 | 38 | |
|
40 | 39 | class PermissionModel(BaseModel): |
|
41 | 40 | """ |
|
42 | 41 | Permissions model for RhodeCode |
|
43 | 42 | """ |
|
44 | 43 | FORKING_DISABLED = 'hg.fork.none' |
|
45 | 44 | FORKING_ENABLED = 'hg.fork.repository' |
|
46 | 45 | |
|
47 | 46 | cls = Permission |
|
48 | 47 | global_perms = { |
|
49 | 48 | 'default_repo_create': None, |
|
50 | 49 | # special case for create repos on write access to group |
|
51 | 50 | 'default_repo_create_on_write': None, |
|
52 | 51 | 'default_repo_group_create': None, |
|
53 | 52 | 'default_user_group_create': None, |
|
54 | 53 | 'default_fork_create': None, |
|
55 | 54 | 'default_inherit_default_permissions': None, |
|
56 | 55 | 'default_register': None, |
|
57 | 56 | 'default_password_reset': None, |
|
58 | 57 | 'default_extern_activate': None, |
|
59 | 58 | |
|
60 | 59 | # object permissions below |
|
61 | 60 | 'default_repo_perm': None, |
|
62 | 61 | 'default_group_perm': None, |
|
63 | 62 | 'default_user_group_perm': None, |
|
64 | 63 | |
|
65 | 64 | # branch |
|
66 | 65 | 'default_branch_perm': None, |
|
67 | 66 | } |
|
68 | 67 | |
|
69 | 68 | def set_global_permission_choices(self, c_obj, gettext_translator): |
|
70 | 69 | _ = gettext_translator |
|
71 | 70 | |
|
72 | 71 | c_obj.repo_perms_choices = [ |
|
73 | 72 | ('repository.none', _('None'),), |
|
74 | 73 | ('repository.read', _('Read'),), |
|
75 | 74 | ('repository.write', _('Write'),), |
|
76 | 75 | ('repository.admin', _('Admin'),)] |
|
77 | 76 | |
|
78 | 77 | c_obj.group_perms_choices = [ |
|
79 | 78 | ('group.none', _('None'),), |
|
80 | 79 | ('group.read', _('Read'),), |
|
81 | 80 | ('group.write', _('Write'),), |
|
82 | 81 | ('group.admin', _('Admin'),)] |
|
83 | 82 | |
|
84 | 83 | c_obj.user_group_perms_choices = [ |
|
85 | 84 | ('usergroup.none', _('None'),), |
|
86 | 85 | ('usergroup.read', _('Read'),), |
|
87 | 86 | ('usergroup.write', _('Write'),), |
|
88 | 87 | ('usergroup.admin', _('Admin'),)] |
|
89 | 88 | |
|
90 | 89 | c_obj.branch_perms_choices = [ |
|
91 | 90 | ('branch.none', _('Protected/No Access'),), |
|
92 | 91 | ('branch.merge', _('Web merge'),), |
|
93 | 92 | ('branch.push', _('Push'),), |
|
94 | 93 | ('branch.push_force', _('Force Push'),)] |
|
95 | 94 | |
|
96 | 95 | c_obj.register_choices = [ |
|
97 | 96 | ('hg.register.none', _('Disabled')), |
|
98 | 97 | ('hg.register.manual_activate', _('Allowed with manual account activation')), |
|
99 | 98 | ('hg.register.auto_activate', _('Allowed with automatic account activation'))] |
|
100 | 99 | |
|
101 | 100 | c_obj.password_reset_choices = [ |
|
102 | 101 | ('hg.password_reset.enabled', _('Allow password recovery')), |
|
103 | 102 | ('hg.password_reset.hidden', _('Hide password recovery link')), |
|
104 | 103 | ('hg.password_reset.disabled', _('Disable password recovery'))] |
|
105 | 104 | |
|
106 | 105 | c_obj.extern_activate_choices = [ |
|
107 | 106 | ('hg.extern_activate.manual', _('Manual activation of external account')), |
|
108 | 107 | ('hg.extern_activate.auto', _('Automatic activation of external account'))] |
|
109 | 108 | |
|
110 | 109 | c_obj.repo_create_choices = [ |
|
111 | 110 | ('hg.create.none', _('Disabled')), |
|
112 | 111 | ('hg.create.repository', _('Enabled'))] |
|
113 | 112 | |
|
114 | 113 | c_obj.repo_create_on_write_choices = [ |
|
115 | 114 | ('hg.create.write_on_repogroup.false', _('Disabled')), |
|
116 | 115 | ('hg.create.write_on_repogroup.true', _('Enabled'))] |
|
117 | 116 | |
|
118 | 117 | c_obj.user_group_create_choices = [ |
|
119 | 118 | ('hg.usergroup.create.false', _('Disabled')), |
|
120 | 119 | ('hg.usergroup.create.true', _('Enabled'))] |
|
121 | 120 | |
|
122 | 121 | c_obj.repo_group_create_choices = [ |
|
123 | 122 | ('hg.repogroup.create.false', _('Disabled')), |
|
124 | 123 | ('hg.repogroup.create.true', _('Enabled'))] |
|
125 | 124 | |
|
126 | 125 | c_obj.fork_choices = [ |
|
127 | 126 | (self.FORKING_DISABLED, _('Disabled')), |
|
128 | 127 | (self.FORKING_ENABLED, _('Enabled'))] |
|
129 | 128 | |
|
130 | 129 | c_obj.inherit_default_permission_choices = [ |
|
131 | 130 | ('hg.inherit_default_perms.false', _('Disabled')), |
|
132 | 131 | ('hg.inherit_default_perms.true', _('Enabled'))] |
|
133 | 132 | |
|
134 | 133 | def get_default_perms(self, object_perms, suffix): |
|
135 | 134 | defaults = {} |
|
136 | 135 | for perm in object_perms: |
|
137 | 136 | # perms |
|
138 | 137 | if perm.permission.permission_name.startswith('repository.'): |
|
139 | 138 | defaults['default_repo_perm' + suffix] = perm.permission.permission_name |
|
140 | 139 | |
|
141 | 140 | if perm.permission.permission_name.startswith('group.'): |
|
142 | 141 | defaults['default_group_perm' + suffix] = perm.permission.permission_name |
|
143 | 142 | |
|
144 | 143 | if perm.permission.permission_name.startswith('usergroup.'): |
|
145 | 144 | defaults['default_user_group_perm' + suffix] = perm.permission.permission_name |
|
146 | 145 | |
|
147 | 146 | # branch |
|
148 | 147 | if perm.permission.permission_name.startswith('branch.'): |
|
149 | 148 | defaults['default_branch_perm' + suffix] = perm.permission.permission_name |
|
150 | 149 | |
|
151 | 150 | # creation of objects |
|
152 | 151 | if perm.permission.permission_name.startswith('hg.create.write_on_repogroup'): |
|
153 | 152 | defaults['default_repo_create_on_write' + suffix] = perm.permission.permission_name |
|
154 | 153 | |
|
155 | 154 | elif perm.permission.permission_name.startswith('hg.create.'): |
|
156 | 155 | defaults['default_repo_create' + suffix] = perm.permission.permission_name |
|
157 | 156 | |
|
158 | 157 | if perm.permission.permission_name.startswith('hg.fork.'): |
|
159 | 158 | defaults['default_fork_create' + suffix] = perm.permission.permission_name |
|
160 | 159 | |
|
161 | 160 | if perm.permission.permission_name.startswith('hg.inherit_default_perms.'): |
|
162 | 161 | defaults['default_inherit_default_permissions' + suffix] = perm.permission.permission_name |
|
163 | 162 | |
|
164 | 163 | if perm.permission.permission_name.startswith('hg.repogroup.'): |
|
165 | 164 | defaults['default_repo_group_create' + suffix] = perm.permission.permission_name |
|
166 | 165 | |
|
167 | 166 | if perm.permission.permission_name.startswith('hg.usergroup.'): |
|
168 | 167 | defaults['default_user_group_create' + suffix] = perm.permission.permission_name |
|
169 | 168 | |
|
170 | 169 | # registration and external account activation |
|
171 | 170 | if perm.permission.permission_name.startswith('hg.register.'): |
|
172 | 171 | defaults['default_register' + suffix] = perm.permission.permission_name |
|
173 | 172 | |
|
174 | 173 | if perm.permission.permission_name.startswith('hg.password_reset.'): |
|
175 | 174 | defaults['default_password_reset' + suffix] = perm.permission.permission_name |
|
176 | 175 | |
|
177 | 176 | if perm.permission.permission_name.startswith('hg.extern_activate.'): |
|
178 | 177 | defaults['default_extern_activate' + suffix] = perm.permission.permission_name |
|
179 | 178 | |
|
180 | 179 | return defaults |
|
181 | 180 | |
|
182 | 181 | def _make_new_user_perm(self, user, perm_name): |
|
183 | 182 | log.debug('Creating new user permission:%s', perm_name) |
|
184 | 183 | new = UserToPerm() |
|
185 | 184 | new.user = user |
|
186 | 185 | new.permission = Permission.get_by_key(perm_name) |
|
187 | 186 | return new |
|
188 | 187 | |
|
189 | 188 | def _make_new_user_group_perm(self, user_group, perm_name): |
|
190 | 189 | log.debug('Creating new user group permission:%s', perm_name) |
|
191 | 190 | new = UserGroupToPerm() |
|
192 | 191 | new.users_group = user_group |
|
193 | 192 | new.permission = Permission.get_by_key(perm_name) |
|
194 | 193 | return new |
|
195 | 194 | |
|
196 | 195 | def _keep_perm(self, perm_name, keep_fields): |
|
197 | 196 | def get_pat(field_name): |
|
198 | 197 | return { |
|
199 | 198 | # global perms |
|
200 | 199 | 'default_repo_create': 'hg.create.', |
|
201 | 200 | # special case for create repos on write access to group |
|
202 | 201 | 'default_repo_create_on_write': 'hg.create.write_on_repogroup.', |
|
203 | 202 | 'default_repo_group_create': 'hg.repogroup.create.', |
|
204 | 203 | 'default_user_group_create': 'hg.usergroup.create.', |
|
205 | 204 | 'default_fork_create': 'hg.fork.', |
|
206 | 205 | 'default_inherit_default_permissions': 'hg.inherit_default_perms.', |
|
207 | 206 | |
|
208 | 207 | # application perms |
|
209 | 208 | 'default_register': 'hg.register.', |
|
210 | 209 | 'default_password_reset': 'hg.password_reset.', |
|
211 | 210 | 'default_extern_activate': 'hg.extern_activate.', |
|
212 | 211 | |
|
213 | 212 | # object permissions below |
|
214 | 213 | 'default_repo_perm': 'repository.', |
|
215 | 214 | 'default_group_perm': 'group.', |
|
216 | 215 | 'default_user_group_perm': 'usergroup.', |
|
217 | 216 | # branch |
|
218 | 217 | 'default_branch_perm': 'branch.', |
|
219 | 218 | |
|
220 | 219 | }[field_name] |
|
221 | 220 | for field in keep_fields: |
|
222 | 221 | pat = get_pat(field) |
|
223 | 222 | if perm_name.startswith(pat): |
|
224 | 223 | return True |
|
225 | 224 | return False |
|
226 | 225 | |
|
227 | 226 | def _clear_object_perm(self, object_perms, preserve=None): |
|
228 | 227 | preserve = preserve or [] |
|
229 | 228 | _deleted = [] |
|
230 | 229 | for perm in object_perms: |
|
231 | 230 | perm_name = perm.permission.permission_name |
|
232 | 231 | if not self._keep_perm(perm_name, keep_fields=preserve): |
|
233 | 232 | _deleted.append(perm_name) |
|
234 | 233 | self.sa.delete(perm) |
|
235 | 234 | return _deleted |
|
236 | 235 | |
|
237 | 236 | def _clear_user_perms(self, user_id, preserve=None): |
|
238 | 237 | perms = self.sa.query(UserToPerm)\ |
|
239 | 238 | .filter(UserToPerm.user_id == user_id)\ |
|
240 | 239 | .all() |
|
241 | 240 | return self._clear_object_perm(perms, preserve=preserve) |
|
242 | 241 | |
|
243 | 242 | def _clear_user_group_perms(self, user_group_id, preserve=None): |
|
244 | 243 | perms = self.sa.query(UserGroupToPerm)\ |
|
245 | 244 | .filter(UserGroupToPerm.users_group_id == user_group_id)\ |
|
246 | 245 | .all() |
|
247 | 246 | return self._clear_object_perm(perms, preserve=preserve) |
|
248 | 247 | |
|
249 | 248 | def _set_new_object_perms(self, obj_type, to_object, form_result, preserve=None): |
|
250 | 249 | # clear current entries, to make this function idempotent |
|
251 | 250 | # it will fix even if we define more permissions or permissions |
|
252 | 251 | # are somehow missing |
|
253 | 252 | preserve = preserve or [] |
|
254 | 253 | _global_perms = self.global_perms.copy() |
|
255 | 254 | if obj_type not in ['user', 'user_group']: |
|
256 | 255 | raise ValueError("obj_type must be on of 'user' or 'user_group'") |
|
257 | 256 | global_perms = len(_global_perms) |
|
258 | 257 | default_user_perms = len(Permission.DEFAULT_USER_PERMISSIONS) |
|
259 | 258 | if global_perms != default_user_perms: |
|
260 | 259 | raise Exception( |
|
261 | 260 | 'Inconsistent permissions definition. Got {} vs {}'.format( |
|
262 | 261 | global_perms, default_user_perms)) |
|
263 | 262 | |
|
264 | 263 | if obj_type == 'user': |
|
265 | 264 | self._clear_user_perms(to_object.user_id, preserve) |
|
266 | 265 | if obj_type == 'user_group': |
|
267 | 266 | self._clear_user_group_perms(to_object.users_group_id, preserve) |
|
268 | 267 | |
|
269 | 268 | # now kill the keys that we want to preserve from the form. |
|
270 | 269 | for key in preserve: |
|
271 | 270 | del _global_perms[key] |
|
272 | 271 | |
|
273 | 272 | for k in _global_perms.copy(): |
|
274 | 273 | _global_perms[k] = form_result[k] |
|
275 | 274 | |
|
276 | 275 | # at that stage we validate all are passed inside form_result |
|
277 | 276 | for _perm_key, perm_value in _global_perms.items(): |
|
278 | 277 | if perm_value is None: |
|
279 | 278 | raise ValueError('Missing permission for %s' % (_perm_key,)) |
|
280 | 279 | |
|
281 | 280 | if obj_type == 'user': |
|
282 | 281 | p = self._make_new_user_perm(object, perm_value) |
|
283 | 282 | self.sa.add(p) |
|
284 | 283 | if obj_type == 'user_group': |
|
285 | 284 | p = self._make_new_user_group_perm(object, perm_value) |
|
286 | 285 | self.sa.add(p) |
|
287 | 286 | |
|
288 | 287 | def _set_new_user_perms(self, user, form_result, preserve=None): |
|
289 | 288 | return self._set_new_object_perms( |
|
290 | 289 | 'user', user, form_result, preserve) |
|
291 | 290 | |
|
292 | 291 | def _set_new_user_group_perms(self, user_group, form_result, preserve=None): |
|
293 | 292 | return self._set_new_object_perms( |
|
294 | 293 | 'user_group', user_group, form_result, preserve) |
|
295 | 294 | |
|
296 | 295 | def set_new_user_perms(self, user, form_result): |
|
297 | 296 | # calculate what to preserve from what is given in form_result |
|
298 | 297 | preserve = set(self.global_perms.keys()).difference(set(form_result.keys())) |
|
299 | 298 | return self._set_new_user_perms(user, form_result, preserve) |
|
300 | 299 | |
|
301 | 300 | def set_new_user_group_perms(self, user_group, form_result): |
|
302 | 301 | # calculate what to preserve from what is given in form_result |
|
303 | 302 | preserve = set(self.global_perms.keys()).difference(set(form_result.keys())) |
|
304 | 303 | return self._set_new_user_group_perms(user_group, form_result, preserve) |
|
305 | 304 | |
|
306 | 305 | def create_permissions(self): |
|
307 | 306 | """ |
|
308 | 307 | Create permissions for whole system |
|
309 | 308 | """ |
|
310 | 309 | for p in Permission.PERMS: |
|
311 | 310 | if not Permission.get_by_key(p[0]): |
|
312 | 311 | new_perm = Permission() |
|
313 | 312 | new_perm.permission_name = p[0] |
|
314 | 313 | new_perm.permission_longname = p[0] # translation err with p[1] |
|
315 | 314 | self.sa.add(new_perm) |
|
316 | 315 | |
|
317 | 316 | def _create_default_object_permission(self, obj_type, obj, obj_perms, |
|
318 | 317 | force=False): |
|
319 | 318 | if obj_type not in ['user', 'user_group']: |
|
320 | 319 | raise ValueError("obj_type must be on of 'user' or 'user_group'") |
|
321 | 320 | |
|
322 | 321 | def _get_group(perm_name): |
|
323 | 322 | return '.'.join(perm_name.split('.')[:1]) |
|
324 | 323 | |
|
325 | 324 | defined_perms_groups = map( |
|
326 | 325 | _get_group, (x.permission.permission_name for x in obj_perms)) |
|
327 | 326 | log.debug('GOT ALREADY DEFINED:%s', obj_perms) |
|
328 | 327 | |
|
329 | 328 | if force: |
|
330 | 329 | self._clear_object_perm(obj_perms) |
|
331 | 330 | self.sa.commit() |
|
332 | 331 | defined_perms_groups = [] |
|
333 | 332 | # for every default permission that needs to be created, we check if |
|
334 | 333 | # it's group is already defined, if it's not we create default perm |
|
335 | 334 | for perm_name in Permission.DEFAULT_USER_PERMISSIONS: |
|
336 | 335 | gr = _get_group(perm_name) |
|
337 | 336 | if gr not in defined_perms_groups: |
|
338 | 337 | log.debug('GR:%s not found, creating permission %s', |
|
339 | 338 | gr, perm_name) |
|
340 | 339 | if obj_type == 'user': |
|
341 | 340 | new_perm = self._make_new_user_perm(obj, perm_name) |
|
342 | 341 | self.sa.add(new_perm) |
|
343 | 342 | if obj_type == 'user_group': |
|
344 | 343 | new_perm = self._make_new_user_group_perm(obj, perm_name) |
|
345 | 344 | self.sa.add(new_perm) |
|
346 | 345 | |
|
347 | 346 | def create_default_user_permissions(self, user, force=False): |
|
348 | 347 | """ |
|
349 | 348 | Creates only missing default permissions for user, if force is set it |
|
350 | 349 | resets the default permissions for that user |
|
351 | 350 | |
|
352 | 351 | :param user: |
|
353 | 352 | :param force: |
|
354 | 353 | """ |
|
355 | 354 | user = self._get_user(user) |
|
356 | 355 | obj_perms = UserToPerm.query().filter(UserToPerm.user == user).all() |
|
357 | 356 | return self._create_default_object_permission( |
|
358 | 357 | 'user', user, obj_perms, force) |
|
359 | 358 | |
|
360 | 359 | def create_default_user_group_permissions(self, user_group, force=False): |
|
361 | 360 | """ |
|
362 | 361 | Creates only missing default permissions for user group, if force is |
|
363 | 362 | set it resets the default permissions for that user group |
|
364 | 363 | |
|
365 | 364 | :param user_group: |
|
366 | 365 | :param force: |
|
367 | 366 | """ |
|
368 | 367 | user_group = self._get_user_group(user_group) |
|
369 | 368 | obj_perms = UserToPerm.query().filter(UserGroupToPerm.users_group == user_group).all() |
|
370 | 369 | return self._create_default_object_permission( |
|
371 | 370 | 'user_group', user_group, obj_perms, force) |
|
372 | 371 | |
|
373 | 372 | def update_application_permissions(self, form_result): |
|
374 | 373 | if 'perm_user_id' in form_result: |
|
375 | 374 | perm_user = User.get(safe_int(form_result['perm_user_id'])) |
|
376 | 375 | else: |
|
377 | 376 | # used mostly to do lookup for default user |
|
378 | 377 | perm_user = User.get_by_username(form_result['perm_user_name']) |
|
379 | 378 | |
|
380 | 379 | try: |
|
381 | 380 | # stage 1 set anonymous access |
|
382 | 381 | if perm_user.username == User.DEFAULT_USER: |
|
383 | 382 | perm_user.active = str2bool(form_result['anonymous']) |
|
384 | 383 | self.sa.add(perm_user) |
|
385 | 384 | |
|
386 | 385 | # stage 2 reset defaults and set them from form data |
|
387 | 386 | self._set_new_user_perms(perm_user, form_result, preserve=[ |
|
388 | 387 | 'default_repo_perm', |
|
389 | 388 | 'default_group_perm', |
|
390 | 389 | 'default_user_group_perm', |
|
391 | 390 | 'default_branch_perm', |
|
392 | 391 | |
|
393 | 392 | 'default_repo_group_create', |
|
394 | 393 | 'default_user_group_create', |
|
395 | 394 | 'default_repo_create_on_write', |
|
396 | 395 | 'default_repo_create', |
|
397 | 396 | 'default_fork_create', |
|
398 | 397 | 'default_inherit_default_permissions']) |
|
399 | 398 | |
|
400 | 399 | self.sa.commit() |
|
401 | 400 | except (DatabaseError,): |
|
402 | 401 | log.error(traceback.format_exc()) |
|
403 | 402 | self.sa.rollback() |
|
404 | 403 | raise |
|
405 | 404 | |
|
406 | 405 | def update_user_permissions(self, form_result): |
|
407 | 406 | if 'perm_user_id' in form_result: |
|
408 | 407 | perm_user = User.get(safe_int(form_result['perm_user_id'])) |
|
409 | 408 | else: |
|
410 | 409 | # used mostly to do lookup for default user |
|
411 | 410 | perm_user = User.get_by_username(form_result['perm_user_name']) |
|
412 | 411 | try: |
|
413 | 412 | # stage 2 reset defaults and set them from form data |
|
414 | 413 | self._set_new_user_perms(perm_user, form_result, preserve=[ |
|
415 | 414 | 'default_repo_perm', |
|
416 | 415 | 'default_group_perm', |
|
417 | 416 | 'default_user_group_perm', |
|
418 | 417 | 'default_branch_perm', |
|
419 | 418 | |
|
420 | 419 | 'default_register', |
|
421 | 420 | 'default_password_reset', |
|
422 | 421 | 'default_extern_activate']) |
|
423 | 422 | self.sa.commit() |
|
424 | 423 | except (DatabaseError,): |
|
425 | 424 | log.error(traceback.format_exc()) |
|
426 | 425 | self.sa.rollback() |
|
427 | 426 | raise |
|
428 | 427 | |
|
429 | 428 | def update_user_group_permissions(self, form_result): |
|
430 | 429 | if 'perm_user_group_id' in form_result: |
|
431 | 430 | perm_user_group = UserGroup.get(safe_int(form_result['perm_user_group_id'])) |
|
432 | 431 | else: |
|
433 | 432 | # used mostly to do lookup for default user |
|
434 | 433 | perm_user_group = UserGroup.get_by_group_name(form_result['perm_user_group_name']) |
|
435 | 434 | try: |
|
436 | 435 | # stage 2 reset defaults and set them from form data |
|
437 | 436 | self._set_new_user_group_perms(perm_user_group, form_result, preserve=[ |
|
438 | 437 | 'default_repo_perm', |
|
439 | 438 | 'default_group_perm', |
|
440 | 439 | 'default_user_group_perm', |
|
441 | 440 | 'default_branch_perm', |
|
442 | 441 | |
|
443 | 442 | 'default_register', |
|
444 | 443 | 'default_password_reset', |
|
445 | 444 | 'default_extern_activate']) |
|
446 | 445 | self.sa.commit() |
|
447 | 446 | except (DatabaseError,): |
|
448 | 447 | log.error(traceback.format_exc()) |
|
449 | 448 | self.sa.rollback() |
|
450 | 449 | raise |
|
451 | 450 | |
|
452 | 451 | def update_object_permissions(self, form_result): |
|
453 | 452 | if 'perm_user_id' in form_result: |
|
454 | 453 | perm_user = User.get(safe_int(form_result['perm_user_id'])) |
|
455 | 454 | else: |
|
456 | 455 | # used mostly to do lookup for default user |
|
457 | 456 | perm_user = User.get_by_username(form_result['perm_user_name']) |
|
458 | 457 | try: |
|
459 | 458 | |
|
460 | 459 | # stage 2 reset defaults and set them from form data |
|
461 | 460 | self._set_new_user_perms(perm_user, form_result, preserve=[ |
|
462 | 461 | 'default_repo_group_create', |
|
463 | 462 | 'default_user_group_create', |
|
464 | 463 | 'default_repo_create_on_write', |
|
465 | 464 | 'default_repo_create', |
|
466 | 465 | 'default_fork_create', |
|
467 | 466 | 'default_inherit_default_permissions', |
|
468 | 467 | 'default_branch_perm', |
|
469 | 468 | |
|
470 | 469 | 'default_register', |
|
471 | 470 | 'default_password_reset', |
|
472 | 471 | 'default_extern_activate']) |
|
473 | 472 | |
|
474 | 473 | # overwrite default repo permissions |
|
475 | 474 | if form_result['overwrite_default_repo']: |
|
476 | 475 | _def_name = form_result['default_repo_perm'].split('repository.')[-1] |
|
477 | 476 | _def = Permission.get_by_key('repository.' + _def_name) |
|
478 | 477 | for r2p in self.sa.query(UserRepoToPerm)\ |
|
479 | 478 | .filter(UserRepoToPerm.user == perm_user)\ |
|
480 | 479 | .all(): |
|
481 | 480 | # don't reset PRIVATE repositories |
|
482 | 481 | if not r2p.repository.private: |
|
483 | 482 | r2p.permission = _def |
|
484 | 483 | self.sa.add(r2p) |
|
485 | 484 | |
|
486 | 485 | # overwrite default repo group permissions |
|
487 | 486 | if form_result['overwrite_default_group']: |
|
488 | 487 | _def_name = form_result['default_group_perm'].split('group.')[-1] |
|
489 | 488 | _def = Permission.get_by_key('group.' + _def_name) |
|
490 | 489 | for g2p in self.sa.query(UserRepoGroupToPerm)\ |
|
491 | 490 | .filter(UserRepoGroupToPerm.user == perm_user)\ |
|
492 | 491 | .all(): |
|
493 | 492 | g2p.permission = _def |
|
494 | 493 | self.sa.add(g2p) |
|
495 | 494 | |
|
496 | 495 | # overwrite default user group permissions |
|
497 | 496 | if form_result['overwrite_default_user_group']: |
|
498 | 497 | _def_name = form_result['default_user_group_perm'].split('usergroup.')[-1] |
|
499 | 498 | # user groups |
|
500 | 499 | _def = Permission.get_by_key('usergroup.' + _def_name) |
|
501 | 500 | for g2p in self.sa.query(UserUserGroupToPerm)\ |
|
502 | 501 | .filter(UserUserGroupToPerm.user == perm_user)\ |
|
503 | 502 | .all(): |
|
504 | 503 | g2p.permission = _def |
|
505 | 504 | self.sa.add(g2p) |
|
506 | 505 | |
|
507 | 506 | # COMMIT |
|
508 | 507 | self.sa.commit() |
|
509 | 508 | except (DatabaseError,): |
|
510 | 509 | log.exception('Failed to set default object permissions') |
|
511 | 510 | self.sa.rollback() |
|
512 | 511 | raise |
|
513 | 512 | |
|
514 | 513 | def update_branch_permissions(self, form_result): |
|
515 | 514 | if 'perm_user_id' in form_result: |
|
516 | 515 | perm_user = User.get(safe_int(form_result['perm_user_id'])) |
|
517 | 516 | else: |
|
518 | 517 | # used mostly to do lookup for default user |
|
519 | 518 | perm_user = User.get_by_username(form_result['perm_user_name']) |
|
520 | 519 | try: |
|
521 | 520 | |
|
522 | 521 | # stage 2 reset defaults and set them from form data |
|
523 | 522 | self._set_new_user_perms(perm_user, form_result, preserve=[ |
|
524 | 523 | 'default_repo_perm', |
|
525 | 524 | 'default_group_perm', |
|
526 | 525 | 'default_user_group_perm', |
|
527 | 526 | |
|
528 | 527 | 'default_repo_group_create', |
|
529 | 528 | 'default_user_group_create', |
|
530 | 529 | 'default_repo_create_on_write', |
|
531 | 530 | 'default_repo_create', |
|
532 | 531 | 'default_fork_create', |
|
533 | 532 | 'default_inherit_default_permissions', |
|
534 | 533 | |
|
535 | 534 | 'default_register', |
|
536 | 535 | 'default_password_reset', |
|
537 | 536 | 'default_extern_activate']) |
|
538 | 537 | |
|
539 | 538 | # overwrite default branch permissions |
|
540 | 539 | if form_result['overwrite_default_branch']: |
|
541 | 540 | _def_name = \ |
|
542 | 541 | form_result['default_branch_perm'].split('branch.')[-1] |
|
543 | 542 | |
|
544 | 543 | _def = Permission.get_by_key('branch.' + _def_name) |
|
545 | 544 | |
|
546 | 545 | user_perms = UserToRepoBranchPermission.query()\ |
|
547 | 546 | .join(UserToRepoBranchPermission.user_repo_to_perm)\ |
|
548 | 547 | .filter(UserRepoToPerm.user == perm_user).all() |
|
549 | 548 | |
|
550 | 549 | for g2p in user_perms: |
|
551 | 550 | g2p.permission = _def |
|
552 | 551 | self.sa.add(g2p) |
|
553 | 552 | |
|
554 | 553 | # COMMIT |
|
555 | 554 | self.sa.commit() |
|
556 | 555 | except (DatabaseError,): |
|
557 | 556 | log.exception('Failed to set default branch permissions') |
|
558 | 557 | self.sa.rollback() |
|
559 | 558 | raise |
|
560 | 559 | |
|
561 | 560 | def get_users_with_repo_write(self, db_repo): |
|
562 | 561 | write_plus = ['repository.write', 'repository.admin'] |
|
563 | 562 | default_user_id = User.get_default_user_id() |
|
564 | 563 | user_write_permissions = collections.OrderedDict() |
|
565 | 564 | |
|
566 | 565 | # write or higher and DEFAULT user for inheritance |
|
567 | 566 | for perm in db_repo.permissions(): |
|
568 | 567 | if perm.permission in write_plus or perm.user_id == default_user_id: |
|
569 | 568 | user_write_permissions[perm.user_id] = perm |
|
570 | 569 | return user_write_permissions |
|
571 | 570 | |
|
572 | 571 | def get_user_groups_with_repo_write(self, db_repo): |
|
573 | 572 | write_plus = ['repository.write', 'repository.admin'] |
|
574 | 573 | user_group_write_permissions = collections.OrderedDict() |
|
575 | 574 | |
|
576 | 575 | # write or higher and DEFAULT user for inheritance |
|
577 | 576 | for p in db_repo.permission_user_groups(): |
|
578 | 577 | if p.permission in write_plus: |
|
579 | 578 | user_group_write_permissions[p.users_group_id] = p |
|
580 | 579 | return user_group_write_permissions |
|
581 | 580 | |
|
582 | 581 | def trigger_permission_flush(self, affected_user_ids=None): |
|
583 | 582 | affected_user_ids = affected_user_ids or User.get_all_user_ids() |
|
584 | 583 | events.trigger(events.UserPermissionsChange(affected_user_ids)) |
|
585 | 584 | |
|
586 | 585 | def flush_user_permission_caches(self, changes, affected_user_ids=None): |
|
587 | 586 | affected_user_ids = affected_user_ids or [] |
|
588 | 587 | |
|
589 | 588 | for change in changes['added'] + changes['updated'] + changes['deleted']: |
|
590 | 589 | if change['type'] == 'user': |
|
591 | 590 | affected_user_ids.append(change['id']) |
|
592 | 591 | if change['type'] == 'user_group': |
|
593 | 592 | user_group = UserGroup.get(safe_int(change['id'])) |
|
594 | 593 | if user_group: |
|
595 | 594 | group_members_ids = [x.user_id for x in user_group.members] |
|
596 | 595 | affected_user_ids.extend(group_members_ids) |
|
597 | 596 | |
|
598 | 597 | self.trigger_permission_flush(affected_user_ids) |
|
599 | 598 | |
|
600 | 599 | return affected_user_ids |
@@ -1,2380 +1,2380 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
1 | ||
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2012-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | |
|
22 | 22 | """ |
|
23 | 23 | pull request model for RhodeCode |
|
24 | 24 | """ |
|
25 | 25 | |
|
26 | 26 | |
|
27 | 27 | import json |
|
28 | 28 | import logging |
|
29 | 29 | import os |
|
30 | 30 | |
|
31 | 31 | import datetime |
|
32 | 32 | import urllib.request, urllib.parse, urllib.error |
|
33 | 33 | import collections |
|
34 | 34 | |
|
35 | 35 | from pyramid.threadlocal import get_current_request |
|
36 | 36 | |
|
37 | 37 | from rhodecode.lib.vcs.nodes import FileNode |
|
38 | 38 | from rhodecode.translation import lazy_ugettext |
|
39 | 39 | from rhodecode.lib import helpers as h, hooks_utils, diffs |
|
40 | 40 | from rhodecode.lib import audit_logger |
|
41 | 41 | from collections import OrderedDict |
|
42 | 42 | from rhodecode.lib.hooks_daemon import prepare_callback_daemon |
|
43 | 43 | from rhodecode.lib.markup_renderer import ( |
|
44 | 44 | DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer) |
|
45 | 45 | from rhodecode.lib.utils2 import ( |
|
46 | 46 | safe_unicode, safe_str, md5_safe, AttributeDict, safe_int, |
|
47 | 47 | get_current_rhodecode_user) |
|
48 | 48 | from rhodecode.lib.vcs.backends.base import ( |
|
49 | 49 | Reference, MergeResponse, MergeFailureReason, UpdateFailureReason, |
|
50 | 50 | TargetRefMissing, SourceRefMissing) |
|
51 | 51 | from rhodecode.lib.vcs.conf import settings as vcs_settings |
|
52 | 52 | from rhodecode.lib.vcs.exceptions import ( |
|
53 | 53 | CommitDoesNotExistError, EmptyRepositoryError) |
|
54 | 54 | from rhodecode.model import BaseModel |
|
55 | 55 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
56 | 56 | from rhodecode.model.comment import CommentsModel |
|
57 | 57 | from rhodecode.model.db import ( |
|
58 | 58 | aliased, null, lazyload, and_, or_, func, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus, |
|
59 | 59 | PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User) |
|
60 | 60 | from rhodecode.model.meta import Session |
|
61 | 61 | from rhodecode.model.notification import NotificationModel, \ |
|
62 | 62 | EmailNotificationModel |
|
63 | 63 | from rhodecode.model.scm import ScmModel |
|
64 | 64 | from rhodecode.model.settings import VcsSettingsModel |
|
65 | 65 | |
|
66 | 66 | |
|
67 | 67 | log = logging.getLogger(__name__) |
|
68 | 68 | |
|
69 | 69 | |
|
70 | 70 | # Data structure to hold the response data when updating commits during a pull |
|
71 | 71 | # request update. |
|
72 | 72 | class UpdateResponse(object): |
|
73 | 73 | |
|
74 | 74 | def __init__(self, executed, reason, new, old, common_ancestor_id, |
|
75 | 75 | commit_changes, source_changed, target_changed): |
|
76 | 76 | |
|
77 | 77 | self.executed = executed |
|
78 | 78 | self.reason = reason |
|
79 | 79 | self.new = new |
|
80 | 80 | self.old = old |
|
81 | 81 | self.common_ancestor_id = common_ancestor_id |
|
82 | 82 | self.changes = commit_changes |
|
83 | 83 | self.source_changed = source_changed |
|
84 | 84 | self.target_changed = target_changed |
|
85 | 85 | |
|
86 | 86 | |
|
87 | 87 | def get_diff_info( |
|
88 | 88 | source_repo, source_ref, target_repo, target_ref, get_authors=False, |
|
89 | 89 | get_commit_authors=True): |
|
90 | 90 | """ |
|
91 | 91 | Calculates detailed diff information for usage in preview of creation of a pull-request. |
|
92 | 92 | This is also used for default reviewers logic |
|
93 | 93 | """ |
|
94 | 94 | |
|
95 | 95 | source_scm = source_repo.scm_instance() |
|
96 | 96 | target_scm = target_repo.scm_instance() |
|
97 | 97 | |
|
98 | 98 | ancestor_id = target_scm.get_common_ancestor(target_ref, source_ref, source_scm) |
|
99 | 99 | if not ancestor_id: |
|
100 | 100 | raise ValueError( |
|
101 | 101 | 'cannot calculate diff info without a common ancestor. ' |
|
102 | 102 | 'Make sure both repositories are related, and have a common forking commit.') |
|
103 | 103 | |
|
104 | 104 | # case here is that want a simple diff without incoming commits, |
|
105 | 105 | # previewing what will be merged based only on commits in the source. |
|
106 | 106 | log.debug('Using ancestor %s as source_ref instead of %s', |
|
107 | 107 | ancestor_id, source_ref) |
|
108 | 108 | |
|
109 | 109 | # source of changes now is the common ancestor |
|
110 | 110 | source_commit = source_scm.get_commit(commit_id=ancestor_id) |
|
111 | 111 | # target commit becomes the source ref as it is the last commit |
|
112 | 112 | # for diff generation this logic gives proper diff |
|
113 | 113 | target_commit = source_scm.get_commit(commit_id=source_ref) |
|
114 | 114 | |
|
115 | 115 | vcs_diff = \ |
|
116 | 116 | source_scm.get_diff(commit1=source_commit, commit2=target_commit, |
|
117 | 117 | ignore_whitespace=False, context=3) |
|
118 | 118 | |
|
119 | 119 | diff_processor = diffs.DiffProcessor( |
|
120 | 120 | vcs_diff, format='newdiff', diff_limit=None, |
|
121 | 121 | file_limit=None, show_full_diff=True) |
|
122 | 122 | |
|
123 | 123 | _parsed = diff_processor.prepare() |
|
124 | 124 | |
|
125 | 125 | all_files = [] |
|
126 | 126 | all_files_changes = [] |
|
127 | 127 | changed_lines = {} |
|
128 | 128 | stats = [0, 0] |
|
129 | 129 | for f in _parsed: |
|
130 | 130 | all_files.append(f['filename']) |
|
131 | 131 | all_files_changes.append({ |
|
132 | 132 | 'filename': f['filename'], |
|
133 | 133 | 'stats': f['stats'] |
|
134 | 134 | }) |
|
135 | 135 | stats[0] += f['stats']['added'] |
|
136 | 136 | stats[1] += f['stats']['deleted'] |
|
137 | 137 | |
|
138 | 138 | changed_lines[f['filename']] = [] |
|
139 | 139 | if len(f['chunks']) < 2: |
|
140 | 140 | continue |
|
141 | 141 | # first line is "context" information |
|
142 | 142 | for chunks in f['chunks'][1:]: |
|
143 | 143 | for chunk in chunks['lines']: |
|
144 | 144 | if chunk['action'] not in ('del', 'mod'): |
|
145 | 145 | continue |
|
146 | 146 | changed_lines[f['filename']].append(chunk['old_lineno']) |
|
147 | 147 | |
|
148 | 148 | commit_authors = [] |
|
149 | 149 | user_counts = {} |
|
150 | 150 | email_counts = {} |
|
151 | 151 | author_counts = {} |
|
152 | 152 | _commit_cache = {} |
|
153 | 153 | |
|
154 | 154 | commits = [] |
|
155 | 155 | if get_commit_authors: |
|
156 | 156 | log.debug('Obtaining commit authors from set of commits') |
|
157 | 157 | _compare_data = target_scm.compare( |
|
158 | 158 | target_ref, source_ref, source_scm, merge=True, |
|
159 | 159 | pre_load=["author", "date", "message"] |
|
160 | 160 | ) |
|
161 | 161 | |
|
162 | 162 | for commit in _compare_data: |
|
163 | 163 | # NOTE(marcink): we serialize here, so we don't produce more vcsserver calls on data returned |
|
164 | 164 | # at this function which is later called via JSON serialization |
|
165 | 165 | serialized_commit = dict( |
|
166 | 166 | author=commit.author, |
|
167 | 167 | date=commit.date, |
|
168 | 168 | message=commit.message, |
|
169 | 169 | commit_id=commit.raw_id, |
|
170 | 170 | raw_id=commit.raw_id |
|
171 | 171 | ) |
|
172 | 172 | commits.append(serialized_commit) |
|
173 | 173 | user = User.get_from_cs_author(serialized_commit['author']) |
|
174 | 174 | if user and user not in commit_authors: |
|
175 | 175 | commit_authors.append(user) |
|
176 | 176 | |
|
177 | 177 | # lines |
|
178 | 178 | if get_authors: |
|
179 | 179 | log.debug('Calculating authors of changed files') |
|
180 | 180 | target_commit = source_repo.get_commit(ancestor_id) |
|
181 | 181 | |
|
182 | 182 | for fname, lines in changed_lines.items(): |
|
183 | 183 | |
|
184 | 184 | try: |
|
185 | 185 | node = target_commit.get_node(fname, pre_load=["is_binary"]) |
|
186 | 186 | except Exception: |
|
187 | 187 | log.exception("Failed to load node with path %s", fname) |
|
188 | 188 | continue |
|
189 | 189 | |
|
190 | 190 | if not isinstance(node, FileNode): |
|
191 | 191 | continue |
|
192 | 192 | |
|
193 | 193 | # NOTE(marcink): for binary node we don't do annotation, just use last author |
|
194 | 194 | if node.is_binary: |
|
195 | 195 | author = node.last_commit.author |
|
196 | 196 | email = node.last_commit.author_email |
|
197 | 197 | |
|
198 | 198 | user = User.get_from_cs_author(author) |
|
199 | 199 | if user: |
|
200 | 200 | user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1 |
|
201 | 201 | author_counts[author] = author_counts.get(author, 0) + 1 |
|
202 | 202 | email_counts[email] = email_counts.get(email, 0) + 1 |
|
203 | 203 | |
|
204 | 204 | continue |
|
205 | 205 | |
|
206 | 206 | for annotation in node.annotate: |
|
207 | 207 | line_no, commit_id, get_commit_func, line_text = annotation |
|
208 | 208 | if line_no in lines: |
|
209 | 209 | if commit_id not in _commit_cache: |
|
210 | 210 | _commit_cache[commit_id] = get_commit_func() |
|
211 | 211 | commit = _commit_cache[commit_id] |
|
212 | 212 | author = commit.author |
|
213 | 213 | email = commit.author_email |
|
214 | 214 | user = User.get_from_cs_author(author) |
|
215 | 215 | if user: |
|
216 | 216 | user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1 |
|
217 | 217 | author_counts[author] = author_counts.get(author, 0) + 1 |
|
218 | 218 | email_counts[email] = email_counts.get(email, 0) + 1 |
|
219 | 219 | |
|
220 | 220 | log.debug('Default reviewers processing finished') |
|
221 | 221 | |
|
222 | 222 | return { |
|
223 | 223 | 'commits': commits, |
|
224 | 224 | 'files': all_files_changes, |
|
225 | 225 | 'stats': stats, |
|
226 | 226 | 'ancestor': ancestor_id, |
|
227 | 227 | # original authors of modified files |
|
228 | 228 | 'original_authors': { |
|
229 | 229 | 'users': user_counts, |
|
230 | 230 | 'authors': author_counts, |
|
231 | 231 | 'emails': email_counts, |
|
232 | 232 | }, |
|
233 | 233 | 'commit_authors': commit_authors |
|
234 | 234 | } |
|
235 | 235 | |
|
236 | 236 | |
|
237 | 237 | class PullRequestModel(BaseModel): |
|
238 | 238 | |
|
239 | 239 | cls = PullRequest |
|
240 | 240 | |
|
241 | 241 | DIFF_CONTEXT = diffs.DEFAULT_CONTEXT |
|
242 | 242 | |
|
243 | 243 | UPDATE_STATUS_MESSAGES = { |
|
244 | 244 | UpdateFailureReason.NONE: lazy_ugettext( |
|
245 | 245 | 'Pull request update successful.'), |
|
246 | 246 | UpdateFailureReason.UNKNOWN: lazy_ugettext( |
|
247 | 247 | 'Pull request update failed because of an unknown error.'), |
|
248 | 248 | UpdateFailureReason.NO_CHANGE: lazy_ugettext( |
|
249 | 249 | 'No update needed because the source and target have not changed.'), |
|
250 | 250 | UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext( |
|
251 | 251 | 'Pull request cannot be updated because the reference type is ' |
|
252 | 252 | 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'), |
|
253 | 253 | UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext( |
|
254 | 254 | 'This pull request cannot be updated because the target ' |
|
255 | 255 | 'reference is missing.'), |
|
256 | 256 | UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext( |
|
257 | 257 | 'This pull request cannot be updated because the source ' |
|
258 | 258 | 'reference is missing.'), |
|
259 | 259 | } |
|
260 | 260 | REF_TYPES = ['bookmark', 'book', 'tag', 'branch'] |
|
261 | 261 | UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch'] |
|
262 | 262 | |
|
263 | 263 | def __get_pull_request(self, pull_request): |
|
264 | 264 | return self._get_instance(( |
|
265 | 265 | PullRequest, PullRequestVersion), pull_request) |
|
266 | 266 | |
|
267 | 267 | def _check_perms(self, perms, pull_request, user, api=False): |
|
268 | 268 | if not api: |
|
269 | 269 | return h.HasRepoPermissionAny(*perms)( |
|
270 | 270 | user=user, repo_name=pull_request.target_repo.repo_name) |
|
271 | 271 | else: |
|
272 | 272 | return h.HasRepoPermissionAnyApi(*perms)( |
|
273 | 273 | user=user, repo_name=pull_request.target_repo.repo_name) |
|
274 | 274 | |
|
275 | 275 | def check_user_read(self, pull_request, user, api=False): |
|
276 | 276 | _perms = ('repository.admin', 'repository.write', 'repository.read',) |
|
277 | 277 | return self._check_perms(_perms, pull_request, user, api) |
|
278 | 278 | |
|
279 | 279 | def check_user_merge(self, pull_request, user, api=False): |
|
280 | 280 | _perms = ('repository.admin', 'repository.write', 'hg.admin',) |
|
281 | 281 | return self._check_perms(_perms, pull_request, user, api) |
|
282 | 282 | |
|
283 | 283 | def check_user_update(self, pull_request, user, api=False): |
|
284 | 284 | owner = user.user_id == pull_request.user_id |
|
285 | 285 | return self.check_user_merge(pull_request, user, api) or owner |
|
286 | 286 | |
|
287 | 287 | def check_user_delete(self, pull_request, user): |
|
288 | 288 | owner = user.user_id == pull_request.user_id |
|
289 | 289 | _perms = ('repository.admin',) |
|
290 | 290 | return self._check_perms(_perms, pull_request, user) or owner |
|
291 | 291 | |
|
292 | 292 | def is_user_reviewer(self, pull_request, user): |
|
293 | 293 | return user.user_id in [ |
|
294 | 294 | x.user_id for x in |
|
295 | 295 | pull_request.get_pull_request_reviewers(PullRequestReviewers.ROLE_REVIEWER) |
|
296 | 296 | if x.user |
|
297 | 297 | ] |
|
298 | 298 | |
|
299 | 299 | def check_user_change_status(self, pull_request, user, api=False): |
|
300 | 300 | return self.check_user_update(pull_request, user, api) \ |
|
301 | 301 | or self.is_user_reviewer(pull_request, user) |
|
302 | 302 | |
|
303 | 303 | def check_user_comment(self, pull_request, user): |
|
304 | 304 | owner = user.user_id == pull_request.user_id |
|
305 | 305 | return self.check_user_read(pull_request, user) or owner |
|
306 | 306 | |
|
307 | 307 | def get(self, pull_request): |
|
308 | 308 | return self.__get_pull_request(pull_request) |
|
309 | 309 | |
|
310 | 310 | def _prepare_get_all_query(self, repo_name, search_q=None, source=False, |
|
311 | 311 | statuses=None, opened_by=None, order_by=None, |
|
312 | 312 | order_dir='desc', only_created=False): |
|
313 | 313 | repo = None |
|
314 | 314 | if repo_name: |
|
315 | 315 | repo = self._get_repo(repo_name) |
|
316 | 316 | |
|
317 | 317 | q = PullRequest.query() |
|
318 | 318 | |
|
319 | 319 | if search_q: |
|
320 | 320 | like_expression = u'%{}%'.format(safe_unicode(search_q)) |
|
321 | 321 | q = q.join(User, User.user_id == PullRequest.user_id) |
|
322 | 322 | q = q.filter(or_( |
|
323 | 323 | cast(PullRequest.pull_request_id, String).ilike(like_expression), |
|
324 | 324 | User.username.ilike(like_expression), |
|
325 | 325 | PullRequest.title.ilike(like_expression), |
|
326 | 326 | PullRequest.description.ilike(like_expression), |
|
327 | 327 | )) |
|
328 | 328 | |
|
329 | 329 | # source or target |
|
330 | 330 | if repo and source: |
|
331 | 331 | q = q.filter(PullRequest.source_repo == repo) |
|
332 | 332 | elif repo: |
|
333 | 333 | q = q.filter(PullRequest.target_repo == repo) |
|
334 | 334 | |
|
335 | 335 | # closed,opened |
|
336 | 336 | if statuses: |
|
337 | 337 | q = q.filter(PullRequest.status.in_(statuses)) |
|
338 | 338 | |
|
339 | 339 | # opened by filter |
|
340 | 340 | if opened_by: |
|
341 | 341 | q = q.filter(PullRequest.user_id.in_(opened_by)) |
|
342 | 342 | |
|
343 | 343 | # only get those that are in "created" state |
|
344 | 344 | if only_created: |
|
345 | 345 | q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED) |
|
346 | 346 | |
|
347 | 347 | order_map = { |
|
348 | 348 | 'name_raw': PullRequest.pull_request_id, |
|
349 | 349 | 'id': PullRequest.pull_request_id, |
|
350 | 350 | 'title': PullRequest.title, |
|
351 | 351 | 'updated_on_raw': PullRequest.updated_on, |
|
352 | 352 | 'target_repo': PullRequest.target_repo_id |
|
353 | 353 | } |
|
354 | 354 | if order_by and order_by in order_map: |
|
355 | 355 | if order_dir == 'asc': |
|
356 | 356 | q = q.order_by(order_map[order_by].asc()) |
|
357 | 357 | else: |
|
358 | 358 | q = q.order_by(order_map[order_by].desc()) |
|
359 | 359 | |
|
360 | 360 | return q |
|
361 | 361 | |
|
362 | 362 | def count_all(self, repo_name, search_q=None, source=False, statuses=None, |
|
363 | 363 | opened_by=None): |
|
364 | 364 | """ |
|
365 | 365 | Count the number of pull requests for a specific repository. |
|
366 | 366 | |
|
367 | 367 | :param repo_name: target or source repo |
|
368 | 368 | :param search_q: filter by text |
|
369 | 369 | :param source: boolean flag to specify if repo_name refers to source |
|
370 | 370 | :param statuses: list of pull request statuses |
|
371 | 371 | :param opened_by: author user of the pull request |
|
372 | 372 | :returns: int number of pull requests |
|
373 | 373 | """ |
|
374 | 374 | q = self._prepare_get_all_query( |
|
375 | 375 | repo_name, search_q=search_q, source=source, statuses=statuses, |
|
376 | 376 | opened_by=opened_by) |
|
377 | 377 | |
|
378 | 378 | return q.count() |
|
379 | 379 | |
|
380 | 380 | def get_all(self, repo_name, search_q=None, source=False, statuses=None, |
|
381 | 381 | opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'): |
|
382 | 382 | """ |
|
383 | 383 | Get all pull requests for a specific repository. |
|
384 | 384 | |
|
385 | 385 | :param repo_name: target or source repo |
|
386 | 386 | :param search_q: filter by text |
|
387 | 387 | :param source: boolean flag to specify if repo_name refers to source |
|
388 | 388 | :param statuses: list of pull request statuses |
|
389 | 389 | :param opened_by: author user of the pull request |
|
390 | 390 | :param offset: pagination offset |
|
391 | 391 | :param length: length of returned list |
|
392 | 392 | :param order_by: order of the returned list |
|
393 | 393 | :param order_dir: 'asc' or 'desc' ordering direction |
|
394 | 394 | :returns: list of pull requests |
|
395 | 395 | """ |
|
396 | 396 | q = self._prepare_get_all_query( |
|
397 | 397 | repo_name, search_q=search_q, source=source, statuses=statuses, |
|
398 | 398 | opened_by=opened_by, order_by=order_by, order_dir=order_dir) |
|
399 | 399 | |
|
400 | 400 | if length: |
|
401 | 401 | pull_requests = q.limit(length).offset(offset).all() |
|
402 | 402 | else: |
|
403 | 403 | pull_requests = q.all() |
|
404 | 404 | |
|
405 | 405 | return pull_requests |
|
406 | 406 | |
|
407 | 407 | def count_awaiting_review(self, repo_name, search_q=None, statuses=None): |
|
408 | 408 | """ |
|
409 | 409 | Count the number of pull requests for a specific repository that are |
|
410 | 410 | awaiting review. |
|
411 | 411 | |
|
412 | 412 | :param repo_name: target or source repo |
|
413 | 413 | :param search_q: filter by text |
|
414 | 414 | :param statuses: list of pull request statuses |
|
415 | 415 | :returns: int number of pull requests |
|
416 | 416 | """ |
|
417 | 417 | pull_requests = self.get_awaiting_review( |
|
418 | 418 | repo_name, search_q=search_q, statuses=statuses) |
|
419 | 419 | |
|
420 | 420 | return len(pull_requests) |
|
421 | 421 | |
|
422 | 422 | def get_awaiting_review(self, repo_name, search_q=None, statuses=None, |
|
423 | 423 | offset=0, length=None, order_by=None, order_dir='desc'): |
|
424 | 424 | """ |
|
425 | 425 | Get all pull requests for a specific repository that are awaiting |
|
426 | 426 | review. |
|
427 | 427 | |
|
428 | 428 | :param repo_name: target or source repo |
|
429 | 429 | :param search_q: filter by text |
|
430 | 430 | :param statuses: list of pull request statuses |
|
431 | 431 | :param offset: pagination offset |
|
432 | 432 | :param length: length of returned list |
|
433 | 433 | :param order_by: order of the returned list |
|
434 | 434 | :param order_dir: 'asc' or 'desc' ordering direction |
|
435 | 435 | :returns: list of pull requests |
|
436 | 436 | """ |
|
437 | 437 | pull_requests = self.get_all( |
|
438 | 438 | repo_name, search_q=search_q, statuses=statuses, |
|
439 | 439 | order_by=order_by, order_dir=order_dir) |
|
440 | 440 | |
|
441 | 441 | _filtered_pull_requests = [] |
|
442 | 442 | for pr in pull_requests: |
|
443 | 443 | status = pr.calculated_review_status() |
|
444 | 444 | if status in [ChangesetStatus.STATUS_NOT_REVIEWED, |
|
445 | 445 | ChangesetStatus.STATUS_UNDER_REVIEW]: |
|
446 | 446 | _filtered_pull_requests.append(pr) |
|
447 | 447 | if length: |
|
448 | 448 | return _filtered_pull_requests[offset:offset+length] |
|
449 | 449 | else: |
|
450 | 450 | return _filtered_pull_requests |
|
451 | 451 | |
|
452 | 452 | def _prepare_awaiting_my_review_review_query( |
|
453 | 453 | self, repo_name, user_id, search_q=None, statuses=None, |
|
454 | 454 | order_by=None, order_dir='desc'): |
|
455 | 455 | |
|
456 | 456 | for_review_statuses = [ |
|
457 | 457 | ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED |
|
458 | 458 | ] |
|
459 | 459 | |
|
460 | 460 | pull_request_alias = aliased(PullRequest) |
|
461 | 461 | status_alias = aliased(ChangesetStatus) |
|
462 | 462 | reviewers_alias = aliased(PullRequestReviewers) |
|
463 | 463 | repo_alias = aliased(Repository) |
|
464 | 464 | |
|
465 | 465 | last_ver_subq = Session()\ |
|
466 | 466 | .query(func.min(ChangesetStatus.version)) \ |
|
467 | 467 | .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\ |
|
468 | 468 | .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \ |
|
469 | 469 | .subquery() |
|
470 | 470 | |
|
471 | 471 | q = Session().query(pull_request_alias) \ |
|
472 | 472 | .options(lazyload(pull_request_alias.author)) \ |
|
473 | 473 | .join(reviewers_alias, |
|
474 | 474 | reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \ |
|
475 | 475 | .join(repo_alias, |
|
476 | 476 | repo_alias.repo_id == pull_request_alias.target_repo_id) \ |
|
477 | 477 | .outerjoin(status_alias, |
|
478 | 478 | and_(status_alias.user_id == reviewers_alias.user_id, |
|
479 | 479 | status_alias.pull_request_id == reviewers_alias.pull_request_id)) \ |
|
480 | 480 | .filter(or_(status_alias.version == null(), |
|
481 | 481 | status_alias.version == last_ver_subq)) \ |
|
482 | 482 | .filter(reviewers_alias.user_id == user_id) \ |
|
483 | 483 | .filter(repo_alias.repo_name == repo_name) \ |
|
484 | 484 | .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \ |
|
485 | 485 | .group_by(pull_request_alias) |
|
486 | 486 | |
|
487 | 487 | # closed,opened |
|
488 | 488 | if statuses: |
|
489 | 489 | q = q.filter(pull_request_alias.status.in_(statuses)) |
|
490 | 490 | |
|
491 | 491 | if search_q: |
|
492 | 492 | like_expression = u'%{}%'.format(safe_unicode(search_q)) |
|
493 | 493 | q = q.join(User, User.user_id == pull_request_alias.user_id) |
|
494 | 494 | q = q.filter(or_( |
|
495 | 495 | cast(pull_request_alias.pull_request_id, String).ilike(like_expression), |
|
496 | 496 | User.username.ilike(like_expression), |
|
497 | 497 | pull_request_alias.title.ilike(like_expression), |
|
498 | 498 | pull_request_alias.description.ilike(like_expression), |
|
499 | 499 | )) |
|
500 | 500 | |
|
501 | 501 | order_map = { |
|
502 | 502 | 'name_raw': pull_request_alias.pull_request_id, |
|
503 | 503 | 'title': pull_request_alias.title, |
|
504 | 504 | 'updated_on_raw': pull_request_alias.updated_on, |
|
505 | 505 | 'target_repo': pull_request_alias.target_repo_id |
|
506 | 506 | } |
|
507 | 507 | if order_by and order_by in order_map: |
|
508 | 508 | if order_dir == 'asc': |
|
509 | 509 | q = q.order_by(order_map[order_by].asc()) |
|
510 | 510 | else: |
|
511 | 511 | q = q.order_by(order_map[order_by].desc()) |
|
512 | 512 | |
|
513 | 513 | return q |
|
514 | 514 | |
|
515 | 515 | def count_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None): |
|
516 | 516 | """ |
|
517 | 517 | Count the number of pull requests for a specific repository that are |
|
518 | 518 | awaiting review from a specific user. |
|
519 | 519 | |
|
520 | 520 | :param repo_name: target or source repo |
|
521 | 521 | :param user_id: reviewer user of the pull request |
|
522 | 522 | :param search_q: filter by text |
|
523 | 523 | :param statuses: list of pull request statuses |
|
524 | 524 | :returns: int number of pull requests |
|
525 | 525 | """ |
|
526 | 526 | q = self._prepare_awaiting_my_review_review_query( |
|
527 | 527 | repo_name, user_id, search_q=search_q, statuses=statuses) |
|
528 | 528 | return q.count() |
|
529 | 529 | |
|
530 | 530 | def get_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None, |
|
531 | 531 | offset=0, length=None, order_by=None, order_dir='desc'): |
|
532 | 532 | """ |
|
533 | 533 | Get all pull requests for a specific repository that are awaiting |
|
534 | 534 | review from a specific user. |
|
535 | 535 | |
|
536 | 536 | :param repo_name: target or source repo |
|
537 | 537 | :param user_id: reviewer user of the pull request |
|
538 | 538 | :param search_q: filter by text |
|
539 | 539 | :param statuses: list of pull request statuses |
|
540 | 540 | :param offset: pagination offset |
|
541 | 541 | :param length: length of returned list |
|
542 | 542 | :param order_by: order of the returned list |
|
543 | 543 | :param order_dir: 'asc' or 'desc' ordering direction |
|
544 | 544 | :returns: list of pull requests |
|
545 | 545 | """ |
|
546 | 546 | |
|
547 | 547 | q = self._prepare_awaiting_my_review_review_query( |
|
548 | 548 | repo_name, user_id, search_q=search_q, statuses=statuses, |
|
549 | 549 | order_by=order_by, order_dir=order_dir) |
|
550 | 550 | |
|
551 | 551 | if length: |
|
552 | 552 | pull_requests = q.limit(length).offset(offset).all() |
|
553 | 553 | else: |
|
554 | 554 | pull_requests = q.all() |
|
555 | 555 | |
|
556 | 556 | return pull_requests |
|
557 | 557 | |
|
558 | 558 | def _prepare_im_participating_query(self, user_id=None, statuses=None, query='', |
|
559 | 559 | order_by=None, order_dir='desc'): |
|
560 | 560 | """ |
|
561 | 561 | return a query of pull-requests user is an creator, or he's added as a reviewer |
|
562 | 562 | """ |
|
563 | 563 | q = PullRequest.query() |
|
564 | 564 | if user_id: |
|
565 | 565 | reviewers_subquery = Session().query( |
|
566 | 566 | PullRequestReviewers.pull_request_id).filter( |
|
567 | 567 | PullRequestReviewers.user_id == user_id).subquery() |
|
568 | 568 | user_filter = or_( |
|
569 | 569 | PullRequest.user_id == user_id, |
|
570 | 570 | PullRequest.pull_request_id.in_(reviewers_subquery) |
|
571 | 571 | ) |
|
572 | 572 | q = PullRequest.query().filter(user_filter) |
|
573 | 573 | |
|
574 | 574 | # closed,opened |
|
575 | 575 | if statuses: |
|
576 | 576 | q = q.filter(PullRequest.status.in_(statuses)) |
|
577 | 577 | |
|
578 | 578 | if query: |
|
579 | 579 | like_expression = u'%{}%'.format(safe_unicode(query)) |
|
580 | 580 | q = q.join(User, User.user_id == PullRequest.user_id) |
|
581 | 581 | q = q.filter(or_( |
|
582 | 582 | cast(PullRequest.pull_request_id, String).ilike(like_expression), |
|
583 | 583 | User.username.ilike(like_expression), |
|
584 | 584 | PullRequest.title.ilike(like_expression), |
|
585 | 585 | PullRequest.description.ilike(like_expression), |
|
586 | 586 | )) |
|
587 | 587 | |
|
588 | 588 | order_map = { |
|
589 | 589 | 'name_raw': PullRequest.pull_request_id, |
|
590 | 590 | 'title': PullRequest.title, |
|
591 | 591 | 'updated_on_raw': PullRequest.updated_on, |
|
592 | 592 | 'target_repo': PullRequest.target_repo_id |
|
593 | 593 | } |
|
594 | 594 | if order_by and order_by in order_map: |
|
595 | 595 | if order_dir == 'asc': |
|
596 | 596 | q = q.order_by(order_map[order_by].asc()) |
|
597 | 597 | else: |
|
598 | 598 | q = q.order_by(order_map[order_by].desc()) |
|
599 | 599 | |
|
600 | 600 | return q |
|
601 | 601 | |
|
602 | 602 | def count_im_participating_in(self, user_id=None, statuses=None, query=''): |
|
603 | 603 | q = self._prepare_im_participating_query(user_id, statuses=statuses, query=query) |
|
604 | 604 | return q.count() |
|
605 | 605 | |
|
606 | 606 | def get_im_participating_in( |
|
607 | 607 | self, user_id=None, statuses=None, query='', offset=0, |
|
608 | 608 | length=None, order_by=None, order_dir='desc'): |
|
609 | 609 | """ |
|
610 | 610 | Get all Pull requests that i'm participating in as a reviewer, or i have opened |
|
611 | 611 | """ |
|
612 | 612 | |
|
613 | 613 | q = self._prepare_im_participating_query( |
|
614 | 614 | user_id, statuses=statuses, query=query, order_by=order_by, |
|
615 | 615 | order_dir=order_dir) |
|
616 | 616 | |
|
617 | 617 | if length: |
|
618 | 618 | pull_requests = q.limit(length).offset(offset).all() |
|
619 | 619 | else: |
|
620 | 620 | pull_requests = q.all() |
|
621 | 621 | |
|
622 | 622 | return pull_requests |
|
623 | 623 | |
|
624 | 624 | def _prepare_participating_in_for_review_query( |
|
625 | 625 | self, user_id, statuses=None, query='', order_by=None, order_dir='desc'): |
|
626 | 626 | |
|
627 | 627 | for_review_statuses = [ |
|
628 | 628 | ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED |
|
629 | 629 | ] |
|
630 | 630 | |
|
631 | 631 | pull_request_alias = aliased(PullRequest) |
|
632 | 632 | status_alias = aliased(ChangesetStatus) |
|
633 | 633 | reviewers_alias = aliased(PullRequestReviewers) |
|
634 | 634 | |
|
635 | 635 | last_ver_subq = Session()\ |
|
636 | 636 | .query(func.min(ChangesetStatus.version)) \ |
|
637 | 637 | .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\ |
|
638 | 638 | .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \ |
|
639 | 639 | .subquery() |
|
640 | 640 | |
|
641 | 641 | q = Session().query(pull_request_alias) \ |
|
642 | 642 | .options(lazyload(pull_request_alias.author)) \ |
|
643 | 643 | .join(reviewers_alias, |
|
644 | 644 | reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \ |
|
645 | 645 | .outerjoin(status_alias, |
|
646 | 646 | and_(status_alias.user_id == reviewers_alias.user_id, |
|
647 | 647 | status_alias.pull_request_id == reviewers_alias.pull_request_id)) \ |
|
648 | 648 | .filter(or_(status_alias.version == null(), |
|
649 | 649 | status_alias.version == last_ver_subq)) \ |
|
650 | 650 | .filter(reviewers_alias.user_id == user_id) \ |
|
651 | 651 | .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \ |
|
652 | 652 | .group_by(pull_request_alias) |
|
653 | 653 | |
|
654 | 654 | # closed,opened |
|
655 | 655 | if statuses: |
|
656 | 656 | q = q.filter(pull_request_alias.status.in_(statuses)) |
|
657 | 657 | |
|
658 | 658 | if query: |
|
659 | 659 | like_expression = u'%{}%'.format(safe_unicode(query)) |
|
660 | 660 | q = q.join(User, User.user_id == pull_request_alias.user_id) |
|
661 | 661 | q = q.filter(or_( |
|
662 | 662 | cast(pull_request_alias.pull_request_id, String).ilike(like_expression), |
|
663 | 663 | User.username.ilike(like_expression), |
|
664 | 664 | pull_request_alias.title.ilike(like_expression), |
|
665 | 665 | pull_request_alias.description.ilike(like_expression), |
|
666 | 666 | )) |
|
667 | 667 | |
|
668 | 668 | order_map = { |
|
669 | 669 | 'name_raw': pull_request_alias.pull_request_id, |
|
670 | 670 | 'title': pull_request_alias.title, |
|
671 | 671 | 'updated_on_raw': pull_request_alias.updated_on, |
|
672 | 672 | 'target_repo': pull_request_alias.target_repo_id |
|
673 | 673 | } |
|
674 | 674 | if order_by and order_by in order_map: |
|
675 | 675 | if order_dir == 'asc': |
|
676 | 676 | q = q.order_by(order_map[order_by].asc()) |
|
677 | 677 | else: |
|
678 | 678 | q = q.order_by(order_map[order_by].desc()) |
|
679 | 679 | |
|
680 | 680 | return q |
|
681 | 681 | |
|
682 | 682 | def count_im_participating_in_for_review(self, user_id, statuses=None, query=''): |
|
683 | 683 | q = self._prepare_participating_in_for_review_query(user_id, statuses=statuses, query=query) |
|
684 | 684 | return q.count() |
|
685 | 685 | |
|
686 | 686 | def get_im_participating_in_for_review( |
|
687 | 687 | self, user_id, statuses=None, query='', offset=0, |
|
688 | 688 | length=None, order_by=None, order_dir='desc'): |
|
689 | 689 | """ |
|
690 | 690 | Get all Pull requests that needs user approval or rejection |
|
691 | 691 | """ |
|
692 | 692 | |
|
693 | 693 | q = self._prepare_participating_in_for_review_query( |
|
694 | 694 | user_id, statuses=statuses, query=query, order_by=order_by, |
|
695 | 695 | order_dir=order_dir) |
|
696 | 696 | |
|
697 | 697 | if length: |
|
698 | 698 | pull_requests = q.limit(length).offset(offset).all() |
|
699 | 699 | else: |
|
700 | 700 | pull_requests = q.all() |
|
701 | 701 | |
|
702 | 702 | return pull_requests |
|
703 | 703 | |
|
704 | 704 | def get_versions(self, pull_request): |
|
705 | 705 | """ |
|
706 | 706 | returns version of pull request sorted by ID descending |
|
707 | 707 | """ |
|
708 | 708 | return PullRequestVersion.query()\ |
|
709 | 709 | .filter(PullRequestVersion.pull_request == pull_request)\ |
|
710 | 710 | .order_by(PullRequestVersion.pull_request_version_id.asc())\ |
|
711 | 711 | .all() |
|
712 | 712 | |
|
713 | 713 | def get_pr_version(self, pull_request_id, version=None): |
|
714 | 714 | at_version = None |
|
715 | 715 | |
|
716 | 716 | if version and version == 'latest': |
|
717 | 717 | pull_request_ver = PullRequest.get(pull_request_id) |
|
718 | 718 | pull_request_obj = pull_request_ver |
|
719 | 719 | _org_pull_request_obj = pull_request_obj |
|
720 | 720 | at_version = 'latest' |
|
721 | 721 | elif version: |
|
722 | 722 | pull_request_ver = PullRequestVersion.get_or_404(version) |
|
723 | 723 | pull_request_obj = pull_request_ver |
|
724 | 724 | _org_pull_request_obj = pull_request_ver.pull_request |
|
725 | 725 | at_version = pull_request_ver.pull_request_version_id |
|
726 | 726 | else: |
|
727 | 727 | _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404( |
|
728 | 728 | pull_request_id) |
|
729 | 729 | |
|
730 | 730 | pull_request_display_obj = PullRequest.get_pr_display_object( |
|
731 | 731 | pull_request_obj, _org_pull_request_obj) |
|
732 | 732 | |
|
733 | 733 | return _org_pull_request_obj, pull_request_obj, \ |
|
734 | 734 | pull_request_display_obj, at_version |
|
735 | 735 | |
|
736 | 736 | def pr_commits_versions(self, versions): |
|
737 | 737 | """ |
|
738 | 738 | Maps the pull-request commits into all known PR versions. This way we can obtain |
|
739 | 739 | each pr version the commit was introduced in. |
|
740 | 740 | """ |
|
741 | 741 | commit_versions = collections.defaultdict(list) |
|
742 | 742 | num_versions = [x.pull_request_version_id for x in versions] |
|
743 | 743 | for ver in versions: |
|
744 | 744 | for commit_id in ver.revisions: |
|
745 | 745 | ver_idx = ChangesetComment.get_index_from_version( |
|
746 | 746 | ver.pull_request_version_id, num_versions=num_versions) |
|
747 | 747 | commit_versions[commit_id].append(ver_idx) |
|
748 | 748 | return commit_versions |
|
749 | 749 | |
|
750 | 750 | def create(self, created_by, source_repo, source_ref, target_repo, |
|
751 | 751 | target_ref, revisions, reviewers, observers, title, description=None, |
|
752 | 752 | common_ancestor_id=None, |
|
753 | 753 | description_renderer=None, |
|
754 | 754 | reviewer_data=None, translator=None, auth_user=None): |
|
755 | 755 | translator = translator or get_current_request().translate |
|
756 | 756 | |
|
757 | 757 | created_by_user = self._get_user(created_by) |
|
758 | 758 | auth_user = auth_user or created_by_user.AuthUser() |
|
759 | 759 | source_repo = self._get_repo(source_repo) |
|
760 | 760 | target_repo = self._get_repo(target_repo) |
|
761 | 761 | |
|
762 | 762 | pull_request = PullRequest() |
|
763 | 763 | pull_request.source_repo = source_repo |
|
764 | 764 | pull_request.source_ref = source_ref |
|
765 | 765 | pull_request.target_repo = target_repo |
|
766 | 766 | pull_request.target_ref = target_ref |
|
767 | 767 | pull_request.revisions = revisions |
|
768 | 768 | pull_request.title = title |
|
769 | 769 | pull_request.description = description |
|
770 | 770 | pull_request.description_renderer = description_renderer |
|
771 | 771 | pull_request.author = created_by_user |
|
772 | 772 | pull_request.reviewer_data = reviewer_data |
|
773 | 773 | pull_request.pull_request_state = pull_request.STATE_CREATING |
|
774 | 774 | pull_request.common_ancestor_id = common_ancestor_id |
|
775 | 775 | |
|
776 | 776 | Session().add(pull_request) |
|
777 | 777 | Session().flush() |
|
778 | 778 | |
|
779 | 779 | reviewer_ids = set() |
|
780 | 780 | # members / reviewers |
|
781 | 781 | for reviewer_object in reviewers: |
|
782 | 782 | user_id, reasons, mandatory, role, rules = reviewer_object |
|
783 | 783 | user = self._get_user(user_id) |
|
784 | 784 | |
|
785 | 785 | # skip duplicates |
|
786 | 786 | if user.user_id in reviewer_ids: |
|
787 | 787 | continue |
|
788 | 788 | |
|
789 | 789 | reviewer_ids.add(user.user_id) |
|
790 | 790 | |
|
791 | 791 | reviewer = PullRequestReviewers() |
|
792 | 792 | reviewer.user = user |
|
793 | 793 | reviewer.pull_request = pull_request |
|
794 | 794 | reviewer.reasons = reasons |
|
795 | 795 | reviewer.mandatory = mandatory |
|
796 | 796 | reviewer.role = role |
|
797 | 797 | |
|
798 | 798 | # NOTE(marcink): pick only first rule for now |
|
799 | 799 | rule_id = list(rules)[0] if rules else None |
|
800 | 800 | rule = RepoReviewRule.get(rule_id) if rule_id else None |
|
801 | 801 | if rule: |
|
802 | 802 | review_group = rule.user_group_vote_rule(user_id) |
|
803 | 803 | # we check if this particular reviewer is member of a voting group |
|
804 | 804 | if review_group: |
|
805 | 805 | # NOTE(marcink): |
|
806 | 806 | # can be that user is member of more but we pick the first same, |
|
807 | 807 | # same as default reviewers algo |
|
808 | 808 | review_group = review_group[0] |
|
809 | 809 | |
|
810 | 810 | rule_data = { |
|
811 | 811 | 'rule_name': |
|
812 | 812 | rule.review_rule_name, |
|
813 | 813 | 'rule_user_group_entry_id': |
|
814 | 814 | review_group.repo_review_rule_users_group_id, |
|
815 | 815 | 'rule_user_group_name': |
|
816 | 816 | review_group.users_group.users_group_name, |
|
817 | 817 | 'rule_user_group_members': |
|
818 | 818 | [x.user.username for x in review_group.users_group.members], |
|
819 | 819 | 'rule_user_group_members_id': |
|
820 | 820 | [x.user.user_id for x in review_group.users_group.members], |
|
821 | 821 | } |
|
822 | 822 | # e.g {'vote_rule': -1, 'mandatory': True} |
|
823 | 823 | rule_data.update(review_group.rule_data()) |
|
824 | 824 | |
|
825 | 825 | reviewer.rule_data = rule_data |
|
826 | 826 | |
|
827 | 827 | Session().add(reviewer) |
|
828 | 828 | Session().flush() |
|
829 | 829 | |
|
830 | 830 | for observer_object in observers: |
|
831 | 831 | user_id, reasons, mandatory, role, rules = observer_object |
|
832 | 832 | user = self._get_user(user_id) |
|
833 | 833 | |
|
834 | 834 | # skip duplicates from reviewers |
|
835 | 835 | if user.user_id in reviewer_ids: |
|
836 | 836 | continue |
|
837 | 837 | |
|
838 | 838 | #reviewer_ids.add(user.user_id) |
|
839 | 839 | |
|
840 | 840 | observer = PullRequestReviewers() |
|
841 | 841 | observer.user = user |
|
842 | 842 | observer.pull_request = pull_request |
|
843 | 843 | observer.reasons = reasons |
|
844 | 844 | observer.mandatory = mandatory |
|
845 | 845 | observer.role = role |
|
846 | 846 | |
|
847 | 847 | # NOTE(marcink): pick only first rule for now |
|
848 | 848 | rule_id = list(rules)[0] if rules else None |
|
849 | 849 | rule = RepoReviewRule.get(rule_id) if rule_id else None |
|
850 | 850 | if rule: |
|
851 | 851 | # TODO(marcink): do we need this for observers ?? |
|
852 | 852 | pass |
|
853 | 853 | |
|
854 | 854 | Session().add(observer) |
|
855 | 855 | Session().flush() |
|
856 | 856 | |
|
857 | 857 | # Set approval status to "Under Review" for all commits which are |
|
858 | 858 | # part of this pull request. |
|
859 | 859 | ChangesetStatusModel().set_status( |
|
860 | 860 | repo=target_repo, |
|
861 | 861 | status=ChangesetStatus.STATUS_UNDER_REVIEW, |
|
862 | 862 | user=created_by_user, |
|
863 | 863 | pull_request=pull_request |
|
864 | 864 | ) |
|
865 | 865 | # we commit early at this point. This has to do with a fact |
|
866 | 866 | # that before queries do some row-locking. And because of that |
|
867 | 867 | # we need to commit and finish transaction before below validate call |
|
868 | 868 | # that for large repos could be long resulting in long row locks |
|
869 | 869 | Session().commit() |
|
870 | 870 | |
|
871 | 871 | # prepare workspace, and run initial merge simulation. Set state during that |
|
872 | 872 | # operation |
|
873 | 873 | pull_request = PullRequest.get(pull_request.pull_request_id) |
|
874 | 874 | |
|
875 | 875 | # set as merging, for merge simulation, and if finished to created so we mark |
|
876 | 876 | # simulation is working fine |
|
877 | 877 | with pull_request.set_state(PullRequest.STATE_MERGING, |
|
878 | 878 | final_state=PullRequest.STATE_CREATED) as state_obj: |
|
879 | 879 | MergeCheck.validate( |
|
880 | 880 | pull_request, auth_user=auth_user, translator=translator) |
|
881 | 881 | |
|
882 | 882 | self.notify_reviewers(pull_request, reviewer_ids, created_by_user) |
|
883 | 883 | self.trigger_pull_request_hook(pull_request, created_by_user, 'create') |
|
884 | 884 | |
|
885 | 885 | creation_data = pull_request.get_api_data(with_merge_state=False) |
|
886 | 886 | self._log_audit_action( |
|
887 | 887 | 'repo.pull_request.create', {'data': creation_data}, |
|
888 | 888 | auth_user, pull_request) |
|
889 | 889 | |
|
890 | 890 | return pull_request |
|
891 | 891 | |
|
892 | 892 | def trigger_pull_request_hook(self, pull_request, user, action, data=None): |
|
893 | 893 | pull_request = self.__get_pull_request(pull_request) |
|
894 | 894 | target_scm = pull_request.target_repo.scm_instance() |
|
895 | 895 | if action == 'create': |
|
896 | 896 | trigger_hook = hooks_utils.trigger_create_pull_request_hook |
|
897 | 897 | elif action == 'merge': |
|
898 | 898 | trigger_hook = hooks_utils.trigger_merge_pull_request_hook |
|
899 | 899 | elif action == 'close': |
|
900 | 900 | trigger_hook = hooks_utils.trigger_close_pull_request_hook |
|
901 | 901 | elif action == 'review_status_change': |
|
902 | 902 | trigger_hook = hooks_utils.trigger_review_pull_request_hook |
|
903 | 903 | elif action == 'update': |
|
904 | 904 | trigger_hook = hooks_utils.trigger_update_pull_request_hook |
|
905 | 905 | elif action == 'comment': |
|
906 | 906 | trigger_hook = hooks_utils.trigger_comment_pull_request_hook |
|
907 | 907 | elif action == 'comment_edit': |
|
908 | 908 | trigger_hook = hooks_utils.trigger_comment_pull_request_edit_hook |
|
909 | 909 | else: |
|
910 | 910 | return |
|
911 | 911 | |
|
912 | 912 | log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s', |
|
913 | 913 | pull_request, action, trigger_hook) |
|
914 | 914 | trigger_hook( |
|
915 | 915 | username=user.username, |
|
916 | 916 | repo_name=pull_request.target_repo.repo_name, |
|
917 | 917 | repo_type=target_scm.alias, |
|
918 | 918 | pull_request=pull_request, |
|
919 | 919 | data=data) |
|
920 | 920 | |
|
921 | 921 | def _get_commit_ids(self, pull_request): |
|
922 | 922 | """ |
|
923 | 923 | Return the commit ids of the merged pull request. |
|
924 | 924 | |
|
925 | 925 | This method is not dealing correctly yet with the lack of autoupdates |
|
926 | 926 | nor with the implicit target updates. |
|
927 | 927 | For example: if a commit in the source repo is already in the target it |
|
928 | 928 | will be reported anyways. |
|
929 | 929 | """ |
|
930 | 930 | merge_rev = pull_request.merge_rev |
|
931 | 931 | if merge_rev is None: |
|
932 | 932 | raise ValueError('This pull request was not merged yet') |
|
933 | 933 | |
|
934 | 934 | commit_ids = list(pull_request.revisions) |
|
935 | 935 | if merge_rev not in commit_ids: |
|
936 | 936 | commit_ids.append(merge_rev) |
|
937 | 937 | |
|
938 | 938 | return commit_ids |
|
939 | 939 | |
|
940 | 940 | def merge_repo(self, pull_request, user, extras): |
|
941 | 941 | repo_type = pull_request.source_repo.repo_type |
|
942 | 942 | log.debug("Merging pull request %s", pull_request.pull_request_id) |
|
943 | 943 | extras['user_agent'] = '{}/internal-merge'.format(repo_type) |
|
944 | 944 | merge_state = self._merge_pull_request(pull_request, user, extras) |
|
945 | 945 | if merge_state.executed: |
|
946 | 946 | log.debug("Merge was successful, updating the pull request comments.") |
|
947 | 947 | self._comment_and_close_pr(pull_request, user, merge_state) |
|
948 | 948 | |
|
949 | 949 | self._log_audit_action( |
|
950 | 950 | 'repo.pull_request.merge', |
|
951 | 951 | {'merge_state': merge_state.__dict__}, |
|
952 | 952 | user, pull_request) |
|
953 | 953 | |
|
954 | 954 | else: |
|
955 | 955 | log.warn("Merge failed, not updating the pull request.") |
|
956 | 956 | return merge_state |
|
957 | 957 | |
|
958 | 958 | def _merge_pull_request(self, pull_request, user, extras, merge_msg=None): |
|
959 | 959 | target_vcs = pull_request.target_repo.scm_instance() |
|
960 | 960 | source_vcs = pull_request.source_repo.scm_instance() |
|
961 | 961 | |
|
962 | 962 | message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format( |
|
963 | 963 | pr_id=pull_request.pull_request_id, |
|
964 | 964 | pr_title=pull_request.title, |
|
965 | 965 | pr_desc=pull_request.description, |
|
966 | 966 | source_repo=source_vcs.name, |
|
967 | 967 | source_ref_name=pull_request.source_ref_parts.name, |
|
968 | 968 | target_repo=target_vcs.name, |
|
969 | 969 | target_ref_name=pull_request.target_ref_parts.name, |
|
970 | 970 | ) |
|
971 | 971 | |
|
972 | 972 | workspace_id = self._workspace_id(pull_request) |
|
973 | 973 | repo_id = pull_request.target_repo.repo_id |
|
974 | 974 | use_rebase = self._use_rebase_for_merging(pull_request) |
|
975 | 975 | close_branch = self._close_branch_before_merging(pull_request) |
|
976 | 976 | user_name = self._user_name_for_merging(pull_request, user) |
|
977 | 977 | |
|
978 | 978 | target_ref = self._refresh_reference( |
|
979 | 979 | pull_request.target_ref_parts, target_vcs) |
|
980 | 980 | |
|
981 | 981 | callback_daemon, extras = prepare_callback_daemon( |
|
982 | 982 | extras, protocol=vcs_settings.HOOKS_PROTOCOL, |
|
983 | 983 | host=vcs_settings.HOOKS_HOST, |
|
984 | 984 | use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS) |
|
985 | 985 | |
|
986 | 986 | with callback_daemon: |
|
987 | 987 | # TODO: johbo: Implement a clean way to run a config_override |
|
988 | 988 | # for a single call. |
|
989 | 989 | target_vcs.config.set( |
|
990 | 990 | 'rhodecode', 'RC_SCM_DATA', json.dumps(extras)) |
|
991 | 991 | |
|
992 | 992 | merge_state = target_vcs.merge( |
|
993 | 993 | repo_id, workspace_id, target_ref, source_vcs, |
|
994 | 994 | pull_request.source_ref_parts, |
|
995 | 995 | user_name=user_name, user_email=user.email, |
|
996 | 996 | message=message, use_rebase=use_rebase, |
|
997 | 997 | close_branch=close_branch) |
|
998 | 998 | return merge_state |
|
999 | 999 | |
|
1000 | 1000 | def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None): |
|
1001 | 1001 | pull_request.merge_rev = merge_state.merge_ref.commit_id |
|
1002 | 1002 | pull_request.updated_on = datetime.datetime.now() |
|
1003 | 1003 | close_msg = close_msg or 'Pull request merged and closed' |
|
1004 | 1004 | |
|
1005 | 1005 | CommentsModel().create( |
|
1006 | 1006 | text=safe_unicode(close_msg), |
|
1007 | 1007 | repo=pull_request.target_repo.repo_id, |
|
1008 | 1008 | user=user.user_id, |
|
1009 | 1009 | pull_request=pull_request.pull_request_id, |
|
1010 | 1010 | f_path=None, |
|
1011 | 1011 | line_no=None, |
|
1012 | 1012 | closing_pr=True |
|
1013 | 1013 | ) |
|
1014 | 1014 | |
|
1015 | 1015 | Session().add(pull_request) |
|
1016 | 1016 | Session().flush() |
|
1017 | 1017 | # TODO: paris: replace invalidation with less radical solution |
|
1018 | 1018 | ScmModel().mark_for_invalidation( |
|
1019 | 1019 | pull_request.target_repo.repo_name) |
|
1020 | 1020 | self.trigger_pull_request_hook(pull_request, user, 'merge') |
|
1021 | 1021 | |
|
1022 | 1022 | def has_valid_update_type(self, pull_request): |
|
1023 | 1023 | source_ref_type = pull_request.source_ref_parts.type |
|
1024 | 1024 | return source_ref_type in self.REF_TYPES |
|
1025 | 1025 | |
|
1026 | 1026 | def get_flow_commits(self, pull_request): |
|
1027 | 1027 | |
|
1028 | 1028 | # source repo |
|
1029 | 1029 | source_ref_name = pull_request.source_ref_parts.name |
|
1030 | 1030 | source_ref_type = pull_request.source_ref_parts.type |
|
1031 | 1031 | source_ref_id = pull_request.source_ref_parts.commit_id |
|
1032 | 1032 | source_repo = pull_request.source_repo.scm_instance() |
|
1033 | 1033 | |
|
1034 | 1034 | try: |
|
1035 | 1035 | if source_ref_type in self.REF_TYPES: |
|
1036 | 1036 | source_commit = source_repo.get_commit( |
|
1037 | 1037 | source_ref_name, reference_obj=pull_request.source_ref_parts) |
|
1038 | 1038 | else: |
|
1039 | 1039 | source_commit = source_repo.get_commit(source_ref_id) |
|
1040 | 1040 | except CommitDoesNotExistError: |
|
1041 | 1041 | raise SourceRefMissing() |
|
1042 | 1042 | |
|
1043 | 1043 | # target repo |
|
1044 | 1044 | target_ref_name = pull_request.target_ref_parts.name |
|
1045 | 1045 | target_ref_type = pull_request.target_ref_parts.type |
|
1046 | 1046 | target_ref_id = pull_request.target_ref_parts.commit_id |
|
1047 | 1047 | target_repo = pull_request.target_repo.scm_instance() |
|
1048 | 1048 | |
|
1049 | 1049 | try: |
|
1050 | 1050 | if target_ref_type in self.REF_TYPES: |
|
1051 | 1051 | target_commit = target_repo.get_commit( |
|
1052 | 1052 | target_ref_name, reference_obj=pull_request.target_ref_parts) |
|
1053 | 1053 | else: |
|
1054 | 1054 | target_commit = target_repo.get_commit(target_ref_id) |
|
1055 | 1055 | except CommitDoesNotExistError: |
|
1056 | 1056 | raise TargetRefMissing() |
|
1057 | 1057 | |
|
1058 | 1058 | return source_commit, target_commit |
|
1059 | 1059 | |
|
1060 | 1060 | def update_commits(self, pull_request, updating_user): |
|
1061 | 1061 | """ |
|
1062 | 1062 | Get the updated list of commits for the pull request |
|
1063 | 1063 | and return the new pull request version and the list |
|
1064 | 1064 | of commits processed by this update action |
|
1065 | 1065 | |
|
1066 | 1066 | updating_user is the user_object who triggered the update |
|
1067 | 1067 | """ |
|
1068 | 1068 | pull_request = self.__get_pull_request(pull_request) |
|
1069 | 1069 | source_ref_type = pull_request.source_ref_parts.type |
|
1070 | 1070 | source_ref_name = pull_request.source_ref_parts.name |
|
1071 | 1071 | source_ref_id = pull_request.source_ref_parts.commit_id |
|
1072 | 1072 | |
|
1073 | 1073 | target_ref_type = pull_request.target_ref_parts.type |
|
1074 | 1074 | target_ref_name = pull_request.target_ref_parts.name |
|
1075 | 1075 | target_ref_id = pull_request.target_ref_parts.commit_id |
|
1076 | 1076 | |
|
1077 | 1077 | if not self.has_valid_update_type(pull_request): |
|
1078 | 1078 | log.debug("Skipping update of pull request %s due to ref type: %s", |
|
1079 | 1079 | pull_request, source_ref_type) |
|
1080 | 1080 | return UpdateResponse( |
|
1081 | 1081 | executed=False, |
|
1082 | 1082 | reason=UpdateFailureReason.WRONG_REF_TYPE, |
|
1083 | 1083 | old=pull_request, new=None, common_ancestor_id=None, commit_changes=None, |
|
1084 | 1084 | source_changed=False, target_changed=False) |
|
1085 | 1085 | |
|
1086 | 1086 | try: |
|
1087 | 1087 | source_commit, target_commit = self.get_flow_commits(pull_request) |
|
1088 | 1088 | except SourceRefMissing: |
|
1089 | 1089 | return UpdateResponse( |
|
1090 | 1090 | executed=False, |
|
1091 | 1091 | reason=UpdateFailureReason.MISSING_SOURCE_REF, |
|
1092 | 1092 | old=pull_request, new=None, common_ancestor_id=None, commit_changes=None, |
|
1093 | 1093 | source_changed=False, target_changed=False) |
|
1094 | 1094 | except TargetRefMissing: |
|
1095 | 1095 | return UpdateResponse( |
|
1096 | 1096 | executed=False, |
|
1097 | 1097 | reason=UpdateFailureReason.MISSING_TARGET_REF, |
|
1098 | 1098 | old=pull_request, new=None, common_ancestor_id=None, commit_changes=None, |
|
1099 | 1099 | source_changed=False, target_changed=False) |
|
1100 | 1100 | |
|
1101 | 1101 | source_changed = source_ref_id != source_commit.raw_id |
|
1102 | 1102 | target_changed = target_ref_id != target_commit.raw_id |
|
1103 | 1103 | |
|
1104 | 1104 | if not (source_changed or target_changed): |
|
1105 | 1105 | log.debug("Nothing changed in pull request %s", pull_request) |
|
1106 | 1106 | return UpdateResponse( |
|
1107 | 1107 | executed=False, |
|
1108 | 1108 | reason=UpdateFailureReason.NO_CHANGE, |
|
1109 | 1109 | old=pull_request, new=None, common_ancestor_id=None, commit_changes=None, |
|
1110 | 1110 | source_changed=target_changed, target_changed=source_changed) |
|
1111 | 1111 | |
|
1112 | 1112 | change_in_found = 'target repo' if target_changed else 'source repo' |
|
1113 | 1113 | log.debug('Updating pull request because of change in %s detected', |
|
1114 | 1114 | change_in_found) |
|
1115 | 1115 | |
|
1116 | 1116 | # Finally there is a need for an update, in case of source change |
|
1117 | 1117 | # we create a new version, else just an update |
|
1118 | 1118 | if source_changed: |
|
1119 | 1119 | pull_request_version = self._create_version_from_snapshot(pull_request) |
|
1120 | 1120 | self._link_comments_to_version(pull_request_version) |
|
1121 | 1121 | else: |
|
1122 | 1122 | try: |
|
1123 | 1123 | ver = pull_request.versions[-1] |
|
1124 | 1124 | except IndexError: |
|
1125 | 1125 | ver = None |
|
1126 | 1126 | |
|
1127 | 1127 | pull_request.pull_request_version_id = \ |
|
1128 | 1128 | ver.pull_request_version_id if ver else None |
|
1129 | 1129 | pull_request_version = pull_request |
|
1130 | 1130 | |
|
1131 | 1131 | source_repo = pull_request.source_repo.scm_instance() |
|
1132 | 1132 | target_repo = pull_request.target_repo.scm_instance() |
|
1133 | 1133 | |
|
1134 | 1134 | # re-compute commit ids |
|
1135 | 1135 | old_commit_ids = pull_request.revisions |
|
1136 | 1136 | pre_load = ["author", "date", "message", "branch"] |
|
1137 | 1137 | commit_ranges = target_repo.compare( |
|
1138 | 1138 | target_commit.raw_id, source_commit.raw_id, source_repo, merge=True, |
|
1139 | 1139 | pre_load=pre_load) |
|
1140 | 1140 | |
|
1141 | 1141 | target_ref = target_commit.raw_id |
|
1142 | 1142 | source_ref = source_commit.raw_id |
|
1143 | 1143 | ancestor_commit_id = target_repo.get_common_ancestor( |
|
1144 | 1144 | target_ref, source_ref, source_repo) |
|
1145 | 1145 | |
|
1146 | 1146 | if not ancestor_commit_id: |
|
1147 | 1147 | raise ValueError( |
|
1148 | 1148 | 'cannot calculate diff info without a common ancestor. ' |
|
1149 | 1149 | 'Make sure both repositories are related, and have a common forking commit.') |
|
1150 | 1150 | |
|
1151 | 1151 | pull_request.common_ancestor_id = ancestor_commit_id |
|
1152 | 1152 | |
|
1153 | 1153 | pull_request.source_ref = '%s:%s:%s' % ( |
|
1154 | 1154 | source_ref_type, source_ref_name, source_commit.raw_id) |
|
1155 | 1155 | pull_request.target_ref = '%s:%s:%s' % ( |
|
1156 | 1156 | target_ref_type, target_ref_name, ancestor_commit_id) |
|
1157 | 1157 | |
|
1158 | 1158 | pull_request.revisions = [ |
|
1159 | 1159 | commit.raw_id for commit in reversed(commit_ranges)] |
|
1160 | 1160 | pull_request.updated_on = datetime.datetime.now() |
|
1161 | 1161 | Session().add(pull_request) |
|
1162 | 1162 | new_commit_ids = pull_request.revisions |
|
1163 | 1163 | |
|
1164 | 1164 | old_diff_data, new_diff_data = self._generate_update_diffs( |
|
1165 | 1165 | pull_request, pull_request_version) |
|
1166 | 1166 | |
|
1167 | 1167 | # calculate commit and file changes |
|
1168 | 1168 | commit_changes = self._calculate_commit_id_changes( |
|
1169 | 1169 | old_commit_ids, new_commit_ids) |
|
1170 | 1170 | file_changes = self._calculate_file_changes( |
|
1171 | 1171 | old_diff_data, new_diff_data) |
|
1172 | 1172 | |
|
1173 | 1173 | # set comments as outdated if DIFFS changed |
|
1174 | 1174 | CommentsModel().outdate_comments( |
|
1175 | 1175 | pull_request, old_diff_data=old_diff_data, |
|
1176 | 1176 | new_diff_data=new_diff_data) |
|
1177 | 1177 | |
|
1178 | 1178 | valid_commit_changes = (commit_changes.added or commit_changes.removed) |
|
1179 | 1179 | file_node_changes = ( |
|
1180 | 1180 | file_changes.added or file_changes.modified or file_changes.removed) |
|
1181 | 1181 | pr_has_changes = valid_commit_changes or file_node_changes |
|
1182 | 1182 | |
|
1183 | 1183 | # Add an automatic comment to the pull request, in case |
|
1184 | 1184 | # anything has changed |
|
1185 | 1185 | if pr_has_changes: |
|
1186 | 1186 | update_comment = CommentsModel().create( |
|
1187 | 1187 | text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes), |
|
1188 | 1188 | repo=pull_request.target_repo, |
|
1189 | 1189 | user=pull_request.author, |
|
1190 | 1190 | pull_request=pull_request, |
|
1191 | 1191 | send_email=False, renderer=DEFAULT_COMMENTS_RENDERER) |
|
1192 | 1192 | |
|
1193 | 1193 | # Update status to "Under Review" for added commits |
|
1194 | 1194 | for commit_id in commit_changes.added: |
|
1195 | 1195 | ChangesetStatusModel().set_status( |
|
1196 | 1196 | repo=pull_request.source_repo, |
|
1197 | 1197 | status=ChangesetStatus.STATUS_UNDER_REVIEW, |
|
1198 | 1198 | comment=update_comment, |
|
1199 | 1199 | user=pull_request.author, |
|
1200 | 1200 | pull_request=pull_request, |
|
1201 | 1201 | revision=commit_id) |
|
1202 | 1202 | |
|
1203 | 1203 | # initial commit |
|
1204 | 1204 | Session().commit() |
|
1205 | 1205 | |
|
1206 | 1206 | if pr_has_changes: |
|
1207 | 1207 | # send update email to users |
|
1208 | 1208 | try: |
|
1209 | 1209 | self.notify_users(pull_request=pull_request, updating_user=updating_user, |
|
1210 | 1210 | ancestor_commit_id=ancestor_commit_id, |
|
1211 | 1211 | commit_changes=commit_changes, |
|
1212 | 1212 | file_changes=file_changes) |
|
1213 | 1213 | Session().commit() |
|
1214 | 1214 | except Exception: |
|
1215 | 1215 | log.exception('Failed to send email notification to users') |
|
1216 | 1216 | Session().rollback() |
|
1217 | 1217 | |
|
1218 | 1218 | log.debug( |
|
1219 | 1219 | 'Updated pull request %s, added_ids: %s, common_ids: %s, ' |
|
1220 | 1220 | 'removed_ids: %s', pull_request.pull_request_id, |
|
1221 | 1221 | commit_changes.added, commit_changes.common, commit_changes.removed) |
|
1222 | 1222 | log.debug( |
|
1223 | 1223 | 'Updated pull request with the following file changes: %s', |
|
1224 | 1224 | file_changes) |
|
1225 | 1225 | |
|
1226 | 1226 | log.info( |
|
1227 | 1227 | "Updated pull request %s from commit %s to commit %s, " |
|
1228 | 1228 | "stored new version %s of this pull request.", |
|
1229 | 1229 | pull_request.pull_request_id, source_ref_id, |
|
1230 | 1230 | pull_request.source_ref_parts.commit_id, |
|
1231 | 1231 | pull_request_version.pull_request_version_id) |
|
1232 | 1232 | |
|
1233 | 1233 | self.trigger_pull_request_hook(pull_request, pull_request.author, 'update') |
|
1234 | 1234 | |
|
1235 | 1235 | return UpdateResponse( |
|
1236 | 1236 | executed=True, reason=UpdateFailureReason.NONE, |
|
1237 | 1237 | old=pull_request, new=pull_request_version, |
|
1238 | 1238 | common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes, |
|
1239 | 1239 | source_changed=source_changed, target_changed=target_changed) |
|
1240 | 1240 | |
|
1241 | 1241 | def _create_version_from_snapshot(self, pull_request): |
|
1242 | 1242 | version = PullRequestVersion() |
|
1243 | 1243 | version.title = pull_request.title |
|
1244 | 1244 | version.description = pull_request.description |
|
1245 | 1245 | version.status = pull_request.status |
|
1246 | 1246 | version.pull_request_state = pull_request.pull_request_state |
|
1247 | 1247 | version.created_on = datetime.datetime.now() |
|
1248 | 1248 | version.updated_on = pull_request.updated_on |
|
1249 | 1249 | version.user_id = pull_request.user_id |
|
1250 | 1250 | version.source_repo = pull_request.source_repo |
|
1251 | 1251 | version.source_ref = pull_request.source_ref |
|
1252 | 1252 | version.target_repo = pull_request.target_repo |
|
1253 | 1253 | version.target_ref = pull_request.target_ref |
|
1254 | 1254 | |
|
1255 | 1255 | version._last_merge_source_rev = pull_request._last_merge_source_rev |
|
1256 | 1256 | version._last_merge_target_rev = pull_request._last_merge_target_rev |
|
1257 | 1257 | version.last_merge_status = pull_request.last_merge_status |
|
1258 | 1258 | version.last_merge_metadata = pull_request.last_merge_metadata |
|
1259 | 1259 | version.shadow_merge_ref = pull_request.shadow_merge_ref |
|
1260 | 1260 | version.merge_rev = pull_request.merge_rev |
|
1261 | 1261 | version.reviewer_data = pull_request.reviewer_data |
|
1262 | 1262 | |
|
1263 | 1263 | version.revisions = pull_request.revisions |
|
1264 | 1264 | version.common_ancestor_id = pull_request.common_ancestor_id |
|
1265 | 1265 | version.pull_request = pull_request |
|
1266 | 1266 | Session().add(version) |
|
1267 | 1267 | Session().flush() |
|
1268 | 1268 | |
|
1269 | 1269 | return version |
|
1270 | 1270 | |
|
1271 | 1271 | def _generate_update_diffs(self, pull_request, pull_request_version): |
|
1272 | 1272 | |
|
1273 | 1273 | diff_context = ( |
|
1274 | 1274 | self.DIFF_CONTEXT + |
|
1275 | 1275 | CommentsModel.needed_extra_diff_context()) |
|
1276 | 1276 | hide_whitespace_changes = False |
|
1277 | 1277 | source_repo = pull_request_version.source_repo |
|
1278 | 1278 | source_ref_id = pull_request_version.source_ref_parts.commit_id |
|
1279 | 1279 | target_ref_id = pull_request_version.target_ref_parts.commit_id |
|
1280 | 1280 | old_diff = self._get_diff_from_pr_or_version( |
|
1281 | 1281 | source_repo, source_ref_id, target_ref_id, |
|
1282 | 1282 | hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context) |
|
1283 | 1283 | |
|
1284 | 1284 | source_repo = pull_request.source_repo |
|
1285 | 1285 | source_ref_id = pull_request.source_ref_parts.commit_id |
|
1286 | 1286 | target_ref_id = pull_request.target_ref_parts.commit_id |
|
1287 | 1287 | |
|
1288 | 1288 | new_diff = self._get_diff_from_pr_or_version( |
|
1289 | 1289 | source_repo, source_ref_id, target_ref_id, |
|
1290 | 1290 | hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context) |
|
1291 | 1291 | |
|
1292 | 1292 | old_diff_data = diffs.DiffProcessor(old_diff) |
|
1293 | 1293 | old_diff_data.prepare() |
|
1294 | 1294 | new_diff_data = diffs.DiffProcessor(new_diff) |
|
1295 | 1295 | new_diff_data.prepare() |
|
1296 | 1296 | |
|
1297 | 1297 | return old_diff_data, new_diff_data |
|
1298 | 1298 | |
|
1299 | 1299 | def _link_comments_to_version(self, pull_request_version): |
|
1300 | 1300 | """ |
|
1301 | 1301 | Link all unlinked comments of this pull request to the given version. |
|
1302 | 1302 | |
|
1303 | 1303 | :param pull_request_version: The `PullRequestVersion` to which |
|
1304 | 1304 | the comments shall be linked. |
|
1305 | 1305 | |
|
1306 | 1306 | """ |
|
1307 | 1307 | pull_request = pull_request_version.pull_request |
|
1308 | 1308 | comments = ChangesetComment.query()\ |
|
1309 | 1309 | .filter( |
|
1310 | 1310 | # TODO: johbo: Should we query for the repo at all here? |
|
1311 | 1311 | # Pending decision on how comments of PRs are to be related |
|
1312 | 1312 | # to either the source repo, the target repo or no repo at all. |
|
1313 | 1313 | ChangesetComment.repo_id == pull_request.target_repo.repo_id, |
|
1314 | 1314 | ChangesetComment.pull_request == pull_request, |
|
1315 | 1315 | ChangesetComment.pull_request_version == None)\ |
|
1316 | 1316 | .order_by(ChangesetComment.comment_id.asc()) |
|
1317 | 1317 | |
|
1318 | 1318 | # TODO: johbo: Find out why this breaks if it is done in a bulk |
|
1319 | 1319 | # operation. |
|
1320 | 1320 | for comment in comments: |
|
1321 | 1321 | comment.pull_request_version_id = ( |
|
1322 | 1322 | pull_request_version.pull_request_version_id) |
|
1323 | 1323 | Session().add(comment) |
|
1324 | 1324 | |
|
1325 | 1325 | def _calculate_commit_id_changes(self, old_ids, new_ids): |
|
1326 | 1326 | added = [x for x in new_ids if x not in old_ids] |
|
1327 | 1327 | common = [x for x in new_ids if x in old_ids] |
|
1328 | 1328 | removed = [x for x in old_ids if x not in new_ids] |
|
1329 | 1329 | total = new_ids |
|
1330 | 1330 | return ChangeTuple(added, common, removed, total) |
|
1331 | 1331 | |
|
1332 | 1332 | def _calculate_file_changes(self, old_diff_data, new_diff_data): |
|
1333 | 1333 | |
|
1334 | 1334 | old_files = OrderedDict() |
|
1335 | 1335 | for diff_data in old_diff_data.parsed_diff: |
|
1336 | 1336 | old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff']) |
|
1337 | 1337 | |
|
1338 | 1338 | added_files = [] |
|
1339 | 1339 | modified_files = [] |
|
1340 | 1340 | removed_files = [] |
|
1341 | 1341 | for diff_data in new_diff_data.parsed_diff: |
|
1342 | 1342 | new_filename = diff_data['filename'] |
|
1343 | 1343 | new_hash = md5_safe(diff_data['raw_diff']) |
|
1344 | 1344 | |
|
1345 | 1345 | old_hash = old_files.get(new_filename) |
|
1346 | 1346 | if not old_hash: |
|
1347 | 1347 | # file is not present in old diff, we have to figure out from parsed diff |
|
1348 | 1348 | # operation ADD/REMOVE |
|
1349 | 1349 | operations_dict = diff_data['stats']['ops'] |
|
1350 | 1350 | if diffs.DEL_FILENODE in operations_dict: |
|
1351 | 1351 | removed_files.append(new_filename) |
|
1352 | 1352 | else: |
|
1353 | 1353 | added_files.append(new_filename) |
|
1354 | 1354 | else: |
|
1355 | 1355 | if new_hash != old_hash: |
|
1356 | 1356 | modified_files.append(new_filename) |
|
1357 | 1357 | # now remove a file from old, since we have seen it already |
|
1358 | 1358 | del old_files[new_filename] |
|
1359 | 1359 | |
|
1360 | 1360 | # removed files is when there are present in old, but not in NEW, |
|
1361 | 1361 | # since we remove old files that are present in new diff, left-overs |
|
1362 | 1362 | # if any should be the removed files |
|
1363 | 1363 | removed_files.extend(old_files.keys()) |
|
1364 | 1364 | |
|
1365 | 1365 | return FileChangeTuple(added_files, modified_files, removed_files) |
|
1366 | 1366 | |
|
1367 | 1367 | def _render_update_message(self, ancestor_commit_id, changes, file_changes): |
|
1368 | 1368 | """ |
|
1369 | 1369 | render the message using DEFAULT_COMMENTS_RENDERER (RST renderer), |
|
1370 | 1370 | so it's always looking the same disregarding on which default |
|
1371 | 1371 | renderer system is using. |
|
1372 | 1372 | |
|
1373 | 1373 | :param ancestor_commit_id: ancestor raw_id |
|
1374 | 1374 | :param changes: changes named tuple |
|
1375 | 1375 | :param file_changes: file changes named tuple |
|
1376 | 1376 | |
|
1377 | 1377 | """ |
|
1378 | 1378 | new_status = ChangesetStatus.get_status_lbl( |
|
1379 | 1379 | ChangesetStatus.STATUS_UNDER_REVIEW) |
|
1380 | 1380 | |
|
1381 | 1381 | changed_files = ( |
|
1382 | 1382 | file_changes.added + file_changes.modified + file_changes.removed) |
|
1383 | 1383 | |
|
1384 | 1384 | params = { |
|
1385 | 1385 | 'under_review_label': new_status, |
|
1386 | 1386 | 'added_commits': changes.added, |
|
1387 | 1387 | 'removed_commits': changes.removed, |
|
1388 | 1388 | 'changed_files': changed_files, |
|
1389 | 1389 | 'added_files': file_changes.added, |
|
1390 | 1390 | 'modified_files': file_changes.modified, |
|
1391 | 1391 | 'removed_files': file_changes.removed, |
|
1392 | 1392 | 'ancestor_commit_id': ancestor_commit_id |
|
1393 | 1393 | } |
|
1394 | 1394 | renderer = RstTemplateRenderer() |
|
1395 | 1395 | return renderer.render('pull_request_update.mako', **params) |
|
1396 | 1396 | |
|
1397 | 1397 | def edit(self, pull_request, title, description, description_renderer, user): |
|
1398 | 1398 | pull_request = self.__get_pull_request(pull_request) |
|
1399 | 1399 | old_data = pull_request.get_api_data(with_merge_state=False) |
|
1400 | 1400 | if pull_request.is_closed(): |
|
1401 | 1401 | raise ValueError('This pull request is closed') |
|
1402 | 1402 | if title: |
|
1403 | 1403 | pull_request.title = title |
|
1404 | 1404 | pull_request.description = description |
|
1405 | 1405 | pull_request.updated_on = datetime.datetime.now() |
|
1406 | 1406 | pull_request.description_renderer = description_renderer |
|
1407 | 1407 | Session().add(pull_request) |
|
1408 | 1408 | self._log_audit_action( |
|
1409 | 1409 | 'repo.pull_request.edit', {'old_data': old_data}, |
|
1410 | 1410 | user, pull_request) |
|
1411 | 1411 | |
|
1412 | 1412 | def update_reviewers(self, pull_request, reviewer_data, user): |
|
1413 | 1413 | """ |
|
1414 | 1414 | Update the reviewers in the pull request |
|
1415 | 1415 | |
|
1416 | 1416 | :param pull_request: the pr to update |
|
1417 | 1417 | :param reviewer_data: list of tuples |
|
1418 | 1418 | [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])] |
|
1419 | 1419 | :param user: current use who triggers this action |
|
1420 | 1420 | """ |
|
1421 | 1421 | |
|
1422 | 1422 | pull_request = self.__get_pull_request(pull_request) |
|
1423 | 1423 | if pull_request.is_closed(): |
|
1424 | 1424 | raise ValueError('This pull request is closed') |
|
1425 | 1425 | |
|
1426 | 1426 | reviewers = {} |
|
1427 | 1427 | for user_id, reasons, mandatory, role, rules in reviewer_data: |
|
1428 | 1428 | if isinstance(user_id, (int, str)): |
|
1429 | 1429 | user_id = self._get_user(user_id).user_id |
|
1430 | 1430 | reviewers[user_id] = { |
|
1431 | 1431 | 'reasons': reasons, 'mandatory': mandatory, 'role': role} |
|
1432 | 1432 | |
|
1433 | 1433 | reviewers_ids = set(reviewers.keys()) |
|
1434 | 1434 | current_reviewers = PullRequestReviewers.get_pull_request_reviewers( |
|
1435 | 1435 | pull_request.pull_request_id, role=PullRequestReviewers.ROLE_REVIEWER) |
|
1436 | 1436 | |
|
1437 | 1437 | current_reviewers_ids = set([x.user.user_id for x in current_reviewers]) |
|
1438 | 1438 | |
|
1439 | 1439 | ids_to_add = reviewers_ids.difference(current_reviewers_ids) |
|
1440 | 1440 | ids_to_remove = current_reviewers_ids.difference(reviewers_ids) |
|
1441 | 1441 | |
|
1442 | 1442 | log.debug("Adding %s reviewers", ids_to_add) |
|
1443 | 1443 | log.debug("Removing %s reviewers", ids_to_remove) |
|
1444 | 1444 | changed = False |
|
1445 | 1445 | added_audit_reviewers = [] |
|
1446 | 1446 | removed_audit_reviewers = [] |
|
1447 | 1447 | |
|
1448 | 1448 | for uid in ids_to_add: |
|
1449 | 1449 | changed = True |
|
1450 | 1450 | _usr = self._get_user(uid) |
|
1451 | 1451 | reviewer = PullRequestReviewers() |
|
1452 | 1452 | reviewer.user = _usr |
|
1453 | 1453 | reviewer.pull_request = pull_request |
|
1454 | 1454 | reviewer.reasons = reviewers[uid]['reasons'] |
|
1455 | 1455 | # NOTE(marcink): mandatory shouldn't be changed now |
|
1456 | 1456 | # reviewer.mandatory = reviewers[uid]['reasons'] |
|
1457 | 1457 | # NOTE(marcink): role should be hardcoded, so we won't edit it. |
|
1458 | 1458 | reviewer.role = PullRequestReviewers.ROLE_REVIEWER |
|
1459 | 1459 | Session().add(reviewer) |
|
1460 | 1460 | added_audit_reviewers.append(reviewer.get_dict()) |
|
1461 | 1461 | |
|
1462 | 1462 | for uid in ids_to_remove: |
|
1463 | 1463 | changed = True |
|
1464 | 1464 | # NOTE(marcink): we fetch "ALL" reviewers objects using .all(). |
|
1465 | 1465 | # This is an edge case that handles previous state of having the same reviewer twice. |
|
1466 | 1466 | # this CAN happen due to the lack of DB checks |
|
1467 | 1467 | reviewers = PullRequestReviewers.query()\ |
|
1468 | 1468 | .filter(PullRequestReviewers.user_id == uid, |
|
1469 | 1469 | PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER, |
|
1470 | 1470 | PullRequestReviewers.pull_request == pull_request)\ |
|
1471 | 1471 | .all() |
|
1472 | 1472 | |
|
1473 | 1473 | for obj in reviewers: |
|
1474 | 1474 | added_audit_reviewers.append(obj.get_dict()) |
|
1475 | 1475 | Session().delete(obj) |
|
1476 | 1476 | |
|
1477 | 1477 | if changed: |
|
1478 | 1478 | Session().expire_all() |
|
1479 | 1479 | pull_request.updated_on = datetime.datetime.now() |
|
1480 | 1480 | Session().add(pull_request) |
|
1481 | 1481 | |
|
1482 | 1482 | # finally store audit logs |
|
1483 | 1483 | for user_data in added_audit_reviewers: |
|
1484 | 1484 | self._log_audit_action( |
|
1485 | 1485 | 'repo.pull_request.reviewer.add', {'data': user_data}, |
|
1486 | 1486 | user, pull_request) |
|
1487 | 1487 | for user_data in removed_audit_reviewers: |
|
1488 | 1488 | self._log_audit_action( |
|
1489 | 1489 | 'repo.pull_request.reviewer.delete', {'old_data': user_data}, |
|
1490 | 1490 | user, pull_request) |
|
1491 | 1491 | |
|
1492 | 1492 | self.notify_reviewers(pull_request, ids_to_add, user) |
|
1493 | 1493 | return ids_to_add, ids_to_remove |
|
1494 | 1494 | |
|
1495 | 1495 | def update_observers(self, pull_request, observer_data, user): |
|
1496 | 1496 | """ |
|
1497 | 1497 | Update the observers in the pull request |
|
1498 | 1498 | |
|
1499 | 1499 | :param pull_request: the pr to update |
|
1500 | 1500 | :param observer_data: list of tuples |
|
1501 | 1501 | [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])] |
|
1502 | 1502 | :param user: current use who triggers this action |
|
1503 | 1503 | """ |
|
1504 | 1504 | pull_request = self.__get_pull_request(pull_request) |
|
1505 | 1505 | if pull_request.is_closed(): |
|
1506 | 1506 | raise ValueError('This pull request is closed') |
|
1507 | 1507 | |
|
1508 | 1508 | observers = {} |
|
1509 | 1509 | for user_id, reasons, mandatory, role, rules in observer_data: |
|
1510 | 1510 | if isinstance(user_id, (int, str)): |
|
1511 | 1511 | user_id = self._get_user(user_id).user_id |
|
1512 | 1512 | observers[user_id] = { |
|
1513 | 1513 | 'reasons': reasons, 'observers': mandatory, 'role': role} |
|
1514 | 1514 | |
|
1515 | 1515 | observers_ids = set(observers.keys()) |
|
1516 | 1516 | current_observers = PullRequestReviewers.get_pull_request_reviewers( |
|
1517 | 1517 | pull_request.pull_request_id, role=PullRequestReviewers.ROLE_OBSERVER) |
|
1518 | 1518 | |
|
1519 | 1519 | current_observers_ids = set([x.user.user_id for x in current_observers]) |
|
1520 | 1520 | |
|
1521 | 1521 | ids_to_add = observers_ids.difference(current_observers_ids) |
|
1522 | 1522 | ids_to_remove = current_observers_ids.difference(observers_ids) |
|
1523 | 1523 | |
|
1524 | 1524 | log.debug("Adding %s observer", ids_to_add) |
|
1525 | 1525 | log.debug("Removing %s observer", ids_to_remove) |
|
1526 | 1526 | changed = False |
|
1527 | 1527 | added_audit_observers = [] |
|
1528 | 1528 | removed_audit_observers = [] |
|
1529 | 1529 | |
|
1530 | 1530 | for uid in ids_to_add: |
|
1531 | 1531 | changed = True |
|
1532 | 1532 | _usr = self._get_user(uid) |
|
1533 | 1533 | observer = PullRequestReviewers() |
|
1534 | 1534 | observer.user = _usr |
|
1535 | 1535 | observer.pull_request = pull_request |
|
1536 | 1536 | observer.reasons = observers[uid]['reasons'] |
|
1537 | 1537 | # NOTE(marcink): mandatory shouldn't be changed now |
|
1538 | 1538 | # observer.mandatory = observer[uid]['reasons'] |
|
1539 | 1539 | |
|
1540 | 1540 | # NOTE(marcink): role should be hardcoded, so we won't edit it. |
|
1541 | 1541 | observer.role = PullRequestReviewers.ROLE_OBSERVER |
|
1542 | 1542 | Session().add(observer) |
|
1543 | 1543 | added_audit_observers.append(observer.get_dict()) |
|
1544 | 1544 | |
|
1545 | 1545 | for uid in ids_to_remove: |
|
1546 | 1546 | changed = True |
|
1547 | 1547 | # NOTE(marcink): we fetch "ALL" reviewers objects using .all(). |
|
1548 | 1548 | # This is an edge case that handles previous state of having the same reviewer twice. |
|
1549 | 1549 | # this CAN happen due to the lack of DB checks |
|
1550 | 1550 | observers = PullRequestReviewers.query()\ |
|
1551 | 1551 | .filter(PullRequestReviewers.user_id == uid, |
|
1552 | 1552 | PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER, |
|
1553 | 1553 | PullRequestReviewers.pull_request == pull_request)\ |
|
1554 | 1554 | .all() |
|
1555 | 1555 | |
|
1556 | 1556 | for obj in observers: |
|
1557 | 1557 | added_audit_observers.append(obj.get_dict()) |
|
1558 | 1558 | Session().delete(obj) |
|
1559 | 1559 | |
|
1560 | 1560 | if changed: |
|
1561 | 1561 | Session().expire_all() |
|
1562 | 1562 | pull_request.updated_on = datetime.datetime.now() |
|
1563 | 1563 | Session().add(pull_request) |
|
1564 | 1564 | |
|
1565 | 1565 | # finally store audit logs |
|
1566 | 1566 | for user_data in added_audit_observers: |
|
1567 | 1567 | self._log_audit_action( |
|
1568 | 1568 | 'repo.pull_request.observer.add', {'data': user_data}, |
|
1569 | 1569 | user, pull_request) |
|
1570 | 1570 | for user_data in removed_audit_observers: |
|
1571 | 1571 | self._log_audit_action( |
|
1572 | 1572 | 'repo.pull_request.observer.delete', {'old_data': user_data}, |
|
1573 | 1573 | user, pull_request) |
|
1574 | 1574 | |
|
1575 | 1575 | self.notify_observers(pull_request, ids_to_add, user) |
|
1576 | 1576 | return ids_to_add, ids_to_remove |
|
1577 | 1577 | |
|
1578 | 1578 | def get_url(self, pull_request, request=None, permalink=False): |
|
1579 | 1579 | if not request: |
|
1580 | 1580 | request = get_current_request() |
|
1581 | 1581 | |
|
1582 | 1582 | if permalink: |
|
1583 | 1583 | return request.route_url( |
|
1584 | 1584 | 'pull_requests_global', |
|
1585 | 1585 | pull_request_id=pull_request.pull_request_id,) |
|
1586 | 1586 | else: |
|
1587 | 1587 | return request.route_url('pullrequest_show', |
|
1588 | 1588 | repo_name=safe_str(pull_request.target_repo.repo_name), |
|
1589 | 1589 | pull_request_id=pull_request.pull_request_id,) |
|
1590 | 1590 | |
|
1591 | 1591 | def get_shadow_clone_url(self, pull_request, request=None): |
|
1592 | 1592 | """ |
|
1593 | 1593 | Returns qualified url pointing to the shadow repository. If this pull |
|
1594 | 1594 | request is closed there is no shadow repository and ``None`` will be |
|
1595 | 1595 | returned. |
|
1596 | 1596 | """ |
|
1597 | 1597 | if pull_request.is_closed(): |
|
1598 | 1598 | return None |
|
1599 | 1599 | else: |
|
1600 | 1600 | pr_url = urllib.parse.unquote(self.get_url(pull_request, request=request)) |
|
1601 | 1601 | return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url)) |
|
1602 | 1602 | |
|
1603 | 1603 | def _notify_reviewers(self, pull_request, user_ids, role, user): |
|
1604 | 1604 | # notification to reviewers/observers |
|
1605 | 1605 | if not user_ids: |
|
1606 | 1606 | return |
|
1607 | 1607 | |
|
1608 | 1608 | log.debug('Notify following %s users about pull-request %s', role, user_ids) |
|
1609 | 1609 | |
|
1610 | 1610 | pull_request_obj = pull_request |
|
1611 | 1611 | # get the current participants of this pull request |
|
1612 | 1612 | recipients = user_ids |
|
1613 | 1613 | notification_type = EmailNotificationModel.TYPE_PULL_REQUEST |
|
1614 | 1614 | |
|
1615 | 1615 | pr_source_repo = pull_request_obj.source_repo |
|
1616 | 1616 | pr_target_repo = pull_request_obj.target_repo |
|
1617 | 1617 | |
|
1618 | 1618 | pr_url = h.route_url('pullrequest_show', |
|
1619 | 1619 | repo_name=pr_target_repo.repo_name, |
|
1620 | 1620 | pull_request_id=pull_request_obj.pull_request_id,) |
|
1621 | 1621 | |
|
1622 | 1622 | # set some variables for email notification |
|
1623 | 1623 | pr_target_repo_url = h.route_url( |
|
1624 | 1624 | 'repo_summary', repo_name=pr_target_repo.repo_name) |
|
1625 | 1625 | |
|
1626 | 1626 | pr_source_repo_url = h.route_url( |
|
1627 | 1627 | 'repo_summary', repo_name=pr_source_repo.repo_name) |
|
1628 | 1628 | |
|
1629 | 1629 | # pull request specifics |
|
1630 | 1630 | pull_request_commits = [ |
|
1631 | 1631 | (x.raw_id, x.message) |
|
1632 | 1632 | for x in map(pr_source_repo.get_commit, pull_request.revisions)] |
|
1633 | 1633 | |
|
1634 | 1634 | current_rhodecode_user = user |
|
1635 | 1635 | kwargs = { |
|
1636 | 1636 | 'user': current_rhodecode_user, |
|
1637 | 1637 | 'pull_request_author': pull_request.author, |
|
1638 | 1638 | 'pull_request': pull_request_obj, |
|
1639 | 1639 | 'pull_request_commits': pull_request_commits, |
|
1640 | 1640 | |
|
1641 | 1641 | 'pull_request_target_repo': pr_target_repo, |
|
1642 | 1642 | 'pull_request_target_repo_url': pr_target_repo_url, |
|
1643 | 1643 | |
|
1644 | 1644 | 'pull_request_source_repo': pr_source_repo, |
|
1645 | 1645 | 'pull_request_source_repo_url': pr_source_repo_url, |
|
1646 | 1646 | |
|
1647 | 1647 | 'pull_request_url': pr_url, |
|
1648 | 1648 | 'thread_ids': [pr_url], |
|
1649 | 1649 | 'user_role': role |
|
1650 | 1650 | } |
|
1651 | 1651 | |
|
1652 | 1652 | # create notification objects, and emails |
|
1653 | 1653 | NotificationModel().create( |
|
1654 | 1654 | created_by=current_rhodecode_user, |
|
1655 | 1655 | notification_subject='', # Filled in based on the notification_type |
|
1656 | 1656 | notification_body='', # Filled in based on the notification_type |
|
1657 | 1657 | notification_type=notification_type, |
|
1658 | 1658 | recipients=recipients, |
|
1659 | 1659 | email_kwargs=kwargs, |
|
1660 | 1660 | ) |
|
1661 | 1661 | |
|
1662 | 1662 | def notify_reviewers(self, pull_request, reviewers_ids, user): |
|
1663 | 1663 | return self._notify_reviewers(pull_request, reviewers_ids, |
|
1664 | 1664 | PullRequestReviewers.ROLE_REVIEWER, user) |
|
1665 | 1665 | |
|
1666 | 1666 | def notify_observers(self, pull_request, observers_ids, user): |
|
1667 | 1667 | return self._notify_reviewers(pull_request, observers_ids, |
|
1668 | 1668 | PullRequestReviewers.ROLE_OBSERVER, user) |
|
1669 | 1669 | |
|
1670 | 1670 | def notify_users(self, pull_request, updating_user, ancestor_commit_id, |
|
1671 | 1671 | commit_changes, file_changes): |
|
1672 | 1672 | |
|
1673 | 1673 | updating_user_id = updating_user.user_id |
|
1674 | 1674 | reviewers = set([x.user.user_id for x in pull_request.get_pull_request_reviewers()]) |
|
1675 | 1675 | # NOTE(marcink): send notification to all other users except to |
|
1676 | 1676 | # person who updated the PR |
|
1677 | 1677 | recipients = reviewers.difference(set([updating_user_id])) |
|
1678 | 1678 | |
|
1679 | 1679 | log.debug('Notify following recipients about pull-request update %s', recipients) |
|
1680 | 1680 | |
|
1681 | 1681 | pull_request_obj = pull_request |
|
1682 | 1682 | |
|
1683 | 1683 | # send email about the update |
|
1684 | 1684 | changed_files = ( |
|
1685 | 1685 | file_changes.added + file_changes.modified + file_changes.removed) |
|
1686 | 1686 | |
|
1687 | 1687 | pr_source_repo = pull_request_obj.source_repo |
|
1688 | 1688 | pr_target_repo = pull_request_obj.target_repo |
|
1689 | 1689 | |
|
1690 | 1690 | pr_url = h.route_url('pullrequest_show', |
|
1691 | 1691 | repo_name=pr_target_repo.repo_name, |
|
1692 | 1692 | pull_request_id=pull_request_obj.pull_request_id,) |
|
1693 | 1693 | |
|
1694 | 1694 | # set some variables for email notification |
|
1695 | 1695 | pr_target_repo_url = h.route_url( |
|
1696 | 1696 | 'repo_summary', repo_name=pr_target_repo.repo_name) |
|
1697 | 1697 | |
|
1698 | 1698 | pr_source_repo_url = h.route_url( |
|
1699 | 1699 | 'repo_summary', repo_name=pr_source_repo.repo_name) |
|
1700 | 1700 | |
|
1701 | 1701 | email_kwargs = { |
|
1702 | 1702 | 'date': datetime.datetime.now(), |
|
1703 | 1703 | 'updating_user': updating_user, |
|
1704 | 1704 | |
|
1705 | 1705 | 'pull_request': pull_request_obj, |
|
1706 | 1706 | |
|
1707 | 1707 | 'pull_request_target_repo': pr_target_repo, |
|
1708 | 1708 | 'pull_request_target_repo_url': pr_target_repo_url, |
|
1709 | 1709 | |
|
1710 | 1710 | 'pull_request_source_repo': pr_source_repo, |
|
1711 | 1711 | 'pull_request_source_repo_url': pr_source_repo_url, |
|
1712 | 1712 | |
|
1713 | 1713 | 'pull_request_url': pr_url, |
|
1714 | 1714 | |
|
1715 | 1715 | 'ancestor_commit_id': ancestor_commit_id, |
|
1716 | 1716 | 'added_commits': commit_changes.added, |
|
1717 | 1717 | 'removed_commits': commit_changes.removed, |
|
1718 | 1718 | 'changed_files': changed_files, |
|
1719 | 1719 | 'added_files': file_changes.added, |
|
1720 | 1720 | 'modified_files': file_changes.modified, |
|
1721 | 1721 | 'removed_files': file_changes.removed, |
|
1722 | 1722 | 'thread_ids': [pr_url], |
|
1723 | 1723 | } |
|
1724 | 1724 | |
|
1725 | 1725 | # create notification objects, and emails |
|
1726 | 1726 | NotificationModel().create( |
|
1727 | 1727 | created_by=updating_user, |
|
1728 | 1728 | notification_subject='', # Filled in based on the notification_type |
|
1729 | 1729 | notification_body='', # Filled in based on the notification_type |
|
1730 | 1730 | notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE, |
|
1731 | 1731 | recipients=recipients, |
|
1732 | 1732 | email_kwargs=email_kwargs, |
|
1733 | 1733 | ) |
|
1734 | 1734 | |
|
1735 | 1735 | def delete(self, pull_request, user=None): |
|
1736 | 1736 | if not user: |
|
1737 | 1737 | user = getattr(get_current_rhodecode_user(), 'username', None) |
|
1738 | 1738 | |
|
1739 | 1739 | pull_request = self.__get_pull_request(pull_request) |
|
1740 | 1740 | old_data = pull_request.get_api_data(with_merge_state=False) |
|
1741 | 1741 | self._cleanup_merge_workspace(pull_request) |
|
1742 | 1742 | self._log_audit_action( |
|
1743 | 1743 | 'repo.pull_request.delete', {'old_data': old_data}, |
|
1744 | 1744 | user, pull_request) |
|
1745 | 1745 | Session().delete(pull_request) |
|
1746 | 1746 | |
|
1747 | 1747 | def close_pull_request(self, pull_request, user): |
|
1748 | 1748 | pull_request = self.__get_pull_request(pull_request) |
|
1749 | 1749 | self._cleanup_merge_workspace(pull_request) |
|
1750 | 1750 | pull_request.status = PullRequest.STATUS_CLOSED |
|
1751 | 1751 | pull_request.updated_on = datetime.datetime.now() |
|
1752 | 1752 | Session().add(pull_request) |
|
1753 | 1753 | self.trigger_pull_request_hook(pull_request, pull_request.author, 'close') |
|
1754 | 1754 | |
|
1755 | 1755 | pr_data = pull_request.get_api_data(with_merge_state=False) |
|
1756 | 1756 | self._log_audit_action( |
|
1757 | 1757 | 'repo.pull_request.close', {'data': pr_data}, user, pull_request) |
|
1758 | 1758 | |
|
1759 | 1759 | def close_pull_request_with_comment( |
|
1760 | 1760 | self, pull_request, user, repo, message=None, auth_user=None): |
|
1761 | 1761 | |
|
1762 | 1762 | pull_request_review_status = pull_request.calculated_review_status() |
|
1763 | 1763 | |
|
1764 | 1764 | if pull_request_review_status == ChangesetStatus.STATUS_APPROVED: |
|
1765 | 1765 | # approved only if we have voting consent |
|
1766 | 1766 | status = ChangesetStatus.STATUS_APPROVED |
|
1767 | 1767 | else: |
|
1768 | 1768 | status = ChangesetStatus.STATUS_REJECTED |
|
1769 | 1769 | status_lbl = ChangesetStatus.get_status_lbl(status) |
|
1770 | 1770 | |
|
1771 | 1771 | default_message = ( |
|
1772 | 1772 | 'Closing with status change {transition_icon} {status}.' |
|
1773 | 1773 | ).format(transition_icon='>', status=status_lbl) |
|
1774 | 1774 | text = message or default_message |
|
1775 | 1775 | |
|
1776 | 1776 | # create a comment, and link it to new status |
|
1777 | 1777 | comment = CommentsModel().create( |
|
1778 | 1778 | text=text, |
|
1779 | 1779 | repo=repo.repo_id, |
|
1780 | 1780 | user=user.user_id, |
|
1781 | 1781 | pull_request=pull_request.pull_request_id, |
|
1782 | 1782 | status_change=status_lbl, |
|
1783 | 1783 | status_change_type=status, |
|
1784 | 1784 | closing_pr=True, |
|
1785 | 1785 | auth_user=auth_user, |
|
1786 | 1786 | ) |
|
1787 | 1787 | |
|
1788 | 1788 | # calculate old status before we change it |
|
1789 | 1789 | old_calculated_status = pull_request.calculated_review_status() |
|
1790 | 1790 | ChangesetStatusModel().set_status( |
|
1791 | 1791 | repo.repo_id, |
|
1792 | 1792 | status, |
|
1793 | 1793 | user.user_id, |
|
1794 | 1794 | comment=comment, |
|
1795 | 1795 | pull_request=pull_request.pull_request_id |
|
1796 | 1796 | ) |
|
1797 | 1797 | |
|
1798 | 1798 | Session().flush() |
|
1799 | 1799 | |
|
1800 | 1800 | self.trigger_pull_request_hook(pull_request, user, 'comment', |
|
1801 | 1801 | data={'comment': comment}) |
|
1802 | 1802 | |
|
1803 | 1803 | # we now calculate the status of pull request again, and based on that |
|
1804 | 1804 | # calculation trigger status change. This might happen in cases |
|
1805 | 1805 | # that non-reviewer admin closes a pr, which means his vote doesn't |
|
1806 | 1806 | # change the status, while if he's a reviewer this might change it. |
|
1807 | 1807 | calculated_status = pull_request.calculated_review_status() |
|
1808 | 1808 | if old_calculated_status != calculated_status: |
|
1809 | 1809 | self.trigger_pull_request_hook(pull_request, user, 'review_status_change', |
|
1810 | 1810 | data={'status': calculated_status}) |
|
1811 | 1811 | |
|
1812 | 1812 | # finally close the PR |
|
1813 | 1813 | PullRequestModel().close_pull_request(pull_request.pull_request_id, user) |
|
1814 | 1814 | |
|
1815 | 1815 | return comment, status |
|
1816 | 1816 | |
|
1817 | 1817 | def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False): |
|
1818 | 1818 | _ = translator or get_current_request().translate |
|
1819 | 1819 | |
|
1820 | 1820 | if not self._is_merge_enabled(pull_request): |
|
1821 | 1821 | return None, False, _('Server-side pull request merging is disabled.') |
|
1822 | 1822 | |
|
1823 | 1823 | if pull_request.is_closed(): |
|
1824 | 1824 | return None, False, _('This pull request is closed.') |
|
1825 | 1825 | |
|
1826 | 1826 | merge_possible, msg = self._check_repo_requirements( |
|
1827 | 1827 | target=pull_request.target_repo, source=pull_request.source_repo, |
|
1828 | 1828 | translator=_) |
|
1829 | 1829 | if not merge_possible: |
|
1830 | 1830 | return None, merge_possible, msg |
|
1831 | 1831 | |
|
1832 | 1832 | try: |
|
1833 | 1833 | merge_response = self._try_merge( |
|
1834 | 1834 | pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh) |
|
1835 | 1835 | log.debug("Merge response: %s", merge_response) |
|
1836 | 1836 | return merge_response, merge_response.possible, merge_response.merge_status_message |
|
1837 | 1837 | except NotImplementedError: |
|
1838 | 1838 | return None, False, _('Pull request merging is not supported.') |
|
1839 | 1839 | |
|
1840 | 1840 | def _check_repo_requirements(self, target, source, translator): |
|
1841 | 1841 | """ |
|
1842 | 1842 | Check if `target` and `source` have compatible requirements. |
|
1843 | 1843 | |
|
1844 | 1844 | Currently this is just checking for largefiles. |
|
1845 | 1845 | """ |
|
1846 | 1846 | _ = translator |
|
1847 | 1847 | target_has_largefiles = self._has_largefiles(target) |
|
1848 | 1848 | source_has_largefiles = self._has_largefiles(source) |
|
1849 | 1849 | merge_possible = True |
|
1850 | 1850 | message = u'' |
|
1851 | 1851 | |
|
1852 | 1852 | if target_has_largefiles != source_has_largefiles: |
|
1853 | 1853 | merge_possible = False |
|
1854 | 1854 | if source_has_largefiles: |
|
1855 | 1855 | message = _( |
|
1856 | 1856 | 'Target repository large files support is disabled.') |
|
1857 | 1857 | else: |
|
1858 | 1858 | message = _( |
|
1859 | 1859 | 'Source repository large files support is disabled.') |
|
1860 | 1860 | |
|
1861 | 1861 | return merge_possible, message |
|
1862 | 1862 | |
|
1863 | 1863 | def _has_largefiles(self, repo): |
|
1864 | 1864 | largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings( |
|
1865 | 1865 | 'extensions', 'largefiles') |
|
1866 | 1866 | return largefiles_ui and largefiles_ui[0].active |
|
1867 | 1867 | |
|
1868 | 1868 | def _try_merge(self, pull_request, force_shadow_repo_refresh=False): |
|
1869 | 1869 | """ |
|
1870 | 1870 | Try to merge the pull request and return the merge status. |
|
1871 | 1871 | """ |
|
1872 | 1872 | log.debug( |
|
1873 | 1873 | "Trying out if the pull request %s can be merged. Force_refresh=%s", |
|
1874 | 1874 | pull_request.pull_request_id, force_shadow_repo_refresh) |
|
1875 | 1875 | target_vcs = pull_request.target_repo.scm_instance() |
|
1876 | 1876 | # Refresh the target reference. |
|
1877 | 1877 | try: |
|
1878 | 1878 | target_ref = self._refresh_reference( |
|
1879 | 1879 | pull_request.target_ref_parts, target_vcs) |
|
1880 | 1880 | except CommitDoesNotExistError: |
|
1881 | 1881 | merge_state = MergeResponse( |
|
1882 | 1882 | False, False, None, MergeFailureReason.MISSING_TARGET_REF, |
|
1883 | 1883 | metadata={'target_ref': pull_request.target_ref_parts}) |
|
1884 | 1884 | return merge_state |
|
1885 | 1885 | |
|
1886 | 1886 | target_locked = pull_request.target_repo.locked |
|
1887 | 1887 | if target_locked and target_locked[0]: |
|
1888 | 1888 | locked_by = 'user:{}'.format(target_locked[0]) |
|
1889 | 1889 | log.debug("The target repository is locked by %s.", locked_by) |
|
1890 | 1890 | merge_state = MergeResponse( |
|
1891 | 1891 | False, False, None, MergeFailureReason.TARGET_IS_LOCKED, |
|
1892 | 1892 | metadata={'locked_by': locked_by}) |
|
1893 | 1893 | elif force_shadow_repo_refresh or self._needs_merge_state_refresh( |
|
1894 | 1894 | pull_request, target_ref): |
|
1895 | 1895 | log.debug("Refreshing the merge status of the repository.") |
|
1896 | 1896 | merge_state = self._refresh_merge_state( |
|
1897 | 1897 | pull_request, target_vcs, target_ref) |
|
1898 | 1898 | else: |
|
1899 | 1899 | possible = pull_request.last_merge_status == MergeFailureReason.NONE |
|
1900 | 1900 | metadata = { |
|
1901 | 1901 | 'unresolved_files': '', |
|
1902 | 1902 | 'target_ref': pull_request.target_ref_parts, |
|
1903 | 1903 | 'source_ref': pull_request.source_ref_parts, |
|
1904 | 1904 | } |
|
1905 | 1905 | if pull_request.last_merge_metadata: |
|
1906 | 1906 | metadata.update(pull_request.last_merge_metadata_parsed) |
|
1907 | 1907 | |
|
1908 | 1908 | if not possible and target_ref.type == 'branch': |
|
1909 | 1909 | # NOTE(marcink): case for mercurial multiple heads on branch |
|
1910 | 1910 | heads = target_vcs._heads(target_ref.name) |
|
1911 | 1911 | if len(heads) != 1: |
|
1912 | 1912 | heads = '\n,'.join(target_vcs._heads(target_ref.name)) |
|
1913 | 1913 | metadata.update({ |
|
1914 | 1914 | 'heads': heads |
|
1915 | 1915 | }) |
|
1916 | 1916 | |
|
1917 | 1917 | merge_state = MergeResponse( |
|
1918 | 1918 | possible, False, None, pull_request.last_merge_status, metadata=metadata) |
|
1919 | 1919 | |
|
1920 | 1920 | return merge_state |
|
1921 | 1921 | |
|
1922 | 1922 | def _refresh_reference(self, reference, vcs_repository): |
|
1923 | 1923 | if reference.type in self.UPDATABLE_REF_TYPES: |
|
1924 | 1924 | name_or_id = reference.name |
|
1925 | 1925 | else: |
|
1926 | 1926 | name_or_id = reference.commit_id |
|
1927 | 1927 | |
|
1928 | 1928 | refreshed_commit = vcs_repository.get_commit(name_or_id) |
|
1929 | 1929 | refreshed_reference = Reference( |
|
1930 | 1930 | reference.type, reference.name, refreshed_commit.raw_id) |
|
1931 | 1931 | return refreshed_reference |
|
1932 | 1932 | |
|
1933 | 1933 | def _needs_merge_state_refresh(self, pull_request, target_reference): |
|
1934 | 1934 | return not( |
|
1935 | 1935 | pull_request.revisions and |
|
1936 | 1936 | pull_request.revisions[0] == pull_request._last_merge_source_rev and |
|
1937 | 1937 | target_reference.commit_id == pull_request._last_merge_target_rev) |
|
1938 | 1938 | |
|
1939 | 1939 | def _refresh_merge_state(self, pull_request, target_vcs, target_reference): |
|
1940 | 1940 | workspace_id = self._workspace_id(pull_request) |
|
1941 | 1941 | source_vcs = pull_request.source_repo.scm_instance() |
|
1942 | 1942 | repo_id = pull_request.target_repo.repo_id |
|
1943 | 1943 | use_rebase = self._use_rebase_for_merging(pull_request) |
|
1944 | 1944 | close_branch = self._close_branch_before_merging(pull_request) |
|
1945 | 1945 | merge_state = target_vcs.merge( |
|
1946 | 1946 | repo_id, workspace_id, |
|
1947 | 1947 | target_reference, source_vcs, pull_request.source_ref_parts, |
|
1948 | 1948 | dry_run=True, use_rebase=use_rebase, |
|
1949 | 1949 | close_branch=close_branch) |
|
1950 | 1950 | |
|
1951 | 1951 | # Do not store the response if there was an unknown error. |
|
1952 | 1952 | if merge_state.failure_reason != MergeFailureReason.UNKNOWN: |
|
1953 | 1953 | pull_request._last_merge_source_rev = \ |
|
1954 | 1954 | pull_request.source_ref_parts.commit_id |
|
1955 | 1955 | pull_request._last_merge_target_rev = target_reference.commit_id |
|
1956 | 1956 | pull_request.last_merge_status = merge_state.failure_reason |
|
1957 | 1957 | pull_request.last_merge_metadata = merge_state.metadata |
|
1958 | 1958 | |
|
1959 | 1959 | pull_request.shadow_merge_ref = merge_state.merge_ref |
|
1960 | 1960 | Session().add(pull_request) |
|
1961 | 1961 | Session().commit() |
|
1962 | 1962 | |
|
1963 | 1963 | return merge_state |
|
1964 | 1964 | |
|
1965 | 1965 | def _workspace_id(self, pull_request): |
|
1966 | 1966 | workspace_id = 'pr-%s' % pull_request.pull_request_id |
|
1967 | 1967 | return workspace_id |
|
1968 | 1968 | |
|
1969 | 1969 | def generate_repo_data(self, repo, commit_id=None, branch=None, |
|
1970 | 1970 | bookmark=None, translator=None): |
|
1971 | 1971 | from rhodecode.model.repo import RepoModel |
|
1972 | 1972 | |
|
1973 | 1973 | all_refs, selected_ref = \ |
|
1974 | 1974 | self._get_repo_pullrequest_sources( |
|
1975 | 1975 | repo.scm_instance(), commit_id=commit_id, |
|
1976 | 1976 | branch=branch, bookmark=bookmark, translator=translator) |
|
1977 | 1977 | |
|
1978 | 1978 | refs_select2 = [] |
|
1979 | 1979 | for element in all_refs: |
|
1980 | 1980 | children = [{'id': x[0], 'text': x[1]} for x in element[0]] |
|
1981 | 1981 | refs_select2.append({'text': element[1], 'children': children}) |
|
1982 | 1982 | |
|
1983 | 1983 | return { |
|
1984 | 1984 | 'user': { |
|
1985 | 1985 | 'user_id': repo.user.user_id, |
|
1986 | 1986 | 'username': repo.user.username, |
|
1987 | 1987 | 'firstname': repo.user.first_name, |
|
1988 | 1988 | 'lastname': repo.user.last_name, |
|
1989 | 1989 | 'gravatar_link': h.gravatar_url(repo.user.email, 14), |
|
1990 | 1990 | }, |
|
1991 | 1991 | 'name': repo.repo_name, |
|
1992 | 1992 | 'link': RepoModel().get_url(repo), |
|
1993 | 1993 | 'description': h.chop_at_smart(repo.description_safe, '\n'), |
|
1994 | 1994 | 'refs': { |
|
1995 | 1995 | 'all_refs': all_refs, |
|
1996 | 1996 | 'selected_ref': selected_ref, |
|
1997 | 1997 | 'select2_refs': refs_select2 |
|
1998 | 1998 | } |
|
1999 | 1999 | } |
|
2000 | 2000 | |
|
2001 | 2001 | def generate_pullrequest_title(self, source, source_ref, target): |
|
2002 | 2002 | return u'{source}#{at_ref} to {target}'.format( |
|
2003 | 2003 | source=source, |
|
2004 | 2004 | at_ref=source_ref, |
|
2005 | 2005 | target=target, |
|
2006 | 2006 | ) |
|
2007 | 2007 | |
|
2008 | 2008 | def _cleanup_merge_workspace(self, pull_request): |
|
2009 | 2009 | # Merging related cleanup |
|
2010 | 2010 | repo_id = pull_request.target_repo.repo_id |
|
2011 | 2011 | target_scm = pull_request.target_repo.scm_instance() |
|
2012 | 2012 | workspace_id = self._workspace_id(pull_request) |
|
2013 | 2013 | |
|
2014 | 2014 | try: |
|
2015 | 2015 | target_scm.cleanup_merge_workspace(repo_id, workspace_id) |
|
2016 | 2016 | except NotImplementedError: |
|
2017 | 2017 | pass |
|
2018 | 2018 | |
|
2019 | 2019 | def _get_repo_pullrequest_sources( |
|
2020 | 2020 | self, repo, commit_id=None, branch=None, bookmark=None, |
|
2021 | 2021 | translator=None): |
|
2022 | 2022 | """ |
|
2023 | 2023 | Return a structure with repo's interesting commits, suitable for |
|
2024 | 2024 | the selectors in pullrequest controller |
|
2025 | 2025 | |
|
2026 | 2026 | :param commit_id: a commit that must be in the list somehow |
|
2027 | 2027 | and selected by default |
|
2028 | 2028 | :param branch: a branch that must be in the list and selected |
|
2029 | 2029 | by default - even if closed |
|
2030 | 2030 | :param bookmark: a bookmark that must be in the list and selected |
|
2031 | 2031 | """ |
|
2032 | 2032 | _ = translator or get_current_request().translate |
|
2033 | 2033 | |
|
2034 | 2034 | commit_id = safe_str(commit_id) if commit_id else None |
|
2035 | 2035 | branch = safe_unicode(branch) if branch else None |
|
2036 | 2036 | bookmark = safe_unicode(bookmark) if bookmark else None |
|
2037 | 2037 | |
|
2038 | 2038 | selected = None |
|
2039 | 2039 | |
|
2040 | 2040 | # order matters: first source that has commit_id in it will be selected |
|
2041 | 2041 | sources = [] |
|
2042 | 2042 | sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark)) |
|
2043 | 2043 | sources.append(('branch', repo.branches.items(), _('Branches'), branch)) |
|
2044 | 2044 | |
|
2045 | 2045 | if commit_id: |
|
2046 | 2046 | ref_commit = (h.short_id(commit_id), commit_id) |
|
2047 | 2047 | sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id)) |
|
2048 | 2048 | |
|
2049 | 2049 | sources.append( |
|
2050 | 2050 | ('branch', repo.branches_closed.items(), _('Closed Branches'), branch), |
|
2051 | 2051 | ) |
|
2052 | 2052 | |
|
2053 | 2053 | groups = [] |
|
2054 | 2054 | |
|
2055 | 2055 | for group_key, ref_list, group_name, match in sources: |
|
2056 | 2056 | group_refs = [] |
|
2057 | 2057 | for ref_name, ref_id in ref_list: |
|
2058 | 2058 | ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id) |
|
2059 | 2059 | group_refs.append((ref_key, ref_name)) |
|
2060 | 2060 | |
|
2061 | 2061 | if not selected: |
|
2062 | 2062 | if set([commit_id, match]) & set([ref_id, ref_name]): |
|
2063 | 2063 | selected = ref_key |
|
2064 | 2064 | |
|
2065 | 2065 | if group_refs: |
|
2066 | 2066 | groups.append((group_refs, group_name)) |
|
2067 | 2067 | |
|
2068 | 2068 | if not selected: |
|
2069 | 2069 | ref = commit_id or branch or bookmark |
|
2070 | 2070 | if ref: |
|
2071 | 2071 | raise CommitDoesNotExistError( |
|
2072 | 2072 | u'No commit refs could be found matching: {}'.format(ref)) |
|
2073 | 2073 | elif repo.DEFAULT_BRANCH_NAME in repo.branches: |
|
2074 | 2074 | selected = u'branch:{}:{}'.format( |
|
2075 | 2075 | safe_unicode(repo.DEFAULT_BRANCH_NAME), |
|
2076 | 2076 | safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME]) |
|
2077 | 2077 | ) |
|
2078 | 2078 | elif repo.commit_ids: |
|
2079 | 2079 | # make the user select in this case |
|
2080 | 2080 | selected = None |
|
2081 | 2081 | else: |
|
2082 | 2082 | raise EmptyRepositoryError() |
|
2083 | 2083 | return groups, selected |
|
2084 | 2084 | |
|
2085 | 2085 | def get_diff(self, source_repo, source_ref_id, target_ref_id, |
|
2086 | 2086 | hide_whitespace_changes, diff_context): |
|
2087 | 2087 | |
|
2088 | 2088 | return self._get_diff_from_pr_or_version( |
|
2089 | 2089 | source_repo, source_ref_id, target_ref_id, |
|
2090 | 2090 | hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context) |
|
2091 | 2091 | |
|
2092 | 2092 | def _get_diff_from_pr_or_version( |
|
2093 | 2093 | self, source_repo, source_ref_id, target_ref_id, |
|
2094 | 2094 | hide_whitespace_changes, diff_context): |
|
2095 | 2095 | |
|
2096 | 2096 | target_commit = source_repo.get_commit( |
|
2097 | 2097 | commit_id=safe_str(target_ref_id)) |
|
2098 | 2098 | source_commit = source_repo.get_commit( |
|
2099 | 2099 | commit_id=safe_str(source_ref_id), maybe_unreachable=True) |
|
2100 | 2100 | if isinstance(source_repo, Repository): |
|
2101 | 2101 | vcs_repo = source_repo.scm_instance() |
|
2102 | 2102 | else: |
|
2103 | 2103 | vcs_repo = source_repo |
|
2104 | 2104 | |
|
2105 | 2105 | # TODO: johbo: In the context of an update, we cannot reach |
|
2106 | 2106 | # the old commit anymore with our normal mechanisms. It needs |
|
2107 | 2107 | # some sort of special support in the vcs layer to avoid this |
|
2108 | 2108 | # workaround. |
|
2109 | 2109 | if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and |
|
2110 | 2110 | vcs_repo.alias == 'git'): |
|
2111 | 2111 | source_commit.raw_id = safe_str(source_ref_id) |
|
2112 | 2112 | |
|
2113 | 2113 | log.debug('calculating diff between ' |
|
2114 | 2114 | 'source_ref:%s and target_ref:%s for repo `%s`', |
|
2115 | 2115 | target_ref_id, source_ref_id, |
|
2116 | 2116 | safe_unicode(vcs_repo.path)) |
|
2117 | 2117 | |
|
2118 | 2118 | vcs_diff = vcs_repo.get_diff( |
|
2119 | 2119 | commit1=target_commit, commit2=source_commit, |
|
2120 | 2120 | ignore_whitespace=hide_whitespace_changes, context=diff_context) |
|
2121 | 2121 | return vcs_diff |
|
2122 | 2122 | |
|
2123 | 2123 | def _is_merge_enabled(self, pull_request): |
|
2124 | 2124 | return self._get_general_setting( |
|
2125 | 2125 | pull_request, 'rhodecode_pr_merge_enabled') |
|
2126 | 2126 | |
|
2127 | 2127 | def _use_rebase_for_merging(self, pull_request): |
|
2128 | 2128 | repo_type = pull_request.target_repo.repo_type |
|
2129 | 2129 | if repo_type == 'hg': |
|
2130 | 2130 | return self._get_general_setting( |
|
2131 | 2131 | pull_request, 'rhodecode_hg_use_rebase_for_merging') |
|
2132 | 2132 | elif repo_type == 'git': |
|
2133 | 2133 | return self._get_general_setting( |
|
2134 | 2134 | pull_request, 'rhodecode_git_use_rebase_for_merging') |
|
2135 | 2135 | |
|
2136 | 2136 | return False |
|
2137 | 2137 | |
|
2138 | 2138 | def _user_name_for_merging(self, pull_request, user): |
|
2139 | 2139 | env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '') |
|
2140 | 2140 | if env_user_name_attr and hasattr(user, env_user_name_attr): |
|
2141 | 2141 | user_name_attr = env_user_name_attr |
|
2142 | 2142 | else: |
|
2143 | 2143 | user_name_attr = 'short_contact' |
|
2144 | 2144 | |
|
2145 | 2145 | user_name = getattr(user, user_name_attr) |
|
2146 | 2146 | return user_name |
|
2147 | 2147 | |
|
2148 | 2148 | def _close_branch_before_merging(self, pull_request): |
|
2149 | 2149 | repo_type = pull_request.target_repo.repo_type |
|
2150 | 2150 | if repo_type == 'hg': |
|
2151 | 2151 | return self._get_general_setting( |
|
2152 | 2152 | pull_request, 'rhodecode_hg_close_branch_before_merging') |
|
2153 | 2153 | elif repo_type == 'git': |
|
2154 | 2154 | return self._get_general_setting( |
|
2155 | 2155 | pull_request, 'rhodecode_git_close_branch_before_merging') |
|
2156 | 2156 | |
|
2157 | 2157 | return False |
|
2158 | 2158 | |
|
2159 | 2159 | def _get_general_setting(self, pull_request, settings_key, default=False): |
|
2160 | 2160 | settings_model = VcsSettingsModel(repo=pull_request.target_repo) |
|
2161 | 2161 | settings = settings_model.get_general_settings() |
|
2162 | 2162 | return settings.get(settings_key, default) |
|
2163 | 2163 | |
|
2164 | 2164 | def _log_audit_action(self, action, action_data, user, pull_request): |
|
2165 | 2165 | audit_logger.store( |
|
2166 | 2166 | action=action, |
|
2167 | 2167 | action_data=action_data, |
|
2168 | 2168 | user=user, |
|
2169 | 2169 | repo=pull_request.target_repo) |
|
2170 | 2170 | |
|
2171 | 2171 | def get_reviewer_functions(self): |
|
2172 | 2172 | """ |
|
2173 | 2173 | Fetches functions for validation and fetching default reviewers. |
|
2174 | 2174 | If available we use the EE package, else we fallback to CE |
|
2175 | 2175 | package functions |
|
2176 | 2176 | """ |
|
2177 | 2177 | try: |
|
2178 | 2178 | from rc_reviewers.utils import get_default_reviewers_data |
|
2179 | 2179 | from rc_reviewers.utils import validate_default_reviewers |
|
2180 | 2180 | from rc_reviewers.utils import validate_observers |
|
2181 | 2181 | except ImportError: |
|
2182 | 2182 | from rhodecode.apps.repository.utils import get_default_reviewers_data |
|
2183 | 2183 | from rhodecode.apps.repository.utils import validate_default_reviewers |
|
2184 | 2184 | from rhodecode.apps.repository.utils import validate_observers |
|
2185 | 2185 | |
|
2186 | 2186 | return get_default_reviewers_data, validate_default_reviewers, validate_observers |
|
2187 | 2187 | |
|
2188 | 2188 | |
|
2189 | 2189 | class MergeCheck(object): |
|
2190 | 2190 | """ |
|
2191 | 2191 | Perform Merge Checks and returns a check object which stores information |
|
2192 | 2192 | about merge errors, and merge conditions |
|
2193 | 2193 | """ |
|
2194 | 2194 | TODO_CHECK = 'todo' |
|
2195 | 2195 | PERM_CHECK = 'perm' |
|
2196 | 2196 | REVIEW_CHECK = 'review' |
|
2197 | 2197 | MERGE_CHECK = 'merge' |
|
2198 | 2198 | WIP_CHECK = 'wip' |
|
2199 | 2199 | |
|
2200 | 2200 | def __init__(self): |
|
2201 | 2201 | self.review_status = None |
|
2202 | 2202 | self.merge_possible = None |
|
2203 | 2203 | self.merge_msg = '' |
|
2204 | 2204 | self.merge_response = None |
|
2205 | 2205 | self.failed = None |
|
2206 | 2206 | self.errors = [] |
|
2207 | 2207 | self.error_details = OrderedDict() |
|
2208 | 2208 | self.source_commit = AttributeDict() |
|
2209 | 2209 | self.target_commit = AttributeDict() |
|
2210 | 2210 | self.reviewers_count = 0 |
|
2211 | 2211 | self.observers_count = 0 |
|
2212 | 2212 | |
|
2213 | 2213 | def __repr__(self): |
|
2214 | 2214 | return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format( |
|
2215 | 2215 | self.merge_possible, self.failed, self.errors) |
|
2216 | 2216 | |
|
2217 | 2217 | def push_error(self, error_type, message, error_key, details): |
|
2218 | 2218 | self.failed = True |
|
2219 | 2219 | self.errors.append([error_type, message]) |
|
2220 | 2220 | self.error_details[error_key] = dict( |
|
2221 | 2221 | details=details, |
|
2222 | 2222 | error_type=error_type, |
|
2223 | 2223 | message=message |
|
2224 | 2224 | ) |
|
2225 | 2225 | |
|
2226 | 2226 | @classmethod |
|
2227 | 2227 | def validate(cls, pull_request, auth_user, translator, fail_early=False, |
|
2228 | 2228 | force_shadow_repo_refresh=False): |
|
2229 | 2229 | _ = translator |
|
2230 | 2230 | merge_check = cls() |
|
2231 | 2231 | |
|
2232 | 2232 | # title has WIP: |
|
2233 | 2233 | if pull_request.work_in_progress: |
|
2234 | 2234 | log.debug("MergeCheck: cannot merge, title has wip: marker.") |
|
2235 | 2235 | |
|
2236 | 2236 | msg = _('WIP marker in title prevents from accidental merge.') |
|
2237 | 2237 | merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title) |
|
2238 | 2238 | if fail_early: |
|
2239 | 2239 | return merge_check |
|
2240 | 2240 | |
|
2241 | 2241 | # permissions to merge |
|
2242 | 2242 | user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user) |
|
2243 | 2243 | if not user_allowed_to_merge: |
|
2244 | 2244 | log.debug("MergeCheck: cannot merge, approval is pending.") |
|
2245 | 2245 | |
|
2246 | 2246 | msg = _('User `{}` not allowed to perform merge.').format(auth_user.username) |
|
2247 | 2247 | merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username) |
|
2248 | 2248 | if fail_early: |
|
2249 | 2249 | return merge_check |
|
2250 | 2250 | |
|
2251 | 2251 | # permission to merge into the target branch |
|
2252 | 2252 | target_commit_id = pull_request.target_ref_parts.commit_id |
|
2253 | 2253 | if pull_request.target_ref_parts.type == 'branch': |
|
2254 | 2254 | branch_name = pull_request.target_ref_parts.name |
|
2255 | 2255 | else: |
|
2256 | 2256 | # for mercurial we can always figure out the branch from the commit |
|
2257 | 2257 | # in case of bookmark |
|
2258 | 2258 | target_commit = pull_request.target_repo.get_commit(target_commit_id) |
|
2259 | 2259 | branch_name = target_commit.branch |
|
2260 | 2260 | |
|
2261 | 2261 | rule, branch_perm = auth_user.get_rule_and_branch_permission( |
|
2262 | 2262 | pull_request.target_repo.repo_name, branch_name) |
|
2263 | 2263 | if branch_perm and branch_perm == 'branch.none': |
|
2264 | 2264 | msg = _('Target branch `{}` changes rejected by rule {}.').format( |
|
2265 | 2265 | branch_name, rule) |
|
2266 | 2266 | merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username) |
|
2267 | 2267 | if fail_early: |
|
2268 | 2268 | return merge_check |
|
2269 | 2269 | |
|
2270 | 2270 | # review status, must be always present |
|
2271 | 2271 | review_status = pull_request.calculated_review_status() |
|
2272 | 2272 | merge_check.review_status = review_status |
|
2273 | 2273 | merge_check.reviewers_count = pull_request.reviewers_count |
|
2274 | 2274 | merge_check.observers_count = pull_request.observers_count |
|
2275 | 2275 | |
|
2276 | 2276 | status_approved = review_status == ChangesetStatus.STATUS_APPROVED |
|
2277 | 2277 | if not status_approved and merge_check.reviewers_count: |
|
2278 | 2278 | log.debug("MergeCheck: cannot merge, approval is pending.") |
|
2279 | 2279 | msg = _('Pull request reviewer approval is pending.') |
|
2280 | 2280 | |
|
2281 | 2281 | merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status) |
|
2282 | 2282 | |
|
2283 | 2283 | if fail_early: |
|
2284 | 2284 | return merge_check |
|
2285 | 2285 | |
|
2286 | 2286 | # left over TODOs |
|
2287 | 2287 | todos = CommentsModel().get_pull_request_unresolved_todos(pull_request) |
|
2288 | 2288 | if todos: |
|
2289 | 2289 | log.debug("MergeCheck: cannot merge, {} " |
|
2290 | 2290 | "unresolved TODOs left.".format(len(todos))) |
|
2291 | 2291 | |
|
2292 | 2292 | if len(todos) == 1: |
|
2293 | 2293 | msg = _('Cannot merge, {} TODO still not resolved.').format( |
|
2294 | 2294 | len(todos)) |
|
2295 | 2295 | else: |
|
2296 | 2296 | msg = _('Cannot merge, {} TODOs still not resolved.').format( |
|
2297 | 2297 | len(todos)) |
|
2298 | 2298 | |
|
2299 | 2299 | merge_check.push_error('warning', msg, cls.TODO_CHECK, todos) |
|
2300 | 2300 | |
|
2301 | 2301 | if fail_early: |
|
2302 | 2302 | return merge_check |
|
2303 | 2303 | |
|
2304 | 2304 | # merge possible, here is the filesystem simulation + shadow repo |
|
2305 | 2305 | merge_response, merge_status, msg = PullRequestModel().merge_status( |
|
2306 | 2306 | pull_request, translator=translator, |
|
2307 | 2307 | force_shadow_repo_refresh=force_shadow_repo_refresh) |
|
2308 | 2308 | |
|
2309 | 2309 | merge_check.merge_possible = merge_status |
|
2310 | 2310 | merge_check.merge_msg = msg |
|
2311 | 2311 | merge_check.merge_response = merge_response |
|
2312 | 2312 | |
|
2313 | 2313 | source_ref_id = pull_request.source_ref_parts.commit_id |
|
2314 | 2314 | target_ref_id = pull_request.target_ref_parts.commit_id |
|
2315 | 2315 | |
|
2316 | 2316 | try: |
|
2317 | 2317 | source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request) |
|
2318 | 2318 | merge_check.source_commit.changed = source_ref_id != source_commit.raw_id |
|
2319 | 2319 | merge_check.source_commit.ref_spec = pull_request.source_ref_parts |
|
2320 | 2320 | merge_check.source_commit.current_raw_id = source_commit.raw_id |
|
2321 | 2321 | merge_check.source_commit.previous_raw_id = source_ref_id |
|
2322 | 2322 | |
|
2323 | 2323 | merge_check.target_commit.changed = target_ref_id != target_commit.raw_id |
|
2324 | 2324 | merge_check.target_commit.ref_spec = pull_request.target_ref_parts |
|
2325 | 2325 | merge_check.target_commit.current_raw_id = target_commit.raw_id |
|
2326 | 2326 | merge_check.target_commit.previous_raw_id = target_ref_id |
|
2327 | 2327 | except (SourceRefMissing, TargetRefMissing): |
|
2328 | 2328 | pass |
|
2329 | 2329 | |
|
2330 | 2330 | if not merge_status: |
|
2331 | 2331 | log.debug("MergeCheck: cannot merge, pull request merge not possible.") |
|
2332 | 2332 | merge_check.push_error('warning', msg, cls.MERGE_CHECK, None) |
|
2333 | 2333 | |
|
2334 | 2334 | if fail_early: |
|
2335 | 2335 | return merge_check |
|
2336 | 2336 | |
|
2337 | 2337 | log.debug('MergeCheck: is failed: %s', merge_check.failed) |
|
2338 | 2338 | return merge_check |
|
2339 | 2339 | |
|
2340 | 2340 | @classmethod |
|
2341 | 2341 | def get_merge_conditions(cls, pull_request, translator): |
|
2342 | 2342 | _ = translator |
|
2343 | 2343 | merge_details = {} |
|
2344 | 2344 | |
|
2345 | 2345 | model = PullRequestModel() |
|
2346 | 2346 | use_rebase = model._use_rebase_for_merging(pull_request) |
|
2347 | 2347 | |
|
2348 | 2348 | if use_rebase: |
|
2349 | 2349 | merge_details['merge_strategy'] = dict( |
|
2350 | 2350 | details={}, |
|
2351 | 2351 | message=_('Merge strategy: rebase') |
|
2352 | 2352 | ) |
|
2353 | 2353 | else: |
|
2354 | 2354 | merge_details['merge_strategy'] = dict( |
|
2355 | 2355 | details={}, |
|
2356 | 2356 | message=_('Merge strategy: explicit merge commit') |
|
2357 | 2357 | ) |
|
2358 | 2358 | |
|
2359 | 2359 | close_branch = model._close_branch_before_merging(pull_request) |
|
2360 | 2360 | if close_branch: |
|
2361 | 2361 | repo_type = pull_request.target_repo.repo_type |
|
2362 | 2362 | close_msg = '' |
|
2363 | 2363 | if repo_type == 'hg': |
|
2364 | 2364 | close_msg = _('Source branch will be closed before the merge.') |
|
2365 | 2365 | elif repo_type == 'git': |
|
2366 | 2366 | close_msg = _('Source branch will be deleted after the merge.') |
|
2367 | 2367 | |
|
2368 | 2368 | merge_details['close_branch'] = dict( |
|
2369 | 2369 | details={}, |
|
2370 | 2370 | message=close_msg |
|
2371 | 2371 | ) |
|
2372 | 2372 | |
|
2373 | 2373 | return merge_details |
|
2374 | 2374 | |
|
2375 | 2375 | |
|
2376 | 2376 | ChangeTuple = collections.namedtuple( |
|
2377 | 2377 | 'ChangeTuple', ['added', 'common', 'removed', 'total']) |
|
2378 | 2378 | |
|
2379 | 2379 | FileChangeTuple = collections.namedtuple( |
|
2380 | 2380 | 'FileChangeTuple', ['added', 'modified', 'removed']) |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
General Comments 0
You need to be logged in to leave comments.
Login now