Show More
@@ -1,2491 +1,2506 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2011-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2011-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import logging |
|
21 | import logging | |
22 | import time |
|
22 | import time | |
23 |
|
23 | |||
24 | import rhodecode |
|
24 | import rhodecode | |
25 | from rhodecode.api import ( |
|
25 | from rhodecode.api import ( | |
26 | jsonrpc_method, JSONRPCError, JSONRPCForbidden, JSONRPCValidationError) |
|
26 | jsonrpc_method, JSONRPCError, JSONRPCForbidden, JSONRPCValidationError) | |
27 | from rhodecode.api.utils import ( |
|
27 | from rhodecode.api.utils import ( | |
28 | has_superadmin_permission, Optional, OAttr, get_repo_or_error, |
|
28 | has_superadmin_permission, Optional, OAttr, get_repo_or_error, | |
29 | get_user_group_or_error, get_user_or_error, validate_repo_permissions, |
|
29 | get_user_group_or_error, get_user_or_error, validate_repo_permissions, | |
30 | get_perm_or_error, parse_args, get_origin, build_commit_data, |
|
30 | get_perm_or_error, parse_args, get_origin, build_commit_data, | |
31 | validate_set_owner_permissions) |
|
31 | validate_set_owner_permissions) | |
32 | from rhodecode.lib import audit_logger, rc_cache |
|
32 | from rhodecode.lib import audit_logger, rc_cache | |
33 | from rhodecode.lib import repo_maintenance |
|
33 | from rhodecode.lib import repo_maintenance | |
34 | from rhodecode.lib.auth import ( |
|
34 | from rhodecode.lib.auth import ( | |
35 | HasPermissionAnyApi, HasUserGroupPermissionAnyApi, |
|
35 | HasPermissionAnyApi, HasUserGroupPermissionAnyApi, | |
36 | HasRepoPermissionAnyApi) |
|
36 | HasRepoPermissionAnyApi) | |
37 | from rhodecode.lib.celerylib.utils import get_task_id |
|
37 | from rhodecode.lib.celerylib.utils import get_task_id | |
38 | from rhodecode.lib.utils2 import ( |
|
38 | from rhodecode.lib.utils2 import ( | |
39 | str2bool, time_to_datetime, safe_str, safe_int, safe_unicode) |
|
39 | str2bool, time_to_datetime, safe_str, safe_int, safe_unicode) | |
40 | from rhodecode.lib.ext_json import json |
|
40 | from rhodecode.lib.ext_json import json | |
41 | from rhodecode.lib.exceptions import ( |
|
41 | from rhodecode.lib.exceptions import ( | |
42 | StatusChangeOnClosedPullRequestError, CommentVersionMismatch) |
|
42 | StatusChangeOnClosedPullRequestError, CommentVersionMismatch) | |
43 | from rhodecode.lib.vcs import RepositoryError |
|
43 | from rhodecode.lib.vcs import RepositoryError | |
44 | from rhodecode.lib.vcs.exceptions import NodeDoesNotExistError |
|
44 | from rhodecode.lib.vcs.exceptions import NodeDoesNotExistError | |
45 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
45 | from rhodecode.model.changeset_status import ChangesetStatusModel | |
46 | from rhodecode.model.comment import CommentsModel |
|
46 | from rhodecode.model.comment import CommentsModel | |
47 | from rhodecode.model.db import ( |
|
47 | from rhodecode.model.db import ( | |
48 | Session, ChangesetStatus, RepositoryField, Repository, RepoGroup, |
|
48 | Session, ChangesetStatus, RepositoryField, Repository, RepoGroup, | |
49 | ChangesetComment) |
|
49 | ChangesetComment) | |
50 | from rhodecode.model.permission import PermissionModel |
|
50 | from rhodecode.model.permission import PermissionModel | |
|
51 | from rhodecode.model.pull_request import PullRequestModel | |||
51 | from rhodecode.model.repo import RepoModel |
|
52 | from rhodecode.model.repo import RepoModel | |
52 | from rhodecode.model.scm import ScmModel, RepoList |
|
53 | from rhodecode.model.scm import ScmModel, RepoList | |
53 | from rhodecode.model.settings import SettingsModel, VcsSettingsModel |
|
54 | from rhodecode.model.settings import SettingsModel, VcsSettingsModel | |
54 | from rhodecode.model import validation_schema |
|
55 | from rhodecode.model import validation_schema | |
55 | from rhodecode.model.validation_schema.schemas import repo_schema |
|
56 | from rhodecode.model.validation_schema.schemas import repo_schema | |
56 |
|
57 | |||
57 | log = logging.getLogger(__name__) |
|
58 | log = logging.getLogger(__name__) | |
58 |
|
59 | |||
59 |
|
60 | |||
60 | @jsonrpc_method() |
|
61 | @jsonrpc_method() | |
61 | def get_repo(request, apiuser, repoid, cache=Optional(True)): |
|
62 | def get_repo(request, apiuser, repoid, cache=Optional(True)): | |
62 | """ |
|
63 | """ | |
63 | Gets an existing repository by its name or repository_id. |
|
64 | Gets an existing repository by its name or repository_id. | |
64 |
|
65 | |||
65 | The members section so the output returns users groups or users |
|
66 | The members section so the output returns users groups or users | |
66 | associated with that repository. |
|
67 | associated with that repository. | |
67 |
|
68 | |||
68 | This command can only be run using an |authtoken| with admin rights, |
|
69 | This command can only be run using an |authtoken| with admin rights, | |
69 | or users with at least read rights to the |repo|. |
|
70 | or users with at least read rights to the |repo|. | |
70 |
|
71 | |||
71 | :param apiuser: This is filled automatically from the |authtoken|. |
|
72 | :param apiuser: This is filled automatically from the |authtoken|. | |
72 | :type apiuser: AuthUser |
|
73 | :type apiuser: AuthUser | |
73 | :param repoid: The repository name or repository id. |
|
74 | :param repoid: The repository name or repository id. | |
74 | :type repoid: str or int |
|
75 | :type repoid: str or int | |
75 | :param cache: use the cached value for last changeset |
|
76 | :param cache: use the cached value for last changeset | |
76 | :type: cache: Optional(bool) |
|
77 | :type: cache: Optional(bool) | |
77 |
|
78 | |||
78 | Example output: |
|
79 | Example output: | |
79 |
|
80 | |||
80 | .. code-block:: bash |
|
81 | .. code-block:: bash | |
81 |
|
82 | |||
82 | { |
|
83 | { | |
83 | "error": null, |
|
84 | "error": null, | |
84 | "id": <repo_id>, |
|
85 | "id": <repo_id>, | |
85 | "result": { |
|
86 | "result": { | |
86 | "clone_uri": null, |
|
87 | "clone_uri": null, | |
87 | "created_on": "timestamp", |
|
88 | "created_on": "timestamp", | |
88 | "description": "repo description", |
|
89 | "description": "repo description", | |
89 | "enable_downloads": false, |
|
90 | "enable_downloads": false, | |
90 | "enable_locking": false, |
|
91 | "enable_locking": false, | |
91 | "enable_statistics": false, |
|
92 | "enable_statistics": false, | |
92 | "followers": [ |
|
93 | "followers": [ | |
93 | { |
|
94 | { | |
94 | "active": true, |
|
95 | "active": true, | |
95 | "admin": false, |
|
96 | "admin": false, | |
96 | "api_key": "****************************************", |
|
97 | "api_key": "****************************************", | |
97 | "api_keys": [ |
|
98 | "api_keys": [ | |
98 | "****************************************" |
|
99 | "****************************************" | |
99 | ], |
|
100 | ], | |
100 | "email": "user@example.com", |
|
101 | "email": "user@example.com", | |
101 | "emails": [ |
|
102 | "emails": [ | |
102 | "user@example.com" |
|
103 | "user@example.com" | |
103 | ], |
|
104 | ], | |
104 | "extern_name": "rhodecode", |
|
105 | "extern_name": "rhodecode", | |
105 | "extern_type": "rhodecode", |
|
106 | "extern_type": "rhodecode", | |
106 | "firstname": "username", |
|
107 | "firstname": "username", | |
107 | "ip_addresses": [], |
|
108 | "ip_addresses": [], | |
108 | "language": null, |
|
109 | "language": null, | |
109 | "last_login": "2015-09-16T17:16:35.854", |
|
110 | "last_login": "2015-09-16T17:16:35.854", | |
110 | "lastname": "surname", |
|
111 | "lastname": "surname", | |
111 | "user_id": <user_id>, |
|
112 | "user_id": <user_id>, | |
112 | "username": "name" |
|
113 | "username": "name" | |
113 | } |
|
114 | } | |
114 | ], |
|
115 | ], | |
115 | "fork_of": "parent-repo", |
|
116 | "fork_of": "parent-repo", | |
116 | "landing_rev": [ |
|
117 | "landing_rev": [ | |
117 | "rev", |
|
118 | "rev", | |
118 | "tip" |
|
119 | "tip" | |
119 | ], |
|
120 | ], | |
120 | "last_changeset": { |
|
121 | "last_changeset": { | |
121 | "author": "User <user@example.com>", |
|
122 | "author": "User <user@example.com>", | |
122 | "branch": "default", |
|
123 | "branch": "default", | |
123 | "date": "timestamp", |
|
124 | "date": "timestamp", | |
124 | "message": "last commit message", |
|
125 | "message": "last commit message", | |
125 | "parents": [ |
|
126 | "parents": [ | |
126 | { |
|
127 | { | |
127 | "raw_id": "commit-id" |
|
128 | "raw_id": "commit-id" | |
128 | } |
|
129 | } | |
129 | ], |
|
130 | ], | |
130 | "raw_id": "commit-id", |
|
131 | "raw_id": "commit-id", | |
131 | "revision": <revision number>, |
|
132 | "revision": <revision number>, | |
132 | "short_id": "short id" |
|
133 | "short_id": "short id" | |
133 | }, |
|
134 | }, | |
134 | "lock_reason": null, |
|
135 | "lock_reason": null, | |
135 | "locked_by": null, |
|
136 | "locked_by": null, | |
136 | "locked_date": null, |
|
137 | "locked_date": null, | |
137 | "owner": "owner-name", |
|
138 | "owner": "owner-name", | |
138 | "permissions": [ |
|
139 | "permissions": [ | |
139 | { |
|
140 | { | |
140 | "name": "super-admin-name", |
|
141 | "name": "super-admin-name", | |
141 | "origin": "super-admin", |
|
142 | "origin": "super-admin", | |
142 | "permission": "repository.admin", |
|
143 | "permission": "repository.admin", | |
143 | "type": "user" |
|
144 | "type": "user" | |
144 | }, |
|
145 | }, | |
145 | { |
|
146 | { | |
146 | "name": "owner-name", |
|
147 | "name": "owner-name", | |
147 | "origin": "owner", |
|
148 | "origin": "owner", | |
148 | "permission": "repository.admin", |
|
149 | "permission": "repository.admin", | |
149 | "type": "user" |
|
150 | "type": "user" | |
150 | }, |
|
151 | }, | |
151 | { |
|
152 | { | |
152 | "name": "user-group-name", |
|
153 | "name": "user-group-name", | |
153 | "origin": "permission", |
|
154 | "origin": "permission", | |
154 | "permission": "repository.write", |
|
155 | "permission": "repository.write", | |
155 | "type": "user_group" |
|
156 | "type": "user_group" | |
156 | } |
|
157 | } | |
157 | ], |
|
158 | ], | |
158 | "private": true, |
|
159 | "private": true, | |
159 | "repo_id": 676, |
|
160 | "repo_id": 676, | |
160 | "repo_name": "user-group/repo-name", |
|
161 | "repo_name": "user-group/repo-name", | |
161 | "repo_type": "hg" |
|
162 | "repo_type": "hg" | |
162 | } |
|
163 | } | |
163 | } |
|
164 | } | |
164 | """ |
|
165 | """ | |
165 |
|
166 | |||
166 | repo = get_repo_or_error(repoid) |
|
167 | repo = get_repo_or_error(repoid) | |
167 | cache = Optional.extract(cache) |
|
168 | cache = Optional.extract(cache) | |
168 |
|
169 | |||
169 | include_secrets = False |
|
170 | include_secrets = False | |
170 | if has_superadmin_permission(apiuser): |
|
171 | if has_superadmin_permission(apiuser): | |
171 | include_secrets = True |
|
172 | include_secrets = True | |
172 | else: |
|
173 | else: | |
173 | # check if we have at least read permission for this repo ! |
|
174 | # check if we have at least read permission for this repo ! | |
174 | _perms = ( |
|
175 | _perms = ( | |
175 | 'repository.admin', 'repository.write', 'repository.read',) |
|
176 | 'repository.admin', 'repository.write', 'repository.read',) | |
176 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
177 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
177 |
|
178 | |||
178 | permissions = [] |
|
179 | permissions = [] | |
179 | for _user in repo.permissions(): |
|
180 | for _user in repo.permissions(): | |
180 | user_data = { |
|
181 | user_data = { | |
181 | 'name': _user.username, |
|
182 | 'name': _user.username, | |
182 | 'permission': _user.permission, |
|
183 | 'permission': _user.permission, | |
183 | 'origin': get_origin(_user), |
|
184 | 'origin': get_origin(_user), | |
184 | 'type': "user", |
|
185 | 'type': "user", | |
185 | } |
|
186 | } | |
186 | permissions.append(user_data) |
|
187 | permissions.append(user_data) | |
187 |
|
188 | |||
188 | for _user_group in repo.permission_user_groups(): |
|
189 | for _user_group in repo.permission_user_groups(): | |
189 | user_group_data = { |
|
190 | user_group_data = { | |
190 | 'name': _user_group.users_group_name, |
|
191 | 'name': _user_group.users_group_name, | |
191 | 'permission': _user_group.permission, |
|
192 | 'permission': _user_group.permission, | |
192 | 'origin': get_origin(_user_group), |
|
193 | 'origin': get_origin(_user_group), | |
193 | 'type': "user_group", |
|
194 | 'type': "user_group", | |
194 | } |
|
195 | } | |
195 | permissions.append(user_group_data) |
|
196 | permissions.append(user_group_data) | |
196 |
|
197 | |||
197 | following_users = [ |
|
198 | following_users = [ | |
198 | user.user.get_api_data(include_secrets=include_secrets) |
|
199 | user.user.get_api_data(include_secrets=include_secrets) | |
199 | for user in repo.followers] |
|
200 | for user in repo.followers] | |
200 |
|
201 | |||
201 | if not cache: |
|
202 | if not cache: | |
202 | repo.update_commit_cache() |
|
203 | repo.update_commit_cache() | |
203 | data = repo.get_api_data(include_secrets=include_secrets) |
|
204 | data = repo.get_api_data(include_secrets=include_secrets) | |
204 | data['permissions'] = permissions |
|
205 | data['permissions'] = permissions | |
205 | data['followers'] = following_users |
|
206 | data['followers'] = following_users | |
206 | return data |
|
207 | return data | |
207 |
|
208 | |||
208 |
|
209 | |||
209 | @jsonrpc_method() |
|
210 | @jsonrpc_method() | |
210 | def get_repos(request, apiuser, root=Optional(None), traverse=Optional(True)): |
|
211 | def get_repos(request, apiuser, root=Optional(None), traverse=Optional(True)): | |
211 | """ |
|
212 | """ | |
212 | Lists all existing repositories. |
|
213 | Lists all existing repositories. | |
213 |
|
214 | |||
214 | This command can only be run using an |authtoken| with admin rights, |
|
215 | This command can only be run using an |authtoken| with admin rights, | |
215 | or users with at least read rights to |repos|. |
|
216 | or users with at least read rights to |repos|. | |
216 |
|
217 | |||
217 | :param apiuser: This is filled automatically from the |authtoken|. |
|
218 | :param apiuser: This is filled automatically from the |authtoken|. | |
218 | :type apiuser: AuthUser |
|
219 | :type apiuser: AuthUser | |
219 | :param root: specify root repository group to fetch repositories. |
|
220 | :param root: specify root repository group to fetch repositories. | |
220 | filters the returned repositories to be members of given root group. |
|
221 | filters the returned repositories to be members of given root group. | |
221 | :type root: Optional(None) |
|
222 | :type root: Optional(None) | |
222 | :param traverse: traverse given root into subrepositories. With this flag |
|
223 | :param traverse: traverse given root into subrepositories. With this flag | |
223 | set to False, it will only return top-level repositories from `root`. |
|
224 | set to False, it will only return top-level repositories from `root`. | |
224 | if root is empty it will return just top-level repositories. |
|
225 | if root is empty it will return just top-level repositories. | |
225 | :type traverse: Optional(True) |
|
226 | :type traverse: Optional(True) | |
226 |
|
227 | |||
227 |
|
228 | |||
228 | Example output: |
|
229 | Example output: | |
229 |
|
230 | |||
230 | .. code-block:: bash |
|
231 | .. code-block:: bash | |
231 |
|
232 | |||
232 | id : <id_given_in_input> |
|
233 | id : <id_given_in_input> | |
233 | result: [ |
|
234 | result: [ | |
234 | { |
|
235 | { | |
235 | "repo_id" : "<repo_id>", |
|
236 | "repo_id" : "<repo_id>", | |
236 | "repo_name" : "<reponame>" |
|
237 | "repo_name" : "<reponame>" | |
237 | "repo_type" : "<repo_type>", |
|
238 | "repo_type" : "<repo_type>", | |
238 | "clone_uri" : "<clone_uri>", |
|
239 | "clone_uri" : "<clone_uri>", | |
239 | "private": : "<bool>", |
|
240 | "private": : "<bool>", | |
240 | "created_on" : "<datetimecreated>", |
|
241 | "created_on" : "<datetimecreated>", | |
241 | "description" : "<description>", |
|
242 | "description" : "<description>", | |
242 | "landing_rev": "<landing_rev>", |
|
243 | "landing_rev": "<landing_rev>", | |
243 | "owner": "<repo_owner>", |
|
244 | "owner": "<repo_owner>", | |
244 | "fork_of": "<name_of_fork_parent>", |
|
245 | "fork_of": "<name_of_fork_parent>", | |
245 | "enable_downloads": "<bool>", |
|
246 | "enable_downloads": "<bool>", | |
246 | "enable_locking": "<bool>", |
|
247 | "enable_locking": "<bool>", | |
247 | "enable_statistics": "<bool>", |
|
248 | "enable_statistics": "<bool>", | |
248 | }, |
|
249 | }, | |
249 | ... |
|
250 | ... | |
250 | ] |
|
251 | ] | |
251 | error: null |
|
252 | error: null | |
252 | """ |
|
253 | """ | |
253 |
|
254 | |||
254 | include_secrets = has_superadmin_permission(apiuser) |
|
255 | include_secrets = has_superadmin_permission(apiuser) | |
255 | _perms = ('repository.read', 'repository.write', 'repository.admin',) |
|
256 | _perms = ('repository.read', 'repository.write', 'repository.admin',) | |
256 | extras = {'user': apiuser} |
|
257 | extras = {'user': apiuser} | |
257 |
|
258 | |||
258 | root = Optional.extract(root) |
|
259 | root = Optional.extract(root) | |
259 | traverse = Optional.extract(traverse, binary=True) |
|
260 | traverse = Optional.extract(traverse, binary=True) | |
260 |
|
261 | |||
261 | if root: |
|
262 | if root: | |
262 | # verify parent existance, if it's empty return an error |
|
263 | # verify parent existance, if it's empty return an error | |
263 | parent = RepoGroup.get_by_group_name(root) |
|
264 | parent = RepoGroup.get_by_group_name(root) | |
264 | if not parent: |
|
265 | if not parent: | |
265 | raise JSONRPCError( |
|
266 | raise JSONRPCError( | |
266 | 'Root repository group `{}` does not exist'.format(root)) |
|
267 | 'Root repository group `{}` does not exist'.format(root)) | |
267 |
|
268 | |||
268 | if traverse: |
|
269 | if traverse: | |
269 | repos = RepoModel().get_repos_for_root(root=root, traverse=traverse) |
|
270 | repos = RepoModel().get_repos_for_root(root=root, traverse=traverse) | |
270 | else: |
|
271 | else: | |
271 | repos = RepoModel().get_repos_for_root(root=parent) |
|
272 | repos = RepoModel().get_repos_for_root(root=parent) | |
272 | else: |
|
273 | else: | |
273 | if traverse: |
|
274 | if traverse: | |
274 | repos = RepoModel().get_all() |
|
275 | repos = RepoModel().get_all() | |
275 | else: |
|
276 | else: | |
276 | # return just top-level |
|
277 | # return just top-level | |
277 | repos = RepoModel().get_repos_for_root(root=None) |
|
278 | repos = RepoModel().get_repos_for_root(root=None) | |
278 |
|
279 | |||
279 | repo_list = RepoList(repos, perm_set=_perms, extra_kwargs=extras) |
|
280 | repo_list = RepoList(repos, perm_set=_perms, extra_kwargs=extras) | |
280 | return [repo.get_api_data(include_secrets=include_secrets) |
|
281 | return [repo.get_api_data(include_secrets=include_secrets) | |
281 | for repo in repo_list] |
|
282 | for repo in repo_list] | |
282 |
|
283 | |||
283 |
|
284 | |||
284 | @jsonrpc_method() |
|
285 | @jsonrpc_method() | |
285 | def get_repo_changeset(request, apiuser, repoid, revision, |
|
286 | def get_repo_changeset(request, apiuser, repoid, revision, | |
286 | details=Optional('basic')): |
|
287 | details=Optional('basic')): | |
287 | """ |
|
288 | """ | |
288 | Returns information about a changeset. |
|
289 | Returns information about a changeset. | |
289 |
|
290 | |||
290 | Additionally parameters define the amount of details returned by |
|
291 | Additionally parameters define the amount of details returned by | |
291 | this function. |
|
292 | this function. | |
292 |
|
293 | |||
293 | This command can only be run using an |authtoken| with admin rights, |
|
294 | This command can only be run using an |authtoken| with admin rights, | |
294 | or users with at least read rights to the |repo|. |
|
295 | or users with at least read rights to the |repo|. | |
295 |
|
296 | |||
296 | :param apiuser: This is filled automatically from the |authtoken|. |
|
297 | :param apiuser: This is filled automatically from the |authtoken|. | |
297 | :type apiuser: AuthUser |
|
298 | :type apiuser: AuthUser | |
298 | :param repoid: The repository name or repository id |
|
299 | :param repoid: The repository name or repository id | |
299 | :type repoid: str or int |
|
300 | :type repoid: str or int | |
300 | :param revision: revision for which listing should be done |
|
301 | :param revision: revision for which listing should be done | |
301 | :type revision: str |
|
302 | :type revision: str | |
302 | :param details: details can be 'basic|extended|full' full gives diff |
|
303 | :param details: details can be 'basic|extended|full' full gives diff | |
303 | info details like the diff itself, and number of changed files etc. |
|
304 | info details like the diff itself, and number of changed files etc. | |
304 | :type details: Optional(str) |
|
305 | :type details: Optional(str) | |
305 |
|
306 | |||
306 | """ |
|
307 | """ | |
307 | repo = get_repo_or_error(repoid) |
|
308 | repo = get_repo_or_error(repoid) | |
308 | if not has_superadmin_permission(apiuser): |
|
309 | if not has_superadmin_permission(apiuser): | |
309 | _perms = ( |
|
310 | _perms = ( | |
310 | 'repository.admin', 'repository.write', 'repository.read',) |
|
311 | 'repository.admin', 'repository.write', 'repository.read',) | |
311 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
312 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
312 |
|
313 | |||
313 | changes_details = Optional.extract(details) |
|
314 | changes_details = Optional.extract(details) | |
314 | _changes_details_types = ['basic', 'extended', 'full'] |
|
315 | _changes_details_types = ['basic', 'extended', 'full'] | |
315 | if changes_details not in _changes_details_types: |
|
316 | if changes_details not in _changes_details_types: | |
316 | raise JSONRPCError( |
|
317 | raise JSONRPCError( | |
317 | 'ret_type must be one of %s' % ( |
|
318 | 'ret_type must be one of %s' % ( | |
318 | ','.join(_changes_details_types))) |
|
319 | ','.join(_changes_details_types))) | |
319 |
|
320 | |||
320 | pre_load = ['author', 'branch', 'date', 'message', 'parents', |
|
321 | pre_load = ['author', 'branch', 'date', 'message', 'parents', | |
321 | 'status', '_commit', '_file_paths'] |
|
322 | 'status', '_commit', '_file_paths'] | |
322 |
|
323 | |||
323 | try: |
|
324 | try: | |
324 | cs = repo.get_commit(commit_id=revision, pre_load=pre_load) |
|
325 | cs = repo.get_commit(commit_id=revision, pre_load=pre_load) | |
325 | except TypeError as e: |
|
326 | except TypeError as e: | |
326 | raise JSONRPCError(safe_str(e)) |
|
327 | raise JSONRPCError(safe_str(e)) | |
327 | _cs_json = cs.__json__() |
|
328 | _cs_json = cs.__json__() | |
328 | _cs_json['diff'] = build_commit_data(cs, changes_details) |
|
329 | _cs_json['diff'] = build_commit_data(cs, changes_details) | |
329 | if changes_details == 'full': |
|
330 | if changes_details == 'full': | |
330 | _cs_json['refs'] = cs._get_refs() |
|
331 | _cs_json['refs'] = cs._get_refs() | |
331 | return _cs_json |
|
332 | return _cs_json | |
332 |
|
333 | |||
333 |
|
334 | |||
334 | @jsonrpc_method() |
|
335 | @jsonrpc_method() | |
335 | def get_repo_changesets(request, apiuser, repoid, start_rev, limit, |
|
336 | def get_repo_changesets(request, apiuser, repoid, start_rev, limit, | |
336 | details=Optional('basic')): |
|
337 | details=Optional('basic')): | |
337 | """ |
|
338 | """ | |
338 | Returns a set of commits limited by the number starting |
|
339 | Returns a set of commits limited by the number starting | |
339 | from the `start_rev` option. |
|
340 | from the `start_rev` option. | |
340 |
|
341 | |||
341 | Additional parameters define the amount of details returned by this |
|
342 | Additional parameters define the amount of details returned by this | |
342 | function. |
|
343 | function. | |
343 |
|
344 | |||
344 | This command can only be run using an |authtoken| with admin rights, |
|
345 | This command can only be run using an |authtoken| with admin rights, | |
345 | or users with at least read rights to |repos|. |
|
346 | or users with at least read rights to |repos|. | |
346 |
|
347 | |||
347 | :param apiuser: This is filled automatically from the |authtoken|. |
|
348 | :param apiuser: This is filled automatically from the |authtoken|. | |
348 | :type apiuser: AuthUser |
|
349 | :type apiuser: AuthUser | |
349 | :param repoid: The repository name or repository ID. |
|
350 | :param repoid: The repository name or repository ID. | |
350 | :type repoid: str or int |
|
351 | :type repoid: str or int | |
351 | :param start_rev: The starting revision from where to get changesets. |
|
352 | :param start_rev: The starting revision from where to get changesets. | |
352 | :type start_rev: str |
|
353 | :type start_rev: str | |
353 | :param limit: Limit the number of commits to this amount |
|
354 | :param limit: Limit the number of commits to this amount | |
354 | :type limit: str or int |
|
355 | :type limit: str or int | |
355 | :param details: Set the level of detail returned. Valid option are: |
|
356 | :param details: Set the level of detail returned. Valid option are: | |
356 | ``basic``, ``extended`` and ``full``. |
|
357 | ``basic``, ``extended`` and ``full``. | |
357 | :type details: Optional(str) |
|
358 | :type details: Optional(str) | |
358 |
|
359 | |||
359 | .. note:: |
|
360 | .. note:: | |
360 |
|
361 | |||
361 | Setting the parameter `details` to the value ``full`` is extensive |
|
362 | Setting the parameter `details` to the value ``full`` is extensive | |
362 | and returns details like the diff itself, and the number |
|
363 | and returns details like the diff itself, and the number | |
363 | of changed files. |
|
364 | of changed files. | |
364 |
|
365 | |||
365 | """ |
|
366 | """ | |
366 | repo = get_repo_or_error(repoid) |
|
367 | repo = get_repo_or_error(repoid) | |
367 | if not has_superadmin_permission(apiuser): |
|
368 | if not has_superadmin_permission(apiuser): | |
368 | _perms = ( |
|
369 | _perms = ( | |
369 | 'repository.admin', 'repository.write', 'repository.read',) |
|
370 | 'repository.admin', 'repository.write', 'repository.read',) | |
370 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
371 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
371 |
|
372 | |||
372 | changes_details = Optional.extract(details) |
|
373 | changes_details = Optional.extract(details) | |
373 | _changes_details_types = ['basic', 'extended', 'full'] |
|
374 | _changes_details_types = ['basic', 'extended', 'full'] | |
374 | if changes_details not in _changes_details_types: |
|
375 | if changes_details not in _changes_details_types: | |
375 | raise JSONRPCError( |
|
376 | raise JSONRPCError( | |
376 | 'ret_type must be one of %s' % ( |
|
377 | 'ret_type must be one of %s' % ( | |
377 | ','.join(_changes_details_types))) |
|
378 | ','.join(_changes_details_types))) | |
378 |
|
379 | |||
379 | limit = int(limit) |
|
380 | limit = int(limit) | |
380 | pre_load = ['author', 'branch', 'date', 'message', 'parents', |
|
381 | pre_load = ['author', 'branch', 'date', 'message', 'parents', | |
381 | 'status', '_commit', '_file_paths'] |
|
382 | 'status', '_commit', '_file_paths'] | |
382 |
|
383 | |||
383 | vcs_repo = repo.scm_instance() |
|
384 | vcs_repo = repo.scm_instance() | |
384 | # SVN needs a special case to distinguish its index and commit id |
|
385 | # SVN needs a special case to distinguish its index and commit id | |
385 | if vcs_repo and vcs_repo.alias == 'svn' and (start_rev == '0'): |
|
386 | if vcs_repo and vcs_repo.alias == 'svn' and (start_rev == '0'): | |
386 | start_rev = vcs_repo.commit_ids[0] |
|
387 | start_rev = vcs_repo.commit_ids[0] | |
387 |
|
388 | |||
388 | try: |
|
389 | try: | |
389 | commits = vcs_repo.get_commits( |
|
390 | commits = vcs_repo.get_commits( | |
390 | start_id=start_rev, pre_load=pre_load, translate_tags=False) |
|
391 | start_id=start_rev, pre_load=pre_load, translate_tags=False) | |
391 | except TypeError as e: |
|
392 | except TypeError as e: | |
392 | raise JSONRPCError(safe_str(e)) |
|
393 | raise JSONRPCError(safe_str(e)) | |
393 | except Exception: |
|
394 | except Exception: | |
394 | log.exception('Fetching of commits failed') |
|
395 | log.exception('Fetching of commits failed') | |
395 | raise JSONRPCError('Error occurred during commit fetching') |
|
396 | raise JSONRPCError('Error occurred during commit fetching') | |
396 |
|
397 | |||
397 | ret = [] |
|
398 | ret = [] | |
398 | for cnt, commit in enumerate(commits): |
|
399 | for cnt, commit in enumerate(commits): | |
399 | if cnt >= limit != -1: |
|
400 | if cnt >= limit != -1: | |
400 | break |
|
401 | break | |
401 | _cs_json = commit.__json__() |
|
402 | _cs_json = commit.__json__() | |
402 | _cs_json['diff'] = build_commit_data(commit, changes_details) |
|
403 | _cs_json['diff'] = build_commit_data(commit, changes_details) | |
403 | if changes_details == 'full': |
|
404 | if changes_details == 'full': | |
404 | _cs_json['refs'] = { |
|
405 | _cs_json['refs'] = { | |
405 | 'branches': [commit.branch], |
|
406 | 'branches': [commit.branch], | |
406 | 'bookmarks': getattr(commit, 'bookmarks', []), |
|
407 | 'bookmarks': getattr(commit, 'bookmarks', []), | |
407 | 'tags': commit.tags |
|
408 | 'tags': commit.tags | |
408 | } |
|
409 | } | |
409 | ret.append(_cs_json) |
|
410 | ret.append(_cs_json) | |
410 | return ret |
|
411 | return ret | |
411 |
|
412 | |||
412 |
|
413 | |||
413 | @jsonrpc_method() |
|
414 | @jsonrpc_method() | |
414 | def get_repo_nodes(request, apiuser, repoid, revision, root_path, |
|
415 | def get_repo_nodes(request, apiuser, repoid, revision, root_path, | |
415 | ret_type=Optional('all'), details=Optional('basic'), |
|
416 | ret_type=Optional('all'), details=Optional('basic'), | |
416 | max_file_bytes=Optional(None)): |
|
417 | max_file_bytes=Optional(None)): | |
417 | """ |
|
418 | """ | |
418 | Returns a list of nodes and children in a flat list for a given |
|
419 | Returns a list of nodes and children in a flat list for a given | |
419 | path at given revision. |
|
420 | path at given revision. | |
420 |
|
421 | |||
421 | It's possible to specify ret_type to show only `files` or `dirs`. |
|
422 | It's possible to specify ret_type to show only `files` or `dirs`. | |
422 |
|
423 | |||
423 | This command can only be run using an |authtoken| with admin rights, |
|
424 | This command can only be run using an |authtoken| with admin rights, | |
424 | or users with at least read rights to |repos|. |
|
425 | or users with at least read rights to |repos|. | |
425 |
|
426 | |||
426 | :param apiuser: This is filled automatically from the |authtoken|. |
|
427 | :param apiuser: This is filled automatically from the |authtoken|. | |
427 | :type apiuser: AuthUser |
|
428 | :type apiuser: AuthUser | |
428 | :param repoid: The repository name or repository ID. |
|
429 | :param repoid: The repository name or repository ID. | |
429 | :type repoid: str or int |
|
430 | :type repoid: str or int | |
430 | :param revision: The revision for which listing should be done. |
|
431 | :param revision: The revision for which listing should be done. | |
431 | :type revision: str |
|
432 | :type revision: str | |
432 | :param root_path: The path from which to start displaying. |
|
433 | :param root_path: The path from which to start displaying. | |
433 | :type root_path: str |
|
434 | :type root_path: str | |
434 | :param ret_type: Set the return type. Valid options are |
|
435 | :param ret_type: Set the return type. Valid options are | |
435 | ``all`` (default), ``files`` and ``dirs``. |
|
436 | ``all`` (default), ``files`` and ``dirs``. | |
436 | :type ret_type: Optional(str) |
|
437 | :type ret_type: Optional(str) | |
437 | :param details: Returns extended information about nodes, such as |
|
438 | :param details: Returns extended information about nodes, such as | |
438 | md5, binary, and or content. |
|
439 | md5, binary, and or content. | |
439 | The valid options are ``basic`` and ``full``. |
|
440 | The valid options are ``basic`` and ``full``. | |
440 | :type details: Optional(str) |
|
441 | :type details: Optional(str) | |
441 | :param max_file_bytes: Only return file content under this file size bytes |
|
442 | :param max_file_bytes: Only return file content under this file size bytes | |
442 | :type details: Optional(int) |
|
443 | :type details: Optional(int) | |
443 |
|
444 | |||
444 | Example output: |
|
445 | Example output: | |
445 |
|
446 | |||
446 | .. code-block:: bash |
|
447 | .. code-block:: bash | |
447 |
|
448 | |||
448 | id : <id_given_in_input> |
|
449 | id : <id_given_in_input> | |
449 | result: [ |
|
450 | result: [ | |
450 | { |
|
451 | { | |
451 | "binary": false, |
|
452 | "binary": false, | |
452 | "content": "File line", |
|
453 | "content": "File line", | |
453 | "extension": "md", |
|
454 | "extension": "md", | |
454 | "lines": 2, |
|
455 | "lines": 2, | |
455 | "md5": "059fa5d29b19c0657e384749480f6422", |
|
456 | "md5": "059fa5d29b19c0657e384749480f6422", | |
456 | "mimetype": "text/x-minidsrc", |
|
457 | "mimetype": "text/x-minidsrc", | |
457 | "name": "file.md", |
|
458 | "name": "file.md", | |
458 | "size": 580, |
|
459 | "size": 580, | |
459 | "type": "file" |
|
460 | "type": "file" | |
460 | }, |
|
461 | }, | |
461 | ... |
|
462 | ... | |
462 | ] |
|
463 | ] | |
463 | error: null |
|
464 | error: null | |
464 | """ |
|
465 | """ | |
465 |
|
466 | |||
466 | repo = get_repo_or_error(repoid) |
|
467 | repo = get_repo_or_error(repoid) | |
467 | if not has_superadmin_permission(apiuser): |
|
468 | if not has_superadmin_permission(apiuser): | |
468 | _perms = ('repository.admin', 'repository.write', 'repository.read',) |
|
469 | _perms = ('repository.admin', 'repository.write', 'repository.read',) | |
469 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
470 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
470 |
|
471 | |||
471 | ret_type = Optional.extract(ret_type) |
|
472 | ret_type = Optional.extract(ret_type) | |
472 | details = Optional.extract(details) |
|
473 | details = Optional.extract(details) | |
473 | _extended_types = ['basic', 'full'] |
|
474 | _extended_types = ['basic', 'full'] | |
474 | if details not in _extended_types: |
|
475 | if details not in _extended_types: | |
475 | raise JSONRPCError('ret_type must be one of %s' % (','.join(_extended_types))) |
|
476 | raise JSONRPCError('ret_type must be one of %s' % (','.join(_extended_types))) | |
476 | extended_info = False |
|
477 | extended_info = False | |
477 | content = False |
|
478 | content = False | |
478 | if details == 'basic': |
|
479 | if details == 'basic': | |
479 | extended_info = True |
|
480 | extended_info = True | |
480 |
|
481 | |||
481 | if details == 'full': |
|
482 | if details == 'full': | |
482 | extended_info = content = True |
|
483 | extended_info = content = True | |
483 |
|
484 | |||
484 | _map = {} |
|
485 | _map = {} | |
485 | try: |
|
486 | try: | |
486 | # check if repo is not empty by any chance, skip quicker if it is. |
|
487 | # check if repo is not empty by any chance, skip quicker if it is. | |
487 | _scm = repo.scm_instance() |
|
488 | _scm = repo.scm_instance() | |
488 | if _scm.is_empty(): |
|
489 | if _scm.is_empty(): | |
489 | return [] |
|
490 | return [] | |
490 |
|
491 | |||
491 | _d, _f = ScmModel().get_nodes( |
|
492 | _d, _f = ScmModel().get_nodes( | |
492 | repo, revision, root_path, flat=False, |
|
493 | repo, revision, root_path, flat=False, | |
493 | extended_info=extended_info, content=content, |
|
494 | extended_info=extended_info, content=content, | |
494 | max_file_bytes=max_file_bytes) |
|
495 | max_file_bytes=max_file_bytes) | |
495 | _map = { |
|
496 | _map = { | |
496 | 'all': _d + _f, |
|
497 | 'all': _d + _f, | |
497 | 'files': _f, |
|
498 | 'files': _f, | |
498 | 'dirs': _d, |
|
499 | 'dirs': _d, | |
499 | } |
|
500 | } | |
500 | return _map[ret_type] |
|
501 | return _map[ret_type] | |
501 | except KeyError: |
|
502 | except KeyError: | |
502 | raise JSONRPCError( |
|
503 | raise JSONRPCError( | |
503 | 'ret_type must be one of %s' % (','.join(sorted(_map.keys())))) |
|
504 | 'ret_type must be one of %s' % (','.join(sorted(_map.keys())))) | |
504 | except Exception: |
|
505 | except Exception: | |
505 | log.exception("Exception occurred while trying to get repo nodes") |
|
506 | log.exception("Exception occurred while trying to get repo nodes") | |
506 | raise JSONRPCError( |
|
507 | raise JSONRPCError( | |
507 | 'failed to get repo: `%s` nodes' % repo.repo_name |
|
508 | 'failed to get repo: `%s` nodes' % repo.repo_name | |
508 | ) |
|
509 | ) | |
509 |
|
510 | |||
510 |
|
511 | |||
511 | @jsonrpc_method() |
|
512 | @jsonrpc_method() | |
512 | def get_repo_file(request, apiuser, repoid, commit_id, file_path, |
|
513 | def get_repo_file(request, apiuser, repoid, commit_id, file_path, | |
513 | max_file_bytes=Optional(None), details=Optional('basic'), |
|
514 | max_file_bytes=Optional(None), details=Optional('basic'), | |
514 | cache=Optional(True)): |
|
515 | cache=Optional(True)): | |
515 | """ |
|
516 | """ | |
516 | Returns a single file from repository at given revision. |
|
517 | Returns a single file from repository at given revision. | |
517 |
|
518 | |||
518 | This command can only be run using an |authtoken| with admin rights, |
|
519 | This command can only be run using an |authtoken| with admin rights, | |
519 | or users with at least read rights to |repos|. |
|
520 | or users with at least read rights to |repos|. | |
520 |
|
521 | |||
521 | :param apiuser: This is filled automatically from the |authtoken|. |
|
522 | :param apiuser: This is filled automatically from the |authtoken|. | |
522 | :type apiuser: AuthUser |
|
523 | :type apiuser: AuthUser | |
523 | :param repoid: The repository name or repository ID. |
|
524 | :param repoid: The repository name or repository ID. | |
524 | :type repoid: str or int |
|
525 | :type repoid: str or int | |
525 | :param commit_id: The revision for which listing should be done. |
|
526 | :param commit_id: The revision for which listing should be done. | |
526 | :type commit_id: str |
|
527 | :type commit_id: str | |
527 | :param file_path: The path from which to start displaying. |
|
528 | :param file_path: The path from which to start displaying. | |
528 | :type file_path: str |
|
529 | :type file_path: str | |
529 | :param details: Returns different set of information about nodes. |
|
530 | :param details: Returns different set of information about nodes. | |
530 | The valid options are ``minimal`` ``basic`` and ``full``. |
|
531 | The valid options are ``minimal`` ``basic`` and ``full``. | |
531 | :type details: Optional(str) |
|
532 | :type details: Optional(str) | |
532 | :param max_file_bytes: Only return file content under this file size bytes |
|
533 | :param max_file_bytes: Only return file content under this file size bytes | |
533 | :type max_file_bytes: Optional(int) |
|
534 | :type max_file_bytes: Optional(int) | |
534 | :param cache: Use internal caches for fetching files. If disabled fetching |
|
535 | :param cache: Use internal caches for fetching files. If disabled fetching | |
535 | files is slower but more memory efficient |
|
536 | files is slower but more memory efficient | |
536 | :type cache: Optional(bool) |
|
537 | :type cache: Optional(bool) | |
537 |
|
538 | |||
538 | Example output: |
|
539 | Example output: | |
539 |
|
540 | |||
540 | .. code-block:: bash |
|
541 | .. code-block:: bash | |
541 |
|
542 | |||
542 | id : <id_given_in_input> |
|
543 | id : <id_given_in_input> | |
543 | result: { |
|
544 | result: { | |
544 | "binary": false, |
|
545 | "binary": false, | |
545 | "extension": "py", |
|
546 | "extension": "py", | |
546 | "lines": 35, |
|
547 | "lines": 35, | |
547 | "content": "....", |
|
548 | "content": "....", | |
548 | "md5": "76318336366b0f17ee249e11b0c99c41", |
|
549 | "md5": "76318336366b0f17ee249e11b0c99c41", | |
549 | "mimetype": "text/x-python", |
|
550 | "mimetype": "text/x-python", | |
550 | "name": "python.py", |
|
551 | "name": "python.py", | |
551 | "size": 817, |
|
552 | "size": 817, | |
552 | "type": "file", |
|
553 | "type": "file", | |
553 | } |
|
554 | } | |
554 | error: null |
|
555 | error: null | |
555 | """ |
|
556 | """ | |
556 |
|
557 | |||
557 | repo = get_repo_or_error(repoid) |
|
558 | repo = get_repo_or_error(repoid) | |
558 | if not has_superadmin_permission(apiuser): |
|
559 | if not has_superadmin_permission(apiuser): | |
559 | _perms = ('repository.admin', 'repository.write', 'repository.read',) |
|
560 | _perms = ('repository.admin', 'repository.write', 'repository.read',) | |
560 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
561 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
561 |
|
562 | |||
562 | cache = Optional.extract(cache, binary=True) |
|
563 | cache = Optional.extract(cache, binary=True) | |
563 | details = Optional.extract(details) |
|
564 | details = Optional.extract(details) | |
564 | _extended_types = ['minimal', 'minimal+search', 'basic', 'full'] |
|
565 | _extended_types = ['minimal', 'minimal+search', 'basic', 'full'] | |
565 | if details not in _extended_types: |
|
566 | if details not in _extended_types: | |
566 | raise JSONRPCError( |
|
567 | raise JSONRPCError( | |
567 | 'ret_type must be one of %s, got %s' % (','.join(_extended_types)), details) |
|
568 | 'ret_type must be one of %s, got %s' % (','.join(_extended_types)), details) | |
568 | extended_info = False |
|
569 | extended_info = False | |
569 | content = False |
|
570 | content = False | |
570 |
|
571 | |||
571 | if details == 'minimal': |
|
572 | if details == 'minimal': | |
572 | extended_info = False |
|
573 | extended_info = False | |
573 |
|
574 | |||
574 | elif details == 'basic': |
|
575 | elif details == 'basic': | |
575 | extended_info = True |
|
576 | extended_info = True | |
576 |
|
577 | |||
577 | elif details == 'full': |
|
578 | elif details == 'full': | |
578 | extended_info = content = True |
|
579 | extended_info = content = True | |
579 |
|
580 | |||
580 | file_path = safe_unicode(file_path) |
|
581 | file_path = safe_unicode(file_path) | |
581 | try: |
|
582 | try: | |
582 | # check if repo is not empty by any chance, skip quicker if it is. |
|
583 | # check if repo is not empty by any chance, skip quicker if it is. | |
583 | _scm = repo.scm_instance() |
|
584 | _scm = repo.scm_instance() | |
584 | if _scm.is_empty(): |
|
585 | if _scm.is_empty(): | |
585 | return None |
|
586 | return None | |
586 |
|
587 | |||
587 | node = ScmModel().get_node( |
|
588 | node = ScmModel().get_node( | |
588 | repo, commit_id, file_path, extended_info=extended_info, |
|
589 | repo, commit_id, file_path, extended_info=extended_info, | |
589 | content=content, max_file_bytes=max_file_bytes, cache=cache) |
|
590 | content=content, max_file_bytes=max_file_bytes, cache=cache) | |
590 | except NodeDoesNotExistError: |
|
591 | except NodeDoesNotExistError: | |
591 | raise JSONRPCError(u'There is no file in repo: `{}` at path `{}` for commit: `{}`'.format( |
|
592 | raise JSONRPCError(u'There is no file in repo: `{}` at path `{}` for commit: `{}`'.format( | |
592 | repo.repo_name, file_path, commit_id)) |
|
593 | repo.repo_name, file_path, commit_id)) | |
593 | except Exception: |
|
594 | except Exception: | |
594 | log.exception(u"Exception occurred while trying to get repo %s file", |
|
595 | log.exception(u"Exception occurred while trying to get repo %s file", | |
595 | repo.repo_name) |
|
596 | repo.repo_name) | |
596 | raise JSONRPCError(u'failed to get repo: `{}` file at path {}'.format( |
|
597 | raise JSONRPCError(u'failed to get repo: `{}` file at path {}'.format( | |
597 | repo.repo_name, file_path)) |
|
598 | repo.repo_name, file_path)) | |
598 |
|
599 | |||
599 | return node |
|
600 | return node | |
600 |
|
601 | |||
601 |
|
602 | |||
602 | @jsonrpc_method() |
|
603 | @jsonrpc_method() | |
603 | def get_repo_fts_tree(request, apiuser, repoid, commit_id, root_path): |
|
604 | def get_repo_fts_tree(request, apiuser, repoid, commit_id, root_path): | |
604 | """ |
|
605 | """ | |
605 | Returns a list of tree nodes for path at given revision. This api is built |
|
606 | Returns a list of tree nodes for path at given revision. This api is built | |
606 | strictly for usage in full text search building, and shouldn't be consumed |
|
607 | strictly for usage in full text search building, and shouldn't be consumed | |
607 |
|
608 | |||
608 | This command can only be run using an |authtoken| with admin rights, |
|
609 | This command can only be run using an |authtoken| with admin rights, | |
609 | or users with at least read rights to |repos|. |
|
610 | or users with at least read rights to |repos|. | |
610 |
|
611 | |||
611 | """ |
|
612 | """ | |
612 |
|
613 | |||
613 | repo = get_repo_or_error(repoid) |
|
614 | repo = get_repo_or_error(repoid) | |
614 | if not has_superadmin_permission(apiuser): |
|
615 | if not has_superadmin_permission(apiuser): | |
615 | _perms = ('repository.admin', 'repository.write', 'repository.read',) |
|
616 | _perms = ('repository.admin', 'repository.write', 'repository.read',) | |
616 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
617 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
617 |
|
618 | |||
618 | repo_id = repo.repo_id |
|
619 | repo_id = repo.repo_id | |
619 | cache_seconds = safe_int(rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time')) |
|
620 | cache_seconds = safe_int(rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time')) | |
620 | cache_on = cache_seconds > 0 |
|
621 | cache_on = cache_seconds > 0 | |
621 |
|
622 | |||
622 | cache_namespace_uid = 'cache_repo.{}'.format(repo_id) |
|
623 | cache_namespace_uid = 'cache_repo.{}'.format(repo_id) | |
623 | region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid) |
|
624 | region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid) | |
624 |
|
625 | |||
625 | def compute_fts_tree(cache_ver, repo_id, commit_id, root_path): |
|
626 | def compute_fts_tree(cache_ver, repo_id, commit_id, root_path): | |
626 | return ScmModel().get_fts_data(repo_id, commit_id, root_path) |
|
627 | return ScmModel().get_fts_data(repo_id, commit_id, root_path) | |
627 |
|
628 | |||
628 | try: |
|
629 | try: | |
629 | # check if repo is not empty by any chance, skip quicker if it is. |
|
630 | # check if repo is not empty by any chance, skip quicker if it is. | |
630 | _scm = repo.scm_instance() |
|
631 | _scm = repo.scm_instance() | |
631 | if _scm.is_empty(): |
|
632 | if _scm.is_empty(): | |
632 | return [] |
|
633 | return [] | |
633 | except RepositoryError: |
|
634 | except RepositoryError: | |
634 | log.exception("Exception occurred while trying to get repo nodes") |
|
635 | log.exception("Exception occurred while trying to get repo nodes") | |
635 | raise JSONRPCError('failed to get repo: `%s` nodes' % repo.repo_name) |
|
636 | raise JSONRPCError('failed to get repo: `%s` nodes' % repo.repo_name) | |
636 |
|
637 | |||
637 | try: |
|
638 | try: | |
638 | # we need to resolve commit_id to a FULL sha for cache to work correctly. |
|
639 | # we need to resolve commit_id to a FULL sha for cache to work correctly. | |
639 | # sending 'master' is a pointer that needs to be translated to current commit. |
|
640 | # sending 'master' is a pointer that needs to be translated to current commit. | |
640 | commit_id = _scm.get_commit(commit_id=commit_id).raw_id |
|
641 | commit_id = _scm.get_commit(commit_id=commit_id).raw_id | |
641 | log.debug( |
|
642 | log.debug( | |
642 | 'Computing FTS REPO TREE for repo_id %s commit_id `%s` ' |
|
643 | 'Computing FTS REPO TREE for repo_id %s commit_id `%s` ' | |
643 | 'with caching: %s[TTL: %ss]' % ( |
|
644 | 'with caching: %s[TTL: %ss]' % ( | |
644 | repo_id, commit_id, cache_on, cache_seconds or 0)) |
|
645 | repo_id, commit_id, cache_on, cache_seconds or 0)) | |
645 |
|
646 | |||
646 | tree_files = compute_fts_tree(rc_cache.FILE_TREE_CACHE_VER, repo_id, commit_id, root_path) |
|
647 | tree_files = compute_fts_tree(rc_cache.FILE_TREE_CACHE_VER, repo_id, commit_id, root_path) | |
647 | return tree_files |
|
648 | return tree_files | |
648 |
|
649 | |||
649 | except Exception: |
|
650 | except Exception: | |
650 | log.exception("Exception occurred while trying to get repo nodes") |
|
651 | log.exception("Exception occurred while trying to get repo nodes") | |
651 | raise JSONRPCError('failed to get repo: `%s` nodes' % repo.repo_name) |
|
652 | raise JSONRPCError('failed to get repo: `%s` nodes' % repo.repo_name) | |
652 |
|
653 | |||
653 |
|
654 | |||
654 | @jsonrpc_method() |
|
655 | @jsonrpc_method() | |
655 | def get_repo_refs(request, apiuser, repoid): |
|
656 | def get_repo_refs(request, apiuser, repoid): | |
656 | """ |
|
657 | """ | |
657 | Returns a dictionary of current references. It returns |
|
658 | Returns a dictionary of current references. It returns | |
658 | bookmarks, branches, closed_branches, and tags for given repository |
|
659 | bookmarks, branches, closed_branches, and tags for given repository | |
659 |
|
660 | |||
660 | It's possible to specify ret_type to show only `files` or `dirs`. |
|
661 | It's possible to specify ret_type to show only `files` or `dirs`. | |
661 |
|
662 | |||
662 | This command can only be run using an |authtoken| with admin rights, |
|
663 | This command can only be run using an |authtoken| with admin rights, | |
663 | or users with at least read rights to |repos|. |
|
664 | or users with at least read rights to |repos|. | |
664 |
|
665 | |||
665 | :param apiuser: This is filled automatically from the |authtoken|. |
|
666 | :param apiuser: This is filled automatically from the |authtoken|. | |
666 | :type apiuser: AuthUser |
|
667 | :type apiuser: AuthUser | |
667 | :param repoid: The repository name or repository ID. |
|
668 | :param repoid: The repository name or repository ID. | |
668 | :type repoid: str or int |
|
669 | :type repoid: str or int | |
669 |
|
670 | |||
670 | Example output: |
|
671 | Example output: | |
671 |
|
672 | |||
672 | .. code-block:: bash |
|
673 | .. code-block:: bash | |
673 |
|
674 | |||
674 | id : <id_given_in_input> |
|
675 | id : <id_given_in_input> | |
675 | "result": { |
|
676 | "result": { | |
676 | "bookmarks": { |
|
677 | "bookmarks": { | |
677 | "dev": "5611d30200f4040ba2ab4f3d64e5b06408a02188", |
|
678 | "dev": "5611d30200f4040ba2ab4f3d64e5b06408a02188", | |
678 | "master": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf" |
|
679 | "master": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf" | |
679 | }, |
|
680 | }, | |
680 | "branches": { |
|
681 | "branches": { | |
681 | "default": "5611d30200f4040ba2ab4f3d64e5b06408a02188", |
|
682 | "default": "5611d30200f4040ba2ab4f3d64e5b06408a02188", | |
682 | "stable": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf" |
|
683 | "stable": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf" | |
683 | }, |
|
684 | }, | |
684 | "branches_closed": {}, |
|
685 | "branches_closed": {}, | |
685 | "tags": { |
|
686 | "tags": { | |
686 | "tip": "5611d30200f4040ba2ab4f3d64e5b06408a02188", |
|
687 | "tip": "5611d30200f4040ba2ab4f3d64e5b06408a02188", | |
687 | "v4.4.0": "1232313f9e6adac5ce5399c2a891dc1e72b79022", |
|
688 | "v4.4.0": "1232313f9e6adac5ce5399c2a891dc1e72b79022", | |
688 | "v4.4.1": "cbb9f1d329ae5768379cdec55a62ebdd546c4e27", |
|
689 | "v4.4.1": "cbb9f1d329ae5768379cdec55a62ebdd546c4e27", | |
689 | "v4.4.2": "24ffe44a27fcd1c5b6936144e176b9f6dd2f3a17", |
|
690 | "v4.4.2": "24ffe44a27fcd1c5b6936144e176b9f6dd2f3a17", | |
690 | } |
|
691 | } | |
691 | } |
|
692 | } | |
692 | error: null |
|
693 | error: null | |
693 | """ |
|
694 | """ | |
694 |
|
695 | |||
695 | repo = get_repo_or_error(repoid) |
|
696 | repo = get_repo_or_error(repoid) | |
696 | if not has_superadmin_permission(apiuser): |
|
697 | if not has_superadmin_permission(apiuser): | |
697 | _perms = ('repository.admin', 'repository.write', 'repository.read',) |
|
698 | _perms = ('repository.admin', 'repository.write', 'repository.read',) | |
698 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
699 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
699 |
|
700 | |||
700 | try: |
|
701 | try: | |
701 | # check if repo is not empty by any chance, skip quicker if it is. |
|
702 | # check if repo is not empty by any chance, skip quicker if it is. | |
702 | vcs_instance = repo.scm_instance() |
|
703 | vcs_instance = repo.scm_instance() | |
703 | refs = vcs_instance.refs() |
|
704 | refs = vcs_instance.refs() | |
704 | return refs |
|
705 | return refs | |
705 | except Exception: |
|
706 | except Exception: | |
706 | log.exception("Exception occurred while trying to get repo refs") |
|
707 | log.exception("Exception occurred while trying to get repo refs") | |
707 | raise JSONRPCError( |
|
708 | raise JSONRPCError( | |
708 | 'failed to get repo: `%s` references' % repo.repo_name |
|
709 | 'failed to get repo: `%s` references' % repo.repo_name | |
709 | ) |
|
710 | ) | |
710 |
|
711 | |||
711 |
|
712 | |||
712 | @jsonrpc_method() |
|
713 | @jsonrpc_method() | |
713 | def create_repo( |
|
714 | def create_repo( | |
714 | request, apiuser, repo_name, repo_type, |
|
715 | request, apiuser, repo_name, repo_type, | |
715 | owner=Optional(OAttr('apiuser')), |
|
716 | owner=Optional(OAttr('apiuser')), | |
716 | description=Optional(''), |
|
717 | description=Optional(''), | |
717 | private=Optional(False), |
|
718 | private=Optional(False), | |
718 | clone_uri=Optional(None), |
|
719 | clone_uri=Optional(None), | |
719 | push_uri=Optional(None), |
|
720 | push_uri=Optional(None), | |
720 | landing_rev=Optional(None), |
|
721 | landing_rev=Optional(None), | |
721 | enable_statistics=Optional(False), |
|
722 | enable_statistics=Optional(False), | |
722 | enable_locking=Optional(False), |
|
723 | enable_locking=Optional(False), | |
723 | enable_downloads=Optional(False), |
|
724 | enable_downloads=Optional(False), | |
724 | copy_permissions=Optional(False)): |
|
725 | copy_permissions=Optional(False)): | |
725 | """ |
|
726 | """ | |
726 | Creates a repository. |
|
727 | Creates a repository. | |
727 |
|
728 | |||
728 | * If the repository name contains "/", repository will be created inside |
|
729 | * If the repository name contains "/", repository will be created inside | |
729 | a repository group or nested repository groups |
|
730 | a repository group or nested repository groups | |
730 |
|
731 | |||
731 | For example "foo/bar/repo1" will create |repo| called "repo1" inside |
|
732 | For example "foo/bar/repo1" will create |repo| called "repo1" inside | |
732 | group "foo/bar". You have to have permissions to access and write to |
|
733 | group "foo/bar". You have to have permissions to access and write to | |
733 | the last repository group ("bar" in this example) |
|
734 | the last repository group ("bar" in this example) | |
734 |
|
735 | |||
735 | This command can only be run using an |authtoken| with at least |
|
736 | This command can only be run using an |authtoken| with at least | |
736 | permissions to create repositories, or write permissions to |
|
737 | permissions to create repositories, or write permissions to | |
737 | parent repository groups. |
|
738 | parent repository groups. | |
738 |
|
739 | |||
739 | :param apiuser: This is filled automatically from the |authtoken|. |
|
740 | :param apiuser: This is filled automatically from the |authtoken|. | |
740 | :type apiuser: AuthUser |
|
741 | :type apiuser: AuthUser | |
741 | :param repo_name: Set the repository name. |
|
742 | :param repo_name: Set the repository name. | |
742 | :type repo_name: str |
|
743 | :type repo_name: str | |
743 | :param repo_type: Set the repository type; 'hg','git', or 'svn'. |
|
744 | :param repo_type: Set the repository type; 'hg','git', or 'svn'. | |
744 | :type repo_type: str |
|
745 | :type repo_type: str | |
745 | :param owner: user_id or username |
|
746 | :param owner: user_id or username | |
746 | :type owner: Optional(str) |
|
747 | :type owner: Optional(str) | |
747 | :param description: Set the repository description. |
|
748 | :param description: Set the repository description. | |
748 | :type description: Optional(str) |
|
749 | :type description: Optional(str) | |
749 | :param private: set repository as private |
|
750 | :param private: set repository as private | |
750 | :type private: bool |
|
751 | :type private: bool | |
751 | :param clone_uri: set clone_uri |
|
752 | :param clone_uri: set clone_uri | |
752 | :type clone_uri: str |
|
753 | :type clone_uri: str | |
753 | :param push_uri: set push_uri |
|
754 | :param push_uri: set push_uri | |
754 | :type push_uri: str |
|
755 | :type push_uri: str | |
755 | :param landing_rev: <rev_type>:<rev>, e.g branch:default, book:dev, rev:abcd |
|
756 | :param landing_rev: <rev_type>:<rev>, e.g branch:default, book:dev, rev:abcd | |
756 | :type landing_rev: str |
|
757 | :type landing_rev: str | |
757 | :param enable_locking: |
|
758 | :param enable_locking: | |
758 | :type enable_locking: bool |
|
759 | :type enable_locking: bool | |
759 | :param enable_downloads: |
|
760 | :param enable_downloads: | |
760 | :type enable_downloads: bool |
|
761 | :type enable_downloads: bool | |
761 | :param enable_statistics: |
|
762 | :param enable_statistics: | |
762 | :type enable_statistics: bool |
|
763 | :type enable_statistics: bool | |
763 | :param copy_permissions: Copy permission from group in which the |
|
764 | :param copy_permissions: Copy permission from group in which the | |
764 | repository is being created. |
|
765 | repository is being created. | |
765 | :type copy_permissions: bool |
|
766 | :type copy_permissions: bool | |
766 |
|
767 | |||
767 |
|
768 | |||
768 | Example output: |
|
769 | Example output: | |
769 |
|
770 | |||
770 | .. code-block:: bash |
|
771 | .. code-block:: bash | |
771 |
|
772 | |||
772 | id : <id_given_in_input> |
|
773 | id : <id_given_in_input> | |
773 | result: { |
|
774 | result: { | |
774 | "msg": "Created new repository `<reponame>`", |
|
775 | "msg": "Created new repository `<reponame>`", | |
775 | "success": true, |
|
776 | "success": true, | |
776 | "task": "<celery task id or None if done sync>" |
|
777 | "task": "<celery task id or None if done sync>" | |
777 | } |
|
778 | } | |
778 | error: null |
|
779 | error: null | |
779 |
|
780 | |||
780 |
|
781 | |||
781 | Example error output: |
|
782 | Example error output: | |
782 |
|
783 | |||
783 | .. code-block:: bash |
|
784 | .. code-block:: bash | |
784 |
|
785 | |||
785 | id : <id_given_in_input> |
|
786 | id : <id_given_in_input> | |
786 | result : null |
|
787 | result : null | |
787 | error : { |
|
788 | error : { | |
788 | 'failed to create repository `<repo_name>`' |
|
789 | 'failed to create repository `<repo_name>`' | |
789 | } |
|
790 | } | |
790 |
|
791 | |||
791 | """ |
|
792 | """ | |
792 |
|
793 | |||
793 | owner = validate_set_owner_permissions(apiuser, owner) |
|
794 | owner = validate_set_owner_permissions(apiuser, owner) | |
794 |
|
795 | |||
795 | description = Optional.extract(description) |
|
796 | description = Optional.extract(description) | |
796 | copy_permissions = Optional.extract(copy_permissions) |
|
797 | copy_permissions = Optional.extract(copy_permissions) | |
797 | clone_uri = Optional.extract(clone_uri) |
|
798 | clone_uri = Optional.extract(clone_uri) | |
798 | push_uri = Optional.extract(push_uri) |
|
799 | push_uri = Optional.extract(push_uri) | |
799 |
|
800 | |||
800 | defs = SettingsModel().get_default_repo_settings(strip_prefix=True) |
|
801 | defs = SettingsModel().get_default_repo_settings(strip_prefix=True) | |
801 | if isinstance(private, Optional): |
|
802 | if isinstance(private, Optional): | |
802 | private = defs.get('repo_private') or Optional.extract(private) |
|
803 | private = defs.get('repo_private') or Optional.extract(private) | |
803 | if isinstance(repo_type, Optional): |
|
804 | if isinstance(repo_type, Optional): | |
804 | repo_type = defs.get('repo_type') |
|
805 | repo_type = defs.get('repo_type') | |
805 | if isinstance(enable_statistics, Optional): |
|
806 | if isinstance(enable_statistics, Optional): | |
806 | enable_statistics = defs.get('repo_enable_statistics') |
|
807 | enable_statistics = defs.get('repo_enable_statistics') | |
807 | if isinstance(enable_locking, Optional): |
|
808 | if isinstance(enable_locking, Optional): | |
808 | enable_locking = defs.get('repo_enable_locking') |
|
809 | enable_locking = defs.get('repo_enable_locking') | |
809 | if isinstance(enable_downloads, Optional): |
|
810 | if isinstance(enable_downloads, Optional): | |
810 | enable_downloads = defs.get('repo_enable_downloads') |
|
811 | enable_downloads = defs.get('repo_enable_downloads') | |
811 |
|
812 | |||
812 | landing_ref, _label = ScmModel.backend_landing_ref(repo_type) |
|
813 | landing_ref, _label = ScmModel.backend_landing_ref(repo_type) | |
813 | ref_choices, _labels = ScmModel().get_repo_landing_revs(request.translate) |
|
814 | ref_choices, _labels = ScmModel().get_repo_landing_revs(request.translate) | |
814 | ref_choices = list(set(ref_choices + [landing_ref])) |
|
815 | ref_choices = list(set(ref_choices + [landing_ref])) | |
815 |
|
816 | |||
816 | landing_commit_ref = Optional.extract(landing_rev) or landing_ref |
|
817 | landing_commit_ref = Optional.extract(landing_rev) or landing_ref | |
817 |
|
818 | |||
818 | schema = repo_schema.RepoSchema().bind( |
|
819 | schema = repo_schema.RepoSchema().bind( | |
819 | repo_type_options=rhodecode.BACKENDS.keys(), |
|
820 | repo_type_options=rhodecode.BACKENDS.keys(), | |
820 | repo_ref_options=ref_choices, |
|
821 | repo_ref_options=ref_choices, | |
821 | repo_type=repo_type, |
|
822 | repo_type=repo_type, | |
822 | # user caller |
|
823 | # user caller | |
823 | user=apiuser) |
|
824 | user=apiuser) | |
824 |
|
825 | |||
825 | try: |
|
826 | try: | |
826 | schema_data = schema.deserialize(dict( |
|
827 | schema_data = schema.deserialize(dict( | |
827 | repo_name=repo_name, |
|
828 | repo_name=repo_name, | |
828 | repo_type=repo_type, |
|
829 | repo_type=repo_type, | |
829 | repo_owner=owner.username, |
|
830 | repo_owner=owner.username, | |
830 | repo_description=description, |
|
831 | repo_description=description, | |
831 | repo_landing_commit_ref=landing_commit_ref, |
|
832 | repo_landing_commit_ref=landing_commit_ref, | |
832 | repo_clone_uri=clone_uri, |
|
833 | repo_clone_uri=clone_uri, | |
833 | repo_push_uri=push_uri, |
|
834 | repo_push_uri=push_uri, | |
834 | repo_private=private, |
|
835 | repo_private=private, | |
835 | repo_copy_permissions=copy_permissions, |
|
836 | repo_copy_permissions=copy_permissions, | |
836 | repo_enable_statistics=enable_statistics, |
|
837 | repo_enable_statistics=enable_statistics, | |
837 | repo_enable_downloads=enable_downloads, |
|
838 | repo_enable_downloads=enable_downloads, | |
838 | repo_enable_locking=enable_locking)) |
|
839 | repo_enable_locking=enable_locking)) | |
839 | except validation_schema.Invalid as err: |
|
840 | except validation_schema.Invalid as err: | |
840 | raise JSONRPCValidationError(colander_exc=err) |
|
841 | raise JSONRPCValidationError(colander_exc=err) | |
841 |
|
842 | |||
842 | try: |
|
843 | try: | |
843 | data = { |
|
844 | data = { | |
844 | 'owner': owner, |
|
845 | 'owner': owner, | |
845 | 'repo_name': schema_data['repo_group']['repo_name_without_group'], |
|
846 | 'repo_name': schema_data['repo_group']['repo_name_without_group'], | |
846 | 'repo_name_full': schema_data['repo_name'], |
|
847 | 'repo_name_full': schema_data['repo_name'], | |
847 | 'repo_group': schema_data['repo_group']['repo_group_id'], |
|
848 | 'repo_group': schema_data['repo_group']['repo_group_id'], | |
848 | 'repo_type': schema_data['repo_type'], |
|
849 | 'repo_type': schema_data['repo_type'], | |
849 | 'repo_description': schema_data['repo_description'], |
|
850 | 'repo_description': schema_data['repo_description'], | |
850 | 'repo_private': schema_data['repo_private'], |
|
851 | 'repo_private': schema_data['repo_private'], | |
851 | 'clone_uri': schema_data['repo_clone_uri'], |
|
852 | 'clone_uri': schema_data['repo_clone_uri'], | |
852 | 'push_uri': schema_data['repo_push_uri'], |
|
853 | 'push_uri': schema_data['repo_push_uri'], | |
853 | 'repo_landing_rev': schema_data['repo_landing_commit_ref'], |
|
854 | 'repo_landing_rev': schema_data['repo_landing_commit_ref'], | |
854 | 'enable_statistics': schema_data['repo_enable_statistics'], |
|
855 | 'enable_statistics': schema_data['repo_enable_statistics'], | |
855 | 'enable_locking': schema_data['repo_enable_locking'], |
|
856 | 'enable_locking': schema_data['repo_enable_locking'], | |
856 | 'enable_downloads': schema_data['repo_enable_downloads'], |
|
857 | 'enable_downloads': schema_data['repo_enable_downloads'], | |
857 | 'repo_copy_permissions': schema_data['repo_copy_permissions'], |
|
858 | 'repo_copy_permissions': schema_data['repo_copy_permissions'], | |
858 | } |
|
859 | } | |
859 |
|
860 | |||
860 | task = RepoModel().create(form_data=data, cur_user=owner.user_id) |
|
861 | task = RepoModel().create(form_data=data, cur_user=owner.user_id) | |
861 | task_id = get_task_id(task) |
|
862 | task_id = get_task_id(task) | |
862 | # no commit, it's done in RepoModel, or async via celery |
|
863 | # no commit, it's done in RepoModel, or async via celery | |
863 | return { |
|
864 | return { | |
864 | 'msg': "Created new repository `%s`" % (schema_data['repo_name'],), |
|
865 | 'msg': "Created new repository `%s`" % (schema_data['repo_name'],), | |
865 | 'success': True, # cannot return the repo data here since fork |
|
866 | 'success': True, # cannot return the repo data here since fork | |
866 | # can be done async |
|
867 | # can be done async | |
867 | 'task': task_id |
|
868 | 'task': task_id | |
868 | } |
|
869 | } | |
869 | except Exception: |
|
870 | except Exception: | |
870 | log.exception( |
|
871 | log.exception( | |
871 | u"Exception while trying to create the repository %s", |
|
872 | u"Exception while trying to create the repository %s", | |
872 | schema_data['repo_name']) |
|
873 | schema_data['repo_name']) | |
873 | raise JSONRPCError( |
|
874 | raise JSONRPCError( | |
874 | 'failed to create repository `%s`' % (schema_data['repo_name'],)) |
|
875 | 'failed to create repository `%s`' % (schema_data['repo_name'],)) | |
875 |
|
876 | |||
876 |
|
877 | |||
877 | @jsonrpc_method() |
|
878 | @jsonrpc_method() | |
878 | def add_field_to_repo(request, apiuser, repoid, key, label=Optional(''), |
|
879 | def add_field_to_repo(request, apiuser, repoid, key, label=Optional(''), | |
879 | description=Optional('')): |
|
880 | description=Optional('')): | |
880 | """ |
|
881 | """ | |
881 | Adds an extra field to a repository. |
|
882 | Adds an extra field to a repository. | |
882 |
|
883 | |||
883 | This command can only be run using an |authtoken| with at least |
|
884 | This command can only be run using an |authtoken| with at least | |
884 | write permissions to the |repo|. |
|
885 | write permissions to the |repo|. | |
885 |
|
886 | |||
886 | :param apiuser: This is filled automatically from the |authtoken|. |
|
887 | :param apiuser: This is filled automatically from the |authtoken|. | |
887 | :type apiuser: AuthUser |
|
888 | :type apiuser: AuthUser | |
888 | :param repoid: Set the repository name or repository id. |
|
889 | :param repoid: Set the repository name or repository id. | |
889 | :type repoid: str or int |
|
890 | :type repoid: str or int | |
890 | :param key: Create a unique field key for this repository. |
|
891 | :param key: Create a unique field key for this repository. | |
891 | :type key: str |
|
892 | :type key: str | |
892 | :param label: |
|
893 | :param label: | |
893 | :type label: Optional(str) |
|
894 | :type label: Optional(str) | |
894 | :param description: |
|
895 | :param description: | |
895 | :type description: Optional(str) |
|
896 | :type description: Optional(str) | |
896 | """ |
|
897 | """ | |
897 | repo = get_repo_or_error(repoid) |
|
898 | repo = get_repo_or_error(repoid) | |
898 | if not has_superadmin_permission(apiuser): |
|
899 | if not has_superadmin_permission(apiuser): | |
899 | _perms = ('repository.admin',) |
|
900 | _perms = ('repository.admin',) | |
900 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
901 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
901 |
|
902 | |||
902 | label = Optional.extract(label) or key |
|
903 | label = Optional.extract(label) or key | |
903 | description = Optional.extract(description) |
|
904 | description = Optional.extract(description) | |
904 |
|
905 | |||
905 | field = RepositoryField.get_by_key_name(key, repo) |
|
906 | field = RepositoryField.get_by_key_name(key, repo) | |
906 | if field: |
|
907 | if field: | |
907 | raise JSONRPCError('Field with key ' |
|
908 | raise JSONRPCError('Field with key ' | |
908 | '`%s` exists for repo `%s`' % (key, repoid)) |
|
909 | '`%s` exists for repo `%s`' % (key, repoid)) | |
909 |
|
910 | |||
910 | try: |
|
911 | try: | |
911 | RepoModel().add_repo_field(repo, key, field_label=label, |
|
912 | RepoModel().add_repo_field(repo, key, field_label=label, | |
912 | field_desc=description) |
|
913 | field_desc=description) | |
913 | Session().commit() |
|
914 | Session().commit() | |
914 | return { |
|
915 | return { | |
915 | 'msg': "Added new repository field `%s`" % (key,), |
|
916 | 'msg': "Added new repository field `%s`" % (key,), | |
916 | 'success': True, |
|
917 | 'success': True, | |
917 | } |
|
918 | } | |
918 | except Exception: |
|
919 | except Exception: | |
919 | log.exception("Exception occurred while trying to add field to repo") |
|
920 | log.exception("Exception occurred while trying to add field to repo") | |
920 | raise JSONRPCError( |
|
921 | raise JSONRPCError( | |
921 | 'failed to create new field for repository `%s`' % (repoid,)) |
|
922 | 'failed to create new field for repository `%s`' % (repoid,)) | |
922 |
|
923 | |||
923 |
|
924 | |||
924 | @jsonrpc_method() |
|
925 | @jsonrpc_method() | |
925 | def remove_field_from_repo(request, apiuser, repoid, key): |
|
926 | def remove_field_from_repo(request, apiuser, repoid, key): | |
926 | """ |
|
927 | """ | |
927 | Removes an extra field from a repository. |
|
928 | Removes an extra field from a repository. | |
928 |
|
929 | |||
929 | This command can only be run using an |authtoken| with at least |
|
930 | This command can only be run using an |authtoken| with at least | |
930 | write permissions to the |repo|. |
|
931 | write permissions to the |repo|. | |
931 |
|
932 | |||
932 | :param apiuser: This is filled automatically from the |authtoken|. |
|
933 | :param apiuser: This is filled automatically from the |authtoken|. | |
933 | :type apiuser: AuthUser |
|
934 | :type apiuser: AuthUser | |
934 | :param repoid: Set the repository name or repository ID. |
|
935 | :param repoid: Set the repository name or repository ID. | |
935 | :type repoid: str or int |
|
936 | :type repoid: str or int | |
936 | :param key: Set the unique field key for this repository. |
|
937 | :param key: Set the unique field key for this repository. | |
937 | :type key: str |
|
938 | :type key: str | |
938 | """ |
|
939 | """ | |
939 |
|
940 | |||
940 | repo = get_repo_or_error(repoid) |
|
941 | repo = get_repo_or_error(repoid) | |
941 | if not has_superadmin_permission(apiuser): |
|
942 | if not has_superadmin_permission(apiuser): | |
942 | _perms = ('repository.admin',) |
|
943 | _perms = ('repository.admin',) | |
943 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
944 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
944 |
|
945 | |||
945 | field = RepositoryField.get_by_key_name(key, repo) |
|
946 | field = RepositoryField.get_by_key_name(key, repo) | |
946 | if not field: |
|
947 | if not field: | |
947 | raise JSONRPCError('Field with key `%s` does not ' |
|
948 | raise JSONRPCError('Field with key `%s` does not ' | |
948 | 'exists for repo `%s`' % (key, repoid)) |
|
949 | 'exists for repo `%s`' % (key, repoid)) | |
949 |
|
950 | |||
950 | try: |
|
951 | try: | |
951 | RepoModel().delete_repo_field(repo, field_key=key) |
|
952 | RepoModel().delete_repo_field(repo, field_key=key) | |
952 | Session().commit() |
|
953 | Session().commit() | |
953 | return { |
|
954 | return { | |
954 | 'msg': "Deleted repository field `%s`" % (key,), |
|
955 | 'msg': "Deleted repository field `%s`" % (key,), | |
955 | 'success': True, |
|
956 | 'success': True, | |
956 | } |
|
957 | } | |
957 | except Exception: |
|
958 | except Exception: | |
958 | log.exception( |
|
959 | log.exception( | |
959 | "Exception occurred while trying to delete field from repo") |
|
960 | "Exception occurred while trying to delete field from repo") | |
960 | raise JSONRPCError( |
|
961 | raise JSONRPCError( | |
961 | 'failed to delete field for repository `%s`' % (repoid,)) |
|
962 | 'failed to delete field for repository `%s`' % (repoid,)) | |
962 |
|
963 | |||
963 |
|
964 | |||
964 | @jsonrpc_method() |
|
965 | @jsonrpc_method() | |
965 | def update_repo( |
|
966 | def update_repo( | |
966 | request, apiuser, repoid, repo_name=Optional(None), |
|
967 | request, apiuser, repoid, repo_name=Optional(None), | |
967 | owner=Optional(OAttr('apiuser')), description=Optional(''), |
|
968 | owner=Optional(OAttr('apiuser')), description=Optional(''), | |
968 | private=Optional(False), |
|
969 | private=Optional(False), | |
969 | clone_uri=Optional(None), push_uri=Optional(None), |
|
970 | clone_uri=Optional(None), push_uri=Optional(None), | |
970 | landing_rev=Optional(None), fork_of=Optional(None), |
|
971 | landing_rev=Optional(None), fork_of=Optional(None), | |
971 | enable_statistics=Optional(False), |
|
972 | enable_statistics=Optional(False), | |
972 | enable_locking=Optional(False), |
|
973 | enable_locking=Optional(False), | |
973 | enable_downloads=Optional(False), fields=Optional('')): |
|
974 | enable_downloads=Optional(False), fields=Optional('')): | |
974 | """ |
|
975 | """ | |
975 | Updates a repository with the given information. |
|
976 | Updates a repository with the given information. | |
976 |
|
977 | |||
977 | This command can only be run using an |authtoken| with at least |
|
978 | This command can only be run using an |authtoken| with at least | |
978 | admin permissions to the |repo|. |
|
979 | admin permissions to the |repo|. | |
979 |
|
980 | |||
980 | * If the repository name contains "/", repository will be updated |
|
981 | * If the repository name contains "/", repository will be updated | |
981 | accordingly with a repository group or nested repository groups |
|
982 | accordingly with a repository group or nested repository groups | |
982 |
|
983 | |||
983 | For example repoid=repo-test name="foo/bar/repo-test" will update |repo| |
|
984 | For example repoid=repo-test name="foo/bar/repo-test" will update |repo| | |
984 | called "repo-test" and place it inside group "foo/bar". |
|
985 | called "repo-test" and place it inside group "foo/bar". | |
985 | You have to have permissions to access and write to the last repository |
|
986 | You have to have permissions to access and write to the last repository | |
986 | group ("bar" in this example) |
|
987 | group ("bar" in this example) | |
987 |
|
988 | |||
988 | :param apiuser: This is filled automatically from the |authtoken|. |
|
989 | :param apiuser: This is filled automatically from the |authtoken|. | |
989 | :type apiuser: AuthUser |
|
990 | :type apiuser: AuthUser | |
990 | :param repoid: repository name or repository ID. |
|
991 | :param repoid: repository name or repository ID. | |
991 | :type repoid: str or int |
|
992 | :type repoid: str or int | |
992 | :param repo_name: Update the |repo| name, including the |
|
993 | :param repo_name: Update the |repo| name, including the | |
993 | repository group it's in. |
|
994 | repository group it's in. | |
994 | :type repo_name: str |
|
995 | :type repo_name: str | |
995 | :param owner: Set the |repo| owner. |
|
996 | :param owner: Set the |repo| owner. | |
996 | :type owner: str |
|
997 | :type owner: str | |
997 | :param fork_of: Set the |repo| as fork of another |repo|. |
|
998 | :param fork_of: Set the |repo| as fork of another |repo|. | |
998 | :type fork_of: str |
|
999 | :type fork_of: str | |
999 | :param description: Update the |repo| description. |
|
1000 | :param description: Update the |repo| description. | |
1000 | :type description: str |
|
1001 | :type description: str | |
1001 | :param private: Set the |repo| as private. (True | False) |
|
1002 | :param private: Set the |repo| as private. (True | False) | |
1002 | :type private: bool |
|
1003 | :type private: bool | |
1003 | :param clone_uri: Update the |repo| clone URI. |
|
1004 | :param clone_uri: Update the |repo| clone URI. | |
1004 | :type clone_uri: str |
|
1005 | :type clone_uri: str | |
1005 | :param landing_rev: Set the |repo| landing revision. e.g branch:default, book:dev, rev:abcd |
|
1006 | :param landing_rev: Set the |repo| landing revision. e.g branch:default, book:dev, rev:abcd | |
1006 | :type landing_rev: str |
|
1007 | :type landing_rev: str | |
1007 | :param enable_statistics: Enable statistics on the |repo|, (True | False). |
|
1008 | :param enable_statistics: Enable statistics on the |repo|, (True | False). | |
1008 | :type enable_statistics: bool |
|
1009 | :type enable_statistics: bool | |
1009 | :param enable_locking: Enable |repo| locking. |
|
1010 | :param enable_locking: Enable |repo| locking. | |
1010 | :type enable_locking: bool |
|
1011 | :type enable_locking: bool | |
1011 | :param enable_downloads: Enable downloads from the |repo|, (True | False). |
|
1012 | :param enable_downloads: Enable downloads from the |repo|, (True | False). | |
1012 | :type enable_downloads: bool |
|
1013 | :type enable_downloads: bool | |
1013 | :param fields: Add extra fields to the |repo|. Use the following |
|
1014 | :param fields: Add extra fields to the |repo|. Use the following | |
1014 | example format: ``field_key=field_val,field_key2=fieldval2``. |
|
1015 | example format: ``field_key=field_val,field_key2=fieldval2``. | |
1015 | Escape ', ' with \, |
|
1016 | Escape ', ' with \, | |
1016 | :type fields: str |
|
1017 | :type fields: str | |
1017 | """ |
|
1018 | """ | |
1018 |
|
1019 | |||
1019 | repo = get_repo_or_error(repoid) |
|
1020 | repo = get_repo_or_error(repoid) | |
1020 |
|
1021 | |||
1021 | include_secrets = False |
|
1022 | include_secrets = False | |
1022 | if not has_superadmin_permission(apiuser): |
|
1023 | if not has_superadmin_permission(apiuser): | |
1023 | validate_repo_permissions(apiuser, repoid, repo, ('repository.admin',)) |
|
1024 | validate_repo_permissions(apiuser, repoid, repo, ('repository.admin',)) | |
1024 | else: |
|
1025 | else: | |
1025 | include_secrets = True |
|
1026 | include_secrets = True | |
1026 |
|
1027 | |||
1027 | updates = dict( |
|
1028 | updates = dict( | |
1028 | repo_name=repo_name |
|
1029 | repo_name=repo_name | |
1029 | if not isinstance(repo_name, Optional) else repo.repo_name, |
|
1030 | if not isinstance(repo_name, Optional) else repo.repo_name, | |
1030 |
|
1031 | |||
1031 | fork_id=fork_of |
|
1032 | fork_id=fork_of | |
1032 | if not isinstance(fork_of, Optional) else repo.fork.repo_name if repo.fork else None, |
|
1033 | if not isinstance(fork_of, Optional) else repo.fork.repo_name if repo.fork else None, | |
1033 |
|
1034 | |||
1034 | user=owner |
|
1035 | user=owner | |
1035 | if not isinstance(owner, Optional) else repo.user.username, |
|
1036 | if not isinstance(owner, Optional) else repo.user.username, | |
1036 |
|
1037 | |||
1037 | repo_description=description |
|
1038 | repo_description=description | |
1038 | if not isinstance(description, Optional) else repo.description, |
|
1039 | if not isinstance(description, Optional) else repo.description, | |
1039 |
|
1040 | |||
1040 | repo_private=private |
|
1041 | repo_private=private | |
1041 | if not isinstance(private, Optional) else repo.private, |
|
1042 | if not isinstance(private, Optional) else repo.private, | |
1042 |
|
1043 | |||
1043 | clone_uri=clone_uri |
|
1044 | clone_uri=clone_uri | |
1044 | if not isinstance(clone_uri, Optional) else repo.clone_uri, |
|
1045 | if not isinstance(clone_uri, Optional) else repo.clone_uri, | |
1045 |
|
1046 | |||
1046 | push_uri=push_uri |
|
1047 | push_uri=push_uri | |
1047 | if not isinstance(push_uri, Optional) else repo.push_uri, |
|
1048 | if not isinstance(push_uri, Optional) else repo.push_uri, | |
1048 |
|
1049 | |||
1049 | repo_landing_rev=landing_rev |
|
1050 | repo_landing_rev=landing_rev | |
1050 | if not isinstance(landing_rev, Optional) else repo._landing_revision, |
|
1051 | if not isinstance(landing_rev, Optional) else repo._landing_revision, | |
1051 |
|
1052 | |||
1052 | repo_enable_statistics=enable_statistics |
|
1053 | repo_enable_statistics=enable_statistics | |
1053 | if not isinstance(enable_statistics, Optional) else repo.enable_statistics, |
|
1054 | if not isinstance(enable_statistics, Optional) else repo.enable_statistics, | |
1054 |
|
1055 | |||
1055 | repo_enable_locking=enable_locking |
|
1056 | repo_enable_locking=enable_locking | |
1056 | if not isinstance(enable_locking, Optional) else repo.enable_locking, |
|
1057 | if not isinstance(enable_locking, Optional) else repo.enable_locking, | |
1057 |
|
1058 | |||
1058 | repo_enable_downloads=enable_downloads |
|
1059 | repo_enable_downloads=enable_downloads | |
1059 | if not isinstance(enable_downloads, Optional) else repo.enable_downloads) |
|
1060 | if not isinstance(enable_downloads, Optional) else repo.enable_downloads) | |
1060 |
|
1061 | |||
1061 | landing_ref, _label = ScmModel.backend_landing_ref(repo.repo_type) |
|
1062 | landing_ref, _label = ScmModel.backend_landing_ref(repo.repo_type) | |
1062 | ref_choices, _labels = ScmModel().get_repo_landing_revs( |
|
1063 | ref_choices, _labels = ScmModel().get_repo_landing_revs( | |
1063 | request.translate, repo=repo) |
|
1064 | request.translate, repo=repo) | |
1064 | ref_choices = list(set(ref_choices + [landing_ref])) |
|
1065 | ref_choices = list(set(ref_choices + [landing_ref])) | |
1065 |
|
1066 | |||
1066 | old_values = repo.get_api_data() |
|
1067 | old_values = repo.get_api_data() | |
1067 | repo_type = repo.repo_type |
|
1068 | repo_type = repo.repo_type | |
1068 | schema = repo_schema.RepoSchema().bind( |
|
1069 | schema = repo_schema.RepoSchema().bind( | |
1069 | repo_type_options=rhodecode.BACKENDS.keys(), |
|
1070 | repo_type_options=rhodecode.BACKENDS.keys(), | |
1070 | repo_ref_options=ref_choices, |
|
1071 | repo_ref_options=ref_choices, | |
1071 | repo_type=repo_type, |
|
1072 | repo_type=repo_type, | |
1072 | # user caller |
|
1073 | # user caller | |
1073 | user=apiuser, |
|
1074 | user=apiuser, | |
1074 | old_values=old_values) |
|
1075 | old_values=old_values) | |
1075 | try: |
|
1076 | try: | |
1076 | schema_data = schema.deserialize(dict( |
|
1077 | schema_data = schema.deserialize(dict( | |
1077 | # we save old value, users cannot change type |
|
1078 | # we save old value, users cannot change type | |
1078 | repo_type=repo_type, |
|
1079 | repo_type=repo_type, | |
1079 |
|
1080 | |||
1080 | repo_name=updates['repo_name'], |
|
1081 | repo_name=updates['repo_name'], | |
1081 | repo_owner=updates['user'], |
|
1082 | repo_owner=updates['user'], | |
1082 | repo_description=updates['repo_description'], |
|
1083 | repo_description=updates['repo_description'], | |
1083 | repo_clone_uri=updates['clone_uri'], |
|
1084 | repo_clone_uri=updates['clone_uri'], | |
1084 | repo_push_uri=updates['push_uri'], |
|
1085 | repo_push_uri=updates['push_uri'], | |
1085 | repo_fork_of=updates['fork_id'], |
|
1086 | repo_fork_of=updates['fork_id'], | |
1086 | repo_private=updates['repo_private'], |
|
1087 | repo_private=updates['repo_private'], | |
1087 | repo_landing_commit_ref=updates['repo_landing_rev'], |
|
1088 | repo_landing_commit_ref=updates['repo_landing_rev'], | |
1088 | repo_enable_statistics=updates['repo_enable_statistics'], |
|
1089 | repo_enable_statistics=updates['repo_enable_statistics'], | |
1089 | repo_enable_downloads=updates['repo_enable_downloads'], |
|
1090 | repo_enable_downloads=updates['repo_enable_downloads'], | |
1090 | repo_enable_locking=updates['repo_enable_locking'])) |
|
1091 | repo_enable_locking=updates['repo_enable_locking'])) | |
1091 | except validation_schema.Invalid as err: |
|
1092 | except validation_schema.Invalid as err: | |
1092 | raise JSONRPCValidationError(colander_exc=err) |
|
1093 | raise JSONRPCValidationError(colander_exc=err) | |
1093 |
|
1094 | |||
1094 | # save validated data back into the updates dict |
|
1095 | # save validated data back into the updates dict | |
1095 | validated_updates = dict( |
|
1096 | validated_updates = dict( | |
1096 | repo_name=schema_data['repo_group']['repo_name_without_group'], |
|
1097 | repo_name=schema_data['repo_group']['repo_name_without_group'], | |
1097 | repo_group=schema_data['repo_group']['repo_group_id'], |
|
1098 | repo_group=schema_data['repo_group']['repo_group_id'], | |
1098 |
|
1099 | |||
1099 | user=schema_data['repo_owner'], |
|
1100 | user=schema_data['repo_owner'], | |
1100 | repo_description=schema_data['repo_description'], |
|
1101 | repo_description=schema_data['repo_description'], | |
1101 | repo_private=schema_data['repo_private'], |
|
1102 | repo_private=schema_data['repo_private'], | |
1102 | clone_uri=schema_data['repo_clone_uri'], |
|
1103 | clone_uri=schema_data['repo_clone_uri'], | |
1103 | push_uri=schema_data['repo_push_uri'], |
|
1104 | push_uri=schema_data['repo_push_uri'], | |
1104 | repo_landing_rev=schema_data['repo_landing_commit_ref'], |
|
1105 | repo_landing_rev=schema_data['repo_landing_commit_ref'], | |
1105 | repo_enable_statistics=schema_data['repo_enable_statistics'], |
|
1106 | repo_enable_statistics=schema_data['repo_enable_statistics'], | |
1106 | repo_enable_locking=schema_data['repo_enable_locking'], |
|
1107 | repo_enable_locking=schema_data['repo_enable_locking'], | |
1107 | repo_enable_downloads=schema_data['repo_enable_downloads'], |
|
1108 | repo_enable_downloads=schema_data['repo_enable_downloads'], | |
1108 | ) |
|
1109 | ) | |
1109 |
|
1110 | |||
1110 | if schema_data['repo_fork_of']: |
|
1111 | if schema_data['repo_fork_of']: | |
1111 | fork_repo = get_repo_or_error(schema_data['repo_fork_of']) |
|
1112 | fork_repo = get_repo_or_error(schema_data['repo_fork_of']) | |
1112 | validated_updates['fork_id'] = fork_repo.repo_id |
|
1113 | validated_updates['fork_id'] = fork_repo.repo_id | |
1113 |
|
1114 | |||
1114 | # extra fields |
|
1115 | # extra fields | |
1115 | fields = parse_args(Optional.extract(fields), key_prefix='ex_') |
|
1116 | fields = parse_args(Optional.extract(fields), key_prefix='ex_') | |
1116 | if fields: |
|
1117 | if fields: | |
1117 | validated_updates.update(fields) |
|
1118 | validated_updates.update(fields) | |
1118 |
|
1119 | |||
1119 | try: |
|
1120 | try: | |
1120 | RepoModel().update(repo, **validated_updates) |
|
1121 | RepoModel().update(repo, **validated_updates) | |
1121 | audit_logger.store_api( |
|
1122 | audit_logger.store_api( | |
1122 | 'repo.edit', action_data={'old_data': old_values}, |
|
1123 | 'repo.edit', action_data={'old_data': old_values}, | |
1123 | user=apiuser, repo=repo) |
|
1124 | user=apiuser, repo=repo) | |
1124 | Session().commit() |
|
1125 | Session().commit() | |
1125 | return { |
|
1126 | return { | |
1126 | 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo.repo_name), |
|
1127 | 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo.repo_name), | |
1127 | 'repository': repo.get_api_data(include_secrets=include_secrets) |
|
1128 | 'repository': repo.get_api_data(include_secrets=include_secrets) | |
1128 | } |
|
1129 | } | |
1129 | except Exception: |
|
1130 | except Exception: | |
1130 | log.exception( |
|
1131 | log.exception( | |
1131 | u"Exception while trying to update the repository %s", |
|
1132 | u"Exception while trying to update the repository %s", | |
1132 | repoid) |
|
1133 | repoid) | |
1133 | raise JSONRPCError('failed to update repo `%s`' % repoid) |
|
1134 | raise JSONRPCError('failed to update repo `%s`' % repoid) | |
1134 |
|
1135 | |||
1135 |
|
1136 | |||
1136 | @jsonrpc_method() |
|
1137 | @jsonrpc_method() | |
1137 | def fork_repo(request, apiuser, repoid, fork_name, |
|
1138 | def fork_repo(request, apiuser, repoid, fork_name, | |
1138 | owner=Optional(OAttr('apiuser')), |
|
1139 | owner=Optional(OAttr('apiuser')), | |
1139 | description=Optional(''), |
|
1140 | description=Optional(''), | |
1140 | private=Optional(False), |
|
1141 | private=Optional(False), | |
1141 | clone_uri=Optional(None), |
|
1142 | clone_uri=Optional(None), | |
1142 | landing_rev=Optional(None), |
|
1143 | landing_rev=Optional(None), | |
1143 | copy_permissions=Optional(False)): |
|
1144 | copy_permissions=Optional(False)): | |
1144 | """ |
|
1145 | """ | |
1145 | Creates a fork of the specified |repo|. |
|
1146 | Creates a fork of the specified |repo|. | |
1146 |
|
1147 | |||
1147 | * If the fork_name contains "/", fork will be created inside |
|
1148 | * If the fork_name contains "/", fork will be created inside | |
1148 | a repository group or nested repository groups |
|
1149 | a repository group or nested repository groups | |
1149 |
|
1150 | |||
1150 | For example "foo/bar/fork-repo" will create fork called "fork-repo" |
|
1151 | For example "foo/bar/fork-repo" will create fork called "fork-repo" | |
1151 | inside group "foo/bar". You have to have permissions to access and |
|
1152 | inside group "foo/bar". You have to have permissions to access and | |
1152 | write to the last repository group ("bar" in this example) |
|
1153 | write to the last repository group ("bar" in this example) | |
1153 |
|
1154 | |||
1154 | This command can only be run using an |authtoken| with minimum |
|
1155 | This command can only be run using an |authtoken| with minimum | |
1155 | read permissions of the forked repo, create fork permissions for an user. |
|
1156 | read permissions of the forked repo, create fork permissions for an user. | |
1156 |
|
1157 | |||
1157 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1158 | :param apiuser: This is filled automatically from the |authtoken|. | |
1158 | :type apiuser: AuthUser |
|
1159 | :type apiuser: AuthUser | |
1159 | :param repoid: Set repository name or repository ID. |
|
1160 | :param repoid: Set repository name or repository ID. | |
1160 | :type repoid: str or int |
|
1161 | :type repoid: str or int | |
1161 | :param fork_name: Set the fork name, including it's repository group membership. |
|
1162 | :param fork_name: Set the fork name, including it's repository group membership. | |
1162 | :type fork_name: str |
|
1163 | :type fork_name: str | |
1163 | :param owner: Set the fork owner. |
|
1164 | :param owner: Set the fork owner. | |
1164 | :type owner: str |
|
1165 | :type owner: str | |
1165 | :param description: Set the fork description. |
|
1166 | :param description: Set the fork description. | |
1166 | :type description: str |
|
1167 | :type description: str | |
1167 | :param copy_permissions: Copy permissions from parent |repo|. The |
|
1168 | :param copy_permissions: Copy permissions from parent |repo|. The | |
1168 | default is False. |
|
1169 | default is False. | |
1169 | :type copy_permissions: bool |
|
1170 | :type copy_permissions: bool | |
1170 | :param private: Make the fork private. The default is False. |
|
1171 | :param private: Make the fork private. The default is False. | |
1171 | :type private: bool |
|
1172 | :type private: bool | |
1172 | :param landing_rev: Set the landing revision. E.g branch:default, book:dev, rev:abcd |
|
1173 | :param landing_rev: Set the landing revision. E.g branch:default, book:dev, rev:abcd | |
1173 |
|
1174 | |||
1174 | Example output: |
|
1175 | Example output: | |
1175 |
|
1176 | |||
1176 | .. code-block:: bash |
|
1177 | .. code-block:: bash | |
1177 |
|
1178 | |||
1178 | id : <id_for_response> |
|
1179 | id : <id_for_response> | |
1179 | api_key : "<api_key>" |
|
1180 | api_key : "<api_key>" | |
1180 | args: { |
|
1181 | args: { | |
1181 | "repoid" : "<reponame or repo_id>", |
|
1182 | "repoid" : "<reponame or repo_id>", | |
1182 | "fork_name": "<forkname>", |
|
1183 | "fork_name": "<forkname>", | |
1183 | "owner": "<username or user_id = Optional(=apiuser)>", |
|
1184 | "owner": "<username or user_id = Optional(=apiuser)>", | |
1184 | "description": "<description>", |
|
1185 | "description": "<description>", | |
1185 | "copy_permissions": "<bool>", |
|
1186 | "copy_permissions": "<bool>", | |
1186 | "private": "<bool>", |
|
1187 | "private": "<bool>", | |
1187 | "landing_rev": "<landing_rev>" |
|
1188 | "landing_rev": "<landing_rev>" | |
1188 | } |
|
1189 | } | |
1189 |
|
1190 | |||
1190 | Example error output: |
|
1191 | Example error output: | |
1191 |
|
1192 | |||
1192 | .. code-block:: bash |
|
1193 | .. code-block:: bash | |
1193 |
|
1194 | |||
1194 | id : <id_given_in_input> |
|
1195 | id : <id_given_in_input> | |
1195 | result: { |
|
1196 | result: { | |
1196 | "msg": "Created fork of `<reponame>` as `<forkname>`", |
|
1197 | "msg": "Created fork of `<reponame>` as `<forkname>`", | |
1197 | "success": true, |
|
1198 | "success": true, | |
1198 | "task": "<celery task id or None if done sync>" |
|
1199 | "task": "<celery task id or None if done sync>" | |
1199 | } |
|
1200 | } | |
1200 | error: null |
|
1201 | error: null | |
1201 |
|
1202 | |||
1202 | """ |
|
1203 | """ | |
1203 |
|
1204 | |||
1204 | repo = get_repo_or_error(repoid) |
|
1205 | repo = get_repo_or_error(repoid) | |
1205 | repo_name = repo.repo_name |
|
1206 | repo_name = repo.repo_name | |
1206 |
|
1207 | |||
1207 | if not has_superadmin_permission(apiuser): |
|
1208 | if not has_superadmin_permission(apiuser): | |
1208 | # check if we have at least read permission for |
|
1209 | # check if we have at least read permission for | |
1209 | # this repo that we fork ! |
|
1210 | # this repo that we fork ! | |
1210 | _perms = ( |
|
1211 | _perms = ( | |
1211 | 'repository.admin', 'repository.write', 'repository.read') |
|
1212 | 'repository.admin', 'repository.write', 'repository.read') | |
1212 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1213 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
1213 |
|
1214 | |||
1214 | # check if the regular user has at least fork permissions as well |
|
1215 | # check if the regular user has at least fork permissions as well | |
1215 | if not HasPermissionAnyApi('hg.fork.repository')(user=apiuser): |
|
1216 | if not HasPermissionAnyApi('hg.fork.repository')(user=apiuser): | |
1216 | raise JSONRPCForbidden() |
|
1217 | raise JSONRPCForbidden() | |
1217 |
|
1218 | |||
1218 | # check if user can set owner parameter |
|
1219 | # check if user can set owner parameter | |
1219 | owner = validate_set_owner_permissions(apiuser, owner) |
|
1220 | owner = validate_set_owner_permissions(apiuser, owner) | |
1220 |
|
1221 | |||
1221 | description = Optional.extract(description) |
|
1222 | description = Optional.extract(description) | |
1222 | copy_permissions = Optional.extract(copy_permissions) |
|
1223 | copy_permissions = Optional.extract(copy_permissions) | |
1223 | clone_uri = Optional.extract(clone_uri) |
|
1224 | clone_uri = Optional.extract(clone_uri) | |
1224 |
|
1225 | |||
1225 | landing_ref, _label = ScmModel.backend_landing_ref(repo.repo_type) |
|
1226 | landing_ref, _label = ScmModel.backend_landing_ref(repo.repo_type) | |
1226 | ref_choices, _labels = ScmModel().get_repo_landing_revs(request.translate) |
|
1227 | ref_choices, _labels = ScmModel().get_repo_landing_revs(request.translate) | |
1227 | ref_choices = list(set(ref_choices + [landing_ref])) |
|
1228 | ref_choices = list(set(ref_choices + [landing_ref])) | |
1228 | landing_commit_ref = Optional.extract(landing_rev) or landing_ref |
|
1229 | landing_commit_ref = Optional.extract(landing_rev) or landing_ref | |
1229 |
|
1230 | |||
1230 | private = Optional.extract(private) |
|
1231 | private = Optional.extract(private) | |
1231 |
|
1232 | |||
1232 | schema = repo_schema.RepoSchema().bind( |
|
1233 | schema = repo_schema.RepoSchema().bind( | |
1233 | repo_type_options=rhodecode.BACKENDS.keys(), |
|
1234 | repo_type_options=rhodecode.BACKENDS.keys(), | |
1234 | repo_ref_options=ref_choices, |
|
1235 | repo_ref_options=ref_choices, | |
1235 | repo_type=repo.repo_type, |
|
1236 | repo_type=repo.repo_type, | |
1236 | # user caller |
|
1237 | # user caller | |
1237 | user=apiuser) |
|
1238 | user=apiuser) | |
1238 |
|
1239 | |||
1239 | try: |
|
1240 | try: | |
1240 | schema_data = schema.deserialize(dict( |
|
1241 | schema_data = schema.deserialize(dict( | |
1241 | repo_name=fork_name, |
|
1242 | repo_name=fork_name, | |
1242 | repo_type=repo.repo_type, |
|
1243 | repo_type=repo.repo_type, | |
1243 | repo_owner=owner.username, |
|
1244 | repo_owner=owner.username, | |
1244 | repo_description=description, |
|
1245 | repo_description=description, | |
1245 | repo_landing_commit_ref=landing_commit_ref, |
|
1246 | repo_landing_commit_ref=landing_commit_ref, | |
1246 | repo_clone_uri=clone_uri, |
|
1247 | repo_clone_uri=clone_uri, | |
1247 | repo_private=private, |
|
1248 | repo_private=private, | |
1248 | repo_copy_permissions=copy_permissions)) |
|
1249 | repo_copy_permissions=copy_permissions)) | |
1249 | except validation_schema.Invalid as err: |
|
1250 | except validation_schema.Invalid as err: | |
1250 | raise JSONRPCValidationError(colander_exc=err) |
|
1251 | raise JSONRPCValidationError(colander_exc=err) | |
1251 |
|
1252 | |||
1252 | try: |
|
1253 | try: | |
1253 | data = { |
|
1254 | data = { | |
1254 | 'fork_parent_id': repo.repo_id, |
|
1255 | 'fork_parent_id': repo.repo_id, | |
1255 |
|
1256 | |||
1256 | 'repo_name': schema_data['repo_group']['repo_name_without_group'], |
|
1257 | 'repo_name': schema_data['repo_group']['repo_name_without_group'], | |
1257 | 'repo_name_full': schema_data['repo_name'], |
|
1258 | 'repo_name_full': schema_data['repo_name'], | |
1258 | 'repo_group': schema_data['repo_group']['repo_group_id'], |
|
1259 | 'repo_group': schema_data['repo_group']['repo_group_id'], | |
1259 | 'repo_type': schema_data['repo_type'], |
|
1260 | 'repo_type': schema_data['repo_type'], | |
1260 | 'description': schema_data['repo_description'], |
|
1261 | 'description': schema_data['repo_description'], | |
1261 | 'private': schema_data['repo_private'], |
|
1262 | 'private': schema_data['repo_private'], | |
1262 | 'copy_permissions': schema_data['repo_copy_permissions'], |
|
1263 | 'copy_permissions': schema_data['repo_copy_permissions'], | |
1263 | 'landing_rev': schema_data['repo_landing_commit_ref'], |
|
1264 | 'landing_rev': schema_data['repo_landing_commit_ref'], | |
1264 | } |
|
1265 | } | |
1265 |
|
1266 | |||
1266 | task = RepoModel().create_fork(data, cur_user=owner.user_id) |
|
1267 | task = RepoModel().create_fork(data, cur_user=owner.user_id) | |
1267 | # no commit, it's done in RepoModel, or async via celery |
|
1268 | # no commit, it's done in RepoModel, or async via celery | |
1268 | task_id = get_task_id(task) |
|
1269 | task_id = get_task_id(task) | |
1269 |
|
1270 | |||
1270 | return { |
|
1271 | return { | |
1271 | 'msg': 'Created fork of `%s` as `%s`' % ( |
|
1272 | 'msg': 'Created fork of `%s` as `%s`' % ( | |
1272 | repo.repo_name, schema_data['repo_name']), |
|
1273 | repo.repo_name, schema_data['repo_name']), | |
1273 | 'success': True, # cannot return the repo data here since fork |
|
1274 | 'success': True, # cannot return the repo data here since fork | |
1274 | # can be done async |
|
1275 | # can be done async | |
1275 | 'task': task_id |
|
1276 | 'task': task_id | |
1276 | } |
|
1277 | } | |
1277 | except Exception: |
|
1278 | except Exception: | |
1278 | log.exception( |
|
1279 | log.exception( | |
1279 | u"Exception while trying to create fork %s", |
|
1280 | u"Exception while trying to create fork %s", | |
1280 | schema_data['repo_name']) |
|
1281 | schema_data['repo_name']) | |
1281 | raise JSONRPCError( |
|
1282 | raise JSONRPCError( | |
1282 | 'failed to fork repository `%s` as `%s`' % ( |
|
1283 | 'failed to fork repository `%s` as `%s`' % ( | |
1283 | repo_name, schema_data['repo_name'])) |
|
1284 | repo_name, schema_data['repo_name'])) | |
1284 |
|
1285 | |||
1285 |
|
1286 | |||
1286 | @jsonrpc_method() |
|
1287 | @jsonrpc_method() | |
1287 | def delete_repo(request, apiuser, repoid, forks=Optional('')): |
|
1288 | def delete_repo(request, apiuser, repoid, forks=Optional('')): | |
1288 | """ |
|
1289 | """ | |
1289 | Deletes a repository. |
|
1290 | Deletes a repository. | |
1290 |
|
1291 | |||
1291 | * When the `forks` parameter is set it's possible to detach or delete |
|
1292 | * When the `forks` parameter is set it's possible to detach or delete | |
1292 | forks of deleted repository. |
|
1293 | forks of deleted repository. | |
1293 |
|
1294 | |||
1294 | This command can only be run using an |authtoken| with admin |
|
1295 | This command can only be run using an |authtoken| with admin | |
1295 | permissions on the |repo|. |
|
1296 | permissions on the |repo|. | |
1296 |
|
1297 | |||
1297 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1298 | :param apiuser: This is filled automatically from the |authtoken|. | |
1298 | :type apiuser: AuthUser |
|
1299 | :type apiuser: AuthUser | |
1299 | :param repoid: Set the repository name or repository ID. |
|
1300 | :param repoid: Set the repository name or repository ID. | |
1300 | :type repoid: str or int |
|
1301 | :type repoid: str or int | |
1301 | :param forks: Set to `detach` or `delete` forks from the |repo|. |
|
1302 | :param forks: Set to `detach` or `delete` forks from the |repo|. | |
1302 | :type forks: Optional(str) |
|
1303 | :type forks: Optional(str) | |
1303 |
|
1304 | |||
1304 | Example error output: |
|
1305 | Example error output: | |
1305 |
|
1306 | |||
1306 | .. code-block:: bash |
|
1307 | .. code-block:: bash | |
1307 |
|
1308 | |||
1308 | id : <id_given_in_input> |
|
1309 | id : <id_given_in_input> | |
1309 | result: { |
|
1310 | result: { | |
1310 | "msg": "Deleted repository `<reponame>`", |
|
1311 | "msg": "Deleted repository `<reponame>`", | |
1311 | "success": true |
|
1312 | "success": true | |
1312 | } |
|
1313 | } | |
1313 | error: null |
|
1314 | error: null | |
1314 | """ |
|
1315 | """ | |
1315 |
|
1316 | |||
1316 | repo = get_repo_or_error(repoid) |
|
1317 | repo = get_repo_or_error(repoid) | |
1317 | repo_name = repo.repo_name |
|
1318 | repo_name = repo.repo_name | |
1318 | if not has_superadmin_permission(apiuser): |
|
1319 | if not has_superadmin_permission(apiuser): | |
1319 | _perms = ('repository.admin',) |
|
1320 | _perms = ('repository.admin',) | |
1320 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1321 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
1321 |
|
1322 | |||
1322 | try: |
|
1323 | try: | |
1323 | handle_forks = Optional.extract(forks) |
|
1324 | handle_forks = Optional.extract(forks) | |
1324 | _forks_msg = '' |
|
1325 | _forks_msg = '' | |
1325 | _forks = [f for f in repo.forks] |
|
1326 | _forks = [f for f in repo.forks] | |
1326 | if handle_forks == 'detach': |
|
1327 | if handle_forks == 'detach': | |
1327 | _forks_msg = ' ' + 'Detached %s forks' % len(_forks) |
|
1328 | _forks_msg = ' ' + 'Detached %s forks' % len(_forks) | |
1328 | elif handle_forks == 'delete': |
|
1329 | elif handle_forks == 'delete': | |
1329 | _forks_msg = ' ' + 'Deleted %s forks' % len(_forks) |
|
1330 | _forks_msg = ' ' + 'Deleted %s forks' % len(_forks) | |
1330 | elif _forks: |
|
1331 | elif _forks: | |
1331 | raise JSONRPCError( |
|
1332 | raise JSONRPCError( | |
1332 | 'Cannot delete `%s` it still contains attached forks' % |
|
1333 | 'Cannot delete `%s` it still contains attached forks' % | |
1333 | (repo.repo_name,) |
|
1334 | (repo.repo_name,) | |
1334 | ) |
|
1335 | ) | |
1335 | old_data = repo.get_api_data() |
|
1336 | old_data = repo.get_api_data() | |
1336 | RepoModel().delete(repo, forks=forks) |
|
1337 | RepoModel().delete(repo, forks=forks) | |
1337 |
|
1338 | |||
1338 | repo = audit_logger.RepoWrap(repo_id=None, |
|
1339 | repo = audit_logger.RepoWrap(repo_id=None, | |
1339 | repo_name=repo.repo_name) |
|
1340 | repo_name=repo.repo_name) | |
1340 |
|
1341 | |||
1341 | audit_logger.store_api( |
|
1342 | audit_logger.store_api( | |
1342 | 'repo.delete', action_data={'old_data': old_data}, |
|
1343 | 'repo.delete', action_data={'old_data': old_data}, | |
1343 | user=apiuser, repo=repo) |
|
1344 | user=apiuser, repo=repo) | |
1344 |
|
1345 | |||
1345 | ScmModel().mark_for_invalidation(repo_name, delete=True) |
|
1346 | ScmModel().mark_for_invalidation(repo_name, delete=True) | |
1346 | Session().commit() |
|
1347 | Session().commit() | |
1347 | return { |
|
1348 | return { | |
1348 | 'msg': 'Deleted repository `%s`%s' % (repo_name, _forks_msg), |
|
1349 | 'msg': 'Deleted repository `%s`%s' % (repo_name, _forks_msg), | |
1349 | 'success': True |
|
1350 | 'success': True | |
1350 | } |
|
1351 | } | |
1351 | except Exception: |
|
1352 | except Exception: | |
1352 | log.exception("Exception occurred while trying to delete repo") |
|
1353 | log.exception("Exception occurred while trying to delete repo") | |
1353 | raise JSONRPCError( |
|
1354 | raise JSONRPCError( | |
1354 | 'failed to delete repository `%s`' % (repo_name,) |
|
1355 | 'failed to delete repository `%s`' % (repo_name,) | |
1355 | ) |
|
1356 | ) | |
1356 |
|
1357 | |||
1357 |
|
1358 | |||
1358 | #TODO: marcink, change name ? |
|
1359 | #TODO: marcink, change name ? | |
1359 | @jsonrpc_method() |
|
1360 | @jsonrpc_method() | |
1360 | def invalidate_cache(request, apiuser, repoid, delete_keys=Optional(False)): |
|
1361 | def invalidate_cache(request, apiuser, repoid, delete_keys=Optional(False)): | |
1361 | """ |
|
1362 | """ | |
1362 | Invalidates the cache for the specified repository. |
|
1363 | Invalidates the cache for the specified repository. | |
1363 |
|
1364 | |||
1364 | This command can only be run using an |authtoken| with admin rights to |
|
1365 | This command can only be run using an |authtoken| with admin rights to | |
1365 | the specified repository. |
|
1366 | the specified repository. | |
1366 |
|
1367 | |||
1367 | This command takes the following options: |
|
1368 | This command takes the following options: | |
1368 |
|
1369 | |||
1369 | :param apiuser: This is filled automatically from |authtoken|. |
|
1370 | :param apiuser: This is filled automatically from |authtoken|. | |
1370 | :type apiuser: AuthUser |
|
1371 | :type apiuser: AuthUser | |
1371 | :param repoid: Sets the repository name or repository ID. |
|
1372 | :param repoid: Sets the repository name or repository ID. | |
1372 | :type repoid: str or int |
|
1373 | :type repoid: str or int | |
1373 | :param delete_keys: This deletes the invalidated keys instead of |
|
1374 | :param delete_keys: This deletes the invalidated keys instead of | |
1374 | just flagging them. |
|
1375 | just flagging them. | |
1375 | :type delete_keys: Optional(``True`` | ``False``) |
|
1376 | :type delete_keys: Optional(``True`` | ``False``) | |
1376 |
|
1377 | |||
1377 | Example output: |
|
1378 | Example output: | |
1378 |
|
1379 | |||
1379 | .. code-block:: bash |
|
1380 | .. code-block:: bash | |
1380 |
|
1381 | |||
1381 | id : <id_given_in_input> |
|
1382 | id : <id_given_in_input> | |
1382 | result : { |
|
1383 | result : { | |
1383 | 'msg': Cache for repository `<repository name>` was invalidated, |
|
1384 | 'msg': Cache for repository `<repository name>` was invalidated, | |
1384 | 'repository': <repository name> |
|
1385 | 'repository': <repository name> | |
1385 | } |
|
1386 | } | |
1386 | error : null |
|
1387 | error : null | |
1387 |
|
1388 | |||
1388 | Example error output: |
|
1389 | Example error output: | |
1389 |
|
1390 | |||
1390 | .. code-block:: bash |
|
1391 | .. code-block:: bash | |
1391 |
|
1392 | |||
1392 | id : <id_given_in_input> |
|
1393 | id : <id_given_in_input> | |
1393 | result : null |
|
1394 | result : null | |
1394 | error : { |
|
1395 | error : { | |
1395 | 'Error occurred during cache invalidation action' |
|
1396 | 'Error occurred during cache invalidation action' | |
1396 | } |
|
1397 | } | |
1397 |
|
1398 | |||
1398 | """ |
|
1399 | """ | |
1399 |
|
1400 | |||
1400 | repo = get_repo_or_error(repoid) |
|
1401 | repo = get_repo_or_error(repoid) | |
1401 | if not has_superadmin_permission(apiuser): |
|
1402 | if not has_superadmin_permission(apiuser): | |
1402 | _perms = ('repository.admin', 'repository.write',) |
|
1403 | _perms = ('repository.admin', 'repository.write',) | |
1403 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1404 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
1404 |
|
1405 | |||
1405 | delete = Optional.extract(delete_keys) |
|
1406 | delete = Optional.extract(delete_keys) | |
1406 | try: |
|
1407 | try: | |
1407 | ScmModel().mark_for_invalidation(repo.repo_name, delete=delete) |
|
1408 | ScmModel().mark_for_invalidation(repo.repo_name, delete=delete) | |
1408 | return { |
|
1409 | return { | |
1409 | 'msg': 'Cache for repository `%s` was invalidated' % (repoid,), |
|
1410 | 'msg': 'Cache for repository `%s` was invalidated' % (repoid,), | |
1410 | 'repository': repo.repo_name |
|
1411 | 'repository': repo.repo_name | |
1411 | } |
|
1412 | } | |
1412 | except Exception: |
|
1413 | except Exception: | |
1413 | log.exception( |
|
1414 | log.exception( | |
1414 | "Exception occurred while trying to invalidate repo cache") |
|
1415 | "Exception occurred while trying to invalidate repo cache") | |
1415 | raise JSONRPCError( |
|
1416 | raise JSONRPCError( | |
1416 | 'Error occurred during cache invalidation action' |
|
1417 | 'Error occurred during cache invalidation action' | |
1417 | ) |
|
1418 | ) | |
1418 |
|
1419 | |||
1419 |
|
1420 | |||
1420 | #TODO: marcink, change name ? |
|
1421 | #TODO: marcink, change name ? | |
1421 | @jsonrpc_method() |
|
1422 | @jsonrpc_method() | |
1422 | def lock(request, apiuser, repoid, locked=Optional(None), |
|
1423 | def lock(request, apiuser, repoid, locked=Optional(None), | |
1423 | userid=Optional(OAttr('apiuser'))): |
|
1424 | userid=Optional(OAttr('apiuser'))): | |
1424 | """ |
|
1425 | """ | |
1425 | Sets the lock state of the specified |repo| by the given user. |
|
1426 | Sets the lock state of the specified |repo| by the given user. | |
1426 | From more information, see :ref:`repo-locking`. |
|
1427 | From more information, see :ref:`repo-locking`. | |
1427 |
|
1428 | |||
1428 | * If the ``userid`` option is not set, the repository is locked to the |
|
1429 | * If the ``userid`` option is not set, the repository is locked to the | |
1429 | user who called the method. |
|
1430 | user who called the method. | |
1430 | * If the ``locked`` parameter is not set, the current lock state of the |
|
1431 | * If the ``locked`` parameter is not set, the current lock state of the | |
1431 | repository is displayed. |
|
1432 | repository is displayed. | |
1432 |
|
1433 | |||
1433 | This command can only be run using an |authtoken| with admin rights to |
|
1434 | This command can only be run using an |authtoken| with admin rights to | |
1434 | the specified repository. |
|
1435 | the specified repository. | |
1435 |
|
1436 | |||
1436 | This command takes the following options: |
|
1437 | This command takes the following options: | |
1437 |
|
1438 | |||
1438 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1439 | :param apiuser: This is filled automatically from the |authtoken|. | |
1439 | :type apiuser: AuthUser |
|
1440 | :type apiuser: AuthUser | |
1440 | :param repoid: Sets the repository name or repository ID. |
|
1441 | :param repoid: Sets the repository name or repository ID. | |
1441 | :type repoid: str or int |
|
1442 | :type repoid: str or int | |
1442 | :param locked: Sets the lock state. |
|
1443 | :param locked: Sets the lock state. | |
1443 | :type locked: Optional(``True`` | ``False``) |
|
1444 | :type locked: Optional(``True`` | ``False``) | |
1444 | :param userid: Set the repository lock to this user. |
|
1445 | :param userid: Set the repository lock to this user. | |
1445 | :type userid: Optional(str or int) |
|
1446 | :type userid: Optional(str or int) | |
1446 |
|
1447 | |||
1447 | Example error output: |
|
1448 | Example error output: | |
1448 |
|
1449 | |||
1449 | .. code-block:: bash |
|
1450 | .. code-block:: bash | |
1450 |
|
1451 | |||
1451 | id : <id_given_in_input> |
|
1452 | id : <id_given_in_input> | |
1452 | result : { |
|
1453 | result : { | |
1453 | 'repo': '<reponame>', |
|
1454 | 'repo': '<reponame>', | |
1454 | 'locked': <bool: lock state>, |
|
1455 | 'locked': <bool: lock state>, | |
1455 | 'locked_since': <int: lock timestamp>, |
|
1456 | 'locked_since': <int: lock timestamp>, | |
1456 | 'locked_by': <username of person who made the lock>, |
|
1457 | 'locked_by': <username of person who made the lock>, | |
1457 | 'lock_reason': <str: reason for locking>, |
|
1458 | 'lock_reason': <str: reason for locking>, | |
1458 | 'lock_state_changed': <bool: True if lock state has been changed in this request>, |
|
1459 | 'lock_state_changed': <bool: True if lock state has been changed in this request>, | |
1459 | 'msg': 'Repo `<reponame>` locked by `<username>` on <timestamp>.' |
|
1460 | 'msg': 'Repo `<reponame>` locked by `<username>` on <timestamp>.' | |
1460 | or |
|
1461 | or | |
1461 | 'msg': 'Repo `<repository name>` not locked.' |
|
1462 | 'msg': 'Repo `<repository name>` not locked.' | |
1462 | or |
|
1463 | or | |
1463 | 'msg': 'User `<user name>` set lock state for repo `<repository name>` to `<new lock state>`' |
|
1464 | 'msg': 'User `<user name>` set lock state for repo `<repository name>` to `<new lock state>`' | |
1464 | } |
|
1465 | } | |
1465 | error : null |
|
1466 | error : null | |
1466 |
|
1467 | |||
1467 | Example error output: |
|
1468 | Example error output: | |
1468 |
|
1469 | |||
1469 | .. code-block:: bash |
|
1470 | .. code-block:: bash | |
1470 |
|
1471 | |||
1471 | id : <id_given_in_input> |
|
1472 | id : <id_given_in_input> | |
1472 | result : null |
|
1473 | result : null | |
1473 | error : { |
|
1474 | error : { | |
1474 | 'Error occurred locking repository `<reponame>`' |
|
1475 | 'Error occurred locking repository `<reponame>`' | |
1475 | } |
|
1476 | } | |
1476 | """ |
|
1477 | """ | |
1477 |
|
1478 | |||
1478 | repo = get_repo_or_error(repoid) |
|
1479 | repo = get_repo_or_error(repoid) | |
1479 | if not has_superadmin_permission(apiuser): |
|
1480 | if not has_superadmin_permission(apiuser): | |
1480 | # check if we have at least write permission for this repo ! |
|
1481 | # check if we have at least write permission for this repo ! | |
1481 | _perms = ('repository.admin', 'repository.write',) |
|
1482 | _perms = ('repository.admin', 'repository.write',) | |
1482 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1483 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
1483 |
|
1484 | |||
1484 | # make sure normal user does not pass someone else userid, |
|
1485 | # make sure normal user does not pass someone else userid, | |
1485 | # he is not allowed to do that |
|
1486 | # he is not allowed to do that | |
1486 | if not isinstance(userid, Optional) and userid != apiuser.user_id: |
|
1487 | if not isinstance(userid, Optional) and userid != apiuser.user_id: | |
1487 | raise JSONRPCError('userid is not the same as your user') |
|
1488 | raise JSONRPCError('userid is not the same as your user') | |
1488 |
|
1489 | |||
1489 | if isinstance(userid, Optional): |
|
1490 | if isinstance(userid, Optional): | |
1490 | userid = apiuser.user_id |
|
1491 | userid = apiuser.user_id | |
1491 |
|
1492 | |||
1492 | user = get_user_or_error(userid) |
|
1493 | user = get_user_or_error(userid) | |
1493 |
|
1494 | |||
1494 | if isinstance(locked, Optional): |
|
1495 | if isinstance(locked, Optional): | |
1495 | lockobj = repo.locked |
|
1496 | lockobj = repo.locked | |
1496 |
|
1497 | |||
1497 | if lockobj[0] is None: |
|
1498 | if lockobj[0] is None: | |
1498 | _d = { |
|
1499 | _d = { | |
1499 | 'repo': repo.repo_name, |
|
1500 | 'repo': repo.repo_name, | |
1500 | 'locked': False, |
|
1501 | 'locked': False, | |
1501 | 'locked_since': None, |
|
1502 | 'locked_since': None, | |
1502 | 'locked_by': None, |
|
1503 | 'locked_by': None, | |
1503 | 'lock_reason': None, |
|
1504 | 'lock_reason': None, | |
1504 | 'lock_state_changed': False, |
|
1505 | 'lock_state_changed': False, | |
1505 | 'msg': 'Repo `%s` not locked.' % repo.repo_name |
|
1506 | 'msg': 'Repo `%s` not locked.' % repo.repo_name | |
1506 | } |
|
1507 | } | |
1507 | return _d |
|
1508 | return _d | |
1508 | else: |
|
1509 | else: | |
1509 | _user_id, _time, _reason = lockobj |
|
1510 | _user_id, _time, _reason = lockobj | |
1510 | lock_user = get_user_or_error(userid) |
|
1511 | lock_user = get_user_or_error(userid) | |
1511 | _d = { |
|
1512 | _d = { | |
1512 | 'repo': repo.repo_name, |
|
1513 | 'repo': repo.repo_name, | |
1513 | 'locked': True, |
|
1514 | 'locked': True, | |
1514 | 'locked_since': _time, |
|
1515 | 'locked_since': _time, | |
1515 | 'locked_by': lock_user.username, |
|
1516 | 'locked_by': lock_user.username, | |
1516 | 'lock_reason': _reason, |
|
1517 | 'lock_reason': _reason, | |
1517 | 'lock_state_changed': False, |
|
1518 | 'lock_state_changed': False, | |
1518 | 'msg': ('Repo `%s` locked by `%s` on `%s`.' |
|
1519 | 'msg': ('Repo `%s` locked by `%s` on `%s`.' | |
1519 | % (repo.repo_name, lock_user.username, |
|
1520 | % (repo.repo_name, lock_user.username, | |
1520 | json.dumps(time_to_datetime(_time)))) |
|
1521 | json.dumps(time_to_datetime(_time)))) | |
1521 | } |
|
1522 | } | |
1522 | return _d |
|
1523 | return _d | |
1523 |
|
1524 | |||
1524 | # force locked state through a flag |
|
1525 | # force locked state through a flag | |
1525 | else: |
|
1526 | else: | |
1526 | locked = str2bool(locked) |
|
1527 | locked = str2bool(locked) | |
1527 | lock_reason = Repository.LOCK_API |
|
1528 | lock_reason = Repository.LOCK_API | |
1528 | try: |
|
1529 | try: | |
1529 | if locked: |
|
1530 | if locked: | |
1530 | lock_time = time.time() |
|
1531 | lock_time = time.time() | |
1531 | Repository.lock(repo, user.user_id, lock_time, lock_reason) |
|
1532 | Repository.lock(repo, user.user_id, lock_time, lock_reason) | |
1532 | else: |
|
1533 | else: | |
1533 | lock_time = None |
|
1534 | lock_time = None | |
1534 | Repository.unlock(repo) |
|
1535 | Repository.unlock(repo) | |
1535 | _d = { |
|
1536 | _d = { | |
1536 | 'repo': repo.repo_name, |
|
1537 | 'repo': repo.repo_name, | |
1537 | 'locked': locked, |
|
1538 | 'locked': locked, | |
1538 | 'locked_since': lock_time, |
|
1539 | 'locked_since': lock_time, | |
1539 | 'locked_by': user.username, |
|
1540 | 'locked_by': user.username, | |
1540 | 'lock_reason': lock_reason, |
|
1541 | 'lock_reason': lock_reason, | |
1541 | 'lock_state_changed': True, |
|
1542 | 'lock_state_changed': True, | |
1542 | 'msg': ('User `%s` set lock state for repo `%s` to `%s`' |
|
1543 | 'msg': ('User `%s` set lock state for repo `%s` to `%s`' | |
1543 | % (user.username, repo.repo_name, locked)) |
|
1544 | % (user.username, repo.repo_name, locked)) | |
1544 | } |
|
1545 | } | |
1545 | return _d |
|
1546 | return _d | |
1546 | except Exception: |
|
1547 | except Exception: | |
1547 | log.exception( |
|
1548 | log.exception( | |
1548 | "Exception occurred while trying to lock repository") |
|
1549 | "Exception occurred while trying to lock repository") | |
1549 | raise JSONRPCError( |
|
1550 | raise JSONRPCError( | |
1550 | 'Error occurred locking repository `%s`' % repo.repo_name |
|
1551 | 'Error occurred locking repository `%s`' % repo.repo_name | |
1551 | ) |
|
1552 | ) | |
1552 |
|
1553 | |||
1553 |
|
1554 | |||
1554 | @jsonrpc_method() |
|
1555 | @jsonrpc_method() | |
1555 | def comment_commit( |
|
1556 | def comment_commit( | |
1556 | request, apiuser, repoid, commit_id, message, status=Optional(None), |
|
1557 | request, apiuser, repoid, commit_id, message, status=Optional(None), | |
1557 | comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE), |
|
1558 | comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE), | |
1558 | resolves_comment_id=Optional(None), extra_recipients=Optional([]), |
|
1559 | resolves_comment_id=Optional(None), extra_recipients=Optional([]), | |
1559 | userid=Optional(OAttr('apiuser')), send_email=Optional(True)): |
|
1560 | userid=Optional(OAttr('apiuser')), send_email=Optional(True)): | |
1560 | """ |
|
1561 | """ | |
1561 | Set a commit comment, and optionally change the status of the commit. |
|
1562 | Set a commit comment, and optionally change the status of the commit. | |
1562 |
|
1563 | |||
1563 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1564 | :param apiuser: This is filled automatically from the |authtoken|. | |
1564 | :type apiuser: AuthUser |
|
1565 | :type apiuser: AuthUser | |
1565 | :param repoid: Set the repository name or repository ID. |
|
1566 | :param repoid: Set the repository name or repository ID. | |
1566 | :type repoid: str or int |
|
1567 | :type repoid: str or int | |
1567 | :param commit_id: Specify the commit_id for which to set a comment. |
|
1568 | :param commit_id: Specify the commit_id for which to set a comment. | |
1568 | :type commit_id: str |
|
1569 | :type commit_id: str | |
1569 | :param message: The comment text. |
|
1570 | :param message: The comment text. | |
1570 | :type message: str |
|
1571 | :type message: str | |
1571 | :param status: (**Optional**) status of commit, one of: 'not_reviewed', |
|
1572 | :param status: (**Optional**) status of commit, one of: 'not_reviewed', | |
1572 | 'approved', 'rejected', 'under_review' |
|
1573 | 'approved', 'rejected', 'under_review' | |
1573 | :type status: str |
|
1574 | :type status: str | |
1574 | :param comment_type: Comment type, one of: 'note', 'todo' |
|
1575 | :param comment_type: Comment type, one of: 'note', 'todo' | |
1575 | :type comment_type: Optional(str), default: 'note' |
|
1576 | :type comment_type: Optional(str), default: 'note' | |
1576 | :param resolves_comment_id: id of comment which this one will resolve |
|
1577 | :param resolves_comment_id: id of comment which this one will resolve | |
1577 | :type resolves_comment_id: Optional(int) |
|
1578 | :type resolves_comment_id: Optional(int) | |
1578 | :param extra_recipients: list of user ids or usernames to add |
|
1579 | :param extra_recipients: list of user ids or usernames to add | |
1579 | notifications for this comment. Acts like a CC for notification |
|
1580 | notifications for this comment. Acts like a CC for notification | |
1580 | :type extra_recipients: Optional(list) |
|
1581 | :type extra_recipients: Optional(list) | |
1581 | :param userid: Set the user name of the comment creator. |
|
1582 | :param userid: Set the user name of the comment creator. | |
1582 | :type userid: Optional(str or int) |
|
1583 | :type userid: Optional(str or int) | |
1583 | :param send_email: Define if this comment should also send email notification |
|
1584 | :param send_email: Define if this comment should also send email notification | |
1584 | :type send_email: Optional(bool) |
|
1585 | :type send_email: Optional(bool) | |
1585 |
|
1586 | |||
1586 | Example error output: |
|
1587 | Example error output: | |
1587 |
|
1588 | |||
1588 | .. code-block:: bash |
|
1589 | .. code-block:: bash | |
1589 |
|
1590 | |||
1590 | { |
|
1591 | { | |
1591 | "id" : <id_given_in_input>, |
|
1592 | "id" : <id_given_in_input>, | |
1592 | "result" : { |
|
1593 | "result" : { | |
1593 | "msg": "Commented on commit `<commit_id>` for repository `<repoid>`", |
|
1594 | "msg": "Commented on commit `<commit_id>` for repository `<repoid>`", | |
1594 | "status_change": null or <status>, |
|
1595 | "status_change": null or <status>, | |
1595 | "success": true |
|
1596 | "success": true | |
1596 | }, |
|
1597 | }, | |
1597 | "error" : null |
|
1598 | "error" : null | |
1598 | } |
|
1599 | } | |
1599 |
|
1600 | |||
1600 | """ |
|
1601 | """ | |
1601 | repo = get_repo_or_error(repoid) |
|
1602 | repo = get_repo_or_error(repoid) | |
1602 | if not has_superadmin_permission(apiuser): |
|
1603 | if not has_superadmin_permission(apiuser): | |
1603 | _perms = ('repository.read', 'repository.write', 'repository.admin') |
|
1604 | _perms = ('repository.read', 'repository.write', 'repository.admin') | |
1604 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1605 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
1605 |
|
1606 | |||
1606 | try: |
|
1607 | try: | |
1607 | commit = repo.scm_instance().get_commit(commit_id=commit_id) |
|
1608 | commit = repo.scm_instance().get_commit(commit_id=commit_id) | |
1608 | commit_id = commit.raw_id |
|
1609 | commit_id = commit.raw_id | |
1609 | except Exception as e: |
|
1610 | except Exception as e: | |
1610 | log.exception('Failed to fetch commit') |
|
1611 | log.exception('Failed to fetch commit') | |
1611 | raise JSONRPCError(safe_str(e)) |
|
1612 | raise JSONRPCError(safe_str(e)) | |
1612 |
|
1613 | |||
1613 | if isinstance(userid, Optional): |
|
1614 | if isinstance(userid, Optional): | |
1614 | userid = apiuser.user_id |
|
1615 | userid = apiuser.user_id | |
1615 |
|
1616 | |||
1616 | user = get_user_or_error(userid) |
|
1617 | user = get_user_or_error(userid) | |
1617 | status = Optional.extract(status) |
|
1618 | status = Optional.extract(status) | |
1618 | comment_type = Optional.extract(comment_type) |
|
1619 | comment_type = Optional.extract(comment_type) | |
1619 | resolves_comment_id = Optional.extract(resolves_comment_id) |
|
1620 | resolves_comment_id = Optional.extract(resolves_comment_id) | |
1620 | extra_recipients = Optional.extract(extra_recipients) |
|
1621 | extra_recipients = Optional.extract(extra_recipients) | |
1621 | send_email = Optional.extract(send_email, binary=True) |
|
1622 | send_email = Optional.extract(send_email, binary=True) | |
1622 |
|
1623 | |||
1623 | allowed_statuses = [x[0] for x in ChangesetStatus.STATUSES] |
|
1624 | allowed_statuses = [x[0] for x in ChangesetStatus.STATUSES] | |
1624 | if status and status not in allowed_statuses: |
|
1625 | if status and status not in allowed_statuses: | |
1625 | raise JSONRPCError('Bad status, must be on ' |
|
1626 | raise JSONRPCError('Bad status, must be on ' | |
1626 | 'of %s got %s' % (allowed_statuses, status,)) |
|
1627 | 'of %s got %s' % (allowed_statuses, status,)) | |
1627 |
|
1628 | |||
1628 | if resolves_comment_id: |
|
1629 | if resolves_comment_id: | |
1629 | comment = ChangesetComment.get(resolves_comment_id) |
|
1630 | comment = ChangesetComment.get(resolves_comment_id) | |
1630 | if not comment: |
|
1631 | if not comment: | |
1631 | raise JSONRPCError( |
|
1632 | raise JSONRPCError( | |
1632 | 'Invalid resolves_comment_id `%s` for this commit.' |
|
1633 | 'Invalid resolves_comment_id `%s` for this commit.' | |
1633 | % resolves_comment_id) |
|
1634 | % resolves_comment_id) | |
1634 | if comment.comment_type != ChangesetComment.COMMENT_TYPE_TODO: |
|
1635 | if comment.comment_type != ChangesetComment.COMMENT_TYPE_TODO: | |
1635 | raise JSONRPCError( |
|
1636 | raise JSONRPCError( | |
1636 | 'Comment `%s` is wrong type for setting status to resolved.' |
|
1637 | 'Comment `%s` is wrong type for setting status to resolved.' | |
1637 | % resolves_comment_id) |
|
1638 | % resolves_comment_id) | |
1638 |
|
1639 | |||
1639 | try: |
|
1640 | try: | |
1640 | rc_config = SettingsModel().get_all_settings() |
|
1641 | rc_config = SettingsModel().get_all_settings() | |
1641 | renderer = rc_config.get('rhodecode_markup_renderer', 'rst') |
|
1642 | renderer = rc_config.get('rhodecode_markup_renderer', 'rst') | |
1642 | status_change_label = ChangesetStatus.get_status_lbl(status) |
|
1643 | status_change_label = ChangesetStatus.get_status_lbl(status) | |
1643 | comment = CommentsModel().create( |
|
1644 | comment = CommentsModel().create( | |
1644 | message, repo, user, commit_id=commit_id, |
|
1645 | message, repo, user, commit_id=commit_id, | |
1645 | status_change=status_change_label, |
|
1646 | status_change=status_change_label, | |
1646 | status_change_type=status, |
|
1647 | status_change_type=status, | |
1647 | renderer=renderer, |
|
1648 | renderer=renderer, | |
1648 | comment_type=comment_type, |
|
1649 | comment_type=comment_type, | |
1649 | resolves_comment_id=resolves_comment_id, |
|
1650 | resolves_comment_id=resolves_comment_id, | |
1650 | auth_user=apiuser, |
|
1651 | auth_user=apiuser, | |
1651 | extra_recipients=extra_recipients, |
|
1652 | extra_recipients=extra_recipients, | |
1652 | send_email=send_email |
|
1653 | send_email=send_email | |
1653 | ) |
|
1654 | ) | |
1654 | if status: |
|
1655 | if status: | |
1655 | # also do a status change |
|
1656 | # also do a status change | |
1656 | try: |
|
1657 | try: | |
1657 | ChangesetStatusModel().set_status( |
|
1658 | ChangesetStatusModel().set_status( | |
1658 | repo, status, user, comment, revision=commit_id, |
|
1659 | repo, status, user, comment, revision=commit_id, | |
1659 | dont_allow_on_closed_pull_request=True |
|
1660 | dont_allow_on_closed_pull_request=True | |
1660 | ) |
|
1661 | ) | |
1661 | except StatusChangeOnClosedPullRequestError: |
|
1662 | except StatusChangeOnClosedPullRequestError: | |
1662 | log.exception( |
|
1663 | log.exception( | |
1663 | "Exception occurred while trying to change repo commit status") |
|
1664 | "Exception occurred while trying to change repo commit status") | |
1664 | msg = ('Changing status on a commit associated with ' |
|
1665 | msg = ('Changing status on a commit associated with ' | |
1665 | 'a closed pull request is not allowed') |
|
1666 | 'a closed pull request is not allowed') | |
1666 | raise JSONRPCError(msg) |
|
1667 | raise JSONRPCError(msg) | |
1667 |
|
1668 | |||
1668 | CommentsModel().trigger_commit_comment_hook( |
|
1669 | CommentsModel().trigger_commit_comment_hook( | |
1669 | repo, apiuser, 'create', |
|
1670 | repo, apiuser, 'create', | |
1670 | data={'comment': comment, 'commit': commit}) |
|
1671 | data={'comment': comment, 'commit': commit}) | |
1671 |
|
1672 | |||
1672 | Session().commit() |
|
1673 | Session().commit() | |
1673 | return { |
|
1674 | return { | |
1674 | 'msg': ( |
|
1675 | 'msg': ( | |
1675 | 'Commented on commit `%s` for repository `%s`' % ( |
|
1676 | 'Commented on commit `%s` for repository `%s`' % ( | |
1676 | comment.revision, repo.repo_name)), |
|
1677 | comment.revision, repo.repo_name)), | |
1677 | 'status_change': status, |
|
1678 | 'status_change': status, | |
1678 | 'success': True, |
|
1679 | 'success': True, | |
1679 | } |
|
1680 | } | |
1680 | except JSONRPCError: |
|
1681 | except JSONRPCError: | |
1681 | # catch any inside errors, and re-raise them to prevent from |
|
1682 | # catch any inside errors, and re-raise them to prevent from | |
1682 | # below global catch to silence them |
|
1683 | # below global catch to silence them | |
1683 | raise |
|
1684 | raise | |
1684 | except Exception: |
|
1685 | except Exception: | |
1685 | log.exception("Exception occurred while trying to comment on commit") |
|
1686 | log.exception("Exception occurred while trying to comment on commit") | |
1686 | raise JSONRPCError( |
|
1687 | raise JSONRPCError( | |
1687 | 'failed to set comment on repository `%s`' % (repo.repo_name,) |
|
1688 | 'failed to set comment on repository `%s`' % (repo.repo_name,) | |
1688 | ) |
|
1689 | ) | |
1689 |
|
1690 | |||
1690 |
|
1691 | |||
1691 | @jsonrpc_method() |
|
1692 | @jsonrpc_method() | |
1692 | def get_repo_comments(request, apiuser, repoid, |
|
1693 | def get_repo_comments(request, apiuser, repoid, | |
1693 | commit_id=Optional(None), comment_type=Optional(None), |
|
1694 | commit_id=Optional(None), comment_type=Optional(None), | |
1694 | userid=Optional(None)): |
|
1695 | userid=Optional(None)): | |
1695 | """ |
|
1696 | """ | |
1696 | Get all comments for a repository |
|
1697 | Get all comments for a repository | |
1697 |
|
1698 | |||
1698 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1699 | :param apiuser: This is filled automatically from the |authtoken|. | |
1699 | :type apiuser: AuthUser |
|
1700 | :type apiuser: AuthUser | |
1700 | :param repoid: Set the repository name or repository ID. |
|
1701 | :param repoid: Set the repository name or repository ID. | |
1701 | :type repoid: str or int |
|
1702 | :type repoid: str or int | |
1702 | :param commit_id: Optionally filter the comments by the commit_id |
|
1703 | :param commit_id: Optionally filter the comments by the commit_id | |
1703 | :type commit_id: Optional(str), default: None |
|
1704 | :type commit_id: Optional(str), default: None | |
1704 | :param comment_type: Optionally filter the comments by the comment_type |
|
1705 | :param comment_type: Optionally filter the comments by the comment_type | |
1705 | one of: 'note', 'todo' |
|
1706 | one of: 'note', 'todo' | |
1706 | :type comment_type: Optional(str), default: None |
|
1707 | :type comment_type: Optional(str), default: None | |
1707 | :param userid: Optionally filter the comments by the author of comment |
|
1708 | :param userid: Optionally filter the comments by the author of comment | |
1708 | :type userid: Optional(str or int), Default: None |
|
1709 | :type userid: Optional(str or int), Default: None | |
1709 |
|
1710 | |||
1710 | Example error output: |
|
1711 | Example error output: | |
1711 |
|
1712 | |||
1712 | .. code-block:: bash |
|
1713 | .. code-block:: bash | |
1713 |
|
1714 | |||
1714 | { |
|
1715 | { | |
1715 | "id" : <id_given_in_input>, |
|
1716 | "id" : <id_given_in_input>, | |
1716 | "result" : [ |
|
1717 | "result" : [ | |
1717 | { |
|
1718 | { | |
1718 | "comment_author": <USER_DETAILS>, |
|
1719 | "comment_author": <USER_DETAILS>, | |
1719 | "comment_created_on": "2017-02-01T14:38:16.309", |
|
1720 | "comment_created_on": "2017-02-01T14:38:16.309", | |
1720 | "comment_f_path": "file.txt", |
|
1721 | "comment_f_path": "file.txt", | |
1721 | "comment_id": 282, |
|
1722 | "comment_id": 282, | |
1722 | "comment_lineno": "n1", |
|
1723 | "comment_lineno": "n1", | |
1723 | "comment_resolved_by": null, |
|
1724 | "comment_resolved_by": null, | |
1724 | "comment_status": [], |
|
1725 | "comment_status": [], | |
1725 | "comment_text": "This file needs a header", |
|
1726 | "comment_text": "This file needs a header", | |
1726 | "comment_type": "todo", |
|
1727 | "comment_type": "todo", | |
1727 | "comment_last_version: 0 |
|
1728 | "comment_last_version: 0 | |
1728 | } |
|
1729 | } | |
1729 | ], |
|
1730 | ], | |
1730 | "error" : null |
|
1731 | "error" : null | |
1731 | } |
|
1732 | } | |
1732 |
|
1733 | |||
1733 | """ |
|
1734 | """ | |
1734 | repo = get_repo_or_error(repoid) |
|
1735 | repo = get_repo_or_error(repoid) | |
1735 | if not has_superadmin_permission(apiuser): |
|
1736 | if not has_superadmin_permission(apiuser): | |
1736 | _perms = ('repository.read', 'repository.write', 'repository.admin') |
|
1737 | _perms = ('repository.read', 'repository.write', 'repository.admin') | |
1737 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1738 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
1738 |
|
1739 | |||
1739 | commit_id = Optional.extract(commit_id) |
|
1740 | commit_id = Optional.extract(commit_id) | |
1740 |
|
1741 | |||
1741 | userid = Optional.extract(userid) |
|
1742 | userid = Optional.extract(userid) | |
1742 | if userid: |
|
1743 | if userid: | |
1743 | user = get_user_or_error(userid) |
|
1744 | user = get_user_or_error(userid) | |
1744 | else: |
|
1745 | else: | |
1745 | user = None |
|
1746 | user = None | |
1746 |
|
1747 | |||
1747 | comment_type = Optional.extract(comment_type) |
|
1748 | comment_type = Optional.extract(comment_type) | |
1748 | if comment_type and comment_type not in ChangesetComment.COMMENT_TYPES: |
|
1749 | if comment_type and comment_type not in ChangesetComment.COMMENT_TYPES: | |
1749 | raise JSONRPCError( |
|
1750 | raise JSONRPCError( | |
1750 | 'comment_type must be one of `{}` got {}'.format( |
|
1751 | 'comment_type must be one of `{}` got {}'.format( | |
1751 | ChangesetComment.COMMENT_TYPES, comment_type) |
|
1752 | ChangesetComment.COMMENT_TYPES, comment_type) | |
1752 | ) |
|
1753 | ) | |
1753 |
|
1754 | |||
1754 | comments = CommentsModel().get_repository_comments( |
|
1755 | comments = CommentsModel().get_repository_comments( | |
1755 | repo=repo, comment_type=comment_type, user=user, commit_id=commit_id) |
|
1756 | repo=repo, comment_type=comment_type, user=user, commit_id=commit_id) | |
1756 | return comments |
|
1757 | return comments | |
1757 |
|
1758 | |||
1758 |
|
1759 | |||
1759 | @jsonrpc_method() |
|
1760 | @jsonrpc_method() | |
1760 | def get_comment(request, apiuser, comment_id): |
|
1761 | def get_comment(request, apiuser, comment_id): | |
1761 | """ |
|
1762 | """ | |
1762 | Get single comment from repository or pull_request |
|
1763 | Get single comment from repository or pull_request | |
1763 |
|
1764 | |||
1764 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1765 | :param apiuser: This is filled automatically from the |authtoken|. | |
1765 | :type apiuser: AuthUser |
|
1766 | :type apiuser: AuthUser | |
1766 | :param comment_id: comment id found in the URL of comment |
|
1767 | :param comment_id: comment id found in the URL of comment | |
1767 | :type comment_id: str or int |
|
1768 | :type comment_id: str or int | |
1768 |
|
1769 | |||
1769 | Example error output: |
|
1770 | Example error output: | |
1770 |
|
1771 | |||
1771 | .. code-block:: bash |
|
1772 | .. code-block:: bash | |
1772 |
|
1773 | |||
1773 | { |
|
1774 | { | |
1774 | "id" : <id_given_in_input>, |
|
1775 | "id" : <id_given_in_input>, | |
1775 | "result" : { |
|
1776 | "result" : { | |
1776 | "comment_author": <USER_DETAILS>, |
|
1777 | "comment_author": <USER_DETAILS>, | |
1777 | "comment_created_on": "2017-02-01T14:38:16.309", |
|
1778 | "comment_created_on": "2017-02-01T14:38:16.309", | |
1778 | "comment_f_path": "file.txt", |
|
1779 | "comment_f_path": "file.txt", | |
1779 | "comment_id": 282, |
|
1780 | "comment_id": 282, | |
1780 | "comment_lineno": "n1", |
|
1781 | "comment_lineno": "n1", | |
1781 | "comment_resolved_by": null, |
|
1782 | "comment_resolved_by": null, | |
1782 | "comment_status": [], |
|
1783 | "comment_status": [], | |
1783 | "comment_text": "This file needs a header", |
|
1784 | "comment_text": "This file needs a header", | |
1784 | "comment_type": "todo", |
|
1785 | "comment_type": "todo", | |
1785 | "comment_last_version: 0 |
|
1786 | "comment_last_version: 0 | |
1786 | }, |
|
1787 | }, | |
1787 | "error" : null |
|
1788 | "error" : null | |
1788 | } |
|
1789 | } | |
1789 |
|
1790 | |||
1790 | """ |
|
1791 | """ | |
1791 |
|
1792 | |||
1792 | comment = ChangesetComment.get(comment_id) |
|
1793 | comment = ChangesetComment.get(comment_id) | |
1793 | if not comment: |
|
1794 | if not comment: | |
1794 | raise JSONRPCError('comment `%s` does not exist' % (comment_id,)) |
|
1795 | raise JSONRPCError('comment `%s` does not exist' % (comment_id,)) | |
1795 |
|
1796 | |||
1796 | perms = ('repository.read', 'repository.write', 'repository.admin') |
|
1797 | perms = ('repository.read', 'repository.write', 'repository.admin') | |
1797 | has_comment_perm = HasRepoPermissionAnyApi(*perms)\ |
|
1798 | has_comment_perm = HasRepoPermissionAnyApi(*perms)\ | |
1798 | (user=apiuser, repo_name=comment.repo.repo_name) |
|
1799 | (user=apiuser, repo_name=comment.repo.repo_name) | |
1799 |
|
1800 | |||
1800 | if not has_comment_perm: |
|
1801 | if not has_comment_perm: | |
1801 | raise JSONRPCError('comment `%s` does not exist' % (comment_id,)) |
|
1802 | raise JSONRPCError('comment `%s` does not exist' % (comment_id,)) | |
1802 |
|
1803 | |||
1803 | return comment |
|
1804 | return comment | |
1804 |
|
1805 | |||
1805 |
|
1806 | |||
1806 | @jsonrpc_method() |
|
1807 | @jsonrpc_method() | |
1807 | def edit_comment(request, apiuser, message, comment_id, version, |
|
1808 | def edit_comment(request, apiuser, message, comment_id, version, | |
1808 | userid=Optional(OAttr('apiuser'))): |
|
1809 | userid=Optional(OAttr('apiuser'))): | |
1809 | """ |
|
1810 | """ | |
1810 | Edit comment on the pull request or commit, |
|
1811 | Edit comment on the pull request or commit, | |
1811 | specified by the `comment_id` and version. Initially version should be 0 |
|
1812 | specified by the `comment_id` and version. Initially version should be 0 | |
1812 |
|
1813 | |||
1813 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1814 | :param apiuser: This is filled automatically from the |authtoken|. | |
1814 | :type apiuser: AuthUser |
|
1815 | :type apiuser: AuthUser | |
1815 | :param comment_id: Specify the comment_id for editing |
|
1816 | :param comment_id: Specify the comment_id for editing | |
1816 | :type comment_id: int |
|
1817 | :type comment_id: int | |
1817 | :param version: version of the comment that will be created, starts from 0 |
|
1818 | :param version: version of the comment that will be created, starts from 0 | |
1818 | :type version: int |
|
1819 | :type version: int | |
1819 | :param message: The text content of the comment. |
|
1820 | :param message: The text content of the comment. | |
1820 | :type message: str |
|
1821 | :type message: str | |
1821 | :param userid: Comment on the pull request as this user |
|
1822 | :param userid: Comment on the pull request as this user | |
1822 | :type userid: Optional(str or int) |
|
1823 | :type userid: Optional(str or int) | |
1823 |
|
1824 | |||
1824 | Example output: |
|
1825 | Example output: | |
1825 |
|
1826 | |||
1826 | .. code-block:: bash |
|
1827 | .. code-block:: bash | |
1827 |
|
1828 | |||
1828 | id : <id_given_in_input> |
|
1829 | id : <id_given_in_input> | |
1829 | result : { |
|
1830 | result : { | |
1830 | "comment": "<comment data>", |
|
1831 | "comment": "<comment data>", | |
1831 | "version": "<Integer>", |
|
1832 | "version": "<Integer>", | |
1832 | }, |
|
1833 | }, | |
1833 | error : null |
|
1834 | error : null | |
1834 | """ |
|
1835 | """ | |
1835 |
|
1836 | |||
1836 | auth_user = apiuser |
|
1837 | auth_user = apiuser | |
1837 | comment = ChangesetComment.get(comment_id) |
|
1838 | comment = ChangesetComment.get(comment_id) | |
1838 | if not comment: |
|
1839 | if not comment: | |
1839 | raise JSONRPCError('comment `%s` does not exist' % (comment_id,)) |
|
1840 | raise JSONRPCError('comment `%s` does not exist' % (comment_id,)) | |
1840 |
|
1841 | |||
1841 | is_super_admin = has_superadmin_permission(apiuser) |
|
1842 | is_super_admin = has_superadmin_permission(apiuser) | |
1842 | is_repo_admin = HasRepoPermissionAnyApi('repository.admin')\ |
|
1843 | is_repo_admin = HasRepoPermissionAnyApi('repository.admin')\ | |
1843 | (user=apiuser, repo_name=comment.repo.repo_name) |
|
1844 | (user=apiuser, repo_name=comment.repo.repo_name) | |
1844 |
|
1845 | |||
1845 | if not isinstance(userid, Optional): |
|
1846 | if not isinstance(userid, Optional): | |
1846 | if is_super_admin or is_repo_admin: |
|
1847 | if is_super_admin or is_repo_admin: | |
1847 | apiuser = get_user_or_error(userid) |
|
1848 | apiuser = get_user_or_error(userid) | |
1848 | auth_user = apiuser.AuthUser() |
|
1849 | auth_user = apiuser.AuthUser() | |
1849 | else: |
|
1850 | else: | |
1850 | raise JSONRPCError('userid is not the same as your user') |
|
1851 | raise JSONRPCError('userid is not the same as your user') | |
1851 |
|
1852 | |||
1852 | comment_author = comment.author.user_id == auth_user.user_id |
|
1853 | comment_author = comment.author.user_id == auth_user.user_id | |
1853 | if not (comment.immutable is False and (is_super_admin or is_repo_admin) or comment_author): |
|
1854 | if not (comment.immutable is False and (is_super_admin or is_repo_admin) or comment_author): | |
1854 | raise JSONRPCError("you don't have access to edit this comment") |
|
1855 | raise JSONRPCError("you don't have access to edit this comment") | |
1855 |
|
1856 | |||
1856 | try: |
|
1857 | try: | |
1857 | comment_history = CommentsModel().edit( |
|
1858 | comment_history = CommentsModel().edit( | |
1858 | comment_id=comment_id, |
|
1859 | comment_id=comment_id, | |
1859 | text=message, |
|
1860 | text=message, | |
1860 | auth_user=auth_user, |
|
1861 | auth_user=auth_user, | |
1861 | version=version, |
|
1862 | version=version, | |
1862 | ) |
|
1863 | ) | |
1863 | Session().commit() |
|
1864 | Session().commit() | |
1864 | except CommentVersionMismatch: |
|
1865 | except CommentVersionMismatch: | |
1865 | raise JSONRPCError( |
|
1866 | raise JSONRPCError( | |
1866 | 'comment ({}) version ({}) mismatch'.format(comment_id, version) |
|
1867 | 'comment ({}) version ({}) mismatch'.format(comment_id, version) | |
1867 | ) |
|
1868 | ) | |
1868 | if not comment_history and not message: |
|
1869 | if not comment_history and not message: | |
1869 | raise JSONRPCError( |
|
1870 | raise JSONRPCError( | |
1870 | "comment ({}) can't be changed with empty string".format(comment_id) |
|
1871 | "comment ({}) can't be changed with empty string".format(comment_id) | |
1871 | ) |
|
1872 | ) | |
|
1873 | ||||
|
1874 | if comment.pull_request: | |||
|
1875 | pull_request = comment.pull_request | |||
|
1876 | PullRequestModel().trigger_pull_request_hook( | |||
|
1877 | pull_request, apiuser, 'comment_edit', | |||
|
1878 | data={'comment': comment}) | |||
|
1879 | else: | |||
|
1880 | db_repo = comment.repo | |||
|
1881 | commit_id = comment.revision | |||
|
1882 | commit = db_repo.get_commit(commit_id) | |||
|
1883 | CommentsModel().trigger_commit_comment_hook( | |||
|
1884 | db_repo, apiuser, 'edit', | |||
|
1885 | data={'comment': comment, 'commit': commit}) | |||
|
1886 | ||||
1872 | data = { |
|
1887 | data = { | |
1873 | 'comment': comment, |
|
1888 | 'comment': comment, | |
1874 | 'version': comment_history.version if comment_history else None, |
|
1889 | 'version': comment_history.version if comment_history else None, | |
1875 | } |
|
1890 | } | |
1876 | return data |
|
1891 | return data | |
1877 |
|
1892 | |||
1878 |
|
1893 | |||
1879 | # TODO(marcink): write this with all required logic for deleting a comments in PR or commits |
|
1894 | # TODO(marcink): write this with all required logic for deleting a comments in PR or commits | |
1880 | # @jsonrpc_method() |
|
1895 | # @jsonrpc_method() | |
1881 | # def delete_comment(request, apiuser, comment_id): |
|
1896 | # def delete_comment(request, apiuser, comment_id): | |
1882 | # auth_user = apiuser |
|
1897 | # auth_user = apiuser | |
1883 | # |
|
1898 | # | |
1884 | # comment = ChangesetComment.get(comment_id) |
|
1899 | # comment = ChangesetComment.get(comment_id) | |
1885 | # if not comment: |
|
1900 | # if not comment: | |
1886 | # raise JSONRPCError('comment `%s` does not exist' % (comment_id,)) |
|
1901 | # raise JSONRPCError('comment `%s` does not exist' % (comment_id,)) | |
1887 | # |
|
1902 | # | |
1888 | # is_super_admin = has_superadmin_permission(apiuser) |
|
1903 | # is_super_admin = has_superadmin_permission(apiuser) | |
1889 | # is_repo_admin = HasRepoPermissionAnyApi('repository.admin')\ |
|
1904 | # is_repo_admin = HasRepoPermissionAnyApi('repository.admin')\ | |
1890 | # (user=apiuser, repo_name=comment.repo.repo_name) |
|
1905 | # (user=apiuser, repo_name=comment.repo.repo_name) | |
1891 | # |
|
1906 | # | |
1892 | # comment_author = comment.author.user_id == auth_user.user_id |
|
1907 | # comment_author = comment.author.user_id == auth_user.user_id | |
1893 | # if not (comment.immutable is False and (is_super_admin or is_repo_admin) or comment_author): |
|
1908 | # if not (comment.immutable is False and (is_super_admin or is_repo_admin) or comment_author): | |
1894 | # raise JSONRPCError("you don't have access to edit this comment") |
|
1909 | # raise JSONRPCError("you don't have access to edit this comment") | |
1895 |
|
1910 | |||
1896 | @jsonrpc_method() |
|
1911 | @jsonrpc_method() | |
1897 | def grant_user_permission(request, apiuser, repoid, userid, perm): |
|
1912 | def grant_user_permission(request, apiuser, repoid, userid, perm): | |
1898 | """ |
|
1913 | """ | |
1899 | Grant permissions for the specified user on the given repository, |
|
1914 | Grant permissions for the specified user on the given repository, | |
1900 | or update existing permissions if found. |
|
1915 | or update existing permissions if found. | |
1901 |
|
1916 | |||
1902 | This command can only be run using an |authtoken| with admin |
|
1917 | This command can only be run using an |authtoken| with admin | |
1903 | permissions on the |repo|. |
|
1918 | permissions on the |repo|. | |
1904 |
|
1919 | |||
1905 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1920 | :param apiuser: This is filled automatically from the |authtoken|. | |
1906 | :type apiuser: AuthUser |
|
1921 | :type apiuser: AuthUser | |
1907 | :param repoid: Set the repository name or repository ID. |
|
1922 | :param repoid: Set the repository name or repository ID. | |
1908 | :type repoid: str or int |
|
1923 | :type repoid: str or int | |
1909 | :param userid: Set the user name. |
|
1924 | :param userid: Set the user name. | |
1910 | :type userid: str |
|
1925 | :type userid: str | |
1911 | :param perm: Set the user permissions, using the following format |
|
1926 | :param perm: Set the user permissions, using the following format | |
1912 | ``(repository.(none|read|write|admin))`` |
|
1927 | ``(repository.(none|read|write|admin))`` | |
1913 | :type perm: str |
|
1928 | :type perm: str | |
1914 |
|
1929 | |||
1915 | Example output: |
|
1930 | Example output: | |
1916 |
|
1931 | |||
1917 | .. code-block:: bash |
|
1932 | .. code-block:: bash | |
1918 |
|
1933 | |||
1919 | id : <id_given_in_input> |
|
1934 | id : <id_given_in_input> | |
1920 | result: { |
|
1935 | result: { | |
1921 | "msg" : "Granted perm: `<perm>` for user: `<username>` in repo: `<reponame>`", |
|
1936 | "msg" : "Granted perm: `<perm>` for user: `<username>` in repo: `<reponame>`", | |
1922 | "success": true |
|
1937 | "success": true | |
1923 | } |
|
1938 | } | |
1924 | error: null |
|
1939 | error: null | |
1925 | """ |
|
1940 | """ | |
1926 |
|
1941 | |||
1927 | repo = get_repo_or_error(repoid) |
|
1942 | repo = get_repo_or_error(repoid) | |
1928 | user = get_user_or_error(userid) |
|
1943 | user = get_user_or_error(userid) | |
1929 | perm = get_perm_or_error(perm) |
|
1944 | perm = get_perm_or_error(perm) | |
1930 | if not has_superadmin_permission(apiuser): |
|
1945 | if not has_superadmin_permission(apiuser): | |
1931 | _perms = ('repository.admin',) |
|
1946 | _perms = ('repository.admin',) | |
1932 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1947 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
1933 |
|
1948 | |||
1934 | perm_additions = [[user.user_id, perm.permission_name, "user"]] |
|
1949 | perm_additions = [[user.user_id, perm.permission_name, "user"]] | |
1935 | try: |
|
1950 | try: | |
1936 | changes = RepoModel().update_permissions( |
|
1951 | changes = RepoModel().update_permissions( | |
1937 | repo=repo, perm_additions=perm_additions, cur_user=apiuser) |
|
1952 | repo=repo, perm_additions=perm_additions, cur_user=apiuser) | |
1938 |
|
1953 | |||
1939 | action_data = { |
|
1954 | action_data = { | |
1940 | 'added': changes['added'], |
|
1955 | 'added': changes['added'], | |
1941 | 'updated': changes['updated'], |
|
1956 | 'updated': changes['updated'], | |
1942 | 'deleted': changes['deleted'], |
|
1957 | 'deleted': changes['deleted'], | |
1943 | } |
|
1958 | } | |
1944 | audit_logger.store_api( |
|
1959 | audit_logger.store_api( | |
1945 | 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo) |
|
1960 | 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo) | |
1946 | Session().commit() |
|
1961 | Session().commit() | |
1947 | PermissionModel().flush_user_permission_caches(changes) |
|
1962 | PermissionModel().flush_user_permission_caches(changes) | |
1948 |
|
1963 | |||
1949 | return { |
|
1964 | return { | |
1950 | 'msg': 'Granted perm: `%s` for user: `%s` in repo: `%s`' % ( |
|
1965 | 'msg': 'Granted perm: `%s` for user: `%s` in repo: `%s`' % ( | |
1951 | perm.permission_name, user.username, repo.repo_name |
|
1966 | perm.permission_name, user.username, repo.repo_name | |
1952 | ), |
|
1967 | ), | |
1953 | 'success': True |
|
1968 | 'success': True | |
1954 | } |
|
1969 | } | |
1955 | except Exception: |
|
1970 | except Exception: | |
1956 | log.exception("Exception occurred while trying edit permissions for repo") |
|
1971 | log.exception("Exception occurred while trying edit permissions for repo") | |
1957 | raise JSONRPCError( |
|
1972 | raise JSONRPCError( | |
1958 | 'failed to edit permission for user: `%s` in repo: `%s`' % ( |
|
1973 | 'failed to edit permission for user: `%s` in repo: `%s`' % ( | |
1959 | userid, repoid |
|
1974 | userid, repoid | |
1960 | ) |
|
1975 | ) | |
1961 | ) |
|
1976 | ) | |
1962 |
|
1977 | |||
1963 |
|
1978 | |||
1964 | @jsonrpc_method() |
|
1979 | @jsonrpc_method() | |
1965 | def revoke_user_permission(request, apiuser, repoid, userid): |
|
1980 | def revoke_user_permission(request, apiuser, repoid, userid): | |
1966 | """ |
|
1981 | """ | |
1967 | Revoke permission for a user on the specified repository. |
|
1982 | Revoke permission for a user on the specified repository. | |
1968 |
|
1983 | |||
1969 | This command can only be run using an |authtoken| with admin |
|
1984 | This command can only be run using an |authtoken| with admin | |
1970 | permissions on the |repo|. |
|
1985 | permissions on the |repo|. | |
1971 |
|
1986 | |||
1972 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1987 | :param apiuser: This is filled automatically from the |authtoken|. | |
1973 | :type apiuser: AuthUser |
|
1988 | :type apiuser: AuthUser | |
1974 | :param repoid: Set the repository name or repository ID. |
|
1989 | :param repoid: Set the repository name or repository ID. | |
1975 | :type repoid: str or int |
|
1990 | :type repoid: str or int | |
1976 | :param userid: Set the user name of revoked user. |
|
1991 | :param userid: Set the user name of revoked user. | |
1977 | :type userid: str or int |
|
1992 | :type userid: str or int | |
1978 |
|
1993 | |||
1979 | Example error output: |
|
1994 | Example error output: | |
1980 |
|
1995 | |||
1981 | .. code-block:: bash |
|
1996 | .. code-block:: bash | |
1982 |
|
1997 | |||
1983 | id : <id_given_in_input> |
|
1998 | id : <id_given_in_input> | |
1984 | result: { |
|
1999 | result: { | |
1985 | "msg" : "Revoked perm for user: `<username>` in repo: `<reponame>`", |
|
2000 | "msg" : "Revoked perm for user: `<username>` in repo: `<reponame>`", | |
1986 | "success": true |
|
2001 | "success": true | |
1987 | } |
|
2002 | } | |
1988 | error: null |
|
2003 | error: null | |
1989 | """ |
|
2004 | """ | |
1990 |
|
2005 | |||
1991 | repo = get_repo_or_error(repoid) |
|
2006 | repo = get_repo_or_error(repoid) | |
1992 | user = get_user_or_error(userid) |
|
2007 | user = get_user_or_error(userid) | |
1993 | if not has_superadmin_permission(apiuser): |
|
2008 | if not has_superadmin_permission(apiuser): | |
1994 | _perms = ('repository.admin',) |
|
2009 | _perms = ('repository.admin',) | |
1995 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
2010 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
1996 |
|
2011 | |||
1997 | perm_deletions = [[user.user_id, None, "user"]] |
|
2012 | perm_deletions = [[user.user_id, None, "user"]] | |
1998 | try: |
|
2013 | try: | |
1999 | changes = RepoModel().update_permissions( |
|
2014 | changes = RepoModel().update_permissions( | |
2000 | repo=repo, perm_deletions=perm_deletions, cur_user=user) |
|
2015 | repo=repo, perm_deletions=perm_deletions, cur_user=user) | |
2001 |
|
2016 | |||
2002 | action_data = { |
|
2017 | action_data = { | |
2003 | 'added': changes['added'], |
|
2018 | 'added': changes['added'], | |
2004 | 'updated': changes['updated'], |
|
2019 | 'updated': changes['updated'], | |
2005 | 'deleted': changes['deleted'], |
|
2020 | 'deleted': changes['deleted'], | |
2006 | } |
|
2021 | } | |
2007 | audit_logger.store_api( |
|
2022 | audit_logger.store_api( | |
2008 | 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo) |
|
2023 | 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo) | |
2009 | Session().commit() |
|
2024 | Session().commit() | |
2010 | PermissionModel().flush_user_permission_caches(changes) |
|
2025 | PermissionModel().flush_user_permission_caches(changes) | |
2011 |
|
2026 | |||
2012 | return { |
|
2027 | return { | |
2013 | 'msg': 'Revoked perm for user: `%s` in repo: `%s`' % ( |
|
2028 | 'msg': 'Revoked perm for user: `%s` in repo: `%s`' % ( | |
2014 | user.username, repo.repo_name |
|
2029 | user.username, repo.repo_name | |
2015 | ), |
|
2030 | ), | |
2016 | 'success': True |
|
2031 | 'success': True | |
2017 | } |
|
2032 | } | |
2018 | except Exception: |
|
2033 | except Exception: | |
2019 | log.exception("Exception occurred while trying revoke permissions to repo") |
|
2034 | log.exception("Exception occurred while trying revoke permissions to repo") | |
2020 | raise JSONRPCError( |
|
2035 | raise JSONRPCError( | |
2021 | 'failed to edit permission for user: `%s` in repo: `%s`' % ( |
|
2036 | 'failed to edit permission for user: `%s` in repo: `%s`' % ( | |
2022 | userid, repoid |
|
2037 | userid, repoid | |
2023 | ) |
|
2038 | ) | |
2024 | ) |
|
2039 | ) | |
2025 |
|
2040 | |||
2026 |
|
2041 | |||
2027 | @jsonrpc_method() |
|
2042 | @jsonrpc_method() | |
2028 | def grant_user_group_permission(request, apiuser, repoid, usergroupid, perm): |
|
2043 | def grant_user_group_permission(request, apiuser, repoid, usergroupid, perm): | |
2029 | """ |
|
2044 | """ | |
2030 | Grant permission for a user group on the specified repository, |
|
2045 | Grant permission for a user group on the specified repository, | |
2031 | or update existing permissions. |
|
2046 | or update existing permissions. | |
2032 |
|
2047 | |||
2033 | This command can only be run using an |authtoken| with admin |
|
2048 | This command can only be run using an |authtoken| with admin | |
2034 | permissions on the |repo|. |
|
2049 | permissions on the |repo|. | |
2035 |
|
2050 | |||
2036 | :param apiuser: This is filled automatically from the |authtoken|. |
|
2051 | :param apiuser: This is filled automatically from the |authtoken|. | |
2037 | :type apiuser: AuthUser |
|
2052 | :type apiuser: AuthUser | |
2038 | :param repoid: Set the repository name or repository ID. |
|
2053 | :param repoid: Set the repository name or repository ID. | |
2039 | :type repoid: str or int |
|
2054 | :type repoid: str or int | |
2040 | :param usergroupid: Specify the ID of the user group. |
|
2055 | :param usergroupid: Specify the ID of the user group. | |
2041 | :type usergroupid: str or int |
|
2056 | :type usergroupid: str or int | |
2042 | :param perm: Set the user group permissions using the following |
|
2057 | :param perm: Set the user group permissions using the following | |
2043 | format: (repository.(none|read|write|admin)) |
|
2058 | format: (repository.(none|read|write|admin)) | |
2044 | :type perm: str |
|
2059 | :type perm: str | |
2045 |
|
2060 | |||
2046 | Example output: |
|
2061 | Example output: | |
2047 |
|
2062 | |||
2048 | .. code-block:: bash |
|
2063 | .. code-block:: bash | |
2049 |
|
2064 | |||
2050 | id : <id_given_in_input> |
|
2065 | id : <id_given_in_input> | |
2051 | result : { |
|
2066 | result : { | |
2052 | "msg" : "Granted perm: `<perm>` for group: `<usersgroupname>` in repo: `<reponame>`", |
|
2067 | "msg" : "Granted perm: `<perm>` for group: `<usersgroupname>` in repo: `<reponame>`", | |
2053 | "success": true |
|
2068 | "success": true | |
2054 |
|
2069 | |||
2055 | } |
|
2070 | } | |
2056 | error : null |
|
2071 | error : null | |
2057 |
|
2072 | |||
2058 | Example error output: |
|
2073 | Example error output: | |
2059 |
|
2074 | |||
2060 | .. code-block:: bash |
|
2075 | .. code-block:: bash | |
2061 |
|
2076 | |||
2062 | id : <id_given_in_input> |
|
2077 | id : <id_given_in_input> | |
2063 | result : null |
|
2078 | result : null | |
2064 | error : { |
|
2079 | error : { | |
2065 | "failed to edit permission for user group: `<usergroup>` in repo `<repo>`' |
|
2080 | "failed to edit permission for user group: `<usergroup>` in repo `<repo>`' | |
2066 | } |
|
2081 | } | |
2067 |
|
2082 | |||
2068 | """ |
|
2083 | """ | |
2069 |
|
2084 | |||
2070 | repo = get_repo_or_error(repoid) |
|
2085 | repo = get_repo_or_error(repoid) | |
2071 | perm = get_perm_or_error(perm) |
|
2086 | perm = get_perm_or_error(perm) | |
2072 | if not has_superadmin_permission(apiuser): |
|
2087 | if not has_superadmin_permission(apiuser): | |
2073 | _perms = ('repository.admin',) |
|
2088 | _perms = ('repository.admin',) | |
2074 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
2089 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
2075 |
|
2090 | |||
2076 | user_group = get_user_group_or_error(usergroupid) |
|
2091 | user_group = get_user_group_or_error(usergroupid) | |
2077 | if not has_superadmin_permission(apiuser): |
|
2092 | if not has_superadmin_permission(apiuser): | |
2078 | # check if we have at least read permission for this user group ! |
|
2093 | # check if we have at least read permission for this user group ! | |
2079 | _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',) |
|
2094 | _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',) | |
2080 | if not HasUserGroupPermissionAnyApi(*_perms)( |
|
2095 | if not HasUserGroupPermissionAnyApi(*_perms)( | |
2081 | user=apiuser, user_group_name=user_group.users_group_name): |
|
2096 | user=apiuser, user_group_name=user_group.users_group_name): | |
2082 | raise JSONRPCError( |
|
2097 | raise JSONRPCError( | |
2083 | 'user group `%s` does not exist' % (usergroupid,)) |
|
2098 | 'user group `%s` does not exist' % (usergroupid,)) | |
2084 |
|
2099 | |||
2085 | perm_additions = [[user_group.users_group_id, perm.permission_name, "user_group"]] |
|
2100 | perm_additions = [[user_group.users_group_id, perm.permission_name, "user_group"]] | |
2086 | try: |
|
2101 | try: | |
2087 | changes = RepoModel().update_permissions( |
|
2102 | changes = RepoModel().update_permissions( | |
2088 | repo=repo, perm_additions=perm_additions, cur_user=apiuser) |
|
2103 | repo=repo, perm_additions=perm_additions, cur_user=apiuser) | |
2089 | action_data = { |
|
2104 | action_data = { | |
2090 | 'added': changes['added'], |
|
2105 | 'added': changes['added'], | |
2091 | 'updated': changes['updated'], |
|
2106 | 'updated': changes['updated'], | |
2092 | 'deleted': changes['deleted'], |
|
2107 | 'deleted': changes['deleted'], | |
2093 | } |
|
2108 | } | |
2094 | audit_logger.store_api( |
|
2109 | audit_logger.store_api( | |
2095 | 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo) |
|
2110 | 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo) | |
2096 | Session().commit() |
|
2111 | Session().commit() | |
2097 | PermissionModel().flush_user_permission_caches(changes) |
|
2112 | PermissionModel().flush_user_permission_caches(changes) | |
2098 |
|
2113 | |||
2099 | return { |
|
2114 | return { | |
2100 | 'msg': 'Granted perm: `%s` for user group: `%s` in ' |
|
2115 | 'msg': 'Granted perm: `%s` for user group: `%s` in ' | |
2101 | 'repo: `%s`' % ( |
|
2116 | 'repo: `%s`' % ( | |
2102 | perm.permission_name, user_group.users_group_name, |
|
2117 | perm.permission_name, user_group.users_group_name, | |
2103 | repo.repo_name |
|
2118 | repo.repo_name | |
2104 | ), |
|
2119 | ), | |
2105 | 'success': True |
|
2120 | 'success': True | |
2106 | } |
|
2121 | } | |
2107 | except Exception: |
|
2122 | except Exception: | |
2108 | log.exception( |
|
2123 | log.exception( | |
2109 | "Exception occurred while trying change permission on repo") |
|
2124 | "Exception occurred while trying change permission on repo") | |
2110 | raise JSONRPCError( |
|
2125 | raise JSONRPCError( | |
2111 | 'failed to edit permission for user group: `%s` in ' |
|
2126 | 'failed to edit permission for user group: `%s` in ' | |
2112 | 'repo: `%s`' % ( |
|
2127 | 'repo: `%s`' % ( | |
2113 | usergroupid, repo.repo_name |
|
2128 | usergroupid, repo.repo_name | |
2114 | ) |
|
2129 | ) | |
2115 | ) |
|
2130 | ) | |
2116 |
|
2131 | |||
2117 |
|
2132 | |||
2118 | @jsonrpc_method() |
|
2133 | @jsonrpc_method() | |
2119 | def revoke_user_group_permission(request, apiuser, repoid, usergroupid): |
|
2134 | def revoke_user_group_permission(request, apiuser, repoid, usergroupid): | |
2120 | """ |
|
2135 | """ | |
2121 | Revoke the permissions of a user group on a given repository. |
|
2136 | Revoke the permissions of a user group on a given repository. | |
2122 |
|
2137 | |||
2123 | This command can only be run using an |authtoken| with admin |
|
2138 | This command can only be run using an |authtoken| with admin | |
2124 | permissions on the |repo|. |
|
2139 | permissions on the |repo|. | |
2125 |
|
2140 | |||
2126 | :param apiuser: This is filled automatically from the |authtoken|. |
|
2141 | :param apiuser: This is filled automatically from the |authtoken|. | |
2127 | :type apiuser: AuthUser |
|
2142 | :type apiuser: AuthUser | |
2128 | :param repoid: Set the repository name or repository ID. |
|
2143 | :param repoid: Set the repository name or repository ID. | |
2129 | :type repoid: str or int |
|
2144 | :type repoid: str or int | |
2130 | :param usergroupid: Specify the user group ID. |
|
2145 | :param usergroupid: Specify the user group ID. | |
2131 | :type usergroupid: str or int |
|
2146 | :type usergroupid: str or int | |
2132 |
|
2147 | |||
2133 | Example output: |
|
2148 | Example output: | |
2134 |
|
2149 | |||
2135 | .. code-block:: bash |
|
2150 | .. code-block:: bash | |
2136 |
|
2151 | |||
2137 | id : <id_given_in_input> |
|
2152 | id : <id_given_in_input> | |
2138 | result: { |
|
2153 | result: { | |
2139 | "msg" : "Revoked perm for group: `<usersgroupname>` in repo: `<reponame>`", |
|
2154 | "msg" : "Revoked perm for group: `<usersgroupname>` in repo: `<reponame>`", | |
2140 | "success": true |
|
2155 | "success": true | |
2141 | } |
|
2156 | } | |
2142 | error: null |
|
2157 | error: null | |
2143 | """ |
|
2158 | """ | |
2144 |
|
2159 | |||
2145 | repo = get_repo_or_error(repoid) |
|
2160 | repo = get_repo_or_error(repoid) | |
2146 | if not has_superadmin_permission(apiuser): |
|
2161 | if not has_superadmin_permission(apiuser): | |
2147 | _perms = ('repository.admin',) |
|
2162 | _perms = ('repository.admin',) | |
2148 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
2163 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
2149 |
|
2164 | |||
2150 | user_group = get_user_group_or_error(usergroupid) |
|
2165 | user_group = get_user_group_or_error(usergroupid) | |
2151 | if not has_superadmin_permission(apiuser): |
|
2166 | if not has_superadmin_permission(apiuser): | |
2152 | # check if we have at least read permission for this user group ! |
|
2167 | # check if we have at least read permission for this user group ! | |
2153 | _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',) |
|
2168 | _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',) | |
2154 | if not HasUserGroupPermissionAnyApi(*_perms)( |
|
2169 | if not HasUserGroupPermissionAnyApi(*_perms)( | |
2155 | user=apiuser, user_group_name=user_group.users_group_name): |
|
2170 | user=apiuser, user_group_name=user_group.users_group_name): | |
2156 | raise JSONRPCError( |
|
2171 | raise JSONRPCError( | |
2157 | 'user group `%s` does not exist' % (usergroupid,)) |
|
2172 | 'user group `%s` does not exist' % (usergroupid,)) | |
2158 |
|
2173 | |||
2159 | perm_deletions = [[user_group.users_group_id, None, "user_group"]] |
|
2174 | perm_deletions = [[user_group.users_group_id, None, "user_group"]] | |
2160 | try: |
|
2175 | try: | |
2161 | changes = RepoModel().update_permissions( |
|
2176 | changes = RepoModel().update_permissions( | |
2162 | repo=repo, perm_deletions=perm_deletions, cur_user=apiuser) |
|
2177 | repo=repo, perm_deletions=perm_deletions, cur_user=apiuser) | |
2163 | action_data = { |
|
2178 | action_data = { | |
2164 | 'added': changes['added'], |
|
2179 | 'added': changes['added'], | |
2165 | 'updated': changes['updated'], |
|
2180 | 'updated': changes['updated'], | |
2166 | 'deleted': changes['deleted'], |
|
2181 | 'deleted': changes['deleted'], | |
2167 | } |
|
2182 | } | |
2168 | audit_logger.store_api( |
|
2183 | audit_logger.store_api( | |
2169 | 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo) |
|
2184 | 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo) | |
2170 | Session().commit() |
|
2185 | Session().commit() | |
2171 | PermissionModel().flush_user_permission_caches(changes) |
|
2186 | PermissionModel().flush_user_permission_caches(changes) | |
2172 |
|
2187 | |||
2173 | return { |
|
2188 | return { | |
2174 | 'msg': 'Revoked perm for user group: `%s` in repo: `%s`' % ( |
|
2189 | 'msg': 'Revoked perm for user group: `%s` in repo: `%s`' % ( | |
2175 | user_group.users_group_name, repo.repo_name |
|
2190 | user_group.users_group_name, repo.repo_name | |
2176 | ), |
|
2191 | ), | |
2177 | 'success': True |
|
2192 | 'success': True | |
2178 | } |
|
2193 | } | |
2179 | except Exception: |
|
2194 | except Exception: | |
2180 | log.exception("Exception occurred while trying revoke " |
|
2195 | log.exception("Exception occurred while trying revoke " | |
2181 | "user group permission on repo") |
|
2196 | "user group permission on repo") | |
2182 | raise JSONRPCError( |
|
2197 | raise JSONRPCError( | |
2183 | 'failed to edit permission for user group: `%s` in ' |
|
2198 | 'failed to edit permission for user group: `%s` in ' | |
2184 | 'repo: `%s`' % ( |
|
2199 | 'repo: `%s`' % ( | |
2185 | user_group.users_group_name, repo.repo_name |
|
2200 | user_group.users_group_name, repo.repo_name | |
2186 | ) |
|
2201 | ) | |
2187 | ) |
|
2202 | ) | |
2188 |
|
2203 | |||
2189 |
|
2204 | |||
2190 | @jsonrpc_method() |
|
2205 | @jsonrpc_method() | |
2191 | def pull(request, apiuser, repoid, remote_uri=Optional(None)): |
|
2206 | def pull(request, apiuser, repoid, remote_uri=Optional(None)): | |
2192 | """ |
|
2207 | """ | |
2193 | Triggers a pull on the given repository from a remote location. You |
|
2208 | Triggers a pull on the given repository from a remote location. You | |
2194 | can use this to keep remote repositories up-to-date. |
|
2209 | can use this to keep remote repositories up-to-date. | |
2195 |
|
2210 | |||
2196 | This command can only be run using an |authtoken| with admin |
|
2211 | This command can only be run using an |authtoken| with admin | |
2197 | rights to the specified repository. For more information, |
|
2212 | rights to the specified repository. For more information, | |
2198 | see :ref:`config-token-ref`. |
|
2213 | see :ref:`config-token-ref`. | |
2199 |
|
2214 | |||
2200 | This command takes the following options: |
|
2215 | This command takes the following options: | |
2201 |
|
2216 | |||
2202 | :param apiuser: This is filled automatically from the |authtoken|. |
|
2217 | :param apiuser: This is filled automatically from the |authtoken|. | |
2203 | :type apiuser: AuthUser |
|
2218 | :type apiuser: AuthUser | |
2204 | :param repoid: The repository name or repository ID. |
|
2219 | :param repoid: The repository name or repository ID. | |
2205 | :type repoid: str or int |
|
2220 | :type repoid: str or int | |
2206 | :param remote_uri: Optional remote URI to pass in for pull |
|
2221 | :param remote_uri: Optional remote URI to pass in for pull | |
2207 | :type remote_uri: str |
|
2222 | :type remote_uri: str | |
2208 |
|
2223 | |||
2209 | Example output: |
|
2224 | Example output: | |
2210 |
|
2225 | |||
2211 | .. code-block:: bash |
|
2226 | .. code-block:: bash | |
2212 |
|
2227 | |||
2213 | id : <id_given_in_input> |
|
2228 | id : <id_given_in_input> | |
2214 | result : { |
|
2229 | result : { | |
2215 | "msg": "Pulled from url `<remote_url>` on repo `<repository name>`" |
|
2230 | "msg": "Pulled from url `<remote_url>` on repo `<repository name>`" | |
2216 | "repository": "<repository name>" |
|
2231 | "repository": "<repository name>" | |
2217 | } |
|
2232 | } | |
2218 | error : null |
|
2233 | error : null | |
2219 |
|
2234 | |||
2220 | Example error output: |
|
2235 | Example error output: | |
2221 |
|
2236 | |||
2222 | .. code-block:: bash |
|
2237 | .. code-block:: bash | |
2223 |
|
2238 | |||
2224 | id : <id_given_in_input> |
|
2239 | id : <id_given_in_input> | |
2225 | result : null |
|
2240 | result : null | |
2226 | error : { |
|
2241 | error : { | |
2227 | "Unable to push changes from `<remote_url>`" |
|
2242 | "Unable to push changes from `<remote_url>`" | |
2228 | } |
|
2243 | } | |
2229 |
|
2244 | |||
2230 | """ |
|
2245 | """ | |
2231 |
|
2246 | |||
2232 | repo = get_repo_or_error(repoid) |
|
2247 | repo = get_repo_or_error(repoid) | |
2233 | remote_uri = Optional.extract(remote_uri) |
|
2248 | remote_uri = Optional.extract(remote_uri) | |
2234 | remote_uri_display = remote_uri or repo.clone_uri_hidden |
|
2249 | remote_uri_display = remote_uri or repo.clone_uri_hidden | |
2235 | if not has_superadmin_permission(apiuser): |
|
2250 | if not has_superadmin_permission(apiuser): | |
2236 | _perms = ('repository.admin',) |
|
2251 | _perms = ('repository.admin',) | |
2237 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
2252 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
2238 |
|
2253 | |||
2239 | try: |
|
2254 | try: | |
2240 | ScmModel().pull_changes( |
|
2255 | ScmModel().pull_changes( | |
2241 | repo.repo_name, apiuser.username, remote_uri=remote_uri) |
|
2256 | repo.repo_name, apiuser.username, remote_uri=remote_uri) | |
2242 | return { |
|
2257 | return { | |
2243 | 'msg': 'Pulled from url `%s` on repo `%s`' % ( |
|
2258 | 'msg': 'Pulled from url `%s` on repo `%s`' % ( | |
2244 | remote_uri_display, repo.repo_name), |
|
2259 | remote_uri_display, repo.repo_name), | |
2245 | 'repository': repo.repo_name |
|
2260 | 'repository': repo.repo_name | |
2246 | } |
|
2261 | } | |
2247 | except Exception: |
|
2262 | except Exception: | |
2248 | log.exception("Exception occurred while trying to " |
|
2263 | log.exception("Exception occurred while trying to " | |
2249 | "pull changes from remote location") |
|
2264 | "pull changes from remote location") | |
2250 | raise JSONRPCError( |
|
2265 | raise JSONRPCError( | |
2251 | 'Unable to pull changes from `%s`' % remote_uri_display |
|
2266 | 'Unable to pull changes from `%s`' % remote_uri_display | |
2252 | ) |
|
2267 | ) | |
2253 |
|
2268 | |||
2254 |
|
2269 | |||
2255 | @jsonrpc_method() |
|
2270 | @jsonrpc_method() | |
2256 | def strip(request, apiuser, repoid, revision, branch): |
|
2271 | def strip(request, apiuser, repoid, revision, branch): | |
2257 | """ |
|
2272 | """ | |
2258 | Strips the given revision from the specified repository. |
|
2273 | Strips the given revision from the specified repository. | |
2259 |
|
2274 | |||
2260 | * This will remove the revision and all of its decendants. |
|
2275 | * This will remove the revision and all of its decendants. | |
2261 |
|
2276 | |||
2262 | This command can only be run using an |authtoken| with admin rights to |
|
2277 | This command can only be run using an |authtoken| with admin rights to | |
2263 | the specified repository. |
|
2278 | the specified repository. | |
2264 |
|
2279 | |||
2265 | This command takes the following options: |
|
2280 | This command takes the following options: | |
2266 |
|
2281 | |||
2267 | :param apiuser: This is filled automatically from the |authtoken|. |
|
2282 | :param apiuser: This is filled automatically from the |authtoken|. | |
2268 | :type apiuser: AuthUser |
|
2283 | :type apiuser: AuthUser | |
2269 | :param repoid: The repository name or repository ID. |
|
2284 | :param repoid: The repository name or repository ID. | |
2270 | :type repoid: str or int |
|
2285 | :type repoid: str or int | |
2271 | :param revision: The revision you wish to strip. |
|
2286 | :param revision: The revision you wish to strip. | |
2272 | :type revision: str |
|
2287 | :type revision: str | |
2273 | :param branch: The branch from which to strip the revision. |
|
2288 | :param branch: The branch from which to strip the revision. | |
2274 | :type branch: str |
|
2289 | :type branch: str | |
2275 |
|
2290 | |||
2276 | Example output: |
|
2291 | Example output: | |
2277 |
|
2292 | |||
2278 | .. code-block:: bash |
|
2293 | .. code-block:: bash | |
2279 |
|
2294 | |||
2280 | id : <id_given_in_input> |
|
2295 | id : <id_given_in_input> | |
2281 | result : { |
|
2296 | result : { | |
2282 | "msg": "'Stripped commit <commit_hash> from repo `<repository name>`'" |
|
2297 | "msg": "'Stripped commit <commit_hash> from repo `<repository name>`'" | |
2283 | "repository": "<repository name>" |
|
2298 | "repository": "<repository name>" | |
2284 | } |
|
2299 | } | |
2285 | error : null |
|
2300 | error : null | |
2286 |
|
2301 | |||
2287 | Example error output: |
|
2302 | Example error output: | |
2288 |
|
2303 | |||
2289 | .. code-block:: bash |
|
2304 | .. code-block:: bash | |
2290 |
|
2305 | |||
2291 | id : <id_given_in_input> |
|
2306 | id : <id_given_in_input> | |
2292 | result : null |
|
2307 | result : null | |
2293 | error : { |
|
2308 | error : { | |
2294 | "Unable to strip commit <commit_hash> from repo `<repository name>`" |
|
2309 | "Unable to strip commit <commit_hash> from repo `<repository name>`" | |
2295 | } |
|
2310 | } | |
2296 |
|
2311 | |||
2297 | """ |
|
2312 | """ | |
2298 |
|
2313 | |||
2299 | repo = get_repo_or_error(repoid) |
|
2314 | repo = get_repo_or_error(repoid) | |
2300 | if not has_superadmin_permission(apiuser): |
|
2315 | if not has_superadmin_permission(apiuser): | |
2301 | _perms = ('repository.admin',) |
|
2316 | _perms = ('repository.admin',) | |
2302 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
2317 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
2303 |
|
2318 | |||
2304 | try: |
|
2319 | try: | |
2305 | ScmModel().strip(repo, revision, branch) |
|
2320 | ScmModel().strip(repo, revision, branch) | |
2306 | audit_logger.store_api( |
|
2321 | audit_logger.store_api( | |
2307 | 'repo.commit.strip', action_data={'commit_id': revision}, |
|
2322 | 'repo.commit.strip', action_data={'commit_id': revision}, | |
2308 | repo=repo, |
|
2323 | repo=repo, | |
2309 | user=apiuser, commit=True) |
|
2324 | user=apiuser, commit=True) | |
2310 |
|
2325 | |||
2311 | return { |
|
2326 | return { | |
2312 | 'msg': 'Stripped commit %s from repo `%s`' % ( |
|
2327 | 'msg': 'Stripped commit %s from repo `%s`' % ( | |
2313 | revision, repo.repo_name), |
|
2328 | revision, repo.repo_name), | |
2314 | 'repository': repo.repo_name |
|
2329 | 'repository': repo.repo_name | |
2315 | } |
|
2330 | } | |
2316 | except Exception: |
|
2331 | except Exception: | |
2317 | log.exception("Exception while trying to strip") |
|
2332 | log.exception("Exception while trying to strip") | |
2318 | raise JSONRPCError( |
|
2333 | raise JSONRPCError( | |
2319 | 'Unable to strip commit %s from repo `%s`' % ( |
|
2334 | 'Unable to strip commit %s from repo `%s`' % ( | |
2320 | revision, repo.repo_name) |
|
2335 | revision, repo.repo_name) | |
2321 | ) |
|
2336 | ) | |
2322 |
|
2337 | |||
2323 |
|
2338 | |||
2324 | @jsonrpc_method() |
|
2339 | @jsonrpc_method() | |
2325 | def get_repo_settings(request, apiuser, repoid, key=Optional(None)): |
|
2340 | def get_repo_settings(request, apiuser, repoid, key=Optional(None)): | |
2326 | """ |
|
2341 | """ | |
2327 | Returns all settings for a repository. If key is given it only returns the |
|
2342 | Returns all settings for a repository. If key is given it only returns the | |
2328 | setting identified by the key or null. |
|
2343 | setting identified by the key or null. | |
2329 |
|
2344 | |||
2330 | :param apiuser: This is filled automatically from the |authtoken|. |
|
2345 | :param apiuser: This is filled automatically from the |authtoken|. | |
2331 | :type apiuser: AuthUser |
|
2346 | :type apiuser: AuthUser | |
2332 | :param repoid: The repository name or repository id. |
|
2347 | :param repoid: The repository name or repository id. | |
2333 | :type repoid: str or int |
|
2348 | :type repoid: str or int | |
2334 | :param key: Key of the setting to return. |
|
2349 | :param key: Key of the setting to return. | |
2335 | :type: key: Optional(str) |
|
2350 | :type: key: Optional(str) | |
2336 |
|
2351 | |||
2337 | Example output: |
|
2352 | Example output: | |
2338 |
|
2353 | |||
2339 | .. code-block:: bash |
|
2354 | .. code-block:: bash | |
2340 |
|
2355 | |||
2341 | { |
|
2356 | { | |
2342 | "error": null, |
|
2357 | "error": null, | |
2343 | "id": 237, |
|
2358 | "id": 237, | |
2344 | "result": { |
|
2359 | "result": { | |
2345 | "extensions_largefiles": true, |
|
2360 | "extensions_largefiles": true, | |
2346 | "extensions_evolve": true, |
|
2361 | "extensions_evolve": true, | |
2347 | "hooks_changegroup_push_logger": true, |
|
2362 | "hooks_changegroup_push_logger": true, | |
2348 | "hooks_changegroup_repo_size": false, |
|
2363 | "hooks_changegroup_repo_size": false, | |
2349 | "hooks_outgoing_pull_logger": true, |
|
2364 | "hooks_outgoing_pull_logger": true, | |
2350 | "phases_publish": "True", |
|
2365 | "phases_publish": "True", | |
2351 | "rhodecode_hg_use_rebase_for_merging": true, |
|
2366 | "rhodecode_hg_use_rebase_for_merging": true, | |
2352 | "rhodecode_pr_merge_enabled": true, |
|
2367 | "rhodecode_pr_merge_enabled": true, | |
2353 | "rhodecode_use_outdated_comments": true |
|
2368 | "rhodecode_use_outdated_comments": true | |
2354 | } |
|
2369 | } | |
2355 | } |
|
2370 | } | |
2356 | """ |
|
2371 | """ | |
2357 |
|
2372 | |||
2358 | # Restrict access to this api method to admins only. |
|
2373 | # Restrict access to this api method to admins only. | |
2359 | if not has_superadmin_permission(apiuser): |
|
2374 | if not has_superadmin_permission(apiuser): | |
2360 | raise JSONRPCForbidden() |
|
2375 | raise JSONRPCForbidden() | |
2361 |
|
2376 | |||
2362 | try: |
|
2377 | try: | |
2363 | repo = get_repo_or_error(repoid) |
|
2378 | repo = get_repo_or_error(repoid) | |
2364 | settings_model = VcsSettingsModel(repo=repo) |
|
2379 | settings_model = VcsSettingsModel(repo=repo) | |
2365 | settings = settings_model.get_global_settings() |
|
2380 | settings = settings_model.get_global_settings() | |
2366 | settings.update(settings_model.get_repo_settings()) |
|
2381 | settings.update(settings_model.get_repo_settings()) | |
2367 |
|
2382 | |||
2368 | # If only a single setting is requested fetch it from all settings. |
|
2383 | # If only a single setting is requested fetch it from all settings. | |
2369 | key = Optional.extract(key) |
|
2384 | key = Optional.extract(key) | |
2370 | if key is not None: |
|
2385 | if key is not None: | |
2371 | settings = settings.get(key, None) |
|
2386 | settings = settings.get(key, None) | |
2372 | except Exception: |
|
2387 | except Exception: | |
2373 | msg = 'Failed to fetch settings for repository `{}`'.format(repoid) |
|
2388 | msg = 'Failed to fetch settings for repository `{}`'.format(repoid) | |
2374 | log.exception(msg) |
|
2389 | log.exception(msg) | |
2375 | raise JSONRPCError(msg) |
|
2390 | raise JSONRPCError(msg) | |
2376 |
|
2391 | |||
2377 | return settings |
|
2392 | return settings | |
2378 |
|
2393 | |||
2379 |
|
2394 | |||
2380 | @jsonrpc_method() |
|
2395 | @jsonrpc_method() | |
2381 | def set_repo_settings(request, apiuser, repoid, settings): |
|
2396 | def set_repo_settings(request, apiuser, repoid, settings): | |
2382 | """ |
|
2397 | """ | |
2383 | Update repository settings. Returns true on success. |
|
2398 | Update repository settings. Returns true on success. | |
2384 |
|
2399 | |||
2385 | :param apiuser: This is filled automatically from the |authtoken|. |
|
2400 | :param apiuser: This is filled automatically from the |authtoken|. | |
2386 | :type apiuser: AuthUser |
|
2401 | :type apiuser: AuthUser | |
2387 | :param repoid: The repository name or repository id. |
|
2402 | :param repoid: The repository name or repository id. | |
2388 | :type repoid: str or int |
|
2403 | :type repoid: str or int | |
2389 | :param settings: The new settings for the repository. |
|
2404 | :param settings: The new settings for the repository. | |
2390 | :type: settings: dict |
|
2405 | :type: settings: dict | |
2391 |
|
2406 | |||
2392 | Example output: |
|
2407 | Example output: | |
2393 |
|
2408 | |||
2394 | .. code-block:: bash |
|
2409 | .. code-block:: bash | |
2395 |
|
2410 | |||
2396 | { |
|
2411 | { | |
2397 | "error": null, |
|
2412 | "error": null, | |
2398 | "id": 237, |
|
2413 | "id": 237, | |
2399 | "result": true |
|
2414 | "result": true | |
2400 | } |
|
2415 | } | |
2401 | """ |
|
2416 | """ | |
2402 | # Restrict access to this api method to admins only. |
|
2417 | # Restrict access to this api method to admins only. | |
2403 | if not has_superadmin_permission(apiuser): |
|
2418 | if not has_superadmin_permission(apiuser): | |
2404 | raise JSONRPCForbidden() |
|
2419 | raise JSONRPCForbidden() | |
2405 |
|
2420 | |||
2406 | if type(settings) is not dict: |
|
2421 | if type(settings) is not dict: | |
2407 | raise JSONRPCError('Settings have to be a JSON Object.') |
|
2422 | raise JSONRPCError('Settings have to be a JSON Object.') | |
2408 |
|
2423 | |||
2409 | try: |
|
2424 | try: | |
2410 | settings_model = VcsSettingsModel(repo=repoid) |
|
2425 | settings_model = VcsSettingsModel(repo=repoid) | |
2411 |
|
2426 | |||
2412 | # Merge global, repo and incoming settings. |
|
2427 | # Merge global, repo and incoming settings. | |
2413 | new_settings = settings_model.get_global_settings() |
|
2428 | new_settings = settings_model.get_global_settings() | |
2414 | new_settings.update(settings_model.get_repo_settings()) |
|
2429 | new_settings.update(settings_model.get_repo_settings()) | |
2415 | new_settings.update(settings) |
|
2430 | new_settings.update(settings) | |
2416 |
|
2431 | |||
2417 | # Update the settings. |
|
2432 | # Update the settings. | |
2418 | inherit_global_settings = new_settings.get( |
|
2433 | inherit_global_settings = new_settings.get( | |
2419 | 'inherit_global_settings', False) |
|
2434 | 'inherit_global_settings', False) | |
2420 | settings_model.create_or_update_repo_settings( |
|
2435 | settings_model.create_or_update_repo_settings( | |
2421 | new_settings, inherit_global_settings=inherit_global_settings) |
|
2436 | new_settings, inherit_global_settings=inherit_global_settings) | |
2422 | Session().commit() |
|
2437 | Session().commit() | |
2423 | except Exception: |
|
2438 | except Exception: | |
2424 | msg = 'Failed to update settings for repository `{}`'.format(repoid) |
|
2439 | msg = 'Failed to update settings for repository `{}`'.format(repoid) | |
2425 | log.exception(msg) |
|
2440 | log.exception(msg) | |
2426 | raise JSONRPCError(msg) |
|
2441 | raise JSONRPCError(msg) | |
2427 |
|
2442 | |||
2428 | # Indicate success. |
|
2443 | # Indicate success. | |
2429 | return True |
|
2444 | return True | |
2430 |
|
2445 | |||
2431 |
|
2446 | |||
2432 | @jsonrpc_method() |
|
2447 | @jsonrpc_method() | |
2433 | def maintenance(request, apiuser, repoid): |
|
2448 | def maintenance(request, apiuser, repoid): | |
2434 | """ |
|
2449 | """ | |
2435 | Triggers a maintenance on the given repository. |
|
2450 | Triggers a maintenance on the given repository. | |
2436 |
|
2451 | |||
2437 | This command can only be run using an |authtoken| with admin |
|
2452 | This command can only be run using an |authtoken| with admin | |
2438 | rights to the specified repository. For more information, |
|
2453 | rights to the specified repository. For more information, | |
2439 | see :ref:`config-token-ref`. |
|
2454 | see :ref:`config-token-ref`. | |
2440 |
|
2455 | |||
2441 | This command takes the following options: |
|
2456 | This command takes the following options: | |
2442 |
|
2457 | |||
2443 | :param apiuser: This is filled automatically from the |authtoken|. |
|
2458 | :param apiuser: This is filled automatically from the |authtoken|. | |
2444 | :type apiuser: AuthUser |
|
2459 | :type apiuser: AuthUser | |
2445 | :param repoid: The repository name or repository ID. |
|
2460 | :param repoid: The repository name or repository ID. | |
2446 | :type repoid: str or int |
|
2461 | :type repoid: str or int | |
2447 |
|
2462 | |||
2448 | Example output: |
|
2463 | Example output: | |
2449 |
|
2464 | |||
2450 | .. code-block:: bash |
|
2465 | .. code-block:: bash | |
2451 |
|
2466 | |||
2452 | id : <id_given_in_input> |
|
2467 | id : <id_given_in_input> | |
2453 | result : { |
|
2468 | result : { | |
2454 | "msg": "executed maintenance command", |
|
2469 | "msg": "executed maintenance command", | |
2455 | "executed_actions": [ |
|
2470 | "executed_actions": [ | |
2456 | <action_message>, <action_message2>... |
|
2471 | <action_message>, <action_message2>... | |
2457 | ], |
|
2472 | ], | |
2458 | "repository": "<repository name>" |
|
2473 | "repository": "<repository name>" | |
2459 | } |
|
2474 | } | |
2460 | error : null |
|
2475 | error : null | |
2461 |
|
2476 | |||
2462 | Example error output: |
|
2477 | Example error output: | |
2463 |
|
2478 | |||
2464 | .. code-block:: bash |
|
2479 | .. code-block:: bash | |
2465 |
|
2480 | |||
2466 | id : <id_given_in_input> |
|
2481 | id : <id_given_in_input> | |
2467 | result : null |
|
2482 | result : null | |
2468 | error : { |
|
2483 | error : { | |
2469 | "Unable to execute maintenance on `<reponame>`" |
|
2484 | "Unable to execute maintenance on `<reponame>`" | |
2470 | } |
|
2485 | } | |
2471 |
|
2486 | |||
2472 | """ |
|
2487 | """ | |
2473 |
|
2488 | |||
2474 | repo = get_repo_or_error(repoid) |
|
2489 | repo = get_repo_or_error(repoid) | |
2475 | if not has_superadmin_permission(apiuser): |
|
2490 | if not has_superadmin_permission(apiuser): | |
2476 | _perms = ('repository.admin',) |
|
2491 | _perms = ('repository.admin',) | |
2477 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
2492 | validate_repo_permissions(apiuser, repoid, repo, _perms) | |
2478 |
|
2493 | |||
2479 | try: |
|
2494 | try: | |
2480 | maintenance = repo_maintenance.RepoMaintenance() |
|
2495 | maintenance = repo_maintenance.RepoMaintenance() | |
2481 | executed_actions = maintenance.execute(repo) |
|
2496 | executed_actions = maintenance.execute(repo) | |
2482 |
|
2497 | |||
2483 | return { |
|
2498 | return { | |
2484 | 'msg': 'executed maintenance command', |
|
2499 | 'msg': 'executed maintenance command', | |
2485 | 'executed_actions': executed_actions, |
|
2500 | 'executed_actions': executed_actions, | |
2486 | 'repository': repo.repo_name |
|
2501 | 'repository': repo.repo_name | |
2487 | } |
|
2502 | } | |
2488 | except Exception: |
|
2503 | except Exception: | |
2489 | log.exception("Exception occurred while trying to run maintenance") |
|
2504 | log.exception("Exception occurred while trying to run maintenance") | |
2490 | raise JSONRPCError( |
|
2505 | raise JSONRPCError( | |
2491 | 'Unable to execute maintenance on `%s`' % repo.repo_name) |
|
2506 | 'Unable to execute maintenance on `%s`' % repo.repo_name) |
@@ -1,717 +1,723 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 |
|
21 | |||
22 | import logging |
|
22 | import logging | |
23 |
|
23 | |||
24 | from pyramid.httpexceptions import ( |
|
24 | from pyramid.httpexceptions import ( | |
25 | HTTPNotFound, HTTPBadRequest, HTTPFound, HTTPForbidden, HTTPConflict) |
|
25 | HTTPNotFound, HTTPBadRequest, HTTPFound, HTTPForbidden, HTTPConflict) | |
26 | from pyramid.view import view_config |
|
26 | from pyramid.view import view_config | |
27 | from pyramid.renderers import render |
|
27 | from pyramid.renderers import render | |
28 | from pyramid.response import Response |
|
28 | from pyramid.response import Response | |
29 |
|
29 | |||
30 | from rhodecode.apps._base import RepoAppView |
|
30 | from rhodecode.apps._base import RepoAppView | |
31 | from rhodecode.apps.file_store import utils as store_utils |
|
31 | from rhodecode.apps.file_store import utils as store_utils | |
32 | from rhodecode.apps.file_store.exceptions import FileNotAllowedException, FileOverSizeException |
|
32 | from rhodecode.apps.file_store.exceptions import FileNotAllowedException, FileOverSizeException | |
33 |
|
33 | |||
34 | from rhodecode.lib import diffs, codeblocks |
|
34 | from rhodecode.lib import diffs, codeblocks | |
35 | from rhodecode.lib.auth import ( |
|
35 | from rhodecode.lib.auth import ( | |
36 | LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous, CSRFRequired) |
|
36 | LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous, CSRFRequired) | |
37 |
|
37 | |||
38 | from rhodecode.lib.compat import OrderedDict |
|
38 | from rhodecode.lib.compat import OrderedDict | |
39 | from rhodecode.lib.diffs import ( |
|
39 | from rhodecode.lib.diffs import ( | |
40 | cache_diff, load_cached_diff, diff_cache_exist, get_diff_context, |
|
40 | cache_diff, load_cached_diff, diff_cache_exist, get_diff_context, | |
41 | get_diff_whitespace_flag) |
|
41 | get_diff_whitespace_flag) | |
42 | from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError, CommentVersionMismatch |
|
42 | from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError, CommentVersionMismatch | |
43 | import rhodecode.lib.helpers as h |
|
43 | import rhodecode.lib.helpers as h | |
44 | from rhodecode.lib.utils2 import safe_unicode, str2bool |
|
44 | from rhodecode.lib.utils2 import safe_unicode, str2bool | |
45 | from rhodecode.lib.vcs.backends.base import EmptyCommit |
|
45 | from rhodecode.lib.vcs.backends.base import EmptyCommit | |
46 | from rhodecode.lib.vcs.exceptions import ( |
|
46 | from rhodecode.lib.vcs.exceptions import ( | |
47 | RepositoryError, CommitDoesNotExistError) |
|
47 | RepositoryError, CommitDoesNotExistError) | |
48 | from rhodecode.model.db import ChangesetComment, ChangesetStatus, FileStore, \ |
|
48 | from rhodecode.model.db import ChangesetComment, ChangesetStatus, FileStore, \ | |
49 | ChangesetCommentHistory |
|
49 | ChangesetCommentHistory | |
50 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
50 | from rhodecode.model.changeset_status import ChangesetStatusModel | |
51 | from rhodecode.model.comment import CommentsModel |
|
51 | from rhodecode.model.comment import CommentsModel | |
52 | from rhodecode.model.meta import Session |
|
52 | from rhodecode.model.meta import Session | |
53 | from rhodecode.model.settings import VcsSettingsModel |
|
53 | from rhodecode.model.settings import VcsSettingsModel | |
54 |
|
54 | |||
55 | log = logging.getLogger(__name__) |
|
55 | log = logging.getLogger(__name__) | |
56 |
|
56 | |||
57 |
|
57 | |||
58 | def _update_with_GET(params, request): |
|
58 | def _update_with_GET(params, request): | |
59 | for k in ['diff1', 'diff2', 'diff']: |
|
59 | for k in ['diff1', 'diff2', 'diff']: | |
60 | params[k] += request.GET.getall(k) |
|
60 | params[k] += request.GET.getall(k) | |
61 |
|
61 | |||
62 |
|
62 | |||
63 | class RepoCommitsView(RepoAppView): |
|
63 | class RepoCommitsView(RepoAppView): | |
64 | def load_default_context(self): |
|
64 | def load_default_context(self): | |
65 | c = self._get_local_tmpl_context(include_app_defaults=True) |
|
65 | c = self._get_local_tmpl_context(include_app_defaults=True) | |
66 | c.rhodecode_repo = self.rhodecode_vcs_repo |
|
66 | c.rhodecode_repo = self.rhodecode_vcs_repo | |
67 |
|
67 | |||
68 | return c |
|
68 | return c | |
69 |
|
69 | |||
70 | def _is_diff_cache_enabled(self, target_repo): |
|
70 | def _is_diff_cache_enabled(self, target_repo): | |
71 | caching_enabled = self._get_general_setting( |
|
71 | caching_enabled = self._get_general_setting( | |
72 | target_repo, 'rhodecode_diff_cache') |
|
72 | target_repo, 'rhodecode_diff_cache') | |
73 | log.debug('Diff caching enabled: %s', caching_enabled) |
|
73 | log.debug('Diff caching enabled: %s', caching_enabled) | |
74 | return caching_enabled |
|
74 | return caching_enabled | |
75 |
|
75 | |||
76 | def _commit(self, commit_id_range, method): |
|
76 | def _commit(self, commit_id_range, method): | |
77 | _ = self.request.translate |
|
77 | _ = self.request.translate | |
78 | c = self.load_default_context() |
|
78 | c = self.load_default_context() | |
79 | c.fulldiff = self.request.GET.get('fulldiff') |
|
79 | c.fulldiff = self.request.GET.get('fulldiff') | |
80 |
|
80 | |||
81 | # fetch global flags of ignore ws or context lines |
|
81 | # fetch global flags of ignore ws or context lines | |
82 | diff_context = get_diff_context(self.request) |
|
82 | diff_context = get_diff_context(self.request) | |
83 | hide_whitespace_changes = get_diff_whitespace_flag(self.request) |
|
83 | hide_whitespace_changes = get_diff_whitespace_flag(self.request) | |
84 |
|
84 | |||
85 | # diff_limit will cut off the whole diff if the limit is applied |
|
85 | # diff_limit will cut off the whole diff if the limit is applied | |
86 | # otherwise it will just hide the big files from the front-end |
|
86 | # otherwise it will just hide the big files from the front-end | |
87 | diff_limit = c.visual.cut_off_limit_diff |
|
87 | diff_limit = c.visual.cut_off_limit_diff | |
88 | file_limit = c.visual.cut_off_limit_file |
|
88 | file_limit = c.visual.cut_off_limit_file | |
89 |
|
89 | |||
90 | # get ranges of commit ids if preset |
|
90 | # get ranges of commit ids if preset | |
91 | commit_range = commit_id_range.split('...')[:2] |
|
91 | commit_range = commit_id_range.split('...')[:2] | |
92 |
|
92 | |||
93 | try: |
|
93 | try: | |
94 | pre_load = ['affected_files', 'author', 'branch', 'date', |
|
94 | pre_load = ['affected_files', 'author', 'branch', 'date', | |
95 | 'message', 'parents'] |
|
95 | 'message', 'parents'] | |
96 | if self.rhodecode_vcs_repo.alias == 'hg': |
|
96 | if self.rhodecode_vcs_repo.alias == 'hg': | |
97 | pre_load += ['hidden', 'obsolete', 'phase'] |
|
97 | pre_load += ['hidden', 'obsolete', 'phase'] | |
98 |
|
98 | |||
99 | if len(commit_range) == 2: |
|
99 | if len(commit_range) == 2: | |
100 | commits = self.rhodecode_vcs_repo.get_commits( |
|
100 | commits = self.rhodecode_vcs_repo.get_commits( | |
101 | start_id=commit_range[0], end_id=commit_range[1], |
|
101 | start_id=commit_range[0], end_id=commit_range[1], | |
102 | pre_load=pre_load, translate_tags=False) |
|
102 | pre_load=pre_load, translate_tags=False) | |
103 | commits = list(commits) |
|
103 | commits = list(commits) | |
104 | else: |
|
104 | else: | |
105 | commits = [self.rhodecode_vcs_repo.get_commit( |
|
105 | commits = [self.rhodecode_vcs_repo.get_commit( | |
106 | commit_id=commit_id_range, pre_load=pre_load)] |
|
106 | commit_id=commit_id_range, pre_load=pre_load)] | |
107 |
|
107 | |||
108 | c.commit_ranges = commits |
|
108 | c.commit_ranges = commits | |
109 | if not c.commit_ranges: |
|
109 | if not c.commit_ranges: | |
110 | raise RepositoryError('The commit range returned an empty result') |
|
110 | raise RepositoryError('The commit range returned an empty result') | |
111 | except CommitDoesNotExistError as e: |
|
111 | except CommitDoesNotExistError as e: | |
112 | msg = _('No such commit exists. Org exception: `{}`').format(e) |
|
112 | msg = _('No such commit exists. Org exception: `{}`').format(e) | |
113 | h.flash(msg, category='error') |
|
113 | h.flash(msg, category='error') | |
114 | raise HTTPNotFound() |
|
114 | raise HTTPNotFound() | |
115 | except Exception: |
|
115 | except Exception: | |
116 | log.exception("General failure") |
|
116 | log.exception("General failure") | |
117 | raise HTTPNotFound() |
|
117 | raise HTTPNotFound() | |
118 |
|
118 | |||
119 | c.changes = OrderedDict() |
|
119 | c.changes = OrderedDict() | |
120 | c.lines_added = 0 |
|
120 | c.lines_added = 0 | |
121 | c.lines_deleted = 0 |
|
121 | c.lines_deleted = 0 | |
122 |
|
122 | |||
123 | # auto collapse if we have more than limit |
|
123 | # auto collapse if we have more than limit | |
124 | collapse_limit = diffs.DiffProcessor._collapse_commits_over |
|
124 | collapse_limit = diffs.DiffProcessor._collapse_commits_over | |
125 | c.collapse_all_commits = len(c.commit_ranges) > collapse_limit |
|
125 | c.collapse_all_commits = len(c.commit_ranges) > collapse_limit | |
126 |
|
126 | |||
127 | c.commit_statuses = ChangesetStatus.STATUSES |
|
127 | c.commit_statuses = ChangesetStatus.STATUSES | |
128 | c.inline_comments = [] |
|
128 | c.inline_comments = [] | |
129 | c.files = [] |
|
129 | c.files = [] | |
130 |
|
130 | |||
131 | c.statuses = [] |
|
131 | c.statuses = [] | |
132 | c.comments = [] |
|
132 | c.comments = [] | |
133 | c.unresolved_comments = [] |
|
133 | c.unresolved_comments = [] | |
134 | c.resolved_comments = [] |
|
134 | c.resolved_comments = [] | |
135 | if len(c.commit_ranges) == 1: |
|
135 | if len(c.commit_ranges) == 1: | |
136 | commit = c.commit_ranges[0] |
|
136 | commit = c.commit_ranges[0] | |
137 | c.comments = CommentsModel().get_comments( |
|
137 | c.comments = CommentsModel().get_comments( | |
138 | self.db_repo.repo_id, |
|
138 | self.db_repo.repo_id, | |
139 | revision=commit.raw_id) |
|
139 | revision=commit.raw_id) | |
140 | c.statuses.append(ChangesetStatusModel().get_status( |
|
140 | c.statuses.append(ChangesetStatusModel().get_status( | |
141 | self.db_repo.repo_id, commit.raw_id)) |
|
141 | self.db_repo.repo_id, commit.raw_id)) | |
142 | # comments from PR |
|
142 | # comments from PR | |
143 | statuses = ChangesetStatusModel().get_statuses( |
|
143 | statuses = ChangesetStatusModel().get_statuses( | |
144 | self.db_repo.repo_id, commit.raw_id, |
|
144 | self.db_repo.repo_id, commit.raw_id, | |
145 | with_revisions=True) |
|
145 | with_revisions=True) | |
146 | prs = set(st.pull_request for st in statuses |
|
146 | prs = set(st.pull_request for st in statuses | |
147 | if st.pull_request is not None) |
|
147 | if st.pull_request is not None) | |
148 | # from associated statuses, check the pull requests, and |
|
148 | # from associated statuses, check the pull requests, and | |
149 | # show comments from them |
|
149 | # show comments from them | |
150 | for pr in prs: |
|
150 | for pr in prs: | |
151 | c.comments.extend(pr.comments) |
|
151 | c.comments.extend(pr.comments) | |
152 |
|
152 | |||
153 | c.unresolved_comments = CommentsModel()\ |
|
153 | c.unresolved_comments = CommentsModel()\ | |
154 | .get_commit_unresolved_todos(commit.raw_id) |
|
154 | .get_commit_unresolved_todos(commit.raw_id) | |
155 | c.resolved_comments = CommentsModel()\ |
|
155 | c.resolved_comments = CommentsModel()\ | |
156 | .get_commit_resolved_todos(commit.raw_id) |
|
156 | .get_commit_resolved_todos(commit.raw_id) | |
157 |
|
157 | |||
158 | diff = None |
|
158 | diff = None | |
159 | # Iterate over ranges (default commit view is always one commit) |
|
159 | # Iterate over ranges (default commit view is always one commit) | |
160 | for commit in c.commit_ranges: |
|
160 | for commit in c.commit_ranges: | |
161 | c.changes[commit.raw_id] = [] |
|
161 | c.changes[commit.raw_id] = [] | |
162 |
|
162 | |||
163 | commit2 = commit |
|
163 | commit2 = commit | |
164 | commit1 = commit.first_parent |
|
164 | commit1 = commit.first_parent | |
165 |
|
165 | |||
166 | if method == 'show': |
|
166 | if method == 'show': | |
167 | inline_comments = CommentsModel().get_inline_comments( |
|
167 | inline_comments = CommentsModel().get_inline_comments( | |
168 | self.db_repo.repo_id, revision=commit.raw_id) |
|
168 | self.db_repo.repo_id, revision=commit.raw_id) | |
169 | c.inline_cnt = CommentsModel().get_inline_comments_count( |
|
169 | c.inline_cnt = CommentsModel().get_inline_comments_count( | |
170 | inline_comments) |
|
170 | inline_comments) | |
171 | c.inline_comments = inline_comments |
|
171 | c.inline_comments = inline_comments | |
172 |
|
172 | |||
173 | cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path( |
|
173 | cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path( | |
174 | self.db_repo) |
|
174 | self.db_repo) | |
175 | cache_file_path = diff_cache_exist( |
|
175 | cache_file_path = diff_cache_exist( | |
176 | cache_path, 'diff', commit.raw_id, |
|
176 | cache_path, 'diff', commit.raw_id, | |
177 | hide_whitespace_changes, diff_context, c.fulldiff) |
|
177 | hide_whitespace_changes, diff_context, c.fulldiff) | |
178 |
|
178 | |||
179 | caching_enabled = self._is_diff_cache_enabled(self.db_repo) |
|
179 | caching_enabled = self._is_diff_cache_enabled(self.db_repo) | |
180 | force_recache = str2bool(self.request.GET.get('force_recache')) |
|
180 | force_recache = str2bool(self.request.GET.get('force_recache')) | |
181 |
|
181 | |||
182 | cached_diff = None |
|
182 | cached_diff = None | |
183 | if caching_enabled: |
|
183 | if caching_enabled: | |
184 | cached_diff = load_cached_diff(cache_file_path) |
|
184 | cached_diff = load_cached_diff(cache_file_path) | |
185 |
|
185 | |||
186 | has_proper_diff_cache = cached_diff and cached_diff.get('diff') |
|
186 | has_proper_diff_cache = cached_diff and cached_diff.get('diff') | |
187 | if not force_recache and has_proper_diff_cache: |
|
187 | if not force_recache and has_proper_diff_cache: | |
188 | diffset = cached_diff['diff'] |
|
188 | diffset = cached_diff['diff'] | |
189 | else: |
|
189 | else: | |
190 | vcs_diff = self.rhodecode_vcs_repo.get_diff( |
|
190 | vcs_diff = self.rhodecode_vcs_repo.get_diff( | |
191 | commit1, commit2, |
|
191 | commit1, commit2, | |
192 | ignore_whitespace=hide_whitespace_changes, |
|
192 | ignore_whitespace=hide_whitespace_changes, | |
193 | context=diff_context) |
|
193 | context=diff_context) | |
194 |
|
194 | |||
195 | diff_processor = diffs.DiffProcessor( |
|
195 | diff_processor = diffs.DiffProcessor( | |
196 | vcs_diff, format='newdiff', diff_limit=diff_limit, |
|
196 | vcs_diff, format='newdiff', diff_limit=diff_limit, | |
197 | file_limit=file_limit, show_full_diff=c.fulldiff) |
|
197 | file_limit=file_limit, show_full_diff=c.fulldiff) | |
198 |
|
198 | |||
199 | _parsed = diff_processor.prepare() |
|
199 | _parsed = diff_processor.prepare() | |
200 |
|
200 | |||
201 | diffset = codeblocks.DiffSet( |
|
201 | diffset = codeblocks.DiffSet( | |
202 | repo_name=self.db_repo_name, |
|
202 | repo_name=self.db_repo_name, | |
203 | source_node_getter=codeblocks.diffset_node_getter(commit1), |
|
203 | source_node_getter=codeblocks.diffset_node_getter(commit1), | |
204 | target_node_getter=codeblocks.diffset_node_getter(commit2)) |
|
204 | target_node_getter=codeblocks.diffset_node_getter(commit2)) | |
205 |
|
205 | |||
206 | diffset = self.path_filter.render_patchset_filtered( |
|
206 | diffset = self.path_filter.render_patchset_filtered( | |
207 | diffset, _parsed, commit1.raw_id, commit2.raw_id) |
|
207 | diffset, _parsed, commit1.raw_id, commit2.raw_id) | |
208 |
|
208 | |||
209 | # save cached diff |
|
209 | # save cached diff | |
210 | if caching_enabled: |
|
210 | if caching_enabled: | |
211 | cache_diff(cache_file_path, diffset, None) |
|
211 | cache_diff(cache_file_path, diffset, None) | |
212 |
|
212 | |||
213 | c.limited_diff = diffset.limited_diff |
|
213 | c.limited_diff = diffset.limited_diff | |
214 | c.changes[commit.raw_id] = diffset |
|
214 | c.changes[commit.raw_id] = diffset | |
215 | else: |
|
215 | else: | |
216 | # TODO(marcink): no cache usage here... |
|
216 | # TODO(marcink): no cache usage here... | |
217 | _diff = self.rhodecode_vcs_repo.get_diff( |
|
217 | _diff = self.rhodecode_vcs_repo.get_diff( | |
218 | commit1, commit2, |
|
218 | commit1, commit2, | |
219 | ignore_whitespace=hide_whitespace_changes, context=diff_context) |
|
219 | ignore_whitespace=hide_whitespace_changes, context=diff_context) | |
220 | diff_processor = diffs.DiffProcessor( |
|
220 | diff_processor = diffs.DiffProcessor( | |
221 | _diff, format='newdiff', diff_limit=diff_limit, |
|
221 | _diff, format='newdiff', diff_limit=diff_limit, | |
222 | file_limit=file_limit, show_full_diff=c.fulldiff) |
|
222 | file_limit=file_limit, show_full_diff=c.fulldiff) | |
223 | # downloads/raw we only need RAW diff nothing else |
|
223 | # downloads/raw we only need RAW diff nothing else | |
224 | diff = self.path_filter.get_raw_patch(diff_processor) |
|
224 | diff = self.path_filter.get_raw_patch(diff_processor) | |
225 | c.changes[commit.raw_id] = [None, None, None, None, diff, None, None] |
|
225 | c.changes[commit.raw_id] = [None, None, None, None, diff, None, None] | |
226 |
|
226 | |||
227 | # sort comments by how they were generated |
|
227 | # sort comments by how they were generated | |
228 | c.comments = sorted(c.comments, key=lambda x: x.comment_id) |
|
228 | c.comments = sorted(c.comments, key=lambda x: x.comment_id) | |
229 |
|
229 | |||
230 | if len(c.commit_ranges) == 1: |
|
230 | if len(c.commit_ranges) == 1: | |
231 | c.commit = c.commit_ranges[0] |
|
231 | c.commit = c.commit_ranges[0] | |
232 | c.parent_tmpl = ''.join( |
|
232 | c.parent_tmpl = ''.join( | |
233 | '# Parent %s\n' % x.raw_id for x in c.commit.parents) |
|
233 | '# Parent %s\n' % x.raw_id for x in c.commit.parents) | |
234 |
|
234 | |||
235 | if method == 'download': |
|
235 | if method == 'download': | |
236 | response = Response(diff) |
|
236 | response = Response(diff) | |
237 | response.content_type = 'text/plain' |
|
237 | response.content_type = 'text/plain' | |
238 | response.content_disposition = ( |
|
238 | response.content_disposition = ( | |
239 | 'attachment; filename=%s.diff' % commit_id_range[:12]) |
|
239 | 'attachment; filename=%s.diff' % commit_id_range[:12]) | |
240 | return response |
|
240 | return response | |
241 | elif method == 'patch': |
|
241 | elif method == 'patch': | |
242 | c.diff = safe_unicode(diff) |
|
242 | c.diff = safe_unicode(diff) | |
243 | patch = render( |
|
243 | patch = render( | |
244 | 'rhodecode:templates/changeset/patch_changeset.mako', |
|
244 | 'rhodecode:templates/changeset/patch_changeset.mako', | |
245 | self._get_template_context(c), self.request) |
|
245 | self._get_template_context(c), self.request) | |
246 | response = Response(patch) |
|
246 | response = Response(patch) | |
247 | response.content_type = 'text/plain' |
|
247 | response.content_type = 'text/plain' | |
248 | return response |
|
248 | return response | |
249 | elif method == 'raw': |
|
249 | elif method == 'raw': | |
250 | response = Response(diff) |
|
250 | response = Response(diff) | |
251 | response.content_type = 'text/plain' |
|
251 | response.content_type = 'text/plain' | |
252 | return response |
|
252 | return response | |
253 | elif method == 'show': |
|
253 | elif method == 'show': | |
254 | if len(c.commit_ranges) == 1: |
|
254 | if len(c.commit_ranges) == 1: | |
255 | html = render( |
|
255 | html = render( | |
256 | 'rhodecode:templates/changeset/changeset.mako', |
|
256 | 'rhodecode:templates/changeset/changeset.mako', | |
257 | self._get_template_context(c), self.request) |
|
257 | self._get_template_context(c), self.request) | |
258 | return Response(html) |
|
258 | return Response(html) | |
259 | else: |
|
259 | else: | |
260 | c.ancestor = None |
|
260 | c.ancestor = None | |
261 | c.target_repo = self.db_repo |
|
261 | c.target_repo = self.db_repo | |
262 | html = render( |
|
262 | html = render( | |
263 | 'rhodecode:templates/changeset/changeset_range.mako', |
|
263 | 'rhodecode:templates/changeset/changeset_range.mako', | |
264 | self._get_template_context(c), self.request) |
|
264 | self._get_template_context(c), self.request) | |
265 | return Response(html) |
|
265 | return Response(html) | |
266 |
|
266 | |||
267 | raise HTTPBadRequest() |
|
267 | raise HTTPBadRequest() | |
268 |
|
268 | |||
269 | @LoginRequired() |
|
269 | @LoginRequired() | |
270 | @HasRepoPermissionAnyDecorator( |
|
270 | @HasRepoPermissionAnyDecorator( | |
271 | 'repository.read', 'repository.write', 'repository.admin') |
|
271 | 'repository.read', 'repository.write', 'repository.admin') | |
272 | @view_config( |
|
272 | @view_config( | |
273 | route_name='repo_commit', request_method='GET', |
|
273 | route_name='repo_commit', request_method='GET', | |
274 | renderer=None) |
|
274 | renderer=None) | |
275 | def repo_commit_show(self): |
|
275 | def repo_commit_show(self): | |
276 | commit_id = self.request.matchdict['commit_id'] |
|
276 | commit_id = self.request.matchdict['commit_id'] | |
277 | return self._commit(commit_id, method='show') |
|
277 | return self._commit(commit_id, method='show') | |
278 |
|
278 | |||
279 | @LoginRequired() |
|
279 | @LoginRequired() | |
280 | @HasRepoPermissionAnyDecorator( |
|
280 | @HasRepoPermissionAnyDecorator( | |
281 | 'repository.read', 'repository.write', 'repository.admin') |
|
281 | 'repository.read', 'repository.write', 'repository.admin') | |
282 | @view_config( |
|
282 | @view_config( | |
283 | route_name='repo_commit_raw', request_method='GET', |
|
283 | route_name='repo_commit_raw', request_method='GET', | |
284 | renderer=None) |
|
284 | renderer=None) | |
285 | @view_config( |
|
285 | @view_config( | |
286 | route_name='repo_commit_raw_deprecated', request_method='GET', |
|
286 | route_name='repo_commit_raw_deprecated', request_method='GET', | |
287 | renderer=None) |
|
287 | renderer=None) | |
288 | def repo_commit_raw(self): |
|
288 | def repo_commit_raw(self): | |
289 | commit_id = self.request.matchdict['commit_id'] |
|
289 | commit_id = self.request.matchdict['commit_id'] | |
290 | return self._commit(commit_id, method='raw') |
|
290 | return self._commit(commit_id, method='raw') | |
291 |
|
291 | |||
292 | @LoginRequired() |
|
292 | @LoginRequired() | |
293 | @HasRepoPermissionAnyDecorator( |
|
293 | @HasRepoPermissionAnyDecorator( | |
294 | 'repository.read', 'repository.write', 'repository.admin') |
|
294 | 'repository.read', 'repository.write', 'repository.admin') | |
295 | @view_config( |
|
295 | @view_config( | |
296 | route_name='repo_commit_patch', request_method='GET', |
|
296 | route_name='repo_commit_patch', request_method='GET', | |
297 | renderer=None) |
|
297 | renderer=None) | |
298 | def repo_commit_patch(self): |
|
298 | def repo_commit_patch(self): | |
299 | commit_id = self.request.matchdict['commit_id'] |
|
299 | commit_id = self.request.matchdict['commit_id'] | |
300 | return self._commit(commit_id, method='patch') |
|
300 | return self._commit(commit_id, method='patch') | |
301 |
|
301 | |||
302 | @LoginRequired() |
|
302 | @LoginRequired() | |
303 | @HasRepoPermissionAnyDecorator( |
|
303 | @HasRepoPermissionAnyDecorator( | |
304 | 'repository.read', 'repository.write', 'repository.admin') |
|
304 | 'repository.read', 'repository.write', 'repository.admin') | |
305 | @view_config( |
|
305 | @view_config( | |
306 | route_name='repo_commit_download', request_method='GET', |
|
306 | route_name='repo_commit_download', request_method='GET', | |
307 | renderer=None) |
|
307 | renderer=None) | |
308 | def repo_commit_download(self): |
|
308 | def repo_commit_download(self): | |
309 | commit_id = self.request.matchdict['commit_id'] |
|
309 | commit_id = self.request.matchdict['commit_id'] | |
310 | return self._commit(commit_id, method='download') |
|
310 | return self._commit(commit_id, method='download') | |
311 |
|
311 | |||
312 | @LoginRequired() |
|
312 | @LoginRequired() | |
313 | @NotAnonymous() |
|
313 | @NotAnonymous() | |
314 | @HasRepoPermissionAnyDecorator( |
|
314 | @HasRepoPermissionAnyDecorator( | |
315 | 'repository.read', 'repository.write', 'repository.admin') |
|
315 | 'repository.read', 'repository.write', 'repository.admin') | |
316 | @CSRFRequired() |
|
316 | @CSRFRequired() | |
317 | @view_config( |
|
317 | @view_config( | |
318 | route_name='repo_commit_comment_create', request_method='POST', |
|
318 | route_name='repo_commit_comment_create', request_method='POST', | |
319 | renderer='json_ext') |
|
319 | renderer='json_ext') | |
320 | def repo_commit_comment_create(self): |
|
320 | def repo_commit_comment_create(self): | |
321 | _ = self.request.translate |
|
321 | _ = self.request.translate | |
322 | commit_id = self.request.matchdict['commit_id'] |
|
322 | commit_id = self.request.matchdict['commit_id'] | |
323 |
|
323 | |||
324 | c = self.load_default_context() |
|
324 | c = self.load_default_context() | |
325 | status = self.request.POST.get('changeset_status', None) |
|
325 | status = self.request.POST.get('changeset_status', None) | |
326 | text = self.request.POST.get('text') |
|
326 | text = self.request.POST.get('text') | |
327 | comment_type = self.request.POST.get('comment_type') |
|
327 | comment_type = self.request.POST.get('comment_type') | |
328 | resolves_comment_id = self.request.POST.get('resolves_comment_id', None) |
|
328 | resolves_comment_id = self.request.POST.get('resolves_comment_id', None) | |
329 |
|
329 | |||
330 | if status: |
|
330 | if status: | |
331 | text = text or (_('Status change %(transition_icon)s %(status)s') |
|
331 | text = text or (_('Status change %(transition_icon)s %(status)s') | |
332 | % {'transition_icon': '>', |
|
332 | % {'transition_icon': '>', | |
333 | 'status': ChangesetStatus.get_status_lbl(status)}) |
|
333 | 'status': ChangesetStatus.get_status_lbl(status)}) | |
334 |
|
334 | |||
335 | multi_commit_ids = [] |
|
335 | multi_commit_ids = [] | |
336 | for _commit_id in self.request.POST.get('commit_ids', '').split(','): |
|
336 | for _commit_id in self.request.POST.get('commit_ids', '').split(','): | |
337 | if _commit_id not in ['', None, EmptyCommit.raw_id]: |
|
337 | if _commit_id not in ['', None, EmptyCommit.raw_id]: | |
338 | if _commit_id not in multi_commit_ids: |
|
338 | if _commit_id not in multi_commit_ids: | |
339 | multi_commit_ids.append(_commit_id) |
|
339 | multi_commit_ids.append(_commit_id) | |
340 |
|
340 | |||
341 | commit_ids = multi_commit_ids or [commit_id] |
|
341 | commit_ids = multi_commit_ids or [commit_id] | |
342 |
|
342 | |||
343 | comment = None |
|
343 | comment = None | |
344 | for current_id in filter(None, commit_ids): |
|
344 | for current_id in filter(None, commit_ids): | |
345 | comment = CommentsModel().create( |
|
345 | comment = CommentsModel().create( | |
346 | text=text, |
|
346 | text=text, | |
347 | repo=self.db_repo.repo_id, |
|
347 | repo=self.db_repo.repo_id, | |
348 | user=self._rhodecode_db_user.user_id, |
|
348 | user=self._rhodecode_db_user.user_id, | |
349 | commit_id=current_id, |
|
349 | commit_id=current_id, | |
350 | f_path=self.request.POST.get('f_path'), |
|
350 | f_path=self.request.POST.get('f_path'), | |
351 | line_no=self.request.POST.get('line'), |
|
351 | line_no=self.request.POST.get('line'), | |
352 | status_change=(ChangesetStatus.get_status_lbl(status) |
|
352 | status_change=(ChangesetStatus.get_status_lbl(status) | |
353 | if status else None), |
|
353 | if status else None), | |
354 | status_change_type=status, |
|
354 | status_change_type=status, | |
355 | comment_type=comment_type, |
|
355 | comment_type=comment_type, | |
356 | resolves_comment_id=resolves_comment_id, |
|
356 | resolves_comment_id=resolves_comment_id, | |
357 | auth_user=self._rhodecode_user |
|
357 | auth_user=self._rhodecode_user | |
358 | ) |
|
358 | ) | |
359 |
|
359 | |||
360 | # get status if set ! |
|
360 | # get status if set ! | |
361 | if status: |
|
361 | if status: | |
362 | # if latest status was from pull request and it's closed |
|
362 | # if latest status was from pull request and it's closed | |
363 | # disallow changing status ! |
|
363 | # disallow changing status ! | |
364 | # dont_allow_on_closed_pull_request = True ! |
|
364 | # dont_allow_on_closed_pull_request = True ! | |
365 |
|
365 | |||
366 | try: |
|
366 | try: | |
367 | ChangesetStatusModel().set_status( |
|
367 | ChangesetStatusModel().set_status( | |
368 | self.db_repo.repo_id, |
|
368 | self.db_repo.repo_id, | |
369 | status, |
|
369 | status, | |
370 | self._rhodecode_db_user.user_id, |
|
370 | self._rhodecode_db_user.user_id, | |
371 | comment, |
|
371 | comment, | |
372 | revision=current_id, |
|
372 | revision=current_id, | |
373 | dont_allow_on_closed_pull_request=True |
|
373 | dont_allow_on_closed_pull_request=True | |
374 | ) |
|
374 | ) | |
375 | except StatusChangeOnClosedPullRequestError: |
|
375 | except StatusChangeOnClosedPullRequestError: | |
376 | msg = _('Changing the status of a commit associated with ' |
|
376 | msg = _('Changing the status of a commit associated with ' | |
377 | 'a closed pull request is not allowed') |
|
377 | 'a closed pull request is not allowed') | |
378 | log.exception(msg) |
|
378 | log.exception(msg) | |
379 | h.flash(msg, category='warning') |
|
379 | h.flash(msg, category='warning') | |
380 | raise HTTPFound(h.route_path( |
|
380 | raise HTTPFound(h.route_path( | |
381 | 'repo_commit', repo_name=self.db_repo_name, |
|
381 | 'repo_commit', repo_name=self.db_repo_name, | |
382 | commit_id=current_id)) |
|
382 | commit_id=current_id)) | |
383 |
|
383 | |||
384 | commit = self.db_repo.get_commit(current_id) |
|
384 | commit = self.db_repo.get_commit(current_id) | |
385 | CommentsModel().trigger_commit_comment_hook( |
|
385 | CommentsModel().trigger_commit_comment_hook( | |
386 | self.db_repo, self._rhodecode_user, 'create', |
|
386 | self.db_repo, self._rhodecode_user, 'create', | |
387 | data={'comment': comment, 'commit': commit}) |
|
387 | data={'comment': comment, 'commit': commit}) | |
388 |
|
388 | |||
389 | # finalize, commit and redirect |
|
389 | # finalize, commit and redirect | |
390 | Session().commit() |
|
390 | Session().commit() | |
391 |
|
391 | |||
392 | data = { |
|
392 | data = { | |
393 | 'target_id': h.safeid(h.safe_unicode( |
|
393 | 'target_id': h.safeid(h.safe_unicode( | |
394 | self.request.POST.get('f_path'))), |
|
394 | self.request.POST.get('f_path'))), | |
395 | } |
|
395 | } | |
396 | if comment: |
|
396 | if comment: | |
397 | c.co = comment |
|
397 | c.co = comment | |
398 | rendered_comment = render( |
|
398 | rendered_comment = render( | |
399 | 'rhodecode:templates/changeset/changeset_comment_block.mako', |
|
399 | 'rhodecode:templates/changeset/changeset_comment_block.mako', | |
400 | self._get_template_context(c), self.request) |
|
400 | self._get_template_context(c), self.request) | |
401 |
|
401 | |||
402 | data.update(comment.get_dict()) |
|
402 | data.update(comment.get_dict()) | |
403 | data.update({'rendered_text': rendered_comment}) |
|
403 | data.update({'rendered_text': rendered_comment}) | |
404 |
|
404 | |||
405 | return data |
|
405 | return data | |
406 |
|
406 | |||
407 | @LoginRequired() |
|
407 | @LoginRequired() | |
408 | @NotAnonymous() |
|
408 | @NotAnonymous() | |
409 | @HasRepoPermissionAnyDecorator( |
|
409 | @HasRepoPermissionAnyDecorator( | |
410 | 'repository.read', 'repository.write', 'repository.admin') |
|
410 | 'repository.read', 'repository.write', 'repository.admin') | |
411 | @CSRFRequired() |
|
411 | @CSRFRequired() | |
412 | @view_config( |
|
412 | @view_config( | |
413 | route_name='repo_commit_comment_preview', request_method='POST', |
|
413 | route_name='repo_commit_comment_preview', request_method='POST', | |
414 | renderer='string', xhr=True) |
|
414 | renderer='string', xhr=True) | |
415 | def repo_commit_comment_preview(self): |
|
415 | def repo_commit_comment_preview(self): | |
416 | # Technically a CSRF token is not needed as no state changes with this |
|
416 | # Technically a CSRF token is not needed as no state changes with this | |
417 | # call. However, as this is a POST is better to have it, so automated |
|
417 | # call. However, as this is a POST is better to have it, so automated | |
418 | # tools don't flag it as potential CSRF. |
|
418 | # tools don't flag it as potential CSRF. | |
419 | # Post is required because the payload could be bigger than the maximum |
|
419 | # Post is required because the payload could be bigger than the maximum | |
420 | # allowed by GET. |
|
420 | # allowed by GET. | |
421 |
|
421 | |||
422 | text = self.request.POST.get('text') |
|
422 | text = self.request.POST.get('text') | |
423 | renderer = self.request.POST.get('renderer') or 'rst' |
|
423 | renderer = self.request.POST.get('renderer') or 'rst' | |
424 | if text: |
|
424 | if text: | |
425 | return h.render(text, renderer=renderer, mentions=True, |
|
425 | return h.render(text, renderer=renderer, mentions=True, | |
426 | repo_name=self.db_repo_name) |
|
426 | repo_name=self.db_repo_name) | |
427 | return '' |
|
427 | return '' | |
428 |
|
428 | |||
429 | @LoginRequired() |
|
429 | @LoginRequired() | |
430 | @NotAnonymous() |
|
430 | @NotAnonymous() | |
431 | @HasRepoPermissionAnyDecorator( |
|
431 | @HasRepoPermissionAnyDecorator( | |
432 | 'repository.read', 'repository.write', 'repository.admin') |
|
432 | 'repository.read', 'repository.write', 'repository.admin') | |
433 | @CSRFRequired() |
|
433 | @CSRFRequired() | |
434 | @view_config( |
|
434 | @view_config( | |
435 | route_name='repo_commit_comment_history_view', request_method='POST', |
|
435 | route_name='repo_commit_comment_history_view', request_method='POST', | |
436 | renderer='string', xhr=True) |
|
436 | renderer='string', xhr=True) | |
437 | def repo_commit_comment_history_view(self): |
|
437 | def repo_commit_comment_history_view(self): | |
438 | c = self.load_default_context() |
|
438 | c = self.load_default_context() | |
439 |
|
439 | |||
440 | comment_history_id = self.request.matchdict['comment_history_id'] |
|
440 | comment_history_id = self.request.matchdict['comment_history_id'] | |
441 | comment_history = ChangesetCommentHistory.get_or_404(comment_history_id) |
|
441 | comment_history = ChangesetCommentHistory.get_or_404(comment_history_id) | |
442 | is_repo_comment = comment_history.comment.repo.repo_id == self.db_repo.repo_id |
|
442 | is_repo_comment = comment_history.comment.repo.repo_id == self.db_repo.repo_id | |
443 |
|
443 | |||
444 | if is_repo_comment: |
|
444 | if is_repo_comment: | |
445 | c.comment_history = comment_history |
|
445 | c.comment_history = comment_history | |
446 |
|
446 | |||
447 | rendered_comment = render( |
|
447 | rendered_comment = render( | |
448 | 'rhodecode:templates/changeset/comment_history.mako', |
|
448 | 'rhodecode:templates/changeset/comment_history.mako', | |
449 | self._get_template_context(c) |
|
449 | self._get_template_context(c) | |
450 | , self.request) |
|
450 | , self.request) | |
451 | return rendered_comment |
|
451 | return rendered_comment | |
452 | else: |
|
452 | else: | |
453 | log.warning('No permissions for user %s to show comment_history_id: %s', |
|
453 | log.warning('No permissions for user %s to show comment_history_id: %s', | |
454 | self._rhodecode_db_user, comment_history_id) |
|
454 | self._rhodecode_db_user, comment_history_id) | |
455 | raise HTTPNotFound() |
|
455 | raise HTTPNotFound() | |
456 |
|
456 | |||
457 | @LoginRequired() |
|
457 | @LoginRequired() | |
458 | @NotAnonymous() |
|
458 | @NotAnonymous() | |
459 | @HasRepoPermissionAnyDecorator( |
|
459 | @HasRepoPermissionAnyDecorator( | |
460 | 'repository.read', 'repository.write', 'repository.admin') |
|
460 | 'repository.read', 'repository.write', 'repository.admin') | |
461 | @CSRFRequired() |
|
461 | @CSRFRequired() | |
462 | @view_config( |
|
462 | @view_config( | |
463 | route_name='repo_commit_comment_attachment_upload', request_method='POST', |
|
463 | route_name='repo_commit_comment_attachment_upload', request_method='POST', | |
464 | renderer='json_ext', xhr=True) |
|
464 | renderer='json_ext', xhr=True) | |
465 | def repo_commit_comment_attachment_upload(self): |
|
465 | def repo_commit_comment_attachment_upload(self): | |
466 | c = self.load_default_context() |
|
466 | c = self.load_default_context() | |
467 | upload_key = 'attachment' |
|
467 | upload_key = 'attachment' | |
468 |
|
468 | |||
469 | file_obj = self.request.POST.get(upload_key) |
|
469 | file_obj = self.request.POST.get(upload_key) | |
470 |
|
470 | |||
471 | if file_obj is None: |
|
471 | if file_obj is None: | |
472 | self.request.response.status = 400 |
|
472 | self.request.response.status = 400 | |
473 | return {'store_fid': None, |
|
473 | return {'store_fid': None, | |
474 | 'access_path': None, |
|
474 | 'access_path': None, | |
475 | 'error': '{} data field is missing'.format(upload_key)} |
|
475 | 'error': '{} data field is missing'.format(upload_key)} | |
476 |
|
476 | |||
477 | if not hasattr(file_obj, 'filename'): |
|
477 | if not hasattr(file_obj, 'filename'): | |
478 | self.request.response.status = 400 |
|
478 | self.request.response.status = 400 | |
479 | return {'store_fid': None, |
|
479 | return {'store_fid': None, | |
480 | 'access_path': None, |
|
480 | 'access_path': None, | |
481 | 'error': 'filename cannot be read from the data field'} |
|
481 | 'error': 'filename cannot be read from the data field'} | |
482 |
|
482 | |||
483 | filename = file_obj.filename |
|
483 | filename = file_obj.filename | |
484 | file_display_name = filename |
|
484 | file_display_name = filename | |
485 |
|
485 | |||
486 | metadata = { |
|
486 | metadata = { | |
487 | 'user_uploaded': {'username': self._rhodecode_user.username, |
|
487 | 'user_uploaded': {'username': self._rhodecode_user.username, | |
488 | 'user_id': self._rhodecode_user.user_id, |
|
488 | 'user_id': self._rhodecode_user.user_id, | |
489 | 'ip': self._rhodecode_user.ip_addr}} |
|
489 | 'ip': self._rhodecode_user.ip_addr}} | |
490 |
|
490 | |||
491 | # TODO(marcink): allow .ini configuration for allowed_extensions, and file-size |
|
491 | # TODO(marcink): allow .ini configuration for allowed_extensions, and file-size | |
492 | allowed_extensions = [ |
|
492 | allowed_extensions = [ | |
493 | 'gif', '.jpeg', '.jpg', '.png', '.docx', '.gz', '.log', '.pdf', |
|
493 | 'gif', '.jpeg', '.jpg', '.png', '.docx', '.gz', '.log', '.pdf', | |
494 | '.pptx', '.txt', '.xlsx', '.zip'] |
|
494 | '.pptx', '.txt', '.xlsx', '.zip'] | |
495 | max_file_size = 10 * 1024 * 1024 # 10MB, also validated via dropzone.js |
|
495 | max_file_size = 10 * 1024 * 1024 # 10MB, also validated via dropzone.js | |
496 |
|
496 | |||
497 | try: |
|
497 | try: | |
498 | storage = store_utils.get_file_storage(self.request.registry.settings) |
|
498 | storage = store_utils.get_file_storage(self.request.registry.settings) | |
499 | store_uid, metadata = storage.save_file( |
|
499 | store_uid, metadata = storage.save_file( | |
500 | file_obj.file, filename, extra_metadata=metadata, |
|
500 | file_obj.file, filename, extra_metadata=metadata, | |
501 | extensions=allowed_extensions, max_filesize=max_file_size) |
|
501 | extensions=allowed_extensions, max_filesize=max_file_size) | |
502 | except FileNotAllowedException: |
|
502 | except FileNotAllowedException: | |
503 | self.request.response.status = 400 |
|
503 | self.request.response.status = 400 | |
504 | permitted_extensions = ', '.join(allowed_extensions) |
|
504 | permitted_extensions = ', '.join(allowed_extensions) | |
505 | error_msg = 'File `{}` is not allowed. ' \ |
|
505 | error_msg = 'File `{}` is not allowed. ' \ | |
506 | 'Only following extensions are permitted: {}'.format( |
|
506 | 'Only following extensions are permitted: {}'.format( | |
507 | filename, permitted_extensions) |
|
507 | filename, permitted_extensions) | |
508 | return {'store_fid': None, |
|
508 | return {'store_fid': None, | |
509 | 'access_path': None, |
|
509 | 'access_path': None, | |
510 | 'error': error_msg} |
|
510 | 'error': error_msg} | |
511 | except FileOverSizeException: |
|
511 | except FileOverSizeException: | |
512 | self.request.response.status = 400 |
|
512 | self.request.response.status = 400 | |
513 | limit_mb = h.format_byte_size_binary(max_file_size) |
|
513 | limit_mb = h.format_byte_size_binary(max_file_size) | |
514 | return {'store_fid': None, |
|
514 | return {'store_fid': None, | |
515 | 'access_path': None, |
|
515 | 'access_path': None, | |
516 | 'error': 'File {} is exceeding allowed limit of {}.'.format( |
|
516 | 'error': 'File {} is exceeding allowed limit of {}.'.format( | |
517 | filename, limit_mb)} |
|
517 | filename, limit_mb)} | |
518 |
|
518 | |||
519 | try: |
|
519 | try: | |
520 | entry = FileStore.create( |
|
520 | entry = FileStore.create( | |
521 | file_uid=store_uid, filename=metadata["filename"], |
|
521 | file_uid=store_uid, filename=metadata["filename"], | |
522 | file_hash=metadata["sha256"], file_size=metadata["size"], |
|
522 | file_hash=metadata["sha256"], file_size=metadata["size"], | |
523 | file_display_name=file_display_name, |
|
523 | file_display_name=file_display_name, | |
524 | file_description=u'comment attachment `{}`'.format(safe_unicode(filename)), |
|
524 | file_description=u'comment attachment `{}`'.format(safe_unicode(filename)), | |
525 | hidden=True, check_acl=True, user_id=self._rhodecode_user.user_id, |
|
525 | hidden=True, check_acl=True, user_id=self._rhodecode_user.user_id, | |
526 | scope_repo_id=self.db_repo.repo_id |
|
526 | scope_repo_id=self.db_repo.repo_id | |
527 | ) |
|
527 | ) | |
528 | Session().add(entry) |
|
528 | Session().add(entry) | |
529 | Session().commit() |
|
529 | Session().commit() | |
530 | log.debug('Stored upload in DB as %s', entry) |
|
530 | log.debug('Stored upload in DB as %s', entry) | |
531 | except Exception: |
|
531 | except Exception: | |
532 | log.exception('Failed to store file %s', filename) |
|
532 | log.exception('Failed to store file %s', filename) | |
533 | self.request.response.status = 400 |
|
533 | self.request.response.status = 400 | |
534 | return {'store_fid': None, |
|
534 | return {'store_fid': None, | |
535 | 'access_path': None, |
|
535 | 'access_path': None, | |
536 | 'error': 'File {} failed to store in DB.'.format(filename)} |
|
536 | 'error': 'File {} failed to store in DB.'.format(filename)} | |
537 |
|
537 | |||
538 | Session().commit() |
|
538 | Session().commit() | |
539 |
|
539 | |||
540 | return { |
|
540 | return { | |
541 | 'store_fid': store_uid, |
|
541 | 'store_fid': store_uid, | |
542 | 'access_path': h.route_path( |
|
542 | 'access_path': h.route_path( | |
543 | 'download_file', fid=store_uid), |
|
543 | 'download_file', fid=store_uid), | |
544 | 'fqn_access_path': h.route_url( |
|
544 | 'fqn_access_path': h.route_url( | |
545 | 'download_file', fid=store_uid), |
|
545 | 'download_file', fid=store_uid), | |
546 | 'repo_access_path': h.route_path( |
|
546 | 'repo_access_path': h.route_path( | |
547 | 'repo_artifacts_get', repo_name=self.db_repo_name, uid=store_uid), |
|
547 | 'repo_artifacts_get', repo_name=self.db_repo_name, uid=store_uid), | |
548 | 'repo_fqn_access_path': h.route_url( |
|
548 | 'repo_fqn_access_path': h.route_url( | |
549 | 'repo_artifacts_get', repo_name=self.db_repo_name, uid=store_uid), |
|
549 | 'repo_artifacts_get', repo_name=self.db_repo_name, uid=store_uid), | |
550 | } |
|
550 | } | |
551 |
|
551 | |||
552 | @LoginRequired() |
|
552 | @LoginRequired() | |
553 | @NotAnonymous() |
|
553 | @NotAnonymous() | |
554 | @HasRepoPermissionAnyDecorator( |
|
554 | @HasRepoPermissionAnyDecorator( | |
555 | 'repository.read', 'repository.write', 'repository.admin') |
|
555 | 'repository.read', 'repository.write', 'repository.admin') | |
556 | @CSRFRequired() |
|
556 | @CSRFRequired() | |
557 | @view_config( |
|
557 | @view_config( | |
558 | route_name='repo_commit_comment_delete', request_method='POST', |
|
558 | route_name='repo_commit_comment_delete', request_method='POST', | |
559 | renderer='json_ext') |
|
559 | renderer='json_ext') | |
560 | def repo_commit_comment_delete(self): |
|
560 | def repo_commit_comment_delete(self): | |
561 | commit_id = self.request.matchdict['commit_id'] |
|
561 | commit_id = self.request.matchdict['commit_id'] | |
562 | comment_id = self.request.matchdict['comment_id'] |
|
562 | comment_id = self.request.matchdict['comment_id'] | |
563 |
|
563 | |||
564 | comment = ChangesetComment.get_or_404(comment_id) |
|
564 | comment = ChangesetComment.get_or_404(comment_id) | |
565 | if not comment: |
|
565 | if not comment: | |
566 | log.debug('Comment with id:%s not found, skipping', comment_id) |
|
566 | log.debug('Comment with id:%s not found, skipping', comment_id) | |
567 | # comment already deleted in another call probably |
|
567 | # comment already deleted in another call probably | |
568 | return True |
|
568 | return True | |
569 |
|
569 | |||
570 | if comment.immutable: |
|
570 | if comment.immutable: | |
571 | # don't allow deleting comments that are immutable |
|
571 | # don't allow deleting comments that are immutable | |
572 | raise HTTPForbidden() |
|
572 | raise HTTPForbidden() | |
573 |
|
573 | |||
574 | is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name) |
|
574 | is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name) | |
575 | super_admin = h.HasPermissionAny('hg.admin')() |
|
575 | super_admin = h.HasPermissionAny('hg.admin')() | |
576 | comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id) |
|
576 | comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id) | |
577 | is_repo_comment = comment.repo.repo_id == self.db_repo.repo_id |
|
577 | is_repo_comment = comment.repo.repo_id == self.db_repo.repo_id | |
578 | comment_repo_admin = is_repo_admin and is_repo_comment |
|
578 | comment_repo_admin = is_repo_admin and is_repo_comment | |
579 |
|
579 | |||
580 | if super_admin or comment_owner or comment_repo_admin: |
|
580 | if super_admin or comment_owner or comment_repo_admin: | |
581 | CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user) |
|
581 | CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user) | |
582 | Session().commit() |
|
582 | Session().commit() | |
583 | return True |
|
583 | return True | |
584 | else: |
|
584 | else: | |
585 | log.warning('No permissions for user %s to delete comment_id: %s', |
|
585 | log.warning('No permissions for user %s to delete comment_id: %s', | |
586 | self._rhodecode_db_user, comment_id) |
|
586 | self._rhodecode_db_user, comment_id) | |
587 | raise HTTPNotFound() |
|
587 | raise HTTPNotFound() | |
588 |
|
588 | |||
589 | @LoginRequired() |
|
589 | @LoginRequired() | |
590 | @NotAnonymous() |
|
590 | @NotAnonymous() | |
591 | @HasRepoPermissionAnyDecorator( |
|
591 | @HasRepoPermissionAnyDecorator( | |
592 | 'repository.read', 'repository.write', 'repository.admin') |
|
592 | 'repository.read', 'repository.write', 'repository.admin') | |
593 | @CSRFRequired() |
|
593 | @CSRFRequired() | |
594 | @view_config( |
|
594 | @view_config( | |
595 | route_name='repo_commit_comment_edit', request_method='POST', |
|
595 | route_name='repo_commit_comment_edit', request_method='POST', | |
596 | renderer='json_ext') |
|
596 | renderer='json_ext') | |
597 | def repo_commit_comment_edit(self): |
|
597 | def repo_commit_comment_edit(self): | |
598 | self.load_default_context() |
|
598 | self.load_default_context() | |
599 |
|
599 | |||
600 | comment_id = self.request.matchdict['comment_id'] |
|
600 | comment_id = self.request.matchdict['comment_id'] | |
601 | comment = ChangesetComment.get_or_404(comment_id) |
|
601 | comment = ChangesetComment.get_or_404(comment_id) | |
602 |
|
602 | |||
603 | if comment.immutable: |
|
603 | if comment.immutable: | |
604 | # don't allow deleting comments that are immutable |
|
604 | # don't allow deleting comments that are immutable | |
605 | raise HTTPForbidden() |
|
605 | raise HTTPForbidden() | |
606 |
|
606 | |||
607 | is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name) |
|
607 | is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name) | |
608 | super_admin = h.HasPermissionAny('hg.admin')() |
|
608 | super_admin = h.HasPermissionAny('hg.admin')() | |
609 | comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id) |
|
609 | comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id) | |
610 | is_repo_comment = comment.repo.repo_id == self.db_repo.repo_id |
|
610 | is_repo_comment = comment.repo.repo_id == self.db_repo.repo_id | |
611 | comment_repo_admin = is_repo_admin and is_repo_comment |
|
611 | comment_repo_admin = is_repo_admin and is_repo_comment | |
612 |
|
612 | |||
613 | if super_admin or comment_owner or comment_repo_admin: |
|
613 | if super_admin or comment_owner or comment_repo_admin: | |
614 | text = self.request.POST.get('text') |
|
614 | text = self.request.POST.get('text') | |
615 | version = self.request.POST.get('version') |
|
615 | version = self.request.POST.get('version') | |
616 | if text == comment.text: |
|
616 | if text == comment.text: | |
617 | log.warning( |
|
617 | log.warning( | |
618 | 'Comment(repo): ' |
|
618 | 'Comment(repo): ' | |
619 | 'Trying to create new version ' |
|
619 | 'Trying to create new version ' | |
620 | 'with the same comment body {}'.format( |
|
620 | 'with the same comment body {}'.format( | |
621 | comment_id, |
|
621 | comment_id, | |
622 | ) |
|
622 | ) | |
623 | ) |
|
623 | ) | |
624 | raise HTTPNotFound() |
|
624 | raise HTTPNotFound() | |
625 |
|
625 | |||
626 | if version.isdigit(): |
|
626 | if version.isdigit(): | |
627 | version = int(version) |
|
627 | version = int(version) | |
628 | else: |
|
628 | else: | |
629 | log.warning( |
|
629 | log.warning( | |
630 | 'Comment(repo): Wrong version type {} {} ' |
|
630 | 'Comment(repo): Wrong version type {} {} ' | |
631 | 'for comment {}'.format( |
|
631 | 'for comment {}'.format( | |
632 | version, |
|
632 | version, | |
633 | type(version), |
|
633 | type(version), | |
634 | comment_id, |
|
634 | comment_id, | |
635 | ) |
|
635 | ) | |
636 | ) |
|
636 | ) | |
637 | raise HTTPNotFound() |
|
637 | raise HTTPNotFound() | |
638 |
|
638 | |||
639 | try: |
|
639 | try: | |
640 | comment_history = CommentsModel().edit( |
|
640 | comment_history = CommentsModel().edit( | |
641 | comment_id=comment_id, |
|
641 | comment_id=comment_id, | |
642 | text=text, |
|
642 | text=text, | |
643 | auth_user=self._rhodecode_user, |
|
643 | auth_user=self._rhodecode_user, | |
644 | version=version, |
|
644 | version=version, | |
645 | ) |
|
645 | ) | |
646 | except CommentVersionMismatch: |
|
646 | except CommentVersionMismatch: | |
647 | raise HTTPConflict() |
|
647 | raise HTTPConflict() | |
648 |
|
648 | |||
649 | if not comment_history: |
|
649 | if not comment_history: | |
650 | raise HTTPNotFound() |
|
650 | raise HTTPNotFound() | |
651 |
|
651 | |||
|
652 | commit_id = self.request.matchdict['commit_id'] | |||
|
653 | commit = self.db_repo.get_commit(commit_id) | |||
|
654 | CommentsModel().trigger_commit_comment_hook( | |||
|
655 | self.db_repo, self._rhodecode_user, 'edit', | |||
|
656 | data={'comment': comment, 'commit': commit}) | |||
|
657 | ||||
652 | Session().commit() |
|
658 | Session().commit() | |
653 | return { |
|
659 | return { | |
654 | 'comment_history_id': comment_history.comment_history_id, |
|
660 | 'comment_history_id': comment_history.comment_history_id, | |
655 | 'comment_id': comment.comment_id, |
|
661 | 'comment_id': comment.comment_id, | |
656 | 'comment_version': comment_history.version, |
|
662 | 'comment_version': comment_history.version, | |
657 | 'comment_author_username': comment_history.author.username, |
|
663 | 'comment_author_username': comment_history.author.username, | |
658 | 'comment_author_gravatar': h.gravatar_url(comment_history.author.email, 16), |
|
664 | 'comment_author_gravatar': h.gravatar_url(comment_history.author.email, 16), | |
659 | 'comment_created_on': h.age_component(comment_history.created_on, |
|
665 | 'comment_created_on': h.age_component(comment_history.created_on, | |
660 | time_is_local=True), |
|
666 | time_is_local=True), | |
661 | } |
|
667 | } | |
662 | else: |
|
668 | else: | |
663 | log.warning('No permissions for user %s to edit comment_id: %s', |
|
669 | log.warning('No permissions for user %s to edit comment_id: %s', | |
664 | self._rhodecode_db_user, comment_id) |
|
670 | self._rhodecode_db_user, comment_id) | |
665 | raise HTTPNotFound() |
|
671 | raise HTTPNotFound() | |
666 |
|
672 | |||
667 | @LoginRequired() |
|
673 | @LoginRequired() | |
668 | @HasRepoPermissionAnyDecorator( |
|
674 | @HasRepoPermissionAnyDecorator( | |
669 | 'repository.read', 'repository.write', 'repository.admin') |
|
675 | 'repository.read', 'repository.write', 'repository.admin') | |
670 | @view_config( |
|
676 | @view_config( | |
671 | route_name='repo_commit_data', request_method='GET', |
|
677 | route_name='repo_commit_data', request_method='GET', | |
672 | renderer='json_ext', xhr=True) |
|
678 | renderer='json_ext', xhr=True) | |
673 | def repo_commit_data(self): |
|
679 | def repo_commit_data(self): | |
674 | commit_id = self.request.matchdict['commit_id'] |
|
680 | commit_id = self.request.matchdict['commit_id'] | |
675 | self.load_default_context() |
|
681 | self.load_default_context() | |
676 |
|
682 | |||
677 | try: |
|
683 | try: | |
678 | return self.rhodecode_vcs_repo.get_commit(commit_id=commit_id) |
|
684 | return self.rhodecode_vcs_repo.get_commit(commit_id=commit_id) | |
679 | except CommitDoesNotExistError as e: |
|
685 | except CommitDoesNotExistError as e: | |
680 | return EmptyCommit(message=str(e)) |
|
686 | return EmptyCommit(message=str(e)) | |
681 |
|
687 | |||
682 | @LoginRequired() |
|
688 | @LoginRequired() | |
683 | @HasRepoPermissionAnyDecorator( |
|
689 | @HasRepoPermissionAnyDecorator( | |
684 | 'repository.read', 'repository.write', 'repository.admin') |
|
690 | 'repository.read', 'repository.write', 'repository.admin') | |
685 | @view_config( |
|
691 | @view_config( | |
686 | route_name='repo_commit_children', request_method='GET', |
|
692 | route_name='repo_commit_children', request_method='GET', | |
687 | renderer='json_ext', xhr=True) |
|
693 | renderer='json_ext', xhr=True) | |
688 | def repo_commit_children(self): |
|
694 | def repo_commit_children(self): | |
689 | commit_id = self.request.matchdict['commit_id'] |
|
695 | commit_id = self.request.matchdict['commit_id'] | |
690 | self.load_default_context() |
|
696 | self.load_default_context() | |
691 |
|
697 | |||
692 | try: |
|
698 | try: | |
693 | commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id) |
|
699 | commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id) | |
694 | children = commit.children |
|
700 | children = commit.children | |
695 | except CommitDoesNotExistError: |
|
701 | except CommitDoesNotExistError: | |
696 | children = [] |
|
702 | children = [] | |
697 |
|
703 | |||
698 | result = {"results": children} |
|
704 | result = {"results": children} | |
699 | return result |
|
705 | return result | |
700 |
|
706 | |||
701 | @LoginRequired() |
|
707 | @LoginRequired() | |
702 | @HasRepoPermissionAnyDecorator( |
|
708 | @HasRepoPermissionAnyDecorator( | |
703 | 'repository.read', 'repository.write', 'repository.admin') |
|
709 | 'repository.read', 'repository.write', 'repository.admin') | |
704 | @view_config( |
|
710 | @view_config( | |
705 | route_name='repo_commit_parents', request_method='GET', |
|
711 | route_name='repo_commit_parents', request_method='GET', | |
706 | renderer='json_ext') |
|
712 | renderer='json_ext') | |
707 | def repo_commit_parents(self): |
|
713 | def repo_commit_parents(self): | |
708 | commit_id = self.request.matchdict['commit_id'] |
|
714 | commit_id = self.request.matchdict['commit_id'] | |
709 | self.load_default_context() |
|
715 | self.load_default_context() | |
710 |
|
716 | |||
711 | try: |
|
717 | try: | |
712 | commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id) |
|
718 | commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id) | |
713 | parents = commit.parents |
|
719 | parents = commit.parents | |
714 | except CommitDoesNotExistError: |
|
720 | except CommitDoesNotExistError: | |
715 | parents = [] |
|
721 | parents = [] | |
716 | result = {"results": parents} |
|
722 | result = {"results": parents} | |
717 | return result |
|
723 | return result |
@@ -1,1626 +1,1631 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2011-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2011-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import logging |
|
21 | import logging | |
22 | import collections |
|
22 | import collections | |
23 |
|
23 | |||
24 | import formencode |
|
24 | import formencode | |
25 | import formencode.htmlfill |
|
25 | import formencode.htmlfill | |
26 | import peppercorn |
|
26 | import peppercorn | |
27 | from pyramid.httpexceptions import ( |
|
27 | from pyramid.httpexceptions import ( | |
28 | HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest, HTTPConflict) |
|
28 | HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest, HTTPConflict) | |
29 | from pyramid.view import view_config |
|
29 | from pyramid.view import view_config | |
30 | from pyramid.renderers import render |
|
30 | from pyramid.renderers import render | |
31 |
|
31 | |||
32 | from rhodecode.apps._base import RepoAppView, DataGridAppView |
|
32 | from rhodecode.apps._base import RepoAppView, DataGridAppView | |
33 |
|
33 | |||
34 | from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream |
|
34 | from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream | |
35 | from rhodecode.lib.base import vcs_operation_context |
|
35 | from rhodecode.lib.base import vcs_operation_context | |
36 | from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist |
|
36 | from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist | |
37 | from rhodecode.lib.exceptions import CommentVersionMismatch |
|
37 | from rhodecode.lib.exceptions import CommentVersionMismatch | |
38 | from rhodecode.lib.ext_json import json |
|
38 | from rhodecode.lib.ext_json import json | |
39 | from rhodecode.lib.auth import ( |
|
39 | from rhodecode.lib.auth import ( | |
40 | LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator, |
|
40 | LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator, | |
41 | NotAnonymous, CSRFRequired) |
|
41 | NotAnonymous, CSRFRequired) | |
42 | from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode |
|
42 | from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode | |
43 | from rhodecode.lib.vcs.backends.base import EmptyCommit, UpdateFailureReason |
|
43 | from rhodecode.lib.vcs.backends.base import EmptyCommit, UpdateFailureReason | |
44 | from rhodecode.lib.vcs.exceptions import ( |
|
44 | from rhodecode.lib.vcs.exceptions import ( | |
45 | CommitDoesNotExistError, RepositoryRequirementError, EmptyRepositoryError) |
|
45 | CommitDoesNotExistError, RepositoryRequirementError, EmptyRepositoryError) | |
46 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
46 | from rhodecode.model.changeset_status import ChangesetStatusModel | |
47 | from rhodecode.model.comment import CommentsModel |
|
47 | from rhodecode.model.comment import CommentsModel | |
48 | from rhodecode.model.db import ( |
|
48 | from rhodecode.model.db import ( | |
49 | func, or_, PullRequest, ChangesetComment, ChangesetStatus, Repository) |
|
49 | func, or_, PullRequest, ChangesetComment, ChangesetStatus, Repository) | |
50 | from rhodecode.model.forms import PullRequestForm |
|
50 | from rhodecode.model.forms import PullRequestForm | |
51 | from rhodecode.model.meta import Session |
|
51 | from rhodecode.model.meta import Session | |
52 | from rhodecode.model.pull_request import PullRequestModel, MergeCheck |
|
52 | from rhodecode.model.pull_request import PullRequestModel, MergeCheck | |
53 | from rhodecode.model.scm import ScmModel |
|
53 | from rhodecode.model.scm import ScmModel | |
54 |
|
54 | |||
55 | log = logging.getLogger(__name__) |
|
55 | log = logging.getLogger(__name__) | |
56 |
|
56 | |||
57 |
|
57 | |||
58 | class RepoPullRequestsView(RepoAppView, DataGridAppView): |
|
58 | class RepoPullRequestsView(RepoAppView, DataGridAppView): | |
59 |
|
59 | |||
60 | def load_default_context(self): |
|
60 | def load_default_context(self): | |
61 | c = self._get_local_tmpl_context(include_app_defaults=True) |
|
61 | c = self._get_local_tmpl_context(include_app_defaults=True) | |
62 | c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED |
|
62 | c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED | |
63 | c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED |
|
63 | c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED | |
64 | # backward compat., we use for OLD PRs a plain renderer |
|
64 | # backward compat., we use for OLD PRs a plain renderer | |
65 | c.renderer = 'plain' |
|
65 | c.renderer = 'plain' | |
66 | return c |
|
66 | return c | |
67 |
|
67 | |||
68 | def _get_pull_requests_list( |
|
68 | def _get_pull_requests_list( | |
69 | self, repo_name, source, filter_type, opened_by, statuses): |
|
69 | self, repo_name, source, filter_type, opened_by, statuses): | |
70 |
|
70 | |||
71 | draw, start, limit = self._extract_chunk(self.request) |
|
71 | draw, start, limit = self._extract_chunk(self.request) | |
72 | search_q, order_by, order_dir = self._extract_ordering(self.request) |
|
72 | search_q, order_by, order_dir = self._extract_ordering(self.request) | |
73 | _render = self.request.get_partial_renderer( |
|
73 | _render = self.request.get_partial_renderer( | |
74 | 'rhodecode:templates/data_table/_dt_elements.mako') |
|
74 | 'rhodecode:templates/data_table/_dt_elements.mako') | |
75 |
|
75 | |||
76 | # pagination |
|
76 | # pagination | |
77 |
|
77 | |||
78 | if filter_type == 'awaiting_review': |
|
78 | if filter_type == 'awaiting_review': | |
79 | pull_requests = PullRequestModel().get_awaiting_review( |
|
79 | pull_requests = PullRequestModel().get_awaiting_review( | |
80 | repo_name, search_q=search_q, source=source, opened_by=opened_by, |
|
80 | repo_name, search_q=search_q, source=source, opened_by=opened_by, | |
81 | statuses=statuses, offset=start, length=limit, |
|
81 | statuses=statuses, offset=start, length=limit, | |
82 | order_by=order_by, order_dir=order_dir) |
|
82 | order_by=order_by, order_dir=order_dir) | |
83 | pull_requests_total_count = PullRequestModel().count_awaiting_review( |
|
83 | pull_requests_total_count = PullRequestModel().count_awaiting_review( | |
84 | repo_name, search_q=search_q, source=source, statuses=statuses, |
|
84 | repo_name, search_q=search_q, source=source, statuses=statuses, | |
85 | opened_by=opened_by) |
|
85 | opened_by=opened_by) | |
86 | elif filter_type == 'awaiting_my_review': |
|
86 | elif filter_type == 'awaiting_my_review': | |
87 | pull_requests = PullRequestModel().get_awaiting_my_review( |
|
87 | pull_requests = PullRequestModel().get_awaiting_my_review( | |
88 | repo_name, search_q=search_q, source=source, opened_by=opened_by, |
|
88 | repo_name, search_q=search_q, source=source, opened_by=opened_by, | |
89 | user_id=self._rhodecode_user.user_id, statuses=statuses, |
|
89 | user_id=self._rhodecode_user.user_id, statuses=statuses, | |
90 | offset=start, length=limit, order_by=order_by, |
|
90 | offset=start, length=limit, order_by=order_by, | |
91 | order_dir=order_dir) |
|
91 | order_dir=order_dir) | |
92 | pull_requests_total_count = PullRequestModel().count_awaiting_my_review( |
|
92 | pull_requests_total_count = PullRequestModel().count_awaiting_my_review( | |
93 | repo_name, search_q=search_q, source=source, user_id=self._rhodecode_user.user_id, |
|
93 | repo_name, search_q=search_q, source=source, user_id=self._rhodecode_user.user_id, | |
94 | statuses=statuses, opened_by=opened_by) |
|
94 | statuses=statuses, opened_by=opened_by) | |
95 | else: |
|
95 | else: | |
96 | pull_requests = PullRequestModel().get_all( |
|
96 | pull_requests = PullRequestModel().get_all( | |
97 | repo_name, search_q=search_q, source=source, opened_by=opened_by, |
|
97 | repo_name, search_q=search_q, source=source, opened_by=opened_by, | |
98 | statuses=statuses, offset=start, length=limit, |
|
98 | statuses=statuses, offset=start, length=limit, | |
99 | order_by=order_by, order_dir=order_dir) |
|
99 | order_by=order_by, order_dir=order_dir) | |
100 | pull_requests_total_count = PullRequestModel().count_all( |
|
100 | pull_requests_total_count = PullRequestModel().count_all( | |
101 | repo_name, search_q=search_q, source=source, statuses=statuses, |
|
101 | repo_name, search_q=search_q, source=source, statuses=statuses, | |
102 | opened_by=opened_by) |
|
102 | opened_by=opened_by) | |
103 |
|
103 | |||
104 | data = [] |
|
104 | data = [] | |
105 | comments_model = CommentsModel() |
|
105 | comments_model = CommentsModel() | |
106 | for pr in pull_requests: |
|
106 | for pr in pull_requests: | |
107 | comments = comments_model.get_all_comments( |
|
107 | comments = comments_model.get_all_comments( | |
108 | self.db_repo.repo_id, pull_request=pr) |
|
108 | self.db_repo.repo_id, pull_request=pr) | |
109 |
|
109 | |||
110 | data.append({ |
|
110 | data.append({ | |
111 | 'name': _render('pullrequest_name', |
|
111 | 'name': _render('pullrequest_name', | |
112 | pr.pull_request_id, pr.pull_request_state, |
|
112 | pr.pull_request_id, pr.pull_request_state, | |
113 | pr.work_in_progress, pr.target_repo.repo_name), |
|
113 | pr.work_in_progress, pr.target_repo.repo_name), | |
114 | 'name_raw': pr.pull_request_id, |
|
114 | 'name_raw': pr.pull_request_id, | |
115 | 'status': _render('pullrequest_status', |
|
115 | 'status': _render('pullrequest_status', | |
116 | pr.calculated_review_status()), |
|
116 | pr.calculated_review_status()), | |
117 | 'title': _render('pullrequest_title', pr.title, pr.description), |
|
117 | 'title': _render('pullrequest_title', pr.title, pr.description), | |
118 | 'description': h.escape(pr.description), |
|
118 | 'description': h.escape(pr.description), | |
119 | 'updated_on': _render('pullrequest_updated_on', |
|
119 | 'updated_on': _render('pullrequest_updated_on', | |
120 | h.datetime_to_time(pr.updated_on)), |
|
120 | h.datetime_to_time(pr.updated_on)), | |
121 | 'updated_on_raw': h.datetime_to_time(pr.updated_on), |
|
121 | 'updated_on_raw': h.datetime_to_time(pr.updated_on), | |
122 | 'created_on': _render('pullrequest_updated_on', |
|
122 | 'created_on': _render('pullrequest_updated_on', | |
123 | h.datetime_to_time(pr.created_on)), |
|
123 | h.datetime_to_time(pr.created_on)), | |
124 | 'created_on_raw': h.datetime_to_time(pr.created_on), |
|
124 | 'created_on_raw': h.datetime_to_time(pr.created_on), | |
125 | 'state': pr.pull_request_state, |
|
125 | 'state': pr.pull_request_state, | |
126 | 'author': _render('pullrequest_author', |
|
126 | 'author': _render('pullrequest_author', | |
127 | pr.author.full_contact, ), |
|
127 | pr.author.full_contact, ), | |
128 | 'author_raw': pr.author.full_name, |
|
128 | 'author_raw': pr.author.full_name, | |
129 | 'comments': _render('pullrequest_comments', len(comments)), |
|
129 | 'comments': _render('pullrequest_comments', len(comments)), | |
130 | 'comments_raw': len(comments), |
|
130 | 'comments_raw': len(comments), | |
131 | 'closed': pr.is_closed(), |
|
131 | 'closed': pr.is_closed(), | |
132 | }) |
|
132 | }) | |
133 |
|
133 | |||
134 | data = ({ |
|
134 | data = ({ | |
135 | 'draw': draw, |
|
135 | 'draw': draw, | |
136 | 'data': data, |
|
136 | 'data': data, | |
137 | 'recordsTotal': pull_requests_total_count, |
|
137 | 'recordsTotal': pull_requests_total_count, | |
138 | 'recordsFiltered': pull_requests_total_count, |
|
138 | 'recordsFiltered': pull_requests_total_count, | |
139 | }) |
|
139 | }) | |
140 | return data |
|
140 | return data | |
141 |
|
141 | |||
142 | @LoginRequired() |
|
142 | @LoginRequired() | |
143 | @HasRepoPermissionAnyDecorator( |
|
143 | @HasRepoPermissionAnyDecorator( | |
144 | 'repository.read', 'repository.write', 'repository.admin') |
|
144 | 'repository.read', 'repository.write', 'repository.admin') | |
145 | @view_config( |
|
145 | @view_config( | |
146 | route_name='pullrequest_show_all', request_method='GET', |
|
146 | route_name='pullrequest_show_all', request_method='GET', | |
147 | renderer='rhodecode:templates/pullrequests/pullrequests.mako') |
|
147 | renderer='rhodecode:templates/pullrequests/pullrequests.mako') | |
148 | def pull_request_list(self): |
|
148 | def pull_request_list(self): | |
149 | c = self.load_default_context() |
|
149 | c = self.load_default_context() | |
150 |
|
150 | |||
151 | req_get = self.request.GET |
|
151 | req_get = self.request.GET | |
152 | c.source = str2bool(req_get.get('source')) |
|
152 | c.source = str2bool(req_get.get('source')) | |
153 | c.closed = str2bool(req_get.get('closed')) |
|
153 | c.closed = str2bool(req_get.get('closed')) | |
154 | c.my = str2bool(req_get.get('my')) |
|
154 | c.my = str2bool(req_get.get('my')) | |
155 | c.awaiting_review = str2bool(req_get.get('awaiting_review')) |
|
155 | c.awaiting_review = str2bool(req_get.get('awaiting_review')) | |
156 | c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review')) |
|
156 | c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review')) | |
157 |
|
157 | |||
158 | c.active = 'open' |
|
158 | c.active = 'open' | |
159 | if c.my: |
|
159 | if c.my: | |
160 | c.active = 'my' |
|
160 | c.active = 'my' | |
161 | if c.closed: |
|
161 | if c.closed: | |
162 | c.active = 'closed' |
|
162 | c.active = 'closed' | |
163 | if c.awaiting_review and not c.source: |
|
163 | if c.awaiting_review and not c.source: | |
164 | c.active = 'awaiting' |
|
164 | c.active = 'awaiting' | |
165 | if c.source and not c.awaiting_review: |
|
165 | if c.source and not c.awaiting_review: | |
166 | c.active = 'source' |
|
166 | c.active = 'source' | |
167 | if c.awaiting_my_review: |
|
167 | if c.awaiting_my_review: | |
168 | c.active = 'awaiting_my' |
|
168 | c.active = 'awaiting_my' | |
169 |
|
169 | |||
170 | return self._get_template_context(c) |
|
170 | return self._get_template_context(c) | |
171 |
|
171 | |||
172 | @LoginRequired() |
|
172 | @LoginRequired() | |
173 | @HasRepoPermissionAnyDecorator( |
|
173 | @HasRepoPermissionAnyDecorator( | |
174 | 'repository.read', 'repository.write', 'repository.admin') |
|
174 | 'repository.read', 'repository.write', 'repository.admin') | |
175 | @view_config( |
|
175 | @view_config( | |
176 | route_name='pullrequest_show_all_data', request_method='GET', |
|
176 | route_name='pullrequest_show_all_data', request_method='GET', | |
177 | renderer='json_ext', xhr=True) |
|
177 | renderer='json_ext', xhr=True) | |
178 | def pull_request_list_data(self): |
|
178 | def pull_request_list_data(self): | |
179 | self.load_default_context() |
|
179 | self.load_default_context() | |
180 |
|
180 | |||
181 | # additional filters |
|
181 | # additional filters | |
182 | req_get = self.request.GET |
|
182 | req_get = self.request.GET | |
183 | source = str2bool(req_get.get('source')) |
|
183 | source = str2bool(req_get.get('source')) | |
184 | closed = str2bool(req_get.get('closed')) |
|
184 | closed = str2bool(req_get.get('closed')) | |
185 | my = str2bool(req_get.get('my')) |
|
185 | my = str2bool(req_get.get('my')) | |
186 | awaiting_review = str2bool(req_get.get('awaiting_review')) |
|
186 | awaiting_review = str2bool(req_get.get('awaiting_review')) | |
187 | awaiting_my_review = str2bool(req_get.get('awaiting_my_review')) |
|
187 | awaiting_my_review = str2bool(req_get.get('awaiting_my_review')) | |
188 |
|
188 | |||
189 | filter_type = 'awaiting_review' if awaiting_review \ |
|
189 | filter_type = 'awaiting_review' if awaiting_review \ | |
190 | else 'awaiting_my_review' if awaiting_my_review \ |
|
190 | else 'awaiting_my_review' if awaiting_my_review \ | |
191 | else None |
|
191 | else None | |
192 |
|
192 | |||
193 | opened_by = None |
|
193 | opened_by = None | |
194 | if my: |
|
194 | if my: | |
195 | opened_by = [self._rhodecode_user.user_id] |
|
195 | opened_by = [self._rhodecode_user.user_id] | |
196 |
|
196 | |||
197 | statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN] |
|
197 | statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN] | |
198 | if closed: |
|
198 | if closed: | |
199 | statuses = [PullRequest.STATUS_CLOSED] |
|
199 | statuses = [PullRequest.STATUS_CLOSED] | |
200 |
|
200 | |||
201 | data = self._get_pull_requests_list( |
|
201 | data = self._get_pull_requests_list( | |
202 | repo_name=self.db_repo_name, source=source, |
|
202 | repo_name=self.db_repo_name, source=source, | |
203 | filter_type=filter_type, opened_by=opened_by, statuses=statuses) |
|
203 | filter_type=filter_type, opened_by=opened_by, statuses=statuses) | |
204 |
|
204 | |||
205 | return data |
|
205 | return data | |
206 |
|
206 | |||
207 | def _is_diff_cache_enabled(self, target_repo): |
|
207 | def _is_diff_cache_enabled(self, target_repo): | |
208 | caching_enabled = self._get_general_setting( |
|
208 | caching_enabled = self._get_general_setting( | |
209 | target_repo, 'rhodecode_diff_cache') |
|
209 | target_repo, 'rhodecode_diff_cache') | |
210 | log.debug('Diff caching enabled: %s', caching_enabled) |
|
210 | log.debug('Diff caching enabled: %s', caching_enabled) | |
211 | return caching_enabled |
|
211 | return caching_enabled | |
212 |
|
212 | |||
213 | def _get_diffset(self, source_repo_name, source_repo, |
|
213 | def _get_diffset(self, source_repo_name, source_repo, | |
214 | ancestor_commit, |
|
214 | ancestor_commit, | |
215 | source_ref_id, target_ref_id, |
|
215 | source_ref_id, target_ref_id, | |
216 | target_commit, source_commit, diff_limit, file_limit, |
|
216 | target_commit, source_commit, diff_limit, file_limit, | |
217 | fulldiff, hide_whitespace_changes, diff_context, use_ancestor=True): |
|
217 | fulldiff, hide_whitespace_changes, diff_context, use_ancestor=True): | |
218 |
|
218 | |||
219 | if use_ancestor: |
|
219 | if use_ancestor: | |
220 | # we might want to not use it for versions |
|
220 | # we might want to not use it for versions | |
221 | target_ref_id = ancestor_commit.raw_id |
|
221 | target_ref_id = ancestor_commit.raw_id | |
222 |
|
222 | |||
223 | vcs_diff = PullRequestModel().get_diff( |
|
223 | vcs_diff = PullRequestModel().get_diff( | |
224 | source_repo, source_ref_id, target_ref_id, |
|
224 | source_repo, source_ref_id, target_ref_id, | |
225 | hide_whitespace_changes, diff_context) |
|
225 | hide_whitespace_changes, diff_context) | |
226 |
|
226 | |||
227 | diff_processor = diffs.DiffProcessor( |
|
227 | diff_processor = diffs.DiffProcessor( | |
228 | vcs_diff, format='newdiff', diff_limit=diff_limit, |
|
228 | vcs_diff, format='newdiff', diff_limit=diff_limit, | |
229 | file_limit=file_limit, show_full_diff=fulldiff) |
|
229 | file_limit=file_limit, show_full_diff=fulldiff) | |
230 |
|
230 | |||
231 | _parsed = diff_processor.prepare() |
|
231 | _parsed = diff_processor.prepare() | |
232 |
|
232 | |||
233 | diffset = codeblocks.DiffSet( |
|
233 | diffset = codeblocks.DiffSet( | |
234 | repo_name=self.db_repo_name, |
|
234 | repo_name=self.db_repo_name, | |
235 | source_repo_name=source_repo_name, |
|
235 | source_repo_name=source_repo_name, | |
236 | source_node_getter=codeblocks.diffset_node_getter(target_commit), |
|
236 | source_node_getter=codeblocks.diffset_node_getter(target_commit), | |
237 | target_node_getter=codeblocks.diffset_node_getter(source_commit), |
|
237 | target_node_getter=codeblocks.diffset_node_getter(source_commit), | |
238 | ) |
|
238 | ) | |
239 | diffset = self.path_filter.render_patchset_filtered( |
|
239 | diffset = self.path_filter.render_patchset_filtered( | |
240 | diffset, _parsed, target_commit.raw_id, source_commit.raw_id) |
|
240 | diffset, _parsed, target_commit.raw_id, source_commit.raw_id) | |
241 |
|
241 | |||
242 | return diffset |
|
242 | return diffset | |
243 |
|
243 | |||
244 | def _get_range_diffset(self, source_scm, source_repo, |
|
244 | def _get_range_diffset(self, source_scm, source_repo, | |
245 | commit1, commit2, diff_limit, file_limit, |
|
245 | commit1, commit2, diff_limit, file_limit, | |
246 | fulldiff, hide_whitespace_changes, diff_context): |
|
246 | fulldiff, hide_whitespace_changes, diff_context): | |
247 | vcs_diff = source_scm.get_diff( |
|
247 | vcs_diff = source_scm.get_diff( | |
248 | commit1, commit2, |
|
248 | commit1, commit2, | |
249 | ignore_whitespace=hide_whitespace_changes, |
|
249 | ignore_whitespace=hide_whitespace_changes, | |
250 | context=diff_context) |
|
250 | context=diff_context) | |
251 |
|
251 | |||
252 | diff_processor = diffs.DiffProcessor( |
|
252 | diff_processor = diffs.DiffProcessor( | |
253 | vcs_diff, format='newdiff', diff_limit=diff_limit, |
|
253 | vcs_diff, format='newdiff', diff_limit=diff_limit, | |
254 | file_limit=file_limit, show_full_diff=fulldiff) |
|
254 | file_limit=file_limit, show_full_diff=fulldiff) | |
255 |
|
255 | |||
256 | _parsed = diff_processor.prepare() |
|
256 | _parsed = diff_processor.prepare() | |
257 |
|
257 | |||
258 | diffset = codeblocks.DiffSet( |
|
258 | diffset = codeblocks.DiffSet( | |
259 | repo_name=source_repo.repo_name, |
|
259 | repo_name=source_repo.repo_name, | |
260 | source_node_getter=codeblocks.diffset_node_getter(commit1), |
|
260 | source_node_getter=codeblocks.diffset_node_getter(commit1), | |
261 | target_node_getter=codeblocks.diffset_node_getter(commit2)) |
|
261 | target_node_getter=codeblocks.diffset_node_getter(commit2)) | |
262 |
|
262 | |||
263 | diffset = self.path_filter.render_patchset_filtered( |
|
263 | diffset = self.path_filter.render_patchset_filtered( | |
264 | diffset, _parsed, commit1.raw_id, commit2.raw_id) |
|
264 | diffset, _parsed, commit1.raw_id, commit2.raw_id) | |
265 |
|
265 | |||
266 | return diffset |
|
266 | return diffset | |
267 |
|
267 | |||
268 | @LoginRequired() |
|
268 | @LoginRequired() | |
269 | @HasRepoPermissionAnyDecorator( |
|
269 | @HasRepoPermissionAnyDecorator( | |
270 | 'repository.read', 'repository.write', 'repository.admin') |
|
270 | 'repository.read', 'repository.write', 'repository.admin') | |
271 | @view_config( |
|
271 | @view_config( | |
272 | route_name='pullrequest_show', request_method='GET', |
|
272 | route_name='pullrequest_show', request_method='GET', | |
273 | renderer='rhodecode:templates/pullrequests/pullrequest_show.mako') |
|
273 | renderer='rhodecode:templates/pullrequests/pullrequest_show.mako') | |
274 | def pull_request_show(self): |
|
274 | def pull_request_show(self): | |
275 | _ = self.request.translate |
|
275 | _ = self.request.translate | |
276 | c = self.load_default_context() |
|
276 | c = self.load_default_context() | |
277 |
|
277 | |||
278 | pull_request = PullRequest.get_or_404( |
|
278 | pull_request = PullRequest.get_or_404( | |
279 | self.request.matchdict['pull_request_id']) |
|
279 | self.request.matchdict['pull_request_id']) | |
280 | pull_request_id = pull_request.pull_request_id |
|
280 | pull_request_id = pull_request.pull_request_id | |
281 |
|
281 | |||
282 | c.state_progressing = pull_request.is_state_changing() |
|
282 | c.state_progressing = pull_request.is_state_changing() | |
283 |
|
283 | |||
284 | _new_state = { |
|
284 | _new_state = { | |
285 | 'created': PullRequest.STATE_CREATED, |
|
285 | 'created': PullRequest.STATE_CREATED, | |
286 | }.get(self.request.GET.get('force_state')) |
|
286 | }.get(self.request.GET.get('force_state')) | |
287 |
|
287 | |||
288 | if c.is_super_admin and _new_state: |
|
288 | if c.is_super_admin and _new_state: | |
289 | with pull_request.set_state(PullRequest.STATE_UPDATING, final_state=_new_state): |
|
289 | with pull_request.set_state(PullRequest.STATE_UPDATING, final_state=_new_state): | |
290 | h.flash( |
|
290 | h.flash( | |
291 | _('Pull Request state was force changed to `{}`').format(_new_state), |
|
291 | _('Pull Request state was force changed to `{}`').format(_new_state), | |
292 | category='success') |
|
292 | category='success') | |
293 | Session().commit() |
|
293 | Session().commit() | |
294 |
|
294 | |||
295 | raise HTTPFound(h.route_path( |
|
295 | raise HTTPFound(h.route_path( | |
296 | 'pullrequest_show', repo_name=self.db_repo_name, |
|
296 | 'pullrequest_show', repo_name=self.db_repo_name, | |
297 | pull_request_id=pull_request_id)) |
|
297 | pull_request_id=pull_request_id)) | |
298 |
|
298 | |||
299 | version = self.request.GET.get('version') |
|
299 | version = self.request.GET.get('version') | |
300 | from_version = self.request.GET.get('from_version') or version |
|
300 | from_version = self.request.GET.get('from_version') or version | |
301 | merge_checks = self.request.GET.get('merge_checks') |
|
301 | merge_checks = self.request.GET.get('merge_checks') | |
302 | c.fulldiff = str2bool(self.request.GET.get('fulldiff')) |
|
302 | c.fulldiff = str2bool(self.request.GET.get('fulldiff')) | |
303 |
|
303 | |||
304 | # fetch global flags of ignore ws or context lines |
|
304 | # fetch global flags of ignore ws or context lines | |
305 | diff_context = diffs.get_diff_context(self.request) |
|
305 | diff_context = diffs.get_diff_context(self.request) | |
306 | hide_whitespace_changes = diffs.get_diff_whitespace_flag(self.request) |
|
306 | hide_whitespace_changes = diffs.get_diff_whitespace_flag(self.request) | |
307 |
|
307 | |||
308 | force_refresh = str2bool(self.request.GET.get('force_refresh')) |
|
308 | force_refresh = str2bool(self.request.GET.get('force_refresh')) | |
309 |
|
309 | |||
310 | (pull_request_latest, |
|
310 | (pull_request_latest, | |
311 | pull_request_at_ver, |
|
311 | pull_request_at_ver, | |
312 | pull_request_display_obj, |
|
312 | pull_request_display_obj, | |
313 | at_version) = PullRequestModel().get_pr_version( |
|
313 | at_version) = PullRequestModel().get_pr_version( | |
314 | pull_request_id, version=version) |
|
314 | pull_request_id, version=version) | |
315 | pr_closed = pull_request_latest.is_closed() |
|
315 | pr_closed = pull_request_latest.is_closed() | |
316 |
|
316 | |||
317 | if pr_closed and (version or from_version): |
|
317 | if pr_closed and (version or from_version): | |
318 | # not allow to browse versions |
|
318 | # not allow to browse versions | |
319 | raise HTTPFound(h.route_path( |
|
319 | raise HTTPFound(h.route_path( | |
320 | 'pullrequest_show', repo_name=self.db_repo_name, |
|
320 | 'pullrequest_show', repo_name=self.db_repo_name, | |
321 | pull_request_id=pull_request_id)) |
|
321 | pull_request_id=pull_request_id)) | |
322 |
|
322 | |||
323 | versions = pull_request_display_obj.versions() |
|
323 | versions = pull_request_display_obj.versions() | |
324 | # used to store per-commit range diffs |
|
324 | # used to store per-commit range diffs | |
325 | c.changes = collections.OrderedDict() |
|
325 | c.changes = collections.OrderedDict() | |
326 | c.range_diff_on = self.request.GET.get('range-diff') == "1" |
|
326 | c.range_diff_on = self.request.GET.get('range-diff') == "1" | |
327 |
|
327 | |||
328 | c.at_version = at_version |
|
328 | c.at_version = at_version | |
329 | c.at_version_num = (at_version |
|
329 | c.at_version_num = (at_version | |
330 | if at_version and at_version != 'latest' |
|
330 | if at_version and at_version != 'latest' | |
331 | else None) |
|
331 | else None) | |
332 | c.at_version_pos = ChangesetComment.get_index_from_version( |
|
332 | c.at_version_pos = ChangesetComment.get_index_from_version( | |
333 | c.at_version_num, versions) |
|
333 | c.at_version_num, versions) | |
334 |
|
334 | |||
335 | (prev_pull_request_latest, |
|
335 | (prev_pull_request_latest, | |
336 | prev_pull_request_at_ver, |
|
336 | prev_pull_request_at_ver, | |
337 | prev_pull_request_display_obj, |
|
337 | prev_pull_request_display_obj, | |
338 | prev_at_version) = PullRequestModel().get_pr_version( |
|
338 | prev_at_version) = PullRequestModel().get_pr_version( | |
339 | pull_request_id, version=from_version) |
|
339 | pull_request_id, version=from_version) | |
340 |
|
340 | |||
341 | c.from_version = prev_at_version |
|
341 | c.from_version = prev_at_version | |
342 | c.from_version_num = (prev_at_version |
|
342 | c.from_version_num = (prev_at_version | |
343 | if prev_at_version and prev_at_version != 'latest' |
|
343 | if prev_at_version and prev_at_version != 'latest' | |
344 | else None) |
|
344 | else None) | |
345 | c.from_version_pos = ChangesetComment.get_index_from_version( |
|
345 | c.from_version_pos = ChangesetComment.get_index_from_version( | |
346 | c.from_version_num, versions) |
|
346 | c.from_version_num, versions) | |
347 |
|
347 | |||
348 | # define if we're in COMPARE mode or VIEW at version mode |
|
348 | # define if we're in COMPARE mode or VIEW at version mode | |
349 | compare = at_version != prev_at_version |
|
349 | compare = at_version != prev_at_version | |
350 |
|
350 | |||
351 | # pull_requests repo_name we opened it against |
|
351 | # pull_requests repo_name we opened it against | |
352 | # ie. target_repo must match |
|
352 | # ie. target_repo must match | |
353 | if self.db_repo_name != pull_request_at_ver.target_repo.repo_name: |
|
353 | if self.db_repo_name != pull_request_at_ver.target_repo.repo_name: | |
354 | raise HTTPNotFound() |
|
354 | raise HTTPNotFound() | |
355 |
|
355 | |||
356 | c.shadow_clone_url = PullRequestModel().get_shadow_clone_url( |
|
356 | c.shadow_clone_url = PullRequestModel().get_shadow_clone_url( | |
357 | pull_request_at_ver) |
|
357 | pull_request_at_ver) | |
358 |
|
358 | |||
359 | c.pull_request = pull_request_display_obj |
|
359 | c.pull_request = pull_request_display_obj | |
360 | c.renderer = pull_request_at_ver.description_renderer or c.renderer |
|
360 | c.renderer = pull_request_at_ver.description_renderer or c.renderer | |
361 | c.pull_request_latest = pull_request_latest |
|
361 | c.pull_request_latest = pull_request_latest | |
362 |
|
362 | |||
363 | if compare or (at_version and not at_version == 'latest'): |
|
363 | if compare or (at_version and not at_version == 'latest'): | |
364 | c.allowed_to_change_status = False |
|
364 | c.allowed_to_change_status = False | |
365 | c.allowed_to_update = False |
|
365 | c.allowed_to_update = False | |
366 | c.allowed_to_merge = False |
|
366 | c.allowed_to_merge = False | |
367 | c.allowed_to_delete = False |
|
367 | c.allowed_to_delete = False | |
368 | c.allowed_to_comment = False |
|
368 | c.allowed_to_comment = False | |
369 | c.allowed_to_close = False |
|
369 | c.allowed_to_close = False | |
370 | else: |
|
370 | else: | |
371 | can_change_status = PullRequestModel().check_user_change_status( |
|
371 | can_change_status = PullRequestModel().check_user_change_status( | |
372 | pull_request_at_ver, self._rhodecode_user) |
|
372 | pull_request_at_ver, self._rhodecode_user) | |
373 | c.allowed_to_change_status = can_change_status and not pr_closed |
|
373 | c.allowed_to_change_status = can_change_status and not pr_closed | |
374 |
|
374 | |||
375 | c.allowed_to_update = PullRequestModel().check_user_update( |
|
375 | c.allowed_to_update = PullRequestModel().check_user_update( | |
376 | pull_request_latest, self._rhodecode_user) and not pr_closed |
|
376 | pull_request_latest, self._rhodecode_user) and not pr_closed | |
377 | c.allowed_to_merge = PullRequestModel().check_user_merge( |
|
377 | c.allowed_to_merge = PullRequestModel().check_user_merge( | |
378 | pull_request_latest, self._rhodecode_user) and not pr_closed |
|
378 | pull_request_latest, self._rhodecode_user) and not pr_closed | |
379 | c.allowed_to_delete = PullRequestModel().check_user_delete( |
|
379 | c.allowed_to_delete = PullRequestModel().check_user_delete( | |
380 | pull_request_latest, self._rhodecode_user) and not pr_closed |
|
380 | pull_request_latest, self._rhodecode_user) and not pr_closed | |
381 | c.allowed_to_comment = not pr_closed |
|
381 | c.allowed_to_comment = not pr_closed | |
382 | c.allowed_to_close = c.allowed_to_merge and not pr_closed |
|
382 | c.allowed_to_close = c.allowed_to_merge and not pr_closed | |
383 |
|
383 | |||
384 | c.forbid_adding_reviewers = False |
|
384 | c.forbid_adding_reviewers = False | |
385 | c.forbid_author_to_review = False |
|
385 | c.forbid_author_to_review = False | |
386 | c.forbid_commit_author_to_review = False |
|
386 | c.forbid_commit_author_to_review = False | |
387 |
|
387 | |||
388 | if pull_request_latest.reviewer_data and \ |
|
388 | if pull_request_latest.reviewer_data and \ | |
389 | 'rules' in pull_request_latest.reviewer_data: |
|
389 | 'rules' in pull_request_latest.reviewer_data: | |
390 | rules = pull_request_latest.reviewer_data['rules'] or {} |
|
390 | rules = pull_request_latest.reviewer_data['rules'] or {} | |
391 | try: |
|
391 | try: | |
392 | c.forbid_adding_reviewers = rules.get( |
|
392 | c.forbid_adding_reviewers = rules.get( | |
393 | 'forbid_adding_reviewers') |
|
393 | 'forbid_adding_reviewers') | |
394 | c.forbid_author_to_review = rules.get( |
|
394 | c.forbid_author_to_review = rules.get( | |
395 | 'forbid_author_to_review') |
|
395 | 'forbid_author_to_review') | |
396 | c.forbid_commit_author_to_review = rules.get( |
|
396 | c.forbid_commit_author_to_review = rules.get( | |
397 | 'forbid_commit_author_to_review') |
|
397 | 'forbid_commit_author_to_review') | |
398 | except Exception: |
|
398 | except Exception: | |
399 | pass |
|
399 | pass | |
400 |
|
400 | |||
401 | # check merge capabilities |
|
401 | # check merge capabilities | |
402 | _merge_check = MergeCheck.validate( |
|
402 | _merge_check = MergeCheck.validate( | |
403 | pull_request_latest, auth_user=self._rhodecode_user, |
|
403 | pull_request_latest, auth_user=self._rhodecode_user, | |
404 | translator=self.request.translate, |
|
404 | translator=self.request.translate, | |
405 | force_shadow_repo_refresh=force_refresh) |
|
405 | force_shadow_repo_refresh=force_refresh) | |
406 |
|
406 | |||
407 | c.pr_merge_errors = _merge_check.error_details |
|
407 | c.pr_merge_errors = _merge_check.error_details | |
408 | c.pr_merge_possible = not _merge_check.failed |
|
408 | c.pr_merge_possible = not _merge_check.failed | |
409 | c.pr_merge_message = _merge_check.merge_msg |
|
409 | c.pr_merge_message = _merge_check.merge_msg | |
410 | c.pr_merge_source_commit = _merge_check.source_commit |
|
410 | c.pr_merge_source_commit = _merge_check.source_commit | |
411 | c.pr_merge_target_commit = _merge_check.target_commit |
|
411 | c.pr_merge_target_commit = _merge_check.target_commit | |
412 |
|
412 | |||
413 | c.pr_merge_info = MergeCheck.get_merge_conditions( |
|
413 | c.pr_merge_info = MergeCheck.get_merge_conditions( | |
414 | pull_request_latest, translator=self.request.translate) |
|
414 | pull_request_latest, translator=self.request.translate) | |
415 |
|
415 | |||
416 | c.pull_request_review_status = _merge_check.review_status |
|
416 | c.pull_request_review_status = _merge_check.review_status | |
417 | if merge_checks: |
|
417 | if merge_checks: | |
418 | self.request.override_renderer = \ |
|
418 | self.request.override_renderer = \ | |
419 | 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako' |
|
419 | 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako' | |
420 | return self._get_template_context(c) |
|
420 | return self._get_template_context(c) | |
421 |
|
421 | |||
422 | comments_model = CommentsModel() |
|
422 | comments_model = CommentsModel() | |
423 |
|
423 | |||
424 | # reviewers and statuses |
|
424 | # reviewers and statuses | |
425 | c.pull_request_reviewers = pull_request_at_ver.reviewers_statuses() |
|
425 | c.pull_request_reviewers = pull_request_at_ver.reviewers_statuses() | |
426 | allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers] |
|
426 | allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers] | |
427 |
|
427 | |||
428 | # GENERAL COMMENTS with versions # |
|
428 | # GENERAL COMMENTS with versions # | |
429 | q = comments_model._all_general_comments_of_pull_request(pull_request_latest) |
|
429 | q = comments_model._all_general_comments_of_pull_request(pull_request_latest) | |
430 | q = q.order_by(ChangesetComment.comment_id.asc()) |
|
430 | q = q.order_by(ChangesetComment.comment_id.asc()) | |
431 | general_comments = q |
|
431 | general_comments = q | |
432 |
|
432 | |||
433 | # pick comments we want to render at current version |
|
433 | # pick comments we want to render at current version | |
434 | c.comment_versions = comments_model.aggregate_comments( |
|
434 | c.comment_versions = comments_model.aggregate_comments( | |
435 | general_comments, versions, c.at_version_num) |
|
435 | general_comments, versions, c.at_version_num) | |
436 | c.comments = c.comment_versions[c.at_version_num]['until'] |
|
436 | c.comments = c.comment_versions[c.at_version_num]['until'] | |
437 |
|
437 | |||
438 | # INLINE COMMENTS with versions # |
|
438 | # INLINE COMMENTS with versions # | |
439 | q = comments_model._all_inline_comments_of_pull_request(pull_request_latest) |
|
439 | q = comments_model._all_inline_comments_of_pull_request(pull_request_latest) | |
440 | q = q.order_by(ChangesetComment.comment_id.asc()) |
|
440 | q = q.order_by(ChangesetComment.comment_id.asc()) | |
441 | inline_comments = q |
|
441 | inline_comments = q | |
442 |
|
442 | |||
443 | c.inline_versions = comments_model.aggregate_comments( |
|
443 | c.inline_versions = comments_model.aggregate_comments( | |
444 | inline_comments, versions, c.at_version_num, inline=True) |
|
444 | inline_comments, versions, c.at_version_num, inline=True) | |
445 |
|
445 | |||
446 | # TODOs |
|
446 | # TODOs | |
447 | c.unresolved_comments = CommentsModel() \ |
|
447 | c.unresolved_comments = CommentsModel() \ | |
448 | .get_pull_request_unresolved_todos(pull_request) |
|
448 | .get_pull_request_unresolved_todos(pull_request) | |
449 | c.resolved_comments = CommentsModel() \ |
|
449 | c.resolved_comments = CommentsModel() \ | |
450 | .get_pull_request_resolved_todos(pull_request) |
|
450 | .get_pull_request_resolved_todos(pull_request) | |
451 |
|
451 | |||
452 | # inject latest version |
|
452 | # inject latest version | |
453 | latest_ver = PullRequest.get_pr_display_object( |
|
453 | latest_ver = PullRequest.get_pr_display_object( | |
454 | pull_request_latest, pull_request_latest) |
|
454 | pull_request_latest, pull_request_latest) | |
455 |
|
455 | |||
456 | c.versions = versions + [latest_ver] |
|
456 | c.versions = versions + [latest_ver] | |
457 |
|
457 | |||
458 | # if we use version, then do not show later comments |
|
458 | # if we use version, then do not show later comments | |
459 | # than current version |
|
459 | # than current version | |
460 | display_inline_comments = collections.defaultdict( |
|
460 | display_inline_comments = collections.defaultdict( | |
461 | lambda: collections.defaultdict(list)) |
|
461 | lambda: collections.defaultdict(list)) | |
462 | for co in inline_comments: |
|
462 | for co in inline_comments: | |
463 | if c.at_version_num: |
|
463 | if c.at_version_num: | |
464 | # pick comments that are at least UPTO given version, so we |
|
464 | # pick comments that are at least UPTO given version, so we | |
465 | # don't render comments for higher version |
|
465 | # don't render comments for higher version | |
466 | should_render = co.pull_request_version_id and \ |
|
466 | should_render = co.pull_request_version_id and \ | |
467 | co.pull_request_version_id <= c.at_version_num |
|
467 | co.pull_request_version_id <= c.at_version_num | |
468 | else: |
|
468 | else: | |
469 | # showing all, for 'latest' |
|
469 | # showing all, for 'latest' | |
470 | should_render = True |
|
470 | should_render = True | |
471 |
|
471 | |||
472 | if should_render: |
|
472 | if should_render: | |
473 | display_inline_comments[co.f_path][co.line_no].append(co) |
|
473 | display_inline_comments[co.f_path][co.line_no].append(co) | |
474 |
|
474 | |||
475 | # load diff data into template context, if we use compare mode then |
|
475 | # load diff data into template context, if we use compare mode then | |
476 | # diff is calculated based on changes between versions of PR |
|
476 | # diff is calculated based on changes between versions of PR | |
477 |
|
477 | |||
478 | source_repo = pull_request_at_ver.source_repo |
|
478 | source_repo = pull_request_at_ver.source_repo | |
479 | source_ref_id = pull_request_at_ver.source_ref_parts.commit_id |
|
479 | source_ref_id = pull_request_at_ver.source_ref_parts.commit_id | |
480 |
|
480 | |||
481 | target_repo = pull_request_at_ver.target_repo |
|
481 | target_repo = pull_request_at_ver.target_repo | |
482 | target_ref_id = pull_request_at_ver.target_ref_parts.commit_id |
|
482 | target_ref_id = pull_request_at_ver.target_ref_parts.commit_id | |
483 |
|
483 | |||
484 | if compare: |
|
484 | if compare: | |
485 | # in compare switch the diff base to latest commit from prev version |
|
485 | # in compare switch the diff base to latest commit from prev version | |
486 | target_ref_id = prev_pull_request_display_obj.revisions[0] |
|
486 | target_ref_id = prev_pull_request_display_obj.revisions[0] | |
487 |
|
487 | |||
488 | # despite opening commits for bookmarks/branches/tags, we always |
|
488 | # despite opening commits for bookmarks/branches/tags, we always | |
489 | # convert this to rev to prevent changes after bookmark or branch change |
|
489 | # convert this to rev to prevent changes after bookmark or branch change | |
490 | c.source_ref_type = 'rev' |
|
490 | c.source_ref_type = 'rev' | |
491 | c.source_ref = source_ref_id |
|
491 | c.source_ref = source_ref_id | |
492 |
|
492 | |||
493 | c.target_ref_type = 'rev' |
|
493 | c.target_ref_type = 'rev' | |
494 | c.target_ref = target_ref_id |
|
494 | c.target_ref = target_ref_id | |
495 |
|
495 | |||
496 | c.source_repo = source_repo |
|
496 | c.source_repo = source_repo | |
497 | c.target_repo = target_repo |
|
497 | c.target_repo = target_repo | |
498 |
|
498 | |||
499 | c.commit_ranges = [] |
|
499 | c.commit_ranges = [] | |
500 | source_commit = EmptyCommit() |
|
500 | source_commit = EmptyCommit() | |
501 | target_commit = EmptyCommit() |
|
501 | target_commit = EmptyCommit() | |
502 | c.missing_requirements = False |
|
502 | c.missing_requirements = False | |
503 |
|
503 | |||
504 | source_scm = source_repo.scm_instance() |
|
504 | source_scm = source_repo.scm_instance() | |
505 | target_scm = target_repo.scm_instance() |
|
505 | target_scm = target_repo.scm_instance() | |
506 |
|
506 | |||
507 | shadow_scm = None |
|
507 | shadow_scm = None | |
508 | try: |
|
508 | try: | |
509 | shadow_scm = pull_request_latest.get_shadow_repo() |
|
509 | shadow_scm = pull_request_latest.get_shadow_repo() | |
510 | except Exception: |
|
510 | except Exception: | |
511 | log.debug('Failed to get shadow repo', exc_info=True) |
|
511 | log.debug('Failed to get shadow repo', exc_info=True) | |
512 | # try first the existing source_repo, and then shadow |
|
512 | # try first the existing source_repo, and then shadow | |
513 | # repo if we can obtain one |
|
513 | # repo if we can obtain one | |
514 | commits_source_repo = source_scm |
|
514 | commits_source_repo = source_scm | |
515 | if shadow_scm: |
|
515 | if shadow_scm: | |
516 | commits_source_repo = shadow_scm |
|
516 | commits_source_repo = shadow_scm | |
517 |
|
517 | |||
518 | c.commits_source_repo = commits_source_repo |
|
518 | c.commits_source_repo = commits_source_repo | |
519 | c.ancestor = None # set it to None, to hide it from PR view |
|
519 | c.ancestor = None # set it to None, to hide it from PR view | |
520 |
|
520 | |||
521 | # empty version means latest, so we keep this to prevent |
|
521 | # empty version means latest, so we keep this to prevent | |
522 | # double caching |
|
522 | # double caching | |
523 | version_normalized = version or 'latest' |
|
523 | version_normalized = version or 'latest' | |
524 | from_version_normalized = from_version or 'latest' |
|
524 | from_version_normalized = from_version or 'latest' | |
525 |
|
525 | |||
526 | cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(target_repo) |
|
526 | cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(target_repo) | |
527 | cache_file_path = diff_cache_exist( |
|
527 | cache_file_path = diff_cache_exist( | |
528 | cache_path, 'pull_request', pull_request_id, version_normalized, |
|
528 | cache_path, 'pull_request', pull_request_id, version_normalized, | |
529 | from_version_normalized, source_ref_id, target_ref_id, |
|
529 | from_version_normalized, source_ref_id, target_ref_id, | |
530 | hide_whitespace_changes, diff_context, c.fulldiff) |
|
530 | hide_whitespace_changes, diff_context, c.fulldiff) | |
531 |
|
531 | |||
532 | caching_enabled = self._is_diff_cache_enabled(c.target_repo) |
|
532 | caching_enabled = self._is_diff_cache_enabled(c.target_repo) | |
533 | force_recache = self.get_recache_flag() |
|
533 | force_recache = self.get_recache_flag() | |
534 |
|
534 | |||
535 | cached_diff = None |
|
535 | cached_diff = None | |
536 | if caching_enabled: |
|
536 | if caching_enabled: | |
537 | cached_diff = load_cached_diff(cache_file_path) |
|
537 | cached_diff = load_cached_diff(cache_file_path) | |
538 |
|
538 | |||
539 | has_proper_commit_cache = ( |
|
539 | has_proper_commit_cache = ( | |
540 | cached_diff and cached_diff.get('commits') |
|
540 | cached_diff and cached_diff.get('commits') | |
541 | and len(cached_diff.get('commits', [])) == 5 |
|
541 | and len(cached_diff.get('commits', [])) == 5 | |
542 | and cached_diff.get('commits')[0] |
|
542 | and cached_diff.get('commits')[0] | |
543 | and cached_diff.get('commits')[3]) |
|
543 | and cached_diff.get('commits')[3]) | |
544 |
|
544 | |||
545 | if not force_recache and not c.range_diff_on and has_proper_commit_cache: |
|
545 | if not force_recache and not c.range_diff_on and has_proper_commit_cache: | |
546 | diff_commit_cache = \ |
|
546 | diff_commit_cache = \ | |
547 | (ancestor_commit, commit_cache, missing_requirements, |
|
547 | (ancestor_commit, commit_cache, missing_requirements, | |
548 | source_commit, target_commit) = cached_diff['commits'] |
|
548 | source_commit, target_commit) = cached_diff['commits'] | |
549 | else: |
|
549 | else: | |
550 | # NOTE(marcink): we reach potentially unreachable errors when a PR has |
|
550 | # NOTE(marcink): we reach potentially unreachable errors when a PR has | |
551 | # merge errors resulting in potentially hidden commits in the shadow repo. |
|
551 | # merge errors resulting in potentially hidden commits in the shadow repo. | |
552 | maybe_unreachable = _merge_check.MERGE_CHECK in _merge_check.error_details \ |
|
552 | maybe_unreachable = _merge_check.MERGE_CHECK in _merge_check.error_details \ | |
553 | and _merge_check.merge_response |
|
553 | and _merge_check.merge_response | |
554 | maybe_unreachable = maybe_unreachable \ |
|
554 | maybe_unreachable = maybe_unreachable \ | |
555 | and _merge_check.merge_response.metadata.get('unresolved_files') |
|
555 | and _merge_check.merge_response.metadata.get('unresolved_files') | |
556 | log.debug("Using unreachable commits due to MERGE_CHECK in merge simulation") |
|
556 | log.debug("Using unreachable commits due to MERGE_CHECK in merge simulation") | |
557 | diff_commit_cache = \ |
|
557 | diff_commit_cache = \ | |
558 | (ancestor_commit, commit_cache, missing_requirements, |
|
558 | (ancestor_commit, commit_cache, missing_requirements, | |
559 | source_commit, target_commit) = self.get_commits( |
|
559 | source_commit, target_commit) = self.get_commits( | |
560 | commits_source_repo, |
|
560 | commits_source_repo, | |
561 | pull_request_at_ver, |
|
561 | pull_request_at_ver, | |
562 | source_commit, |
|
562 | source_commit, | |
563 | source_ref_id, |
|
563 | source_ref_id, | |
564 | source_scm, |
|
564 | source_scm, | |
565 | target_commit, |
|
565 | target_commit, | |
566 | target_ref_id, |
|
566 | target_ref_id, | |
567 | target_scm, |
|
567 | target_scm, | |
568 | maybe_unreachable=maybe_unreachable) |
|
568 | maybe_unreachable=maybe_unreachable) | |
569 |
|
569 | |||
570 | # register our commit range |
|
570 | # register our commit range | |
571 | for comm in commit_cache.values(): |
|
571 | for comm in commit_cache.values(): | |
572 | c.commit_ranges.append(comm) |
|
572 | c.commit_ranges.append(comm) | |
573 |
|
573 | |||
574 | c.missing_requirements = missing_requirements |
|
574 | c.missing_requirements = missing_requirements | |
575 | c.ancestor_commit = ancestor_commit |
|
575 | c.ancestor_commit = ancestor_commit | |
576 | c.statuses = source_repo.statuses( |
|
576 | c.statuses = source_repo.statuses( | |
577 | [x.raw_id for x in c.commit_ranges]) |
|
577 | [x.raw_id for x in c.commit_ranges]) | |
578 |
|
578 | |||
579 | # auto collapse if we have more than limit |
|
579 | # auto collapse if we have more than limit | |
580 | collapse_limit = diffs.DiffProcessor._collapse_commits_over |
|
580 | collapse_limit = diffs.DiffProcessor._collapse_commits_over | |
581 | c.collapse_all_commits = len(c.commit_ranges) > collapse_limit |
|
581 | c.collapse_all_commits = len(c.commit_ranges) > collapse_limit | |
582 | c.compare_mode = compare |
|
582 | c.compare_mode = compare | |
583 |
|
583 | |||
584 | # diff_limit is the old behavior, will cut off the whole diff |
|
584 | # diff_limit is the old behavior, will cut off the whole diff | |
585 | # if the limit is applied otherwise will just hide the |
|
585 | # if the limit is applied otherwise will just hide the | |
586 | # big files from the front-end |
|
586 | # big files from the front-end | |
587 | diff_limit = c.visual.cut_off_limit_diff |
|
587 | diff_limit = c.visual.cut_off_limit_diff | |
588 | file_limit = c.visual.cut_off_limit_file |
|
588 | file_limit = c.visual.cut_off_limit_file | |
589 |
|
589 | |||
590 | c.missing_commits = False |
|
590 | c.missing_commits = False | |
591 | if (c.missing_requirements |
|
591 | if (c.missing_requirements | |
592 | or isinstance(source_commit, EmptyCommit) |
|
592 | or isinstance(source_commit, EmptyCommit) | |
593 | or source_commit == target_commit): |
|
593 | or source_commit == target_commit): | |
594 |
|
594 | |||
595 | c.missing_commits = True |
|
595 | c.missing_commits = True | |
596 | else: |
|
596 | else: | |
597 | c.inline_comments = display_inline_comments |
|
597 | c.inline_comments = display_inline_comments | |
598 |
|
598 | |||
599 | use_ancestor = True |
|
599 | use_ancestor = True | |
600 | if from_version_normalized != version_normalized: |
|
600 | if from_version_normalized != version_normalized: | |
601 | use_ancestor = False |
|
601 | use_ancestor = False | |
602 |
|
602 | |||
603 | has_proper_diff_cache = cached_diff and cached_diff.get('commits') |
|
603 | has_proper_diff_cache = cached_diff and cached_diff.get('commits') | |
604 | if not force_recache and has_proper_diff_cache: |
|
604 | if not force_recache and has_proper_diff_cache: | |
605 | c.diffset = cached_diff['diff'] |
|
605 | c.diffset = cached_diff['diff'] | |
606 | else: |
|
606 | else: | |
607 | c.diffset = self._get_diffset( |
|
607 | c.diffset = self._get_diffset( | |
608 | c.source_repo.repo_name, commits_source_repo, |
|
608 | c.source_repo.repo_name, commits_source_repo, | |
609 | c.ancestor_commit, |
|
609 | c.ancestor_commit, | |
610 | source_ref_id, target_ref_id, |
|
610 | source_ref_id, target_ref_id, | |
611 | target_commit, source_commit, |
|
611 | target_commit, source_commit, | |
612 | diff_limit, file_limit, c.fulldiff, |
|
612 | diff_limit, file_limit, c.fulldiff, | |
613 | hide_whitespace_changes, diff_context, |
|
613 | hide_whitespace_changes, diff_context, | |
614 | use_ancestor=use_ancestor |
|
614 | use_ancestor=use_ancestor | |
615 | ) |
|
615 | ) | |
616 |
|
616 | |||
617 | # save cached diff |
|
617 | # save cached diff | |
618 | if caching_enabled: |
|
618 | if caching_enabled: | |
619 | cache_diff(cache_file_path, c.diffset, diff_commit_cache) |
|
619 | cache_diff(cache_file_path, c.diffset, diff_commit_cache) | |
620 |
|
620 | |||
621 | c.limited_diff = c.diffset.limited_diff |
|
621 | c.limited_diff = c.diffset.limited_diff | |
622 |
|
622 | |||
623 | # calculate removed files that are bound to comments |
|
623 | # calculate removed files that are bound to comments | |
624 | comment_deleted_files = [ |
|
624 | comment_deleted_files = [ | |
625 | fname for fname in display_inline_comments |
|
625 | fname for fname in display_inline_comments | |
626 | if fname not in c.diffset.file_stats] |
|
626 | if fname not in c.diffset.file_stats] | |
627 |
|
627 | |||
628 | c.deleted_files_comments = collections.defaultdict(dict) |
|
628 | c.deleted_files_comments = collections.defaultdict(dict) | |
629 | for fname, per_line_comments in display_inline_comments.items(): |
|
629 | for fname, per_line_comments in display_inline_comments.items(): | |
630 | if fname in comment_deleted_files: |
|
630 | if fname in comment_deleted_files: | |
631 | c.deleted_files_comments[fname]['stats'] = 0 |
|
631 | c.deleted_files_comments[fname]['stats'] = 0 | |
632 | c.deleted_files_comments[fname]['comments'] = list() |
|
632 | c.deleted_files_comments[fname]['comments'] = list() | |
633 | for lno, comments in per_line_comments.items(): |
|
633 | for lno, comments in per_line_comments.items(): | |
634 | c.deleted_files_comments[fname]['comments'].extend(comments) |
|
634 | c.deleted_files_comments[fname]['comments'].extend(comments) | |
635 |
|
635 | |||
636 | # maybe calculate the range diff |
|
636 | # maybe calculate the range diff | |
637 | if c.range_diff_on: |
|
637 | if c.range_diff_on: | |
638 | # TODO(marcink): set whitespace/context |
|
638 | # TODO(marcink): set whitespace/context | |
639 | context_lcl = 3 |
|
639 | context_lcl = 3 | |
640 | ign_whitespace_lcl = False |
|
640 | ign_whitespace_lcl = False | |
641 |
|
641 | |||
642 | for commit in c.commit_ranges: |
|
642 | for commit in c.commit_ranges: | |
643 | commit2 = commit |
|
643 | commit2 = commit | |
644 | commit1 = commit.first_parent |
|
644 | commit1 = commit.first_parent | |
645 |
|
645 | |||
646 | range_diff_cache_file_path = diff_cache_exist( |
|
646 | range_diff_cache_file_path = diff_cache_exist( | |
647 | cache_path, 'diff', commit.raw_id, |
|
647 | cache_path, 'diff', commit.raw_id, | |
648 | ign_whitespace_lcl, context_lcl, c.fulldiff) |
|
648 | ign_whitespace_lcl, context_lcl, c.fulldiff) | |
649 |
|
649 | |||
650 | cached_diff = None |
|
650 | cached_diff = None | |
651 | if caching_enabled: |
|
651 | if caching_enabled: | |
652 | cached_diff = load_cached_diff(range_diff_cache_file_path) |
|
652 | cached_diff = load_cached_diff(range_diff_cache_file_path) | |
653 |
|
653 | |||
654 | has_proper_diff_cache = cached_diff and cached_diff.get('diff') |
|
654 | has_proper_diff_cache = cached_diff and cached_diff.get('diff') | |
655 | if not force_recache and has_proper_diff_cache: |
|
655 | if not force_recache and has_proper_diff_cache: | |
656 | diffset = cached_diff['diff'] |
|
656 | diffset = cached_diff['diff'] | |
657 | else: |
|
657 | else: | |
658 | diffset = self._get_range_diffset( |
|
658 | diffset = self._get_range_diffset( | |
659 | commits_source_repo, source_repo, |
|
659 | commits_source_repo, source_repo, | |
660 | commit1, commit2, diff_limit, file_limit, |
|
660 | commit1, commit2, diff_limit, file_limit, | |
661 | c.fulldiff, ign_whitespace_lcl, context_lcl |
|
661 | c.fulldiff, ign_whitespace_lcl, context_lcl | |
662 | ) |
|
662 | ) | |
663 |
|
663 | |||
664 | # save cached diff |
|
664 | # save cached diff | |
665 | if caching_enabled: |
|
665 | if caching_enabled: | |
666 | cache_diff(range_diff_cache_file_path, diffset, None) |
|
666 | cache_diff(range_diff_cache_file_path, diffset, None) | |
667 |
|
667 | |||
668 | c.changes[commit.raw_id] = diffset |
|
668 | c.changes[commit.raw_id] = diffset | |
669 |
|
669 | |||
670 | # this is a hack to properly display links, when creating PR, the |
|
670 | # this is a hack to properly display links, when creating PR, the | |
671 | # compare view and others uses different notation, and |
|
671 | # compare view and others uses different notation, and | |
672 | # compare_commits.mako renders links based on the target_repo. |
|
672 | # compare_commits.mako renders links based on the target_repo. | |
673 | # We need to swap that here to generate it properly on the html side |
|
673 | # We need to swap that here to generate it properly on the html side | |
674 | c.target_repo = c.source_repo |
|
674 | c.target_repo = c.source_repo | |
675 |
|
675 | |||
676 | c.commit_statuses = ChangesetStatus.STATUSES |
|
676 | c.commit_statuses = ChangesetStatus.STATUSES | |
677 |
|
677 | |||
678 | c.show_version_changes = not pr_closed |
|
678 | c.show_version_changes = not pr_closed | |
679 | if c.show_version_changes: |
|
679 | if c.show_version_changes: | |
680 | cur_obj = pull_request_at_ver |
|
680 | cur_obj = pull_request_at_ver | |
681 | prev_obj = prev_pull_request_at_ver |
|
681 | prev_obj = prev_pull_request_at_ver | |
682 |
|
682 | |||
683 | old_commit_ids = prev_obj.revisions |
|
683 | old_commit_ids = prev_obj.revisions | |
684 | new_commit_ids = cur_obj.revisions |
|
684 | new_commit_ids = cur_obj.revisions | |
685 | commit_changes = PullRequestModel()._calculate_commit_id_changes( |
|
685 | commit_changes = PullRequestModel()._calculate_commit_id_changes( | |
686 | old_commit_ids, new_commit_ids) |
|
686 | old_commit_ids, new_commit_ids) | |
687 | c.commit_changes_summary = commit_changes |
|
687 | c.commit_changes_summary = commit_changes | |
688 |
|
688 | |||
689 | # calculate the diff for commits between versions |
|
689 | # calculate the diff for commits between versions | |
690 | c.commit_changes = [] |
|
690 | c.commit_changes = [] | |
691 |
|
691 | |||
692 | def mark(cs, fw): |
|
692 | def mark(cs, fw): | |
693 | return list(h.itertools.izip_longest([], cs, fillvalue=fw)) |
|
693 | return list(h.itertools.izip_longest([], cs, fillvalue=fw)) | |
694 |
|
694 | |||
695 | for c_type, raw_id in mark(commit_changes.added, 'a') \ |
|
695 | for c_type, raw_id in mark(commit_changes.added, 'a') \ | |
696 | + mark(commit_changes.removed, 'r') \ |
|
696 | + mark(commit_changes.removed, 'r') \ | |
697 | + mark(commit_changes.common, 'c'): |
|
697 | + mark(commit_changes.common, 'c'): | |
698 |
|
698 | |||
699 | if raw_id in commit_cache: |
|
699 | if raw_id in commit_cache: | |
700 | commit = commit_cache[raw_id] |
|
700 | commit = commit_cache[raw_id] | |
701 | else: |
|
701 | else: | |
702 | try: |
|
702 | try: | |
703 | commit = commits_source_repo.get_commit(raw_id) |
|
703 | commit = commits_source_repo.get_commit(raw_id) | |
704 | except CommitDoesNotExistError: |
|
704 | except CommitDoesNotExistError: | |
705 | # in case we fail extracting still use "dummy" commit |
|
705 | # in case we fail extracting still use "dummy" commit | |
706 | # for display in commit diff |
|
706 | # for display in commit diff | |
707 | commit = h.AttributeDict( |
|
707 | commit = h.AttributeDict( | |
708 | {'raw_id': raw_id, |
|
708 | {'raw_id': raw_id, | |
709 | 'message': 'EMPTY or MISSING COMMIT'}) |
|
709 | 'message': 'EMPTY or MISSING COMMIT'}) | |
710 | c.commit_changes.append([c_type, commit]) |
|
710 | c.commit_changes.append([c_type, commit]) | |
711 |
|
711 | |||
712 | # current user review statuses for each version |
|
712 | # current user review statuses for each version | |
713 | c.review_versions = {} |
|
713 | c.review_versions = {} | |
714 | if self._rhodecode_user.user_id in allowed_reviewers: |
|
714 | if self._rhodecode_user.user_id in allowed_reviewers: | |
715 | for co in general_comments: |
|
715 | for co in general_comments: | |
716 | if co.author.user_id == self._rhodecode_user.user_id: |
|
716 | if co.author.user_id == self._rhodecode_user.user_id: | |
717 | status = co.status_change |
|
717 | status = co.status_change | |
718 | if status: |
|
718 | if status: | |
719 | _ver_pr = status[0].comment.pull_request_version_id |
|
719 | _ver_pr = status[0].comment.pull_request_version_id | |
720 | c.review_versions[_ver_pr] = status[0] |
|
720 | c.review_versions[_ver_pr] = status[0] | |
721 |
|
721 | |||
722 | return self._get_template_context(c) |
|
722 | return self._get_template_context(c) | |
723 |
|
723 | |||
724 | def get_commits( |
|
724 | def get_commits( | |
725 | self, commits_source_repo, pull_request_at_ver, source_commit, |
|
725 | self, commits_source_repo, pull_request_at_ver, source_commit, | |
726 | source_ref_id, source_scm, target_commit, target_ref_id, target_scm, |
|
726 | source_ref_id, source_scm, target_commit, target_ref_id, target_scm, | |
727 | maybe_unreachable=False): |
|
727 | maybe_unreachable=False): | |
728 |
|
728 | |||
729 | commit_cache = collections.OrderedDict() |
|
729 | commit_cache = collections.OrderedDict() | |
730 | missing_requirements = False |
|
730 | missing_requirements = False | |
731 |
|
731 | |||
732 | try: |
|
732 | try: | |
733 | pre_load = ["author", "date", "message", "branch", "parents"] |
|
733 | pre_load = ["author", "date", "message", "branch", "parents"] | |
734 |
|
734 | |||
735 | pull_request_commits = pull_request_at_ver.revisions |
|
735 | pull_request_commits = pull_request_at_ver.revisions | |
736 | log.debug('Loading %s commits from %s', |
|
736 | log.debug('Loading %s commits from %s', | |
737 | len(pull_request_commits), commits_source_repo) |
|
737 | len(pull_request_commits), commits_source_repo) | |
738 |
|
738 | |||
739 | for rev in pull_request_commits: |
|
739 | for rev in pull_request_commits: | |
740 | comm = commits_source_repo.get_commit(commit_id=rev, pre_load=pre_load, |
|
740 | comm = commits_source_repo.get_commit(commit_id=rev, pre_load=pre_load, | |
741 | maybe_unreachable=maybe_unreachable) |
|
741 | maybe_unreachable=maybe_unreachable) | |
742 | commit_cache[comm.raw_id] = comm |
|
742 | commit_cache[comm.raw_id] = comm | |
743 |
|
743 | |||
744 | # Order here matters, we first need to get target, and then |
|
744 | # Order here matters, we first need to get target, and then | |
745 | # the source |
|
745 | # the source | |
746 | target_commit = commits_source_repo.get_commit( |
|
746 | target_commit = commits_source_repo.get_commit( | |
747 | commit_id=safe_str(target_ref_id)) |
|
747 | commit_id=safe_str(target_ref_id)) | |
748 |
|
748 | |||
749 | source_commit = commits_source_repo.get_commit( |
|
749 | source_commit = commits_source_repo.get_commit( | |
750 | commit_id=safe_str(source_ref_id), maybe_unreachable=True) |
|
750 | commit_id=safe_str(source_ref_id), maybe_unreachable=True) | |
751 | except CommitDoesNotExistError: |
|
751 | except CommitDoesNotExistError: | |
752 | log.warning('Failed to get commit from `{}` repo'.format( |
|
752 | log.warning('Failed to get commit from `{}` repo'.format( | |
753 | commits_source_repo), exc_info=True) |
|
753 | commits_source_repo), exc_info=True) | |
754 | except RepositoryRequirementError: |
|
754 | except RepositoryRequirementError: | |
755 | log.warning('Failed to get all required data from repo', exc_info=True) |
|
755 | log.warning('Failed to get all required data from repo', exc_info=True) | |
756 | missing_requirements = True |
|
756 | missing_requirements = True | |
757 |
|
757 | |||
758 | pr_ancestor_id = pull_request_at_ver.common_ancestor_id |
|
758 | pr_ancestor_id = pull_request_at_ver.common_ancestor_id | |
759 |
|
759 | |||
760 | try: |
|
760 | try: | |
761 | ancestor_commit = source_scm.get_commit(pr_ancestor_id) |
|
761 | ancestor_commit = source_scm.get_commit(pr_ancestor_id) | |
762 | except Exception: |
|
762 | except Exception: | |
763 | ancestor_commit = None |
|
763 | ancestor_commit = None | |
764 |
|
764 | |||
765 | return ancestor_commit, commit_cache, missing_requirements, source_commit, target_commit |
|
765 | return ancestor_commit, commit_cache, missing_requirements, source_commit, target_commit | |
766 |
|
766 | |||
767 | def assure_not_empty_repo(self): |
|
767 | def assure_not_empty_repo(self): | |
768 | _ = self.request.translate |
|
768 | _ = self.request.translate | |
769 |
|
769 | |||
770 | try: |
|
770 | try: | |
771 | self.db_repo.scm_instance().get_commit() |
|
771 | self.db_repo.scm_instance().get_commit() | |
772 | except EmptyRepositoryError: |
|
772 | except EmptyRepositoryError: | |
773 | h.flash(h.literal(_('There are no commits yet')), |
|
773 | h.flash(h.literal(_('There are no commits yet')), | |
774 | category='warning') |
|
774 | category='warning') | |
775 | raise HTTPFound( |
|
775 | raise HTTPFound( | |
776 | h.route_path('repo_summary', repo_name=self.db_repo.repo_name)) |
|
776 | h.route_path('repo_summary', repo_name=self.db_repo.repo_name)) | |
777 |
|
777 | |||
778 | @LoginRequired() |
|
778 | @LoginRequired() | |
779 | @NotAnonymous() |
|
779 | @NotAnonymous() | |
780 | @HasRepoPermissionAnyDecorator( |
|
780 | @HasRepoPermissionAnyDecorator( | |
781 | 'repository.read', 'repository.write', 'repository.admin') |
|
781 | 'repository.read', 'repository.write', 'repository.admin') | |
782 | @view_config( |
|
782 | @view_config( | |
783 | route_name='pullrequest_new', request_method='GET', |
|
783 | route_name='pullrequest_new', request_method='GET', | |
784 | renderer='rhodecode:templates/pullrequests/pullrequest.mako') |
|
784 | renderer='rhodecode:templates/pullrequests/pullrequest.mako') | |
785 | def pull_request_new(self): |
|
785 | def pull_request_new(self): | |
786 | _ = self.request.translate |
|
786 | _ = self.request.translate | |
787 | c = self.load_default_context() |
|
787 | c = self.load_default_context() | |
788 |
|
788 | |||
789 | self.assure_not_empty_repo() |
|
789 | self.assure_not_empty_repo() | |
790 | source_repo = self.db_repo |
|
790 | source_repo = self.db_repo | |
791 |
|
791 | |||
792 | commit_id = self.request.GET.get('commit') |
|
792 | commit_id = self.request.GET.get('commit') | |
793 | branch_ref = self.request.GET.get('branch') |
|
793 | branch_ref = self.request.GET.get('branch') | |
794 | bookmark_ref = self.request.GET.get('bookmark') |
|
794 | bookmark_ref = self.request.GET.get('bookmark') | |
795 |
|
795 | |||
796 | try: |
|
796 | try: | |
797 | source_repo_data = PullRequestModel().generate_repo_data( |
|
797 | source_repo_data = PullRequestModel().generate_repo_data( | |
798 | source_repo, commit_id=commit_id, |
|
798 | source_repo, commit_id=commit_id, | |
799 | branch=branch_ref, bookmark=bookmark_ref, |
|
799 | branch=branch_ref, bookmark=bookmark_ref, | |
800 | translator=self.request.translate) |
|
800 | translator=self.request.translate) | |
801 | except CommitDoesNotExistError as e: |
|
801 | except CommitDoesNotExistError as e: | |
802 | log.exception(e) |
|
802 | log.exception(e) | |
803 | h.flash(_('Commit does not exist'), 'error') |
|
803 | h.flash(_('Commit does not exist'), 'error') | |
804 | raise HTTPFound( |
|
804 | raise HTTPFound( | |
805 | h.route_path('pullrequest_new', repo_name=source_repo.repo_name)) |
|
805 | h.route_path('pullrequest_new', repo_name=source_repo.repo_name)) | |
806 |
|
806 | |||
807 | default_target_repo = source_repo |
|
807 | default_target_repo = source_repo | |
808 |
|
808 | |||
809 | if source_repo.parent and c.has_origin_repo_read_perm: |
|
809 | if source_repo.parent and c.has_origin_repo_read_perm: | |
810 | parent_vcs_obj = source_repo.parent.scm_instance() |
|
810 | parent_vcs_obj = source_repo.parent.scm_instance() | |
811 | if parent_vcs_obj and not parent_vcs_obj.is_empty(): |
|
811 | if parent_vcs_obj and not parent_vcs_obj.is_empty(): | |
812 | # change default if we have a parent repo |
|
812 | # change default if we have a parent repo | |
813 | default_target_repo = source_repo.parent |
|
813 | default_target_repo = source_repo.parent | |
814 |
|
814 | |||
815 | target_repo_data = PullRequestModel().generate_repo_data( |
|
815 | target_repo_data = PullRequestModel().generate_repo_data( | |
816 | default_target_repo, translator=self.request.translate) |
|
816 | default_target_repo, translator=self.request.translate) | |
817 |
|
817 | |||
818 | selected_source_ref = source_repo_data['refs']['selected_ref'] |
|
818 | selected_source_ref = source_repo_data['refs']['selected_ref'] | |
819 | title_source_ref = '' |
|
819 | title_source_ref = '' | |
820 | if selected_source_ref: |
|
820 | if selected_source_ref: | |
821 | title_source_ref = selected_source_ref.split(':', 2)[1] |
|
821 | title_source_ref = selected_source_ref.split(':', 2)[1] | |
822 | c.default_title = PullRequestModel().generate_pullrequest_title( |
|
822 | c.default_title = PullRequestModel().generate_pullrequest_title( | |
823 | source=source_repo.repo_name, |
|
823 | source=source_repo.repo_name, | |
824 | source_ref=title_source_ref, |
|
824 | source_ref=title_source_ref, | |
825 | target=default_target_repo.repo_name |
|
825 | target=default_target_repo.repo_name | |
826 | ) |
|
826 | ) | |
827 |
|
827 | |||
828 | c.default_repo_data = { |
|
828 | c.default_repo_data = { | |
829 | 'source_repo_name': source_repo.repo_name, |
|
829 | 'source_repo_name': source_repo.repo_name, | |
830 | 'source_refs_json': json.dumps(source_repo_data), |
|
830 | 'source_refs_json': json.dumps(source_repo_data), | |
831 | 'target_repo_name': default_target_repo.repo_name, |
|
831 | 'target_repo_name': default_target_repo.repo_name, | |
832 | 'target_refs_json': json.dumps(target_repo_data), |
|
832 | 'target_refs_json': json.dumps(target_repo_data), | |
833 | } |
|
833 | } | |
834 | c.default_source_ref = selected_source_ref |
|
834 | c.default_source_ref = selected_source_ref | |
835 |
|
835 | |||
836 | return self._get_template_context(c) |
|
836 | return self._get_template_context(c) | |
837 |
|
837 | |||
838 | @LoginRequired() |
|
838 | @LoginRequired() | |
839 | @NotAnonymous() |
|
839 | @NotAnonymous() | |
840 | @HasRepoPermissionAnyDecorator( |
|
840 | @HasRepoPermissionAnyDecorator( | |
841 | 'repository.read', 'repository.write', 'repository.admin') |
|
841 | 'repository.read', 'repository.write', 'repository.admin') | |
842 | @view_config( |
|
842 | @view_config( | |
843 | route_name='pullrequest_repo_refs', request_method='GET', |
|
843 | route_name='pullrequest_repo_refs', request_method='GET', | |
844 | renderer='json_ext', xhr=True) |
|
844 | renderer='json_ext', xhr=True) | |
845 | def pull_request_repo_refs(self): |
|
845 | def pull_request_repo_refs(self): | |
846 | self.load_default_context() |
|
846 | self.load_default_context() | |
847 | target_repo_name = self.request.matchdict['target_repo_name'] |
|
847 | target_repo_name = self.request.matchdict['target_repo_name'] | |
848 | repo = Repository.get_by_repo_name(target_repo_name) |
|
848 | repo = Repository.get_by_repo_name(target_repo_name) | |
849 | if not repo: |
|
849 | if not repo: | |
850 | raise HTTPNotFound() |
|
850 | raise HTTPNotFound() | |
851 |
|
851 | |||
852 | target_perm = HasRepoPermissionAny( |
|
852 | target_perm = HasRepoPermissionAny( | |
853 | 'repository.read', 'repository.write', 'repository.admin')( |
|
853 | 'repository.read', 'repository.write', 'repository.admin')( | |
854 | target_repo_name) |
|
854 | target_repo_name) | |
855 | if not target_perm: |
|
855 | if not target_perm: | |
856 | raise HTTPNotFound() |
|
856 | raise HTTPNotFound() | |
857 |
|
857 | |||
858 | return PullRequestModel().generate_repo_data( |
|
858 | return PullRequestModel().generate_repo_data( | |
859 | repo, translator=self.request.translate) |
|
859 | repo, translator=self.request.translate) | |
860 |
|
860 | |||
861 | @LoginRequired() |
|
861 | @LoginRequired() | |
862 | @NotAnonymous() |
|
862 | @NotAnonymous() | |
863 | @HasRepoPermissionAnyDecorator( |
|
863 | @HasRepoPermissionAnyDecorator( | |
864 | 'repository.read', 'repository.write', 'repository.admin') |
|
864 | 'repository.read', 'repository.write', 'repository.admin') | |
865 | @view_config( |
|
865 | @view_config( | |
866 | route_name='pullrequest_repo_targets', request_method='GET', |
|
866 | route_name='pullrequest_repo_targets', request_method='GET', | |
867 | renderer='json_ext', xhr=True) |
|
867 | renderer='json_ext', xhr=True) | |
868 | def pullrequest_repo_targets(self): |
|
868 | def pullrequest_repo_targets(self): | |
869 | _ = self.request.translate |
|
869 | _ = self.request.translate | |
870 | filter_query = self.request.GET.get('query') |
|
870 | filter_query = self.request.GET.get('query') | |
871 |
|
871 | |||
872 | # get the parents |
|
872 | # get the parents | |
873 | parent_target_repos = [] |
|
873 | parent_target_repos = [] | |
874 | if self.db_repo.parent: |
|
874 | if self.db_repo.parent: | |
875 | parents_query = Repository.query() \ |
|
875 | parents_query = Repository.query() \ | |
876 | .order_by(func.length(Repository.repo_name)) \ |
|
876 | .order_by(func.length(Repository.repo_name)) \ | |
877 | .filter(Repository.fork_id == self.db_repo.parent.repo_id) |
|
877 | .filter(Repository.fork_id == self.db_repo.parent.repo_id) | |
878 |
|
878 | |||
879 | if filter_query: |
|
879 | if filter_query: | |
880 | ilike_expression = u'%{}%'.format(safe_unicode(filter_query)) |
|
880 | ilike_expression = u'%{}%'.format(safe_unicode(filter_query)) | |
881 | parents_query = parents_query.filter( |
|
881 | parents_query = parents_query.filter( | |
882 | Repository.repo_name.ilike(ilike_expression)) |
|
882 | Repository.repo_name.ilike(ilike_expression)) | |
883 | parents = parents_query.limit(20).all() |
|
883 | parents = parents_query.limit(20).all() | |
884 |
|
884 | |||
885 | for parent in parents: |
|
885 | for parent in parents: | |
886 | parent_vcs_obj = parent.scm_instance() |
|
886 | parent_vcs_obj = parent.scm_instance() | |
887 | if parent_vcs_obj and not parent_vcs_obj.is_empty(): |
|
887 | if parent_vcs_obj and not parent_vcs_obj.is_empty(): | |
888 | parent_target_repos.append(parent) |
|
888 | parent_target_repos.append(parent) | |
889 |
|
889 | |||
890 | # get other forks, and repo itself |
|
890 | # get other forks, and repo itself | |
891 | query = Repository.query() \ |
|
891 | query = Repository.query() \ | |
892 | .order_by(func.length(Repository.repo_name)) \ |
|
892 | .order_by(func.length(Repository.repo_name)) \ | |
893 | .filter( |
|
893 | .filter( | |
894 | or_(Repository.repo_id == self.db_repo.repo_id, # repo itself |
|
894 | or_(Repository.repo_id == self.db_repo.repo_id, # repo itself | |
895 | Repository.fork_id == self.db_repo.repo_id) # forks of this repo |
|
895 | Repository.fork_id == self.db_repo.repo_id) # forks of this repo | |
896 | ) \ |
|
896 | ) \ | |
897 | .filter(~Repository.repo_id.in_([x.repo_id for x in parent_target_repos])) |
|
897 | .filter(~Repository.repo_id.in_([x.repo_id for x in parent_target_repos])) | |
898 |
|
898 | |||
899 | if filter_query: |
|
899 | if filter_query: | |
900 | ilike_expression = u'%{}%'.format(safe_unicode(filter_query)) |
|
900 | ilike_expression = u'%{}%'.format(safe_unicode(filter_query)) | |
901 | query = query.filter(Repository.repo_name.ilike(ilike_expression)) |
|
901 | query = query.filter(Repository.repo_name.ilike(ilike_expression)) | |
902 |
|
902 | |||
903 | limit = max(20 - len(parent_target_repos), 5) # not less then 5 |
|
903 | limit = max(20 - len(parent_target_repos), 5) # not less then 5 | |
904 | target_repos = query.limit(limit).all() |
|
904 | target_repos = query.limit(limit).all() | |
905 |
|
905 | |||
906 | all_target_repos = target_repos + parent_target_repos |
|
906 | all_target_repos = target_repos + parent_target_repos | |
907 |
|
907 | |||
908 | repos = [] |
|
908 | repos = [] | |
909 | # This checks permissions to the repositories |
|
909 | # This checks permissions to the repositories | |
910 | for obj in ScmModel().get_repos(all_target_repos): |
|
910 | for obj in ScmModel().get_repos(all_target_repos): | |
911 | repos.append({ |
|
911 | repos.append({ | |
912 | 'id': obj['name'], |
|
912 | 'id': obj['name'], | |
913 | 'text': obj['name'], |
|
913 | 'text': obj['name'], | |
914 | 'type': 'repo', |
|
914 | 'type': 'repo', | |
915 | 'repo_id': obj['dbrepo']['repo_id'], |
|
915 | 'repo_id': obj['dbrepo']['repo_id'], | |
916 | 'repo_type': obj['dbrepo']['repo_type'], |
|
916 | 'repo_type': obj['dbrepo']['repo_type'], | |
917 | 'private': obj['dbrepo']['private'], |
|
917 | 'private': obj['dbrepo']['private'], | |
918 |
|
918 | |||
919 | }) |
|
919 | }) | |
920 |
|
920 | |||
921 | data = { |
|
921 | data = { | |
922 | 'more': False, |
|
922 | 'more': False, | |
923 | 'results': [{ |
|
923 | 'results': [{ | |
924 | 'text': _('Repositories'), |
|
924 | 'text': _('Repositories'), | |
925 | 'children': repos |
|
925 | 'children': repos | |
926 | }] if repos else [] |
|
926 | }] if repos else [] | |
927 | } |
|
927 | } | |
928 | return data |
|
928 | return data | |
929 |
|
929 | |||
930 | @LoginRequired() |
|
930 | @LoginRequired() | |
931 | @NotAnonymous() |
|
931 | @NotAnonymous() | |
932 | @HasRepoPermissionAnyDecorator( |
|
932 | @HasRepoPermissionAnyDecorator( | |
933 | 'repository.read', 'repository.write', 'repository.admin') |
|
933 | 'repository.read', 'repository.write', 'repository.admin') | |
934 | @CSRFRequired() |
|
934 | @CSRFRequired() | |
935 | @view_config( |
|
935 | @view_config( | |
936 | route_name='pullrequest_create', request_method='POST', |
|
936 | route_name='pullrequest_create', request_method='POST', | |
937 | renderer=None) |
|
937 | renderer=None) | |
938 | def pull_request_create(self): |
|
938 | def pull_request_create(self): | |
939 | _ = self.request.translate |
|
939 | _ = self.request.translate | |
940 | self.assure_not_empty_repo() |
|
940 | self.assure_not_empty_repo() | |
941 | self.load_default_context() |
|
941 | self.load_default_context() | |
942 |
|
942 | |||
943 | controls = peppercorn.parse(self.request.POST.items()) |
|
943 | controls = peppercorn.parse(self.request.POST.items()) | |
944 |
|
944 | |||
945 | try: |
|
945 | try: | |
946 | form = PullRequestForm( |
|
946 | form = PullRequestForm( | |
947 | self.request.translate, self.db_repo.repo_id)() |
|
947 | self.request.translate, self.db_repo.repo_id)() | |
948 | _form = form.to_python(controls) |
|
948 | _form = form.to_python(controls) | |
949 | except formencode.Invalid as errors: |
|
949 | except formencode.Invalid as errors: | |
950 | if errors.error_dict.get('revisions'): |
|
950 | if errors.error_dict.get('revisions'): | |
951 | msg = 'Revisions: %s' % errors.error_dict['revisions'] |
|
951 | msg = 'Revisions: %s' % errors.error_dict['revisions'] | |
952 | elif errors.error_dict.get('pullrequest_title'): |
|
952 | elif errors.error_dict.get('pullrequest_title'): | |
953 | msg = errors.error_dict.get('pullrequest_title') |
|
953 | msg = errors.error_dict.get('pullrequest_title') | |
954 | else: |
|
954 | else: | |
955 | msg = _('Error creating pull request: {}').format(errors) |
|
955 | msg = _('Error creating pull request: {}').format(errors) | |
956 | log.exception(msg) |
|
956 | log.exception(msg) | |
957 | h.flash(msg, 'error') |
|
957 | h.flash(msg, 'error') | |
958 |
|
958 | |||
959 | # would rather just go back to form ... |
|
959 | # would rather just go back to form ... | |
960 | raise HTTPFound( |
|
960 | raise HTTPFound( | |
961 | h.route_path('pullrequest_new', repo_name=self.db_repo_name)) |
|
961 | h.route_path('pullrequest_new', repo_name=self.db_repo_name)) | |
962 |
|
962 | |||
963 | source_repo = _form['source_repo'] |
|
963 | source_repo = _form['source_repo'] | |
964 | source_ref = _form['source_ref'] |
|
964 | source_ref = _form['source_ref'] | |
965 | target_repo = _form['target_repo'] |
|
965 | target_repo = _form['target_repo'] | |
966 | target_ref = _form['target_ref'] |
|
966 | target_ref = _form['target_ref'] | |
967 | commit_ids = _form['revisions'][::-1] |
|
967 | commit_ids = _form['revisions'][::-1] | |
968 | common_ancestor_id = _form['common_ancestor'] |
|
968 | common_ancestor_id = _form['common_ancestor'] | |
969 |
|
969 | |||
970 | # find the ancestor for this pr |
|
970 | # find the ancestor for this pr | |
971 | source_db_repo = Repository.get_by_repo_name(_form['source_repo']) |
|
971 | source_db_repo = Repository.get_by_repo_name(_form['source_repo']) | |
972 | target_db_repo = Repository.get_by_repo_name(_form['target_repo']) |
|
972 | target_db_repo = Repository.get_by_repo_name(_form['target_repo']) | |
973 |
|
973 | |||
974 | if not (source_db_repo or target_db_repo): |
|
974 | if not (source_db_repo or target_db_repo): | |
975 | h.flash(_('source_repo or target repo not found'), category='error') |
|
975 | h.flash(_('source_repo or target repo not found'), category='error') | |
976 | raise HTTPFound( |
|
976 | raise HTTPFound( | |
977 | h.route_path('pullrequest_new', repo_name=self.db_repo_name)) |
|
977 | h.route_path('pullrequest_new', repo_name=self.db_repo_name)) | |
978 |
|
978 | |||
979 | # re-check permissions again here |
|
979 | # re-check permissions again here | |
980 | # source_repo we must have read permissions |
|
980 | # source_repo we must have read permissions | |
981 |
|
981 | |||
982 | source_perm = HasRepoPermissionAny( |
|
982 | source_perm = HasRepoPermissionAny( | |
983 | 'repository.read', 'repository.write', 'repository.admin')( |
|
983 | 'repository.read', 'repository.write', 'repository.admin')( | |
984 | source_db_repo.repo_name) |
|
984 | source_db_repo.repo_name) | |
985 | if not source_perm: |
|
985 | if not source_perm: | |
986 | msg = _('Not Enough permissions to source repo `{}`.'.format( |
|
986 | msg = _('Not Enough permissions to source repo `{}`.'.format( | |
987 | source_db_repo.repo_name)) |
|
987 | source_db_repo.repo_name)) | |
988 | h.flash(msg, category='error') |
|
988 | h.flash(msg, category='error') | |
989 | # copy the args back to redirect |
|
989 | # copy the args back to redirect | |
990 | org_query = self.request.GET.mixed() |
|
990 | org_query = self.request.GET.mixed() | |
991 | raise HTTPFound( |
|
991 | raise HTTPFound( | |
992 | h.route_path('pullrequest_new', repo_name=self.db_repo_name, |
|
992 | h.route_path('pullrequest_new', repo_name=self.db_repo_name, | |
993 | _query=org_query)) |
|
993 | _query=org_query)) | |
994 |
|
994 | |||
995 | # target repo we must have read permissions, and also later on |
|
995 | # target repo we must have read permissions, and also later on | |
996 | # we want to check branch permissions here |
|
996 | # we want to check branch permissions here | |
997 | target_perm = HasRepoPermissionAny( |
|
997 | target_perm = HasRepoPermissionAny( | |
998 | 'repository.read', 'repository.write', 'repository.admin')( |
|
998 | 'repository.read', 'repository.write', 'repository.admin')( | |
999 | target_db_repo.repo_name) |
|
999 | target_db_repo.repo_name) | |
1000 | if not target_perm: |
|
1000 | if not target_perm: | |
1001 | msg = _('Not Enough permissions to target repo `{}`.'.format( |
|
1001 | msg = _('Not Enough permissions to target repo `{}`.'.format( | |
1002 | target_db_repo.repo_name)) |
|
1002 | target_db_repo.repo_name)) | |
1003 | h.flash(msg, category='error') |
|
1003 | h.flash(msg, category='error') | |
1004 | # copy the args back to redirect |
|
1004 | # copy the args back to redirect | |
1005 | org_query = self.request.GET.mixed() |
|
1005 | org_query = self.request.GET.mixed() | |
1006 | raise HTTPFound( |
|
1006 | raise HTTPFound( | |
1007 | h.route_path('pullrequest_new', repo_name=self.db_repo_name, |
|
1007 | h.route_path('pullrequest_new', repo_name=self.db_repo_name, | |
1008 | _query=org_query)) |
|
1008 | _query=org_query)) | |
1009 |
|
1009 | |||
1010 | source_scm = source_db_repo.scm_instance() |
|
1010 | source_scm = source_db_repo.scm_instance() | |
1011 | target_scm = target_db_repo.scm_instance() |
|
1011 | target_scm = target_db_repo.scm_instance() | |
1012 |
|
1012 | |||
1013 | source_commit = source_scm.get_commit(source_ref.split(':')[-1]) |
|
1013 | source_commit = source_scm.get_commit(source_ref.split(':')[-1]) | |
1014 | target_commit = target_scm.get_commit(target_ref.split(':')[-1]) |
|
1014 | target_commit = target_scm.get_commit(target_ref.split(':')[-1]) | |
1015 |
|
1015 | |||
1016 | ancestor = source_scm.get_common_ancestor( |
|
1016 | ancestor = source_scm.get_common_ancestor( | |
1017 | source_commit.raw_id, target_commit.raw_id, target_scm) |
|
1017 | source_commit.raw_id, target_commit.raw_id, target_scm) | |
1018 |
|
1018 | |||
1019 | # recalculate target ref based on ancestor |
|
1019 | # recalculate target ref based on ancestor | |
1020 | target_ref_type, target_ref_name, __ = _form['target_ref'].split(':') |
|
1020 | target_ref_type, target_ref_name, __ = _form['target_ref'].split(':') | |
1021 | target_ref = ':'.join((target_ref_type, target_ref_name, ancestor)) |
|
1021 | target_ref = ':'.join((target_ref_type, target_ref_name, ancestor)) | |
1022 |
|
1022 | |||
1023 | get_default_reviewers_data, validate_default_reviewers = \ |
|
1023 | get_default_reviewers_data, validate_default_reviewers = \ | |
1024 | PullRequestModel().get_reviewer_functions() |
|
1024 | PullRequestModel().get_reviewer_functions() | |
1025 |
|
1025 | |||
1026 | # recalculate reviewers logic, to make sure we can validate this |
|
1026 | # recalculate reviewers logic, to make sure we can validate this | |
1027 | reviewer_rules = get_default_reviewers_data( |
|
1027 | reviewer_rules = get_default_reviewers_data( | |
1028 | self._rhodecode_db_user, source_db_repo, |
|
1028 | self._rhodecode_db_user, source_db_repo, | |
1029 | source_commit, target_db_repo, target_commit) |
|
1029 | source_commit, target_db_repo, target_commit) | |
1030 |
|
1030 | |||
1031 | given_reviewers = _form['review_members'] |
|
1031 | given_reviewers = _form['review_members'] | |
1032 | reviewers = validate_default_reviewers( |
|
1032 | reviewers = validate_default_reviewers( | |
1033 | given_reviewers, reviewer_rules) |
|
1033 | given_reviewers, reviewer_rules) | |
1034 |
|
1034 | |||
1035 | pullrequest_title = _form['pullrequest_title'] |
|
1035 | pullrequest_title = _form['pullrequest_title'] | |
1036 | title_source_ref = source_ref.split(':', 2)[1] |
|
1036 | title_source_ref = source_ref.split(':', 2)[1] | |
1037 | if not pullrequest_title: |
|
1037 | if not pullrequest_title: | |
1038 | pullrequest_title = PullRequestModel().generate_pullrequest_title( |
|
1038 | pullrequest_title = PullRequestModel().generate_pullrequest_title( | |
1039 | source=source_repo, |
|
1039 | source=source_repo, | |
1040 | source_ref=title_source_ref, |
|
1040 | source_ref=title_source_ref, | |
1041 | target=target_repo |
|
1041 | target=target_repo | |
1042 | ) |
|
1042 | ) | |
1043 |
|
1043 | |||
1044 | description = _form['pullrequest_desc'] |
|
1044 | description = _form['pullrequest_desc'] | |
1045 | description_renderer = _form['description_renderer'] |
|
1045 | description_renderer = _form['description_renderer'] | |
1046 |
|
1046 | |||
1047 | try: |
|
1047 | try: | |
1048 | pull_request = PullRequestModel().create( |
|
1048 | pull_request = PullRequestModel().create( | |
1049 | created_by=self._rhodecode_user.user_id, |
|
1049 | created_by=self._rhodecode_user.user_id, | |
1050 | source_repo=source_repo, |
|
1050 | source_repo=source_repo, | |
1051 | source_ref=source_ref, |
|
1051 | source_ref=source_ref, | |
1052 | target_repo=target_repo, |
|
1052 | target_repo=target_repo, | |
1053 | target_ref=target_ref, |
|
1053 | target_ref=target_ref, | |
1054 | revisions=commit_ids, |
|
1054 | revisions=commit_ids, | |
1055 | common_ancestor_id=common_ancestor_id, |
|
1055 | common_ancestor_id=common_ancestor_id, | |
1056 | reviewers=reviewers, |
|
1056 | reviewers=reviewers, | |
1057 | title=pullrequest_title, |
|
1057 | title=pullrequest_title, | |
1058 | description=description, |
|
1058 | description=description, | |
1059 | description_renderer=description_renderer, |
|
1059 | description_renderer=description_renderer, | |
1060 | reviewer_data=reviewer_rules, |
|
1060 | reviewer_data=reviewer_rules, | |
1061 | auth_user=self._rhodecode_user |
|
1061 | auth_user=self._rhodecode_user | |
1062 | ) |
|
1062 | ) | |
1063 | Session().commit() |
|
1063 | Session().commit() | |
1064 |
|
1064 | |||
1065 | h.flash(_('Successfully opened new pull request'), |
|
1065 | h.flash(_('Successfully opened new pull request'), | |
1066 | category='success') |
|
1066 | category='success') | |
1067 | except Exception: |
|
1067 | except Exception: | |
1068 | msg = _('Error occurred during creation of this pull request.') |
|
1068 | msg = _('Error occurred during creation of this pull request.') | |
1069 | log.exception(msg) |
|
1069 | log.exception(msg) | |
1070 | h.flash(msg, category='error') |
|
1070 | h.flash(msg, category='error') | |
1071 |
|
1071 | |||
1072 | # copy the args back to redirect |
|
1072 | # copy the args back to redirect | |
1073 | org_query = self.request.GET.mixed() |
|
1073 | org_query = self.request.GET.mixed() | |
1074 | raise HTTPFound( |
|
1074 | raise HTTPFound( | |
1075 | h.route_path('pullrequest_new', repo_name=self.db_repo_name, |
|
1075 | h.route_path('pullrequest_new', repo_name=self.db_repo_name, | |
1076 | _query=org_query)) |
|
1076 | _query=org_query)) | |
1077 |
|
1077 | |||
1078 | raise HTTPFound( |
|
1078 | raise HTTPFound( | |
1079 | h.route_path('pullrequest_show', repo_name=target_repo, |
|
1079 | h.route_path('pullrequest_show', repo_name=target_repo, | |
1080 | pull_request_id=pull_request.pull_request_id)) |
|
1080 | pull_request_id=pull_request.pull_request_id)) | |
1081 |
|
1081 | |||
1082 | @LoginRequired() |
|
1082 | @LoginRequired() | |
1083 | @NotAnonymous() |
|
1083 | @NotAnonymous() | |
1084 | @HasRepoPermissionAnyDecorator( |
|
1084 | @HasRepoPermissionAnyDecorator( | |
1085 | 'repository.read', 'repository.write', 'repository.admin') |
|
1085 | 'repository.read', 'repository.write', 'repository.admin') | |
1086 | @CSRFRequired() |
|
1086 | @CSRFRequired() | |
1087 | @view_config( |
|
1087 | @view_config( | |
1088 | route_name='pullrequest_update', request_method='POST', |
|
1088 | route_name='pullrequest_update', request_method='POST', | |
1089 | renderer='json_ext') |
|
1089 | renderer='json_ext') | |
1090 | def pull_request_update(self): |
|
1090 | def pull_request_update(self): | |
1091 | pull_request = PullRequest.get_or_404( |
|
1091 | pull_request = PullRequest.get_or_404( | |
1092 | self.request.matchdict['pull_request_id']) |
|
1092 | self.request.matchdict['pull_request_id']) | |
1093 | _ = self.request.translate |
|
1093 | _ = self.request.translate | |
1094 |
|
1094 | |||
1095 | self.load_default_context() |
|
1095 | self.load_default_context() | |
1096 | redirect_url = None |
|
1096 | redirect_url = None | |
1097 |
|
1097 | |||
1098 | if pull_request.is_closed(): |
|
1098 | if pull_request.is_closed(): | |
1099 | log.debug('update: forbidden because pull request is closed') |
|
1099 | log.debug('update: forbidden because pull request is closed') | |
1100 | msg = _(u'Cannot update closed pull requests.') |
|
1100 | msg = _(u'Cannot update closed pull requests.') | |
1101 | h.flash(msg, category='error') |
|
1101 | h.flash(msg, category='error') | |
1102 | return {'response': True, |
|
1102 | return {'response': True, | |
1103 | 'redirect_url': redirect_url} |
|
1103 | 'redirect_url': redirect_url} | |
1104 |
|
1104 | |||
1105 | is_state_changing = pull_request.is_state_changing() |
|
1105 | is_state_changing = pull_request.is_state_changing() | |
1106 |
|
1106 | |||
1107 | # only owner or admin can update it |
|
1107 | # only owner or admin can update it | |
1108 | allowed_to_update = PullRequestModel().check_user_update( |
|
1108 | allowed_to_update = PullRequestModel().check_user_update( | |
1109 | pull_request, self._rhodecode_user) |
|
1109 | pull_request, self._rhodecode_user) | |
1110 | if allowed_to_update: |
|
1110 | if allowed_to_update: | |
1111 | controls = peppercorn.parse(self.request.POST.items()) |
|
1111 | controls = peppercorn.parse(self.request.POST.items()) | |
1112 | force_refresh = str2bool(self.request.POST.get('force_refresh')) |
|
1112 | force_refresh = str2bool(self.request.POST.get('force_refresh')) | |
1113 |
|
1113 | |||
1114 | if 'review_members' in controls: |
|
1114 | if 'review_members' in controls: | |
1115 | self._update_reviewers( |
|
1115 | self._update_reviewers( | |
1116 | pull_request, controls['review_members'], |
|
1116 | pull_request, controls['review_members'], | |
1117 | pull_request.reviewer_data) |
|
1117 | pull_request.reviewer_data) | |
1118 | elif str2bool(self.request.POST.get('update_commits', 'false')): |
|
1118 | elif str2bool(self.request.POST.get('update_commits', 'false')): | |
1119 | if is_state_changing: |
|
1119 | if is_state_changing: | |
1120 | log.debug('commits update: forbidden because pull request is in state %s', |
|
1120 | log.debug('commits update: forbidden because pull request is in state %s', | |
1121 | pull_request.pull_request_state) |
|
1121 | pull_request.pull_request_state) | |
1122 | msg = _(u'Cannot update pull requests commits in state other than `{}`. ' |
|
1122 | msg = _(u'Cannot update pull requests commits in state other than `{}`. ' | |
1123 | u'Current state is: `{}`').format( |
|
1123 | u'Current state is: `{}`').format( | |
1124 | PullRequest.STATE_CREATED, pull_request.pull_request_state) |
|
1124 | PullRequest.STATE_CREATED, pull_request.pull_request_state) | |
1125 | h.flash(msg, category='error') |
|
1125 | h.flash(msg, category='error') | |
1126 | return {'response': True, |
|
1126 | return {'response': True, | |
1127 | 'redirect_url': redirect_url} |
|
1127 | 'redirect_url': redirect_url} | |
1128 |
|
1128 | |||
1129 | self._update_commits(pull_request) |
|
1129 | self._update_commits(pull_request) | |
1130 | if force_refresh: |
|
1130 | if force_refresh: | |
1131 | redirect_url = h.route_path( |
|
1131 | redirect_url = h.route_path( | |
1132 | 'pullrequest_show', repo_name=self.db_repo_name, |
|
1132 | 'pullrequest_show', repo_name=self.db_repo_name, | |
1133 | pull_request_id=pull_request.pull_request_id, |
|
1133 | pull_request_id=pull_request.pull_request_id, | |
1134 | _query={"force_refresh": 1}) |
|
1134 | _query={"force_refresh": 1}) | |
1135 | elif str2bool(self.request.POST.get('edit_pull_request', 'false')): |
|
1135 | elif str2bool(self.request.POST.get('edit_pull_request', 'false')): | |
1136 | self._edit_pull_request(pull_request) |
|
1136 | self._edit_pull_request(pull_request) | |
1137 | else: |
|
1137 | else: | |
1138 | raise HTTPBadRequest() |
|
1138 | raise HTTPBadRequest() | |
1139 |
|
1139 | |||
1140 | return {'response': True, |
|
1140 | return {'response': True, | |
1141 | 'redirect_url': redirect_url} |
|
1141 | 'redirect_url': redirect_url} | |
1142 | raise HTTPForbidden() |
|
1142 | raise HTTPForbidden() | |
1143 |
|
1143 | |||
1144 | def _edit_pull_request(self, pull_request): |
|
1144 | def _edit_pull_request(self, pull_request): | |
1145 | _ = self.request.translate |
|
1145 | _ = self.request.translate | |
1146 |
|
1146 | |||
1147 | try: |
|
1147 | try: | |
1148 | PullRequestModel().edit( |
|
1148 | PullRequestModel().edit( | |
1149 | pull_request, |
|
1149 | pull_request, | |
1150 | self.request.POST.get('title'), |
|
1150 | self.request.POST.get('title'), | |
1151 | self.request.POST.get('description'), |
|
1151 | self.request.POST.get('description'), | |
1152 | self.request.POST.get('description_renderer'), |
|
1152 | self.request.POST.get('description_renderer'), | |
1153 | self._rhodecode_user) |
|
1153 | self._rhodecode_user) | |
1154 | except ValueError: |
|
1154 | except ValueError: | |
1155 | msg = _(u'Cannot update closed pull requests.') |
|
1155 | msg = _(u'Cannot update closed pull requests.') | |
1156 | h.flash(msg, category='error') |
|
1156 | h.flash(msg, category='error') | |
1157 | return |
|
1157 | return | |
1158 | else: |
|
1158 | else: | |
1159 | Session().commit() |
|
1159 | Session().commit() | |
1160 |
|
1160 | |||
1161 | msg = _(u'Pull request title & description updated.') |
|
1161 | msg = _(u'Pull request title & description updated.') | |
1162 | h.flash(msg, category='success') |
|
1162 | h.flash(msg, category='success') | |
1163 | return |
|
1163 | return | |
1164 |
|
1164 | |||
1165 | def _update_commits(self, pull_request): |
|
1165 | def _update_commits(self, pull_request): | |
1166 | _ = self.request.translate |
|
1166 | _ = self.request.translate | |
1167 |
|
1167 | |||
1168 | with pull_request.set_state(PullRequest.STATE_UPDATING): |
|
1168 | with pull_request.set_state(PullRequest.STATE_UPDATING): | |
1169 | resp = PullRequestModel().update_commits( |
|
1169 | resp = PullRequestModel().update_commits( | |
1170 | pull_request, self._rhodecode_db_user) |
|
1170 | pull_request, self._rhodecode_db_user) | |
1171 |
|
1171 | |||
1172 | if resp.executed: |
|
1172 | if resp.executed: | |
1173 |
|
1173 | |||
1174 | if resp.target_changed and resp.source_changed: |
|
1174 | if resp.target_changed and resp.source_changed: | |
1175 | changed = 'target and source repositories' |
|
1175 | changed = 'target and source repositories' | |
1176 | elif resp.target_changed and not resp.source_changed: |
|
1176 | elif resp.target_changed and not resp.source_changed: | |
1177 | changed = 'target repository' |
|
1177 | changed = 'target repository' | |
1178 | elif not resp.target_changed and resp.source_changed: |
|
1178 | elif not resp.target_changed and resp.source_changed: | |
1179 | changed = 'source repository' |
|
1179 | changed = 'source repository' | |
1180 | else: |
|
1180 | else: | |
1181 | changed = 'nothing' |
|
1181 | changed = 'nothing' | |
1182 |
|
1182 | |||
1183 | msg = _(u'Pull request updated to "{source_commit_id}" with ' |
|
1183 | msg = _(u'Pull request updated to "{source_commit_id}" with ' | |
1184 | u'{count_added} added, {count_removed} removed commits. ' |
|
1184 | u'{count_added} added, {count_removed} removed commits. ' | |
1185 | u'Source of changes: {change_source}') |
|
1185 | u'Source of changes: {change_source}') | |
1186 | msg = msg.format( |
|
1186 | msg = msg.format( | |
1187 | source_commit_id=pull_request.source_ref_parts.commit_id, |
|
1187 | source_commit_id=pull_request.source_ref_parts.commit_id, | |
1188 | count_added=len(resp.changes.added), |
|
1188 | count_added=len(resp.changes.added), | |
1189 | count_removed=len(resp.changes.removed), |
|
1189 | count_removed=len(resp.changes.removed), | |
1190 | change_source=changed) |
|
1190 | change_source=changed) | |
1191 | h.flash(msg, category='success') |
|
1191 | h.flash(msg, category='success') | |
1192 |
|
1192 | |||
1193 | channel = '/repo${}$/pr/{}'.format( |
|
1193 | channel = '/repo${}$/pr/{}'.format( | |
1194 | pull_request.target_repo.repo_name, pull_request.pull_request_id) |
|
1194 | pull_request.target_repo.repo_name, pull_request.pull_request_id) | |
1195 | message = msg + ( |
|
1195 | message = msg + ( | |
1196 | ' - <a onclick="window.location.reload()">' |
|
1196 | ' - <a onclick="window.location.reload()">' | |
1197 | '<strong>{}</strong></a>'.format(_('Reload page'))) |
|
1197 | '<strong>{}</strong></a>'.format(_('Reload page'))) | |
1198 | channelstream.post_message( |
|
1198 | channelstream.post_message( | |
1199 | channel, message, self._rhodecode_user.username, |
|
1199 | channel, message, self._rhodecode_user.username, | |
1200 | registry=self.request.registry) |
|
1200 | registry=self.request.registry) | |
1201 | else: |
|
1201 | else: | |
1202 | msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason] |
|
1202 | msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason] | |
1203 | warning_reasons = [ |
|
1203 | warning_reasons = [ | |
1204 | UpdateFailureReason.NO_CHANGE, |
|
1204 | UpdateFailureReason.NO_CHANGE, | |
1205 | UpdateFailureReason.WRONG_REF_TYPE, |
|
1205 | UpdateFailureReason.WRONG_REF_TYPE, | |
1206 | ] |
|
1206 | ] | |
1207 | category = 'warning' if resp.reason in warning_reasons else 'error' |
|
1207 | category = 'warning' if resp.reason in warning_reasons else 'error' | |
1208 | h.flash(msg, category=category) |
|
1208 | h.flash(msg, category=category) | |
1209 |
|
1209 | |||
1210 | @LoginRequired() |
|
1210 | @LoginRequired() | |
1211 | @NotAnonymous() |
|
1211 | @NotAnonymous() | |
1212 | @HasRepoPermissionAnyDecorator( |
|
1212 | @HasRepoPermissionAnyDecorator( | |
1213 | 'repository.read', 'repository.write', 'repository.admin') |
|
1213 | 'repository.read', 'repository.write', 'repository.admin') | |
1214 | @CSRFRequired() |
|
1214 | @CSRFRequired() | |
1215 | @view_config( |
|
1215 | @view_config( | |
1216 | route_name='pullrequest_merge', request_method='POST', |
|
1216 | route_name='pullrequest_merge', request_method='POST', | |
1217 | renderer='json_ext') |
|
1217 | renderer='json_ext') | |
1218 | def pull_request_merge(self): |
|
1218 | def pull_request_merge(self): | |
1219 | """ |
|
1219 | """ | |
1220 | Merge will perform a server-side merge of the specified |
|
1220 | Merge will perform a server-side merge of the specified | |
1221 | pull request, if the pull request is approved and mergeable. |
|
1221 | pull request, if the pull request is approved and mergeable. | |
1222 | After successful merging, the pull request is automatically |
|
1222 | After successful merging, the pull request is automatically | |
1223 | closed, with a relevant comment. |
|
1223 | closed, with a relevant comment. | |
1224 | """ |
|
1224 | """ | |
1225 | pull_request = PullRequest.get_or_404( |
|
1225 | pull_request = PullRequest.get_or_404( | |
1226 | self.request.matchdict['pull_request_id']) |
|
1226 | self.request.matchdict['pull_request_id']) | |
1227 | _ = self.request.translate |
|
1227 | _ = self.request.translate | |
1228 |
|
1228 | |||
1229 | if pull_request.is_state_changing(): |
|
1229 | if pull_request.is_state_changing(): | |
1230 | log.debug('show: forbidden because pull request is in state %s', |
|
1230 | log.debug('show: forbidden because pull request is in state %s', | |
1231 | pull_request.pull_request_state) |
|
1231 | pull_request.pull_request_state) | |
1232 | msg = _(u'Cannot merge pull requests in state other than `{}`. ' |
|
1232 | msg = _(u'Cannot merge pull requests in state other than `{}`. ' | |
1233 | u'Current state is: `{}`').format(PullRequest.STATE_CREATED, |
|
1233 | u'Current state is: `{}`').format(PullRequest.STATE_CREATED, | |
1234 | pull_request.pull_request_state) |
|
1234 | pull_request.pull_request_state) | |
1235 | h.flash(msg, category='error') |
|
1235 | h.flash(msg, category='error') | |
1236 | raise HTTPFound( |
|
1236 | raise HTTPFound( | |
1237 | h.route_path('pullrequest_show', |
|
1237 | h.route_path('pullrequest_show', | |
1238 | repo_name=pull_request.target_repo.repo_name, |
|
1238 | repo_name=pull_request.target_repo.repo_name, | |
1239 | pull_request_id=pull_request.pull_request_id)) |
|
1239 | pull_request_id=pull_request.pull_request_id)) | |
1240 |
|
1240 | |||
1241 | self.load_default_context() |
|
1241 | self.load_default_context() | |
1242 |
|
1242 | |||
1243 | with pull_request.set_state(PullRequest.STATE_UPDATING): |
|
1243 | with pull_request.set_state(PullRequest.STATE_UPDATING): | |
1244 | check = MergeCheck.validate( |
|
1244 | check = MergeCheck.validate( | |
1245 | pull_request, auth_user=self._rhodecode_user, |
|
1245 | pull_request, auth_user=self._rhodecode_user, | |
1246 | translator=self.request.translate) |
|
1246 | translator=self.request.translate) | |
1247 | merge_possible = not check.failed |
|
1247 | merge_possible = not check.failed | |
1248 |
|
1248 | |||
1249 | for err_type, error_msg in check.errors: |
|
1249 | for err_type, error_msg in check.errors: | |
1250 | h.flash(error_msg, category=err_type) |
|
1250 | h.flash(error_msg, category=err_type) | |
1251 |
|
1251 | |||
1252 | if merge_possible: |
|
1252 | if merge_possible: | |
1253 | log.debug("Pre-conditions checked, trying to merge.") |
|
1253 | log.debug("Pre-conditions checked, trying to merge.") | |
1254 | extras = vcs_operation_context( |
|
1254 | extras = vcs_operation_context( | |
1255 | self.request.environ, repo_name=pull_request.target_repo.repo_name, |
|
1255 | self.request.environ, repo_name=pull_request.target_repo.repo_name, | |
1256 | username=self._rhodecode_db_user.username, action='push', |
|
1256 | username=self._rhodecode_db_user.username, action='push', | |
1257 | scm=pull_request.target_repo.repo_type) |
|
1257 | scm=pull_request.target_repo.repo_type) | |
1258 | with pull_request.set_state(PullRequest.STATE_UPDATING): |
|
1258 | with pull_request.set_state(PullRequest.STATE_UPDATING): | |
1259 | self._merge_pull_request( |
|
1259 | self._merge_pull_request( | |
1260 | pull_request, self._rhodecode_db_user, extras) |
|
1260 | pull_request, self._rhodecode_db_user, extras) | |
1261 | else: |
|
1261 | else: | |
1262 | log.debug("Pre-conditions failed, NOT merging.") |
|
1262 | log.debug("Pre-conditions failed, NOT merging.") | |
1263 |
|
1263 | |||
1264 | raise HTTPFound( |
|
1264 | raise HTTPFound( | |
1265 | h.route_path('pullrequest_show', |
|
1265 | h.route_path('pullrequest_show', | |
1266 | repo_name=pull_request.target_repo.repo_name, |
|
1266 | repo_name=pull_request.target_repo.repo_name, | |
1267 | pull_request_id=pull_request.pull_request_id)) |
|
1267 | pull_request_id=pull_request.pull_request_id)) | |
1268 |
|
1268 | |||
1269 | def _merge_pull_request(self, pull_request, user, extras): |
|
1269 | def _merge_pull_request(self, pull_request, user, extras): | |
1270 | _ = self.request.translate |
|
1270 | _ = self.request.translate | |
1271 | merge_resp = PullRequestModel().merge_repo(pull_request, user, extras=extras) |
|
1271 | merge_resp = PullRequestModel().merge_repo(pull_request, user, extras=extras) | |
1272 |
|
1272 | |||
1273 | if merge_resp.executed: |
|
1273 | if merge_resp.executed: | |
1274 | log.debug("The merge was successful, closing the pull request.") |
|
1274 | log.debug("The merge was successful, closing the pull request.") | |
1275 | PullRequestModel().close_pull_request( |
|
1275 | PullRequestModel().close_pull_request( | |
1276 | pull_request.pull_request_id, user) |
|
1276 | pull_request.pull_request_id, user) | |
1277 | Session().commit() |
|
1277 | Session().commit() | |
1278 | msg = _('Pull request was successfully merged and closed.') |
|
1278 | msg = _('Pull request was successfully merged and closed.') | |
1279 | h.flash(msg, category='success') |
|
1279 | h.flash(msg, category='success') | |
1280 | else: |
|
1280 | else: | |
1281 | log.debug( |
|
1281 | log.debug( | |
1282 | "The merge was not successful. Merge response: %s", merge_resp) |
|
1282 | "The merge was not successful. Merge response: %s", merge_resp) | |
1283 | msg = merge_resp.merge_status_message |
|
1283 | msg = merge_resp.merge_status_message | |
1284 | h.flash(msg, category='error') |
|
1284 | h.flash(msg, category='error') | |
1285 |
|
1285 | |||
1286 | def _update_reviewers(self, pull_request, review_members, reviewer_rules): |
|
1286 | def _update_reviewers(self, pull_request, review_members, reviewer_rules): | |
1287 | _ = self.request.translate |
|
1287 | _ = self.request.translate | |
1288 |
|
1288 | |||
1289 | get_default_reviewers_data, validate_default_reviewers = \ |
|
1289 | get_default_reviewers_data, validate_default_reviewers = \ | |
1290 | PullRequestModel().get_reviewer_functions() |
|
1290 | PullRequestModel().get_reviewer_functions() | |
1291 |
|
1291 | |||
1292 | try: |
|
1292 | try: | |
1293 | reviewers = validate_default_reviewers(review_members, reviewer_rules) |
|
1293 | reviewers = validate_default_reviewers(review_members, reviewer_rules) | |
1294 | except ValueError as e: |
|
1294 | except ValueError as e: | |
1295 | log.error('Reviewers Validation: {}'.format(e)) |
|
1295 | log.error('Reviewers Validation: {}'.format(e)) | |
1296 | h.flash(e, category='error') |
|
1296 | h.flash(e, category='error') | |
1297 | return |
|
1297 | return | |
1298 |
|
1298 | |||
1299 | old_calculated_status = pull_request.calculated_review_status() |
|
1299 | old_calculated_status = pull_request.calculated_review_status() | |
1300 | PullRequestModel().update_reviewers( |
|
1300 | PullRequestModel().update_reviewers( | |
1301 | pull_request, reviewers, self._rhodecode_user) |
|
1301 | pull_request, reviewers, self._rhodecode_user) | |
1302 | h.flash(_('Pull request reviewers updated.'), category='success') |
|
1302 | h.flash(_('Pull request reviewers updated.'), category='success') | |
1303 | Session().commit() |
|
1303 | Session().commit() | |
1304 |
|
1304 | |||
1305 | # trigger status changed if change in reviewers changes the status |
|
1305 | # trigger status changed if change in reviewers changes the status | |
1306 | calculated_status = pull_request.calculated_review_status() |
|
1306 | calculated_status = pull_request.calculated_review_status() | |
1307 | if old_calculated_status != calculated_status: |
|
1307 | if old_calculated_status != calculated_status: | |
1308 | PullRequestModel().trigger_pull_request_hook( |
|
1308 | PullRequestModel().trigger_pull_request_hook( | |
1309 | pull_request, self._rhodecode_user, 'review_status_change', |
|
1309 | pull_request, self._rhodecode_user, 'review_status_change', | |
1310 | data={'status': calculated_status}) |
|
1310 | data={'status': calculated_status}) | |
1311 |
|
1311 | |||
1312 | @LoginRequired() |
|
1312 | @LoginRequired() | |
1313 | @NotAnonymous() |
|
1313 | @NotAnonymous() | |
1314 | @HasRepoPermissionAnyDecorator( |
|
1314 | @HasRepoPermissionAnyDecorator( | |
1315 | 'repository.read', 'repository.write', 'repository.admin') |
|
1315 | 'repository.read', 'repository.write', 'repository.admin') | |
1316 | @CSRFRequired() |
|
1316 | @CSRFRequired() | |
1317 | @view_config( |
|
1317 | @view_config( | |
1318 | route_name='pullrequest_delete', request_method='POST', |
|
1318 | route_name='pullrequest_delete', request_method='POST', | |
1319 | renderer='json_ext') |
|
1319 | renderer='json_ext') | |
1320 | def pull_request_delete(self): |
|
1320 | def pull_request_delete(self): | |
1321 | _ = self.request.translate |
|
1321 | _ = self.request.translate | |
1322 |
|
1322 | |||
1323 | pull_request = PullRequest.get_or_404( |
|
1323 | pull_request = PullRequest.get_or_404( | |
1324 | self.request.matchdict['pull_request_id']) |
|
1324 | self.request.matchdict['pull_request_id']) | |
1325 | self.load_default_context() |
|
1325 | self.load_default_context() | |
1326 |
|
1326 | |||
1327 | pr_closed = pull_request.is_closed() |
|
1327 | pr_closed = pull_request.is_closed() | |
1328 | allowed_to_delete = PullRequestModel().check_user_delete( |
|
1328 | allowed_to_delete = PullRequestModel().check_user_delete( | |
1329 | pull_request, self._rhodecode_user) and not pr_closed |
|
1329 | pull_request, self._rhodecode_user) and not pr_closed | |
1330 |
|
1330 | |||
1331 | # only owner can delete it ! |
|
1331 | # only owner can delete it ! | |
1332 | if allowed_to_delete: |
|
1332 | if allowed_to_delete: | |
1333 | PullRequestModel().delete(pull_request, self._rhodecode_user) |
|
1333 | PullRequestModel().delete(pull_request, self._rhodecode_user) | |
1334 | Session().commit() |
|
1334 | Session().commit() | |
1335 | h.flash(_('Successfully deleted pull request'), |
|
1335 | h.flash(_('Successfully deleted pull request'), | |
1336 | category='success') |
|
1336 | category='success') | |
1337 | raise HTTPFound(h.route_path('pullrequest_show_all', |
|
1337 | raise HTTPFound(h.route_path('pullrequest_show_all', | |
1338 | repo_name=self.db_repo_name)) |
|
1338 | repo_name=self.db_repo_name)) | |
1339 |
|
1339 | |||
1340 | log.warning('user %s tried to delete pull request without access', |
|
1340 | log.warning('user %s tried to delete pull request without access', | |
1341 | self._rhodecode_user) |
|
1341 | self._rhodecode_user) | |
1342 | raise HTTPNotFound() |
|
1342 | raise HTTPNotFound() | |
1343 |
|
1343 | |||
1344 | @LoginRequired() |
|
1344 | @LoginRequired() | |
1345 | @NotAnonymous() |
|
1345 | @NotAnonymous() | |
1346 | @HasRepoPermissionAnyDecorator( |
|
1346 | @HasRepoPermissionAnyDecorator( | |
1347 | 'repository.read', 'repository.write', 'repository.admin') |
|
1347 | 'repository.read', 'repository.write', 'repository.admin') | |
1348 | @CSRFRequired() |
|
1348 | @CSRFRequired() | |
1349 | @view_config( |
|
1349 | @view_config( | |
1350 | route_name='pullrequest_comment_create', request_method='POST', |
|
1350 | route_name='pullrequest_comment_create', request_method='POST', | |
1351 | renderer='json_ext') |
|
1351 | renderer='json_ext') | |
1352 | def pull_request_comment_create(self): |
|
1352 | def pull_request_comment_create(self): | |
1353 | _ = self.request.translate |
|
1353 | _ = self.request.translate | |
1354 |
|
1354 | |||
1355 | pull_request = PullRequest.get_or_404( |
|
1355 | pull_request = PullRequest.get_or_404( | |
1356 | self.request.matchdict['pull_request_id']) |
|
1356 | self.request.matchdict['pull_request_id']) | |
1357 | pull_request_id = pull_request.pull_request_id |
|
1357 | pull_request_id = pull_request.pull_request_id | |
1358 |
|
1358 | |||
1359 | if pull_request.is_closed(): |
|
1359 | if pull_request.is_closed(): | |
1360 | log.debug('comment: forbidden because pull request is closed') |
|
1360 | log.debug('comment: forbidden because pull request is closed') | |
1361 | raise HTTPForbidden() |
|
1361 | raise HTTPForbidden() | |
1362 |
|
1362 | |||
1363 | allowed_to_comment = PullRequestModel().check_user_comment( |
|
1363 | allowed_to_comment = PullRequestModel().check_user_comment( | |
1364 | pull_request, self._rhodecode_user) |
|
1364 | pull_request, self._rhodecode_user) | |
1365 | if not allowed_to_comment: |
|
1365 | if not allowed_to_comment: | |
1366 | log.debug( |
|
1366 | log.debug( | |
1367 | 'comment: forbidden because pull request is from forbidden repo') |
|
1367 | 'comment: forbidden because pull request is from forbidden repo') | |
1368 | raise HTTPForbidden() |
|
1368 | raise HTTPForbidden() | |
1369 |
|
1369 | |||
1370 | c = self.load_default_context() |
|
1370 | c = self.load_default_context() | |
1371 |
|
1371 | |||
1372 | status = self.request.POST.get('changeset_status', None) |
|
1372 | status = self.request.POST.get('changeset_status', None) | |
1373 | text = self.request.POST.get('text') |
|
1373 | text = self.request.POST.get('text') | |
1374 | comment_type = self.request.POST.get('comment_type') |
|
1374 | comment_type = self.request.POST.get('comment_type') | |
1375 | resolves_comment_id = self.request.POST.get('resolves_comment_id', None) |
|
1375 | resolves_comment_id = self.request.POST.get('resolves_comment_id', None) | |
1376 | close_pull_request = self.request.POST.get('close_pull_request') |
|
1376 | close_pull_request = self.request.POST.get('close_pull_request') | |
1377 |
|
1377 | |||
1378 | # the logic here should work like following, if we submit close |
|
1378 | # the logic here should work like following, if we submit close | |
1379 | # pr comment, use `close_pull_request_with_comment` function |
|
1379 | # pr comment, use `close_pull_request_with_comment` function | |
1380 | # else handle regular comment logic |
|
1380 | # else handle regular comment logic | |
1381 |
|
1381 | |||
1382 | if close_pull_request: |
|
1382 | if close_pull_request: | |
1383 | # only owner or admin or person with write permissions |
|
1383 | # only owner or admin or person with write permissions | |
1384 | allowed_to_close = PullRequestModel().check_user_update( |
|
1384 | allowed_to_close = PullRequestModel().check_user_update( | |
1385 | pull_request, self._rhodecode_user) |
|
1385 | pull_request, self._rhodecode_user) | |
1386 | if not allowed_to_close: |
|
1386 | if not allowed_to_close: | |
1387 | log.debug('comment: forbidden because not allowed to close ' |
|
1387 | log.debug('comment: forbidden because not allowed to close ' | |
1388 | 'pull request %s', pull_request_id) |
|
1388 | 'pull request %s', pull_request_id) | |
1389 | raise HTTPForbidden() |
|
1389 | raise HTTPForbidden() | |
1390 |
|
1390 | |||
1391 | # This also triggers `review_status_change` |
|
1391 | # This also triggers `review_status_change` | |
1392 | comment, status = PullRequestModel().close_pull_request_with_comment( |
|
1392 | comment, status = PullRequestModel().close_pull_request_with_comment( | |
1393 | pull_request, self._rhodecode_user, self.db_repo, message=text, |
|
1393 | pull_request, self._rhodecode_user, self.db_repo, message=text, | |
1394 | auth_user=self._rhodecode_user) |
|
1394 | auth_user=self._rhodecode_user) | |
1395 | Session().flush() |
|
1395 | Session().flush() | |
1396 |
|
1396 | |||
1397 | PullRequestModel().trigger_pull_request_hook( |
|
1397 | PullRequestModel().trigger_pull_request_hook( | |
1398 | pull_request, self._rhodecode_user, 'comment', |
|
1398 | pull_request, self._rhodecode_user, 'comment', | |
1399 | data={'comment': comment}) |
|
1399 | data={'comment': comment}) | |
1400 |
|
1400 | |||
1401 | else: |
|
1401 | else: | |
1402 | # regular comment case, could be inline, or one with status. |
|
1402 | # regular comment case, could be inline, or one with status. | |
1403 | # for that one we check also permissions |
|
1403 | # for that one we check also permissions | |
1404 |
|
1404 | |||
1405 | allowed_to_change_status = PullRequestModel().check_user_change_status( |
|
1405 | allowed_to_change_status = PullRequestModel().check_user_change_status( | |
1406 | pull_request, self._rhodecode_user) |
|
1406 | pull_request, self._rhodecode_user) | |
1407 |
|
1407 | |||
1408 | if status and allowed_to_change_status: |
|
1408 | if status and allowed_to_change_status: | |
1409 | message = (_('Status change %(transition_icon)s %(status)s') |
|
1409 | message = (_('Status change %(transition_icon)s %(status)s') | |
1410 | % {'transition_icon': '>', |
|
1410 | % {'transition_icon': '>', | |
1411 | 'status': ChangesetStatus.get_status_lbl(status)}) |
|
1411 | 'status': ChangesetStatus.get_status_lbl(status)}) | |
1412 | text = text or message |
|
1412 | text = text or message | |
1413 |
|
1413 | |||
1414 | comment = CommentsModel().create( |
|
1414 | comment = CommentsModel().create( | |
1415 | text=text, |
|
1415 | text=text, | |
1416 | repo=self.db_repo.repo_id, |
|
1416 | repo=self.db_repo.repo_id, | |
1417 | user=self._rhodecode_user.user_id, |
|
1417 | user=self._rhodecode_user.user_id, | |
1418 | pull_request=pull_request, |
|
1418 | pull_request=pull_request, | |
1419 | f_path=self.request.POST.get('f_path'), |
|
1419 | f_path=self.request.POST.get('f_path'), | |
1420 | line_no=self.request.POST.get('line'), |
|
1420 | line_no=self.request.POST.get('line'), | |
1421 | status_change=(ChangesetStatus.get_status_lbl(status) |
|
1421 | status_change=(ChangesetStatus.get_status_lbl(status) | |
1422 | if status and allowed_to_change_status else None), |
|
1422 | if status and allowed_to_change_status else None), | |
1423 | status_change_type=(status |
|
1423 | status_change_type=(status | |
1424 | if status and allowed_to_change_status else None), |
|
1424 | if status and allowed_to_change_status else None), | |
1425 | comment_type=comment_type, |
|
1425 | comment_type=comment_type, | |
1426 | resolves_comment_id=resolves_comment_id, |
|
1426 | resolves_comment_id=resolves_comment_id, | |
1427 | auth_user=self._rhodecode_user |
|
1427 | auth_user=self._rhodecode_user | |
1428 | ) |
|
1428 | ) | |
1429 |
|
1429 | |||
1430 | if allowed_to_change_status: |
|
1430 | if allowed_to_change_status: | |
1431 | # calculate old status before we change it |
|
1431 | # calculate old status before we change it | |
1432 | old_calculated_status = pull_request.calculated_review_status() |
|
1432 | old_calculated_status = pull_request.calculated_review_status() | |
1433 |
|
1433 | |||
1434 | # get status if set ! |
|
1434 | # get status if set ! | |
1435 | if status: |
|
1435 | if status: | |
1436 | ChangesetStatusModel().set_status( |
|
1436 | ChangesetStatusModel().set_status( | |
1437 | self.db_repo.repo_id, |
|
1437 | self.db_repo.repo_id, | |
1438 | status, |
|
1438 | status, | |
1439 | self._rhodecode_user.user_id, |
|
1439 | self._rhodecode_user.user_id, | |
1440 | comment, |
|
1440 | comment, | |
1441 | pull_request=pull_request |
|
1441 | pull_request=pull_request | |
1442 | ) |
|
1442 | ) | |
1443 |
|
1443 | |||
1444 | Session().flush() |
|
1444 | Session().flush() | |
1445 | # this is somehow required to get access to some relationship |
|
1445 | # this is somehow required to get access to some relationship | |
1446 | # loaded on comment |
|
1446 | # loaded on comment | |
1447 | Session().refresh(comment) |
|
1447 | Session().refresh(comment) | |
1448 |
|
1448 | |||
1449 | PullRequestModel().trigger_pull_request_hook( |
|
1449 | PullRequestModel().trigger_pull_request_hook( | |
1450 | pull_request, self._rhodecode_user, 'comment', |
|
1450 | pull_request, self._rhodecode_user, 'comment', | |
1451 | data={'comment': comment}) |
|
1451 | data={'comment': comment}) | |
1452 |
|
1452 | |||
1453 | # we now calculate the status of pull request, and based on that |
|
1453 | # we now calculate the status of pull request, and based on that | |
1454 | # calculation we set the commits status |
|
1454 | # calculation we set the commits status | |
1455 | calculated_status = pull_request.calculated_review_status() |
|
1455 | calculated_status = pull_request.calculated_review_status() | |
1456 | if old_calculated_status != calculated_status: |
|
1456 | if old_calculated_status != calculated_status: | |
1457 | PullRequestModel().trigger_pull_request_hook( |
|
1457 | PullRequestModel().trigger_pull_request_hook( | |
1458 | pull_request, self._rhodecode_user, 'review_status_change', |
|
1458 | pull_request, self._rhodecode_user, 'review_status_change', | |
1459 | data={'status': calculated_status}) |
|
1459 | data={'status': calculated_status}) | |
1460 |
|
1460 | |||
1461 | Session().commit() |
|
1461 | Session().commit() | |
1462 |
|
1462 | |||
1463 | data = { |
|
1463 | data = { | |
1464 | 'target_id': h.safeid(h.safe_unicode( |
|
1464 | 'target_id': h.safeid(h.safe_unicode( | |
1465 | self.request.POST.get('f_path'))), |
|
1465 | self.request.POST.get('f_path'))), | |
1466 | } |
|
1466 | } | |
1467 | if comment: |
|
1467 | if comment: | |
1468 | c.co = comment |
|
1468 | c.co = comment | |
1469 | rendered_comment = render( |
|
1469 | rendered_comment = render( | |
1470 | 'rhodecode:templates/changeset/changeset_comment_block.mako', |
|
1470 | 'rhodecode:templates/changeset/changeset_comment_block.mako', | |
1471 | self._get_template_context(c), self.request) |
|
1471 | self._get_template_context(c), self.request) | |
1472 |
|
1472 | |||
1473 | data.update(comment.get_dict()) |
|
1473 | data.update(comment.get_dict()) | |
1474 | data.update({'rendered_text': rendered_comment}) |
|
1474 | data.update({'rendered_text': rendered_comment}) | |
1475 |
|
1475 | |||
1476 | return data |
|
1476 | return data | |
1477 |
|
1477 | |||
1478 | @LoginRequired() |
|
1478 | @LoginRequired() | |
1479 | @NotAnonymous() |
|
1479 | @NotAnonymous() | |
1480 | @HasRepoPermissionAnyDecorator( |
|
1480 | @HasRepoPermissionAnyDecorator( | |
1481 | 'repository.read', 'repository.write', 'repository.admin') |
|
1481 | 'repository.read', 'repository.write', 'repository.admin') | |
1482 | @CSRFRequired() |
|
1482 | @CSRFRequired() | |
1483 | @view_config( |
|
1483 | @view_config( | |
1484 | route_name='pullrequest_comment_delete', request_method='POST', |
|
1484 | route_name='pullrequest_comment_delete', request_method='POST', | |
1485 | renderer='json_ext') |
|
1485 | renderer='json_ext') | |
1486 | def pull_request_comment_delete(self): |
|
1486 | def pull_request_comment_delete(self): | |
1487 | pull_request = PullRequest.get_or_404( |
|
1487 | pull_request = PullRequest.get_or_404( | |
1488 | self.request.matchdict['pull_request_id']) |
|
1488 | self.request.matchdict['pull_request_id']) | |
1489 |
|
1489 | |||
1490 | comment = ChangesetComment.get_or_404( |
|
1490 | comment = ChangesetComment.get_or_404( | |
1491 | self.request.matchdict['comment_id']) |
|
1491 | self.request.matchdict['comment_id']) | |
1492 | comment_id = comment.comment_id |
|
1492 | comment_id = comment.comment_id | |
1493 |
|
1493 | |||
1494 | if comment.immutable: |
|
1494 | if comment.immutable: | |
1495 | # don't allow deleting comments that are immutable |
|
1495 | # don't allow deleting comments that are immutable | |
1496 | raise HTTPForbidden() |
|
1496 | raise HTTPForbidden() | |
1497 |
|
1497 | |||
1498 | if pull_request.is_closed(): |
|
1498 | if pull_request.is_closed(): | |
1499 | log.debug('comment: forbidden because pull request is closed') |
|
1499 | log.debug('comment: forbidden because pull request is closed') | |
1500 | raise HTTPForbidden() |
|
1500 | raise HTTPForbidden() | |
1501 |
|
1501 | |||
1502 | if not comment: |
|
1502 | if not comment: | |
1503 | log.debug('Comment with id:%s not found, skipping', comment_id) |
|
1503 | log.debug('Comment with id:%s not found, skipping', comment_id) | |
1504 | # comment already deleted in another call probably |
|
1504 | # comment already deleted in another call probably | |
1505 | return True |
|
1505 | return True | |
1506 |
|
1506 | |||
1507 | if comment.pull_request.is_closed(): |
|
1507 | if comment.pull_request.is_closed(): | |
1508 | # don't allow deleting comments on closed pull request |
|
1508 | # don't allow deleting comments on closed pull request | |
1509 | raise HTTPForbidden() |
|
1509 | raise HTTPForbidden() | |
1510 |
|
1510 | |||
1511 | is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name) |
|
1511 | is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name) | |
1512 | super_admin = h.HasPermissionAny('hg.admin')() |
|
1512 | super_admin = h.HasPermissionAny('hg.admin')() | |
1513 | comment_owner = comment.author.user_id == self._rhodecode_user.user_id |
|
1513 | comment_owner = comment.author.user_id == self._rhodecode_user.user_id | |
1514 | is_repo_comment = comment.repo.repo_name == self.db_repo_name |
|
1514 | is_repo_comment = comment.repo.repo_name == self.db_repo_name | |
1515 | comment_repo_admin = is_repo_admin and is_repo_comment |
|
1515 | comment_repo_admin = is_repo_admin and is_repo_comment | |
1516 |
|
1516 | |||
1517 | if super_admin or comment_owner or comment_repo_admin: |
|
1517 | if super_admin or comment_owner or comment_repo_admin: | |
1518 | old_calculated_status = comment.pull_request.calculated_review_status() |
|
1518 | old_calculated_status = comment.pull_request.calculated_review_status() | |
1519 | CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user) |
|
1519 | CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user) | |
1520 | Session().commit() |
|
1520 | Session().commit() | |
1521 | calculated_status = comment.pull_request.calculated_review_status() |
|
1521 | calculated_status = comment.pull_request.calculated_review_status() | |
1522 | if old_calculated_status != calculated_status: |
|
1522 | if old_calculated_status != calculated_status: | |
1523 | PullRequestModel().trigger_pull_request_hook( |
|
1523 | PullRequestModel().trigger_pull_request_hook( | |
1524 | comment.pull_request, self._rhodecode_user, 'review_status_change', |
|
1524 | comment.pull_request, self._rhodecode_user, 'review_status_change', | |
1525 | data={'status': calculated_status}) |
|
1525 | data={'status': calculated_status}) | |
1526 | return True |
|
1526 | return True | |
1527 | else: |
|
1527 | else: | |
1528 | log.warning('No permissions for user %s to delete comment_id: %s', |
|
1528 | log.warning('No permissions for user %s to delete comment_id: %s', | |
1529 | self._rhodecode_db_user, comment_id) |
|
1529 | self._rhodecode_db_user, comment_id) | |
1530 | raise HTTPNotFound() |
|
1530 | raise HTTPNotFound() | |
1531 |
|
1531 | |||
1532 | @LoginRequired() |
|
1532 | @LoginRequired() | |
1533 | @NotAnonymous() |
|
1533 | @NotAnonymous() | |
1534 | @HasRepoPermissionAnyDecorator( |
|
1534 | @HasRepoPermissionAnyDecorator( | |
1535 | 'repository.read', 'repository.write', 'repository.admin') |
|
1535 | 'repository.read', 'repository.write', 'repository.admin') | |
1536 | @CSRFRequired() |
|
1536 | @CSRFRequired() | |
1537 | @view_config( |
|
1537 | @view_config( | |
1538 | route_name='pullrequest_comment_edit', request_method='POST', |
|
1538 | route_name='pullrequest_comment_edit', request_method='POST', | |
1539 | renderer='json_ext') |
|
1539 | renderer='json_ext') | |
1540 | def pull_request_comment_edit(self): |
|
1540 | def pull_request_comment_edit(self): | |
1541 | self.load_default_context() |
|
1541 | self.load_default_context() | |
1542 |
|
1542 | |||
1543 | pull_request = PullRequest.get_or_404( |
|
1543 | pull_request = PullRequest.get_or_404( | |
1544 | self.request.matchdict['pull_request_id'] |
|
1544 | self.request.matchdict['pull_request_id'] | |
1545 | ) |
|
1545 | ) | |
1546 | comment = ChangesetComment.get_or_404( |
|
1546 | comment = ChangesetComment.get_or_404( | |
1547 | self.request.matchdict['comment_id'] |
|
1547 | self.request.matchdict['comment_id'] | |
1548 | ) |
|
1548 | ) | |
1549 | comment_id = comment.comment_id |
|
1549 | comment_id = comment.comment_id | |
1550 |
|
1550 | |||
1551 | if comment.immutable: |
|
1551 | if comment.immutable: | |
1552 | # don't allow deleting comments that are immutable |
|
1552 | # don't allow deleting comments that are immutable | |
1553 | raise HTTPForbidden() |
|
1553 | raise HTTPForbidden() | |
1554 |
|
1554 | |||
1555 | if pull_request.is_closed(): |
|
1555 | if pull_request.is_closed(): | |
1556 | log.debug('comment: forbidden because pull request is closed') |
|
1556 | log.debug('comment: forbidden because pull request is closed') | |
1557 | raise HTTPForbidden() |
|
1557 | raise HTTPForbidden() | |
1558 |
|
1558 | |||
1559 | if not comment: |
|
1559 | if not comment: | |
1560 | log.debug('Comment with id:%s not found, skipping', comment_id) |
|
1560 | log.debug('Comment with id:%s not found, skipping', comment_id) | |
1561 | # comment already deleted in another call probably |
|
1561 | # comment already deleted in another call probably | |
1562 | return True |
|
1562 | return True | |
1563 |
|
1563 | |||
1564 | if comment.pull_request.is_closed(): |
|
1564 | if comment.pull_request.is_closed(): | |
1565 | # don't allow deleting comments on closed pull request |
|
1565 | # don't allow deleting comments on closed pull request | |
1566 | raise HTTPForbidden() |
|
1566 | raise HTTPForbidden() | |
1567 |
|
1567 | |||
1568 | is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name) |
|
1568 | is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name) | |
1569 | super_admin = h.HasPermissionAny('hg.admin')() |
|
1569 | super_admin = h.HasPermissionAny('hg.admin')() | |
1570 | comment_owner = comment.author.user_id == self._rhodecode_user.user_id |
|
1570 | comment_owner = comment.author.user_id == self._rhodecode_user.user_id | |
1571 | is_repo_comment = comment.repo.repo_name == self.db_repo_name |
|
1571 | is_repo_comment = comment.repo.repo_name == self.db_repo_name | |
1572 | comment_repo_admin = is_repo_admin and is_repo_comment |
|
1572 | comment_repo_admin = is_repo_admin and is_repo_comment | |
1573 |
|
1573 | |||
1574 | if super_admin or comment_owner or comment_repo_admin: |
|
1574 | if super_admin or comment_owner or comment_repo_admin: | |
1575 | text = self.request.POST.get('text') |
|
1575 | text = self.request.POST.get('text') | |
1576 | version = self.request.POST.get('version') |
|
1576 | version = self.request.POST.get('version') | |
1577 | if text == comment.text: |
|
1577 | if text == comment.text: | |
1578 | log.warning( |
|
1578 | log.warning( | |
1579 | 'Comment(PR): ' |
|
1579 | 'Comment(PR): ' | |
1580 | 'Trying to create new version ' |
|
1580 | 'Trying to create new version ' | |
1581 | 'with the same comment body {}'.format( |
|
1581 | 'with the same comment body {}'.format( | |
1582 | comment_id, |
|
1582 | comment_id, | |
1583 | ) |
|
1583 | ) | |
1584 | ) |
|
1584 | ) | |
1585 | raise HTTPNotFound() |
|
1585 | raise HTTPNotFound() | |
1586 |
|
1586 | |||
1587 | if version.isdigit(): |
|
1587 | if version.isdigit(): | |
1588 | version = int(version) |
|
1588 | version = int(version) | |
1589 | else: |
|
1589 | else: | |
1590 | log.warning( |
|
1590 | log.warning( | |
1591 | 'Comment(PR): Wrong version type {} {} ' |
|
1591 | 'Comment(PR): Wrong version type {} {} ' | |
1592 | 'for comment {}'.format( |
|
1592 | 'for comment {}'.format( | |
1593 | version, |
|
1593 | version, | |
1594 | type(version), |
|
1594 | type(version), | |
1595 | comment_id, |
|
1595 | comment_id, | |
1596 | ) |
|
1596 | ) | |
1597 | ) |
|
1597 | ) | |
1598 | raise HTTPNotFound() |
|
1598 | raise HTTPNotFound() | |
1599 |
|
1599 | |||
1600 | try: |
|
1600 | try: | |
1601 | comment_history = CommentsModel().edit( |
|
1601 | comment_history = CommentsModel().edit( | |
1602 | comment_id=comment_id, |
|
1602 | comment_id=comment_id, | |
1603 | text=text, |
|
1603 | text=text, | |
1604 | auth_user=self._rhodecode_user, |
|
1604 | auth_user=self._rhodecode_user, | |
1605 | version=version, |
|
1605 | version=version, | |
1606 | ) |
|
1606 | ) | |
1607 | except CommentVersionMismatch: |
|
1607 | except CommentVersionMismatch: | |
1608 | raise HTTPConflict() |
|
1608 | raise HTTPConflict() | |
1609 |
|
1609 | |||
1610 | if not comment_history: |
|
1610 | if not comment_history: | |
1611 | raise HTTPNotFound() |
|
1611 | raise HTTPNotFound() | |
1612 |
|
1612 | |||
1613 | Session().commit() |
|
1613 | Session().commit() | |
|
1614 | ||||
|
1615 | PullRequestModel().trigger_pull_request_hook( | |||
|
1616 | pull_request, self._rhodecode_user, 'comment_edit', | |||
|
1617 | data={'comment': comment}) | |||
|
1618 | ||||
1614 | return { |
|
1619 | return { | |
1615 | 'comment_history_id': comment_history.comment_history_id, |
|
1620 | 'comment_history_id': comment_history.comment_history_id, | |
1616 | 'comment_id': comment.comment_id, |
|
1621 | 'comment_id': comment.comment_id, | |
1617 | 'comment_version': comment_history.version, |
|
1622 | 'comment_version': comment_history.version, | |
1618 | 'comment_author_username': comment_history.author.username, |
|
1623 | 'comment_author_username': comment_history.author.username, | |
1619 | 'comment_author_gravatar': h.gravatar_url(comment_history.author.email, 16), |
|
1624 | 'comment_author_gravatar': h.gravatar_url(comment_history.author.email, 16), | |
1620 | 'comment_created_on': h.age_component(comment_history.created_on, |
|
1625 | 'comment_created_on': h.age_component(comment_history.created_on, | |
1621 | time_is_local=True), |
|
1626 | time_is_local=True), | |
1622 | } |
|
1627 | } | |
1623 | else: |
|
1628 | else: | |
1624 | log.warning('No permissions for user %s to edit comment_id: %s', |
|
1629 | log.warning('No permissions for user %s to edit comment_id: %s', | |
1625 | self._rhodecode_db_user, comment_id) |
|
1630 | self._rhodecode_db_user, comment_id) | |
1626 | raise HTTPNotFound() |
|
1631 | raise HTTPNotFound() |
@@ -1,79 +1,80 b'' | |||||
1 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
1 | # Copyright (C) 2016-2020 RhodeCode GmbH | |
2 | # |
|
2 | # | |
3 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |
5 | # (only), as published by the Free Software Foundation. |
|
5 | # (only), as published by the Free Software Foundation. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU Affero General Public License |
|
12 | # You should have received a copy of the GNU Affero General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | # |
|
14 | # | |
15 | # This program is dual-licensed. If you wish to learn more about the |
|
15 | # This program is dual-licensed. If you wish to learn more about the | |
16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
18 |
|
18 | |||
19 | import logging |
|
19 | import logging | |
20 | from pyramid.threadlocal import get_current_registry |
|
20 | from pyramid.threadlocal import get_current_registry | |
21 | from rhodecode.events.base import RhodeCodeIntegrationEvent |
|
21 | from rhodecode.events.base import RhodeCodeIntegrationEvent | |
22 |
|
22 | |||
23 |
|
23 | |||
24 | log = logging.getLogger(__name__) |
|
24 | log = logging.getLogger(__name__) | |
25 |
|
25 | |||
26 |
|
26 | |||
27 | def trigger(event, registry=None): |
|
27 | def trigger(event, registry=None): | |
28 | """ |
|
28 | """ | |
29 | Helper method to send an event. This wraps the pyramid logic to send an |
|
29 | Helper method to send an event. This wraps the pyramid logic to send an | |
30 | event. |
|
30 | event. | |
31 | """ |
|
31 | """ | |
32 | # For the first step we are using pyramids thread locals here. If the |
|
32 | # For the first step we are using pyramids thread locals here. If the | |
33 | # event mechanism works out as a good solution we should think about |
|
33 | # event mechanism works out as a good solution we should think about | |
34 | # passing the registry as an argument to get rid of it. |
|
34 | # passing the registry as an argument to get rid of it. | |
35 | event_name = event.__class__ |
|
35 | event_name = event.__class__ | |
36 | log.debug('event %s sent for execution', event_name) |
|
36 | log.debug('event %s sent for execution', event_name) | |
37 | registry = registry or get_current_registry() |
|
37 | registry = registry or get_current_registry() | |
38 | registry.notify(event) |
|
38 | registry.notify(event) | |
39 | log.debug('event %s triggered using registry %s', event_name, registry) |
|
39 | log.debug('event %s triggered using registry %s', event_name, registry) | |
40 |
|
40 | |||
41 | # Send the events to integrations directly |
|
41 | # Send the events to integrations directly | |
42 | from rhodecode.integrations import integrations_event_handler |
|
42 | from rhodecode.integrations import integrations_event_handler | |
43 | if isinstance(event, RhodeCodeIntegrationEvent): |
|
43 | if isinstance(event, RhodeCodeIntegrationEvent): | |
44 | integrations_event_handler(event) |
|
44 | integrations_event_handler(event) | |
45 |
|
45 | |||
46 |
|
46 | |||
47 | from rhodecode.events.user import ( # pragma: no cover |
|
47 | from rhodecode.events.user import ( # pragma: no cover | |
48 | UserPreCreate, |
|
48 | UserPreCreate, | |
49 | UserPostCreate, |
|
49 | UserPostCreate, | |
50 | UserPreUpdate, |
|
50 | UserPreUpdate, | |
51 | UserRegistered, |
|
51 | UserRegistered, | |
52 | UserPermissionsChange, |
|
52 | UserPermissionsChange, | |
53 | ) |
|
53 | ) | |
54 |
|
54 | |||
55 | from rhodecode.events.repo import ( # pragma: no cover |
|
55 | from rhodecode.events.repo import ( # pragma: no cover | |
56 | RepoEvent, RepoCommitCommentEvent, |
|
56 | RepoEvent, | |
|
57 | RepoCommitCommentEvent, RepoCommitCommentEditEvent, | |||
57 | RepoPreCreateEvent, RepoCreateEvent, |
|
58 | RepoPreCreateEvent, RepoCreateEvent, | |
58 | RepoPreDeleteEvent, RepoDeleteEvent, |
|
59 | RepoPreDeleteEvent, RepoDeleteEvent, | |
59 | RepoPrePushEvent, RepoPushEvent, |
|
60 | RepoPrePushEvent, RepoPushEvent, | |
60 | RepoPrePullEvent, RepoPullEvent, |
|
61 | RepoPrePullEvent, RepoPullEvent, | |
61 | ) |
|
62 | ) | |
62 |
|
63 | |||
63 | from rhodecode.events.repo_group import ( # pragma: no cover |
|
64 | from rhodecode.events.repo_group import ( # pragma: no cover | |
64 | RepoGroupEvent, |
|
65 | RepoGroupEvent, | |
65 | RepoGroupCreateEvent, |
|
66 | RepoGroupCreateEvent, | |
66 | RepoGroupUpdateEvent, |
|
67 | RepoGroupUpdateEvent, | |
67 | RepoGroupDeleteEvent, |
|
68 | RepoGroupDeleteEvent, | |
68 | ) |
|
69 | ) | |
69 |
|
70 | |||
70 | from rhodecode.events.pullrequest import ( # pragma: no cover |
|
71 | from rhodecode.events.pullrequest import ( # pragma: no cover | |
71 | PullRequestEvent, |
|
72 | PullRequestEvent, | |
72 | PullRequestCreateEvent, |
|
73 | PullRequestCreateEvent, | |
73 | PullRequestUpdateEvent, |
|
74 | PullRequestUpdateEvent, | |
74 | PullRequestCommentEvent, |
|
75 | PullRequestCommentEvent, | |
|
76 | PullRequestCommentEditEvent, | |||
75 | PullRequestReviewEvent, |
|
77 | PullRequestReviewEvent, | |
76 | PullRequestMergeEvent, |
|
78 | PullRequestMergeEvent, | |
77 | PullRequestCloseEvent, |
|
79 | PullRequestCloseEvent, | |
78 | PullRequestCommentEvent, |
|
|||
79 | ) |
|
80 | ) |
@@ -1,164 +1,203 b'' | |||||
1 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
1 | # Copyright (C) 2016-2020 RhodeCode GmbH | |
2 | # |
|
2 | # | |
3 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |
5 | # (only), as published by the Free Software Foundation. |
|
5 | # (only), as published by the Free Software Foundation. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU Affero General Public License |
|
12 | # You should have received a copy of the GNU Affero General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | # |
|
14 | # | |
15 | # This program is dual-licensed. If you wish to learn more about the |
|
15 | # This program is dual-licensed. If you wish to learn more about the | |
16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
18 |
|
18 | |||
19 | import logging |
|
19 | import logging | |
20 |
|
20 | |||
21 | from rhodecode.translation import lazy_ugettext |
|
21 | from rhodecode.translation import lazy_ugettext | |
22 | from rhodecode.events.repo import ( |
|
22 | from rhodecode.events.repo import (RepoEvent, _commits_as_dict, _issues_as_dict) | |
23 | RepoEvent, _commits_as_dict, _issues_as_dict) |
|
|||
24 |
|
23 | |||
25 | log = logging.getLogger(__name__) |
|
24 | log = logging.getLogger(__name__) | |
26 |
|
25 | |||
27 |
|
26 | |||
28 | class PullRequestEvent(RepoEvent): |
|
27 | class PullRequestEvent(RepoEvent): | |
29 | """ |
|
28 | """ | |
30 | Base class for pull request events. |
|
29 | Base class for pull request events. | |
31 |
|
30 | |||
32 | :param pullrequest: a :class:`PullRequest` instance |
|
31 | :param pullrequest: a :class:`PullRequest` instance | |
33 | """ |
|
32 | """ | |
34 |
|
33 | |||
35 | def __init__(self, pullrequest): |
|
34 | def __init__(self, pullrequest): | |
36 | super(PullRequestEvent, self).__init__(pullrequest.target_repo) |
|
35 | super(PullRequestEvent, self).__init__(pullrequest.target_repo) | |
37 | self.pullrequest = pullrequest |
|
36 | self.pullrequest = pullrequest | |
38 |
|
37 | |||
39 | def as_dict(self): |
|
38 | def as_dict(self): | |
40 | from rhodecode.lib.utils2 import md5_safe |
|
39 | from rhodecode.lib.utils2 import md5_safe | |
41 | from rhodecode.model.pull_request import PullRequestModel |
|
40 | from rhodecode.model.pull_request import PullRequestModel | |
42 | data = super(PullRequestEvent, self).as_dict() |
|
41 | data = super(PullRequestEvent, self).as_dict() | |
43 |
|
42 | |||
44 | commits = _commits_as_dict( |
|
43 | commits = _commits_as_dict( | |
45 | self, |
|
44 | self, | |
46 | commit_ids=self.pullrequest.revisions, |
|
45 | commit_ids=self.pullrequest.revisions, | |
47 | repos=[self.pullrequest.source_repo] |
|
46 | repos=[self.pullrequest.source_repo] | |
48 | ) |
|
47 | ) | |
49 | issues = _issues_as_dict(commits) |
|
48 | issues = _issues_as_dict(commits) | |
50 | # calculate hashes of all commits for unique identifier of commits |
|
49 | # calculate hashes of all commits for unique identifier of commits | |
51 | # inside that pull request |
|
50 | # inside that pull request | |
52 | commits_hash = md5_safe(':'.join(x.get('raw_id', '') for x in commits)) |
|
51 | commits_hash = md5_safe(':'.join(x.get('raw_id', '') for x in commits)) | |
53 |
|
52 | |||
54 | data.update({ |
|
53 | data.update({ | |
55 | 'pullrequest': { |
|
54 | 'pullrequest': { | |
56 | 'title': self.pullrequest.title, |
|
55 | 'title': self.pullrequest.title, | |
57 | 'issues': issues, |
|
56 | 'issues': issues, | |
58 | 'pull_request_id': self.pullrequest.pull_request_id, |
|
57 | 'pull_request_id': self.pullrequest.pull_request_id, | |
59 | 'url': PullRequestModel().get_url( |
|
58 | 'url': PullRequestModel().get_url( | |
60 | self.pullrequest, request=self.request), |
|
59 | self.pullrequest, request=self.request), | |
61 | 'permalink_url': PullRequestModel().get_url( |
|
60 | 'permalink_url': PullRequestModel().get_url( | |
62 | self.pullrequest, request=self.request, permalink=True), |
|
61 | self.pullrequest, request=self.request, permalink=True), | |
63 | 'shadow_url': PullRequestModel().get_shadow_clone_url( |
|
62 | 'shadow_url': PullRequestModel().get_shadow_clone_url( | |
64 | self.pullrequest, request=self.request), |
|
63 | self.pullrequest, request=self.request), | |
65 | 'status': self.pullrequest.calculated_review_status(), |
|
64 | 'status': self.pullrequest.calculated_review_status(), | |
66 | 'commits_uid': commits_hash, |
|
65 | 'commits_uid': commits_hash, | |
67 | 'commits': commits, |
|
66 | 'commits': commits, | |
68 | } |
|
67 | } | |
69 | }) |
|
68 | }) | |
70 | return data |
|
69 | return data | |
71 |
|
70 | |||
72 |
|
71 | |||
73 | class PullRequestCreateEvent(PullRequestEvent): |
|
72 | class PullRequestCreateEvent(PullRequestEvent): | |
74 | """ |
|
73 | """ | |
75 | An instance of this class is emitted as an :term:`event` after a pull |
|
74 | An instance of this class is emitted as an :term:`event` after a pull | |
76 | request is created. |
|
75 | request is created. | |
77 | """ |
|
76 | """ | |
78 | name = 'pullrequest-create' |
|
77 | name = 'pullrequest-create' | |
79 | display_name = lazy_ugettext('pullrequest created') |
|
78 | display_name = lazy_ugettext('pullrequest created') | |
80 | description = lazy_ugettext('Event triggered after pull request was created') |
|
79 | description = lazy_ugettext('Event triggered after pull request was created') | |
81 |
|
80 | |||
82 |
|
81 | |||
83 | class PullRequestCloseEvent(PullRequestEvent): |
|
82 | class PullRequestCloseEvent(PullRequestEvent): | |
84 | """ |
|
83 | """ | |
85 | An instance of this class is emitted as an :term:`event` after a pull |
|
84 | An instance of this class is emitted as an :term:`event` after a pull | |
86 | request is closed. |
|
85 | request is closed. | |
87 | """ |
|
86 | """ | |
88 | name = 'pullrequest-close' |
|
87 | name = 'pullrequest-close' | |
89 | display_name = lazy_ugettext('pullrequest closed') |
|
88 | display_name = lazy_ugettext('pullrequest closed') | |
90 | description = lazy_ugettext('Event triggered after pull request was closed') |
|
89 | description = lazy_ugettext('Event triggered after pull request was closed') | |
91 |
|
90 | |||
92 |
|
91 | |||
93 | class PullRequestUpdateEvent(PullRequestEvent): |
|
92 | class PullRequestUpdateEvent(PullRequestEvent): | |
94 | """ |
|
93 | """ | |
95 | An instance of this class is emitted as an :term:`event` after a pull |
|
94 | An instance of this class is emitted as an :term:`event` after a pull | |
96 | request's commits have been updated. |
|
95 | request's commits have been updated. | |
97 | """ |
|
96 | """ | |
98 | name = 'pullrequest-update' |
|
97 | name = 'pullrequest-update' | |
99 | display_name = lazy_ugettext('pullrequest commits updated') |
|
98 | display_name = lazy_ugettext('pullrequest commits updated') | |
100 | description = lazy_ugettext('Event triggered after pull requests was updated') |
|
99 | description = lazy_ugettext('Event triggered after pull requests was updated') | |
101 |
|
100 | |||
102 |
|
101 | |||
103 | class PullRequestReviewEvent(PullRequestEvent): |
|
102 | class PullRequestReviewEvent(PullRequestEvent): | |
104 | """ |
|
103 | """ | |
105 | An instance of this class is emitted as an :term:`event` after a pull |
|
104 | An instance of this class is emitted as an :term:`event` after a pull | |
106 | request review has changed. A status defines new status of review. |
|
105 | request review has changed. A status defines new status of review. | |
107 | """ |
|
106 | """ | |
108 | name = 'pullrequest-review' |
|
107 | name = 'pullrequest-review' | |
109 | display_name = lazy_ugettext('pullrequest review changed') |
|
108 | display_name = lazy_ugettext('pullrequest review changed') | |
110 | description = lazy_ugettext('Event triggered after a review status of a ' |
|
109 | description = lazy_ugettext('Event triggered after a review status of a ' | |
111 | 'pull requests has changed to other.') |
|
110 | 'pull requests has changed to other.') | |
112 |
|
111 | |||
113 | def __init__(self, pullrequest, status): |
|
112 | def __init__(self, pullrequest, status): | |
114 | super(PullRequestReviewEvent, self).__init__(pullrequest) |
|
113 | super(PullRequestReviewEvent, self).__init__(pullrequest) | |
115 | self.status = status |
|
114 | self.status = status | |
116 |
|
115 | |||
117 |
|
116 | |||
118 | class PullRequestMergeEvent(PullRequestEvent): |
|
117 | class PullRequestMergeEvent(PullRequestEvent): | |
119 | """ |
|
118 | """ | |
120 | An instance of this class is emitted as an :term:`event` after a pull |
|
119 | An instance of this class is emitted as an :term:`event` after a pull | |
121 | request is merged. |
|
120 | request is merged. | |
122 | """ |
|
121 | """ | |
123 | name = 'pullrequest-merge' |
|
122 | name = 'pullrequest-merge' | |
124 | display_name = lazy_ugettext('pullrequest merged') |
|
123 | display_name = lazy_ugettext('pullrequest merged') | |
125 | description = lazy_ugettext('Event triggered after a successful merge operation ' |
|
124 | description = lazy_ugettext('Event triggered after a successful merge operation ' | |
126 | 'was executed on a pull request') |
|
125 | 'was executed on a pull request') | |
127 |
|
126 | |||
128 |
|
127 | |||
129 | class PullRequestCommentEvent(PullRequestEvent): |
|
128 | class PullRequestCommentEvent(PullRequestEvent): | |
130 | """ |
|
129 | """ | |
131 | An instance of this class is emitted as an :term:`event` after a pull |
|
130 | An instance of this class is emitted as an :term:`event` after a pull | |
132 | request comment is created. |
|
131 | request comment is created. | |
133 | """ |
|
132 | """ | |
134 | name = 'pullrequest-comment' |
|
133 | name = 'pullrequest-comment' | |
135 | display_name = lazy_ugettext('pullrequest commented') |
|
134 | display_name = lazy_ugettext('pullrequest commented') | |
136 | description = lazy_ugettext('Event triggered after a comment was made on a code ' |
|
135 | description = lazy_ugettext('Event triggered after a comment was made on a code ' | |
137 | 'in the pull request') |
|
136 | 'in the pull request') | |
138 |
|
137 | |||
139 | def __init__(self, pullrequest, comment): |
|
138 | def __init__(self, pullrequest, comment): | |
140 | super(PullRequestCommentEvent, self).__init__(pullrequest) |
|
139 | super(PullRequestCommentEvent, self).__init__(pullrequest) | |
141 | self.comment = comment |
|
140 | self.comment = comment | |
142 |
|
141 | |||
143 | def as_dict(self): |
|
142 | def as_dict(self): | |
144 | from rhodecode.model.comment import CommentsModel |
|
143 | from rhodecode.model.comment import CommentsModel | |
145 | data = super(PullRequestCommentEvent, self).as_dict() |
|
144 | data = super(PullRequestCommentEvent, self).as_dict() | |
146 |
|
145 | |||
147 | status = None |
|
146 | status = None | |
148 | if self.comment.status_change: |
|
147 | if self.comment.status_change: | |
149 | status = self.comment.status_change[0].status |
|
148 | status = self.comment.status_change[0].status | |
150 |
|
149 | |||
151 | data.update({ |
|
150 | data.update({ | |
152 | 'comment': { |
|
151 | 'comment': { | |
153 | 'status': status, |
|
152 | 'status': status, | |
154 | 'text': self.comment.text, |
|
153 | 'text': self.comment.text, | |
155 | 'type': self.comment.comment_type, |
|
154 | 'type': self.comment.comment_type, | |
156 | 'file': self.comment.f_path, |
|
155 | 'file': self.comment.f_path, | |
157 | 'line': self.comment.line_no, |
|
156 | 'line': self.comment.line_no, | |
|
157 | 'version': self.comment.last_version, | |||
158 | 'url': CommentsModel().get_url( |
|
158 | 'url': CommentsModel().get_url( | |
159 | self.comment, request=self.request), |
|
159 | self.comment, request=self.request), | |
160 | 'permalink_url': CommentsModel().get_url( |
|
160 | 'permalink_url': CommentsModel().get_url( | |
161 | self.comment, request=self.request, permalink=True), |
|
161 | self.comment, request=self.request, permalink=True), | |
162 | } |
|
162 | } | |
163 | }) |
|
163 | }) | |
164 | return data |
|
164 | return data | |
|
165 | ||||
|
166 | ||||
|
167 | class PullRequestCommentEditEvent(PullRequestEvent): | |||
|
168 | """ | |||
|
169 | An instance of this class is emitted as an :term:`event` after a pull | |||
|
170 | request comment is edited. | |||
|
171 | """ | |||
|
172 | name = 'pullrequest-comment-edit' | |||
|
173 | display_name = lazy_ugettext('pullrequest comment edited') | |||
|
174 | description = lazy_ugettext('Event triggered after a comment was edited on a code ' | |||
|
175 | 'in the pull request') | |||
|
176 | ||||
|
177 | def __init__(self, pullrequest, comment): | |||
|
178 | super(PullRequestCommentEditEvent, self).__init__(pullrequest) | |||
|
179 | self.comment = comment | |||
|
180 | ||||
|
181 | def as_dict(self): | |||
|
182 | from rhodecode.model.comment import CommentsModel | |||
|
183 | data = super(PullRequestCommentEditEvent, self).as_dict() | |||
|
184 | ||||
|
185 | status = None | |||
|
186 | if self.comment.status_change: | |||
|
187 | status = self.comment.status_change[0].status | |||
|
188 | ||||
|
189 | data.update({ | |||
|
190 | 'comment': { | |||
|
191 | 'status': status, | |||
|
192 | 'text': self.comment.text, | |||
|
193 | 'type': self.comment.comment_type, | |||
|
194 | 'file': self.comment.f_path, | |||
|
195 | 'line': self.comment.line_no, | |||
|
196 | 'version': self.comment.last_version, | |||
|
197 | 'url': CommentsModel().get_url( | |||
|
198 | self.comment, request=self.request), | |||
|
199 | 'permalink_url': CommentsModel().get_url( | |||
|
200 | self.comment, request=self.request, permalink=True), | |||
|
201 | } | |||
|
202 | }) | |||
|
203 | return data |
@@ -1,400 +1,436 b'' | |||||
1 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
1 | # Copyright (C) 2016-2020 RhodeCode GmbH | |
2 | # |
|
2 | # | |
3 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |
5 | # (only), as published by the Free Software Foundation. |
|
5 | # (only), as published by the Free Software Foundation. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU Affero General Public License |
|
12 | # You should have received a copy of the GNU Affero General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | # |
|
14 | # | |
15 | # This program is dual-licensed. If you wish to learn more about the |
|
15 | # This program is dual-licensed. If you wish to learn more about the | |
16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
18 |
|
18 | |||
19 | import collections |
|
19 | import collections | |
20 | import logging |
|
20 | import logging | |
21 | import datetime |
|
21 | import datetime | |
22 |
|
22 | |||
23 | from rhodecode.translation import lazy_ugettext |
|
23 | from rhodecode.translation import lazy_ugettext | |
24 | from rhodecode.model.db import User, Repository, Session |
|
24 | from rhodecode.model.db import User, Repository, Session | |
25 | from rhodecode.events.base import RhodeCodeIntegrationEvent |
|
25 | from rhodecode.events.base import RhodeCodeIntegrationEvent | |
26 | from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError |
|
26 | from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError | |
27 |
|
27 | |||
28 | log = logging.getLogger(__name__) |
|
28 | log = logging.getLogger(__name__) | |
29 |
|
29 | |||
30 |
|
30 | |||
31 | def _commits_as_dict(event, commit_ids, repos): |
|
31 | def _commits_as_dict(event, commit_ids, repos): | |
32 | """ |
|
32 | """ | |
33 | Helper function to serialize commit_ids |
|
33 | Helper function to serialize commit_ids | |
34 |
|
34 | |||
35 | :param event: class calling this method |
|
35 | :param event: class calling this method | |
36 | :param commit_ids: commits to get |
|
36 | :param commit_ids: commits to get | |
37 | :param repos: list of repos to check |
|
37 | :param repos: list of repos to check | |
38 | """ |
|
38 | """ | |
39 | from rhodecode.lib.utils2 import extract_mentioned_users |
|
39 | from rhodecode.lib.utils2 import extract_mentioned_users | |
40 | from rhodecode.lib.helpers import ( |
|
40 | from rhodecode.lib.helpers import ( | |
41 | urlify_commit_message, process_patterns, chop_at_smart) |
|
41 | urlify_commit_message, process_patterns, chop_at_smart) | |
42 | from rhodecode.model.repo import RepoModel |
|
42 | from rhodecode.model.repo import RepoModel | |
43 |
|
43 | |||
44 | if not repos: |
|
44 | if not repos: | |
45 | raise Exception('no repo defined') |
|
45 | raise Exception('no repo defined') | |
46 |
|
46 | |||
47 | if not isinstance(repos, (tuple, list)): |
|
47 | if not isinstance(repos, (tuple, list)): | |
48 | repos = [repos] |
|
48 | repos = [repos] | |
49 |
|
49 | |||
50 | if not commit_ids: |
|
50 | if not commit_ids: | |
51 | return [] |
|
51 | return [] | |
52 |
|
52 | |||
53 | needed_commits = list(commit_ids) |
|
53 | needed_commits = list(commit_ids) | |
54 |
|
54 | |||
55 | commits = [] |
|
55 | commits = [] | |
56 | reviewers = [] |
|
56 | reviewers = [] | |
57 | for repo in repos: |
|
57 | for repo in repos: | |
58 | if not needed_commits: |
|
58 | if not needed_commits: | |
59 | return commits # return early if we have the commits we need |
|
59 | return commits # return early if we have the commits we need | |
60 |
|
60 | |||
61 | vcs_repo = repo.scm_instance(cache=False) |
|
61 | vcs_repo = repo.scm_instance(cache=False) | |
62 |
|
62 | |||
63 | try: |
|
63 | try: | |
64 | # use copy of needed_commits since we modify it while iterating |
|
64 | # use copy of needed_commits since we modify it while iterating | |
65 | for commit_id in list(needed_commits): |
|
65 | for commit_id in list(needed_commits): | |
66 | if commit_id.startswith('tag=>'): |
|
66 | if commit_id.startswith('tag=>'): | |
67 | raw_id = commit_id[5:] |
|
67 | raw_id = commit_id[5:] | |
68 | cs_data = { |
|
68 | cs_data = { | |
69 | 'raw_id': commit_id, 'short_id': commit_id, |
|
69 | 'raw_id': commit_id, 'short_id': commit_id, | |
70 | 'branch': None, |
|
70 | 'branch': None, | |
71 | 'git_ref_change': 'tag_add', |
|
71 | 'git_ref_change': 'tag_add', | |
72 | 'message': 'Added new tag {}'.format(raw_id), |
|
72 | 'message': 'Added new tag {}'.format(raw_id), | |
73 | 'author': event.actor.full_contact, |
|
73 | 'author': event.actor.full_contact, | |
74 | 'date': datetime.datetime.now(), |
|
74 | 'date': datetime.datetime.now(), | |
75 | 'refs': { |
|
75 | 'refs': { | |
76 | 'branches': [], |
|
76 | 'branches': [], | |
77 | 'bookmarks': [], |
|
77 | 'bookmarks': [], | |
78 | 'tags': [] |
|
78 | 'tags': [] | |
79 | } |
|
79 | } | |
80 | } |
|
80 | } | |
81 | commits.append(cs_data) |
|
81 | commits.append(cs_data) | |
82 |
|
82 | |||
83 | elif commit_id.startswith('delete_branch=>'): |
|
83 | elif commit_id.startswith('delete_branch=>'): | |
84 | raw_id = commit_id[15:] |
|
84 | raw_id = commit_id[15:] | |
85 | cs_data = { |
|
85 | cs_data = { | |
86 | 'raw_id': commit_id, 'short_id': commit_id, |
|
86 | 'raw_id': commit_id, 'short_id': commit_id, | |
87 | 'branch': None, |
|
87 | 'branch': None, | |
88 | 'git_ref_change': 'branch_delete', |
|
88 | 'git_ref_change': 'branch_delete', | |
89 | 'message': 'Deleted branch {}'.format(raw_id), |
|
89 | 'message': 'Deleted branch {}'.format(raw_id), | |
90 | 'author': event.actor.full_contact, |
|
90 | 'author': event.actor.full_contact, | |
91 | 'date': datetime.datetime.now(), |
|
91 | 'date': datetime.datetime.now(), | |
92 | 'refs': { |
|
92 | 'refs': { | |
93 | 'branches': [], |
|
93 | 'branches': [], | |
94 | 'bookmarks': [], |
|
94 | 'bookmarks': [], | |
95 | 'tags': [] |
|
95 | 'tags': [] | |
96 | } |
|
96 | } | |
97 | } |
|
97 | } | |
98 | commits.append(cs_data) |
|
98 | commits.append(cs_data) | |
99 |
|
99 | |||
100 | else: |
|
100 | else: | |
101 | try: |
|
101 | try: | |
102 | cs = vcs_repo.get_commit(commit_id) |
|
102 | cs = vcs_repo.get_commit(commit_id) | |
103 | except CommitDoesNotExistError: |
|
103 | except CommitDoesNotExistError: | |
104 | continue # maybe its in next repo |
|
104 | continue # maybe its in next repo | |
105 |
|
105 | |||
106 | cs_data = cs.__json__() |
|
106 | cs_data = cs.__json__() | |
107 | cs_data['refs'] = cs._get_refs() |
|
107 | cs_data['refs'] = cs._get_refs() | |
108 |
|
108 | |||
109 | cs_data['mentions'] = extract_mentioned_users(cs_data['message']) |
|
109 | cs_data['mentions'] = extract_mentioned_users(cs_data['message']) | |
110 | cs_data['reviewers'] = reviewers |
|
110 | cs_data['reviewers'] = reviewers | |
111 | cs_data['url'] = RepoModel().get_commit_url( |
|
111 | cs_data['url'] = RepoModel().get_commit_url( | |
112 | repo, cs_data['raw_id'], request=event.request) |
|
112 | repo, cs_data['raw_id'], request=event.request) | |
113 | cs_data['permalink_url'] = RepoModel().get_commit_url( |
|
113 | cs_data['permalink_url'] = RepoModel().get_commit_url( | |
114 | repo, cs_data['raw_id'], request=event.request, |
|
114 | repo, cs_data['raw_id'], request=event.request, | |
115 | permalink=True) |
|
115 | permalink=True) | |
116 | urlified_message, issues_data = process_patterns( |
|
116 | urlified_message, issues_data = process_patterns( | |
117 | cs_data['message'], repo.repo_name) |
|
117 | cs_data['message'], repo.repo_name) | |
118 | cs_data['issues'] = issues_data |
|
118 | cs_data['issues'] = issues_data | |
119 | cs_data['message_html'] = urlify_commit_message( |
|
119 | cs_data['message_html'] = urlify_commit_message( | |
120 | cs_data['message'], repo.repo_name) |
|
120 | cs_data['message'], repo.repo_name) | |
121 | cs_data['message_html_title'] = chop_at_smart( |
|
121 | cs_data['message_html_title'] = chop_at_smart( | |
122 | cs_data['message'], '\n', suffix_if_chopped='...') |
|
122 | cs_data['message'], '\n', suffix_if_chopped='...') | |
123 | commits.append(cs_data) |
|
123 | commits.append(cs_data) | |
124 |
|
124 | |||
125 | needed_commits.remove(commit_id) |
|
125 | needed_commits.remove(commit_id) | |
126 |
|
126 | |||
127 | except Exception: |
|
127 | except Exception: | |
128 | log.exception('Failed to extract commits data') |
|
128 | log.exception('Failed to extract commits data') | |
129 | # we don't send any commits when crash happens, only full list |
|
129 | # we don't send any commits when crash happens, only full list | |
130 | # matters we short circuit then. |
|
130 | # matters we short circuit then. | |
131 | return [] |
|
131 | return [] | |
132 |
|
132 | |||
133 | missing_commits = set(commit_ids) - set(c['raw_id'] for c in commits) |
|
133 | missing_commits = set(commit_ids) - set(c['raw_id'] for c in commits) | |
134 | if missing_commits: |
|
134 | if missing_commits: | |
135 | log.error('Inconsistent repository state. ' |
|
135 | log.error('Inconsistent repository state. ' | |
136 | 'Missing commits: %s', ', '.join(missing_commits)) |
|
136 | 'Missing commits: %s', ', '.join(missing_commits)) | |
137 |
|
137 | |||
138 | return commits |
|
138 | return commits | |
139 |
|
139 | |||
140 |
|
140 | |||
141 | def _issues_as_dict(commits): |
|
141 | def _issues_as_dict(commits): | |
142 | """ Helper function to serialize issues from commits """ |
|
142 | """ Helper function to serialize issues from commits """ | |
143 | issues = {} |
|
143 | issues = {} | |
144 | for commit in commits: |
|
144 | for commit in commits: | |
145 | for issue in commit['issues']: |
|
145 | for issue in commit['issues']: | |
146 | issues[issue['id']] = issue |
|
146 | issues[issue['id']] = issue | |
147 | return issues |
|
147 | return issues | |
148 |
|
148 | |||
149 |
|
149 | |||
150 | class RepoEvent(RhodeCodeIntegrationEvent): |
|
150 | class RepoEvent(RhodeCodeIntegrationEvent): | |
151 | """ |
|
151 | """ | |
152 | Base class for events acting on a repository. |
|
152 | Base class for events acting on a repository. | |
153 |
|
153 | |||
154 | :param repo: a :class:`Repository` instance |
|
154 | :param repo: a :class:`Repository` instance | |
155 | """ |
|
155 | """ | |
156 |
|
156 | |||
157 | def __init__(self, repo): |
|
157 | def __init__(self, repo): | |
158 | super(RepoEvent, self).__init__() |
|
158 | super(RepoEvent, self).__init__() | |
159 | self.repo = repo |
|
159 | self.repo = repo | |
160 |
|
160 | |||
161 | def as_dict(self): |
|
161 | def as_dict(self): | |
162 | from rhodecode.model.repo import RepoModel |
|
162 | from rhodecode.model.repo import RepoModel | |
163 | data = super(RepoEvent, self).as_dict() |
|
163 | data = super(RepoEvent, self).as_dict() | |
164 |
|
164 | |||
165 | extra_fields = collections.OrderedDict() |
|
165 | extra_fields = collections.OrderedDict() | |
166 | for field in self.repo.extra_fields: |
|
166 | for field in self.repo.extra_fields: | |
167 | extra_fields[field.field_key] = field.field_value |
|
167 | extra_fields[field.field_key] = field.field_value | |
168 |
|
168 | |||
169 | data.update({ |
|
169 | data.update({ | |
170 | 'repo': { |
|
170 | 'repo': { | |
171 | 'repo_id': self.repo.repo_id, |
|
171 | 'repo_id': self.repo.repo_id, | |
172 | 'repo_name': self.repo.repo_name, |
|
172 | 'repo_name': self.repo.repo_name, | |
173 | 'repo_type': self.repo.repo_type, |
|
173 | 'repo_type': self.repo.repo_type, | |
174 | 'url': RepoModel().get_url( |
|
174 | 'url': RepoModel().get_url( | |
175 | self.repo, request=self.request), |
|
175 | self.repo, request=self.request), | |
176 | 'permalink_url': RepoModel().get_url( |
|
176 | 'permalink_url': RepoModel().get_url( | |
177 | self.repo, request=self.request, permalink=True), |
|
177 | self.repo, request=self.request, permalink=True), | |
178 | 'extra_fields': extra_fields |
|
178 | 'extra_fields': extra_fields | |
179 | } |
|
179 | } | |
180 | }) |
|
180 | }) | |
181 | return data |
|
181 | return data | |
182 |
|
182 | |||
183 |
|
183 | |||
184 | class RepoCommitCommentEvent(RepoEvent): |
|
184 | class RepoCommitCommentEvent(RepoEvent): | |
185 | """ |
|
185 | """ | |
186 | An instance of this class is emitted as an :term:`event` after a comment is made |
|
186 | An instance of this class is emitted as an :term:`event` after a comment is made | |
187 | on repository commit. |
|
187 | on repository commit. | |
188 | """ |
|
188 | """ | |
189 |
|
189 | |||
190 | name = 'repo-commit-comment' |
|
190 | name = 'repo-commit-comment' | |
191 | display_name = lazy_ugettext('repository commit comment') |
|
191 | display_name = lazy_ugettext('repository commit comment') | |
192 | description = lazy_ugettext('Event triggered after a comment was made ' |
|
192 | description = lazy_ugettext('Event triggered after a comment was made ' | |
193 | 'on commit inside a repository') |
|
193 | 'on commit inside a repository') | |
194 |
|
194 | |||
195 | def __init__(self, repo, commit, comment): |
|
195 | def __init__(self, repo, commit, comment): | |
196 | super(RepoCommitCommentEvent, self).__init__(repo) |
|
196 | super(RepoCommitCommentEvent, self).__init__(repo) | |
197 | self.commit = commit |
|
197 | self.commit = commit | |
198 | self.comment = comment |
|
198 | self.comment = comment | |
199 |
|
199 | |||
200 | def as_dict(self): |
|
200 | def as_dict(self): | |
201 | data = super(RepoCommitCommentEvent, self).as_dict() |
|
201 | data = super(RepoCommitCommentEvent, self).as_dict() | |
202 | data['commit'] = { |
|
202 | data['commit'] = { | |
203 | 'commit_id': self.commit.raw_id, |
|
203 | 'commit_id': self.commit.raw_id, | |
204 | 'commit_message': self.commit.message, |
|
204 | 'commit_message': self.commit.message, | |
205 | 'commit_branch': self.commit.branch, |
|
205 | 'commit_branch': self.commit.branch, | |
206 | } |
|
206 | } | |
207 |
|
207 | |||
208 | data['comment'] = { |
|
208 | data['comment'] = { | |
209 | 'comment_id': self.comment.comment_id, |
|
209 | 'comment_id': self.comment.comment_id, | |
210 | 'comment_text': self.comment.text, |
|
210 | 'comment_text': self.comment.text, | |
211 | 'comment_type': self.comment.comment_type, |
|
211 | 'comment_type': self.comment.comment_type, | |
212 | 'comment_f_path': self.comment.f_path, |
|
212 | 'comment_f_path': self.comment.f_path, | |
213 | 'comment_line_no': self.comment.line_no, |
|
213 | 'comment_line_no': self.comment.line_no, | |
|
214 | 'comment_version': self.comment.last_version, | |||
|
215 | } | |||
|
216 | return data | |||
|
217 | ||||
|
218 | ||||
|
219 | class RepoCommitCommentEditEvent(RepoEvent): | |||
|
220 | """ | |||
|
221 | An instance of this class is emitted as an :term:`event` after a comment is edited | |||
|
222 | on repository commit. | |||
|
223 | """ | |||
|
224 | ||||
|
225 | name = 'repo-commit-edit-comment' | |||
|
226 | display_name = lazy_ugettext('repository commit edit comment') | |||
|
227 | description = lazy_ugettext('Event triggered after a comment was edited ' | |||
|
228 | 'on commit inside a repository') | |||
|
229 | ||||
|
230 | def __init__(self, repo, commit, comment): | |||
|
231 | super(RepoCommitCommentEditEvent, self).__init__(repo) | |||
|
232 | self.commit = commit | |||
|
233 | self.comment = comment | |||
|
234 | ||||
|
235 | def as_dict(self): | |||
|
236 | data = super(RepoCommitCommentEditEvent, self).as_dict() | |||
|
237 | data['commit'] = { | |||
|
238 | 'commit_id': self.commit.raw_id, | |||
|
239 | 'commit_message': self.commit.message, | |||
|
240 | 'commit_branch': self.commit.branch, | |||
|
241 | } | |||
|
242 | ||||
|
243 | data['comment'] = { | |||
|
244 | 'comment_id': self.comment.comment_id, | |||
|
245 | 'comment_text': self.comment.text, | |||
|
246 | 'comment_type': self.comment.comment_type, | |||
|
247 | 'comment_f_path': self.comment.f_path, | |||
|
248 | 'comment_line_no': self.comment.line_no, | |||
|
249 | 'comment_version': self.comment.last_version, | |||
214 | } |
|
250 | } | |
215 | return data |
|
251 | return data | |
216 |
|
252 | |||
217 |
|
253 | |||
218 | class RepoPreCreateEvent(RepoEvent): |
|
254 | class RepoPreCreateEvent(RepoEvent): | |
219 | """ |
|
255 | """ | |
220 | An instance of this class is emitted as an :term:`event` before a repo is |
|
256 | An instance of this class is emitted as an :term:`event` before a repo is | |
221 | created. |
|
257 | created. | |
222 | """ |
|
258 | """ | |
223 | name = 'repo-pre-create' |
|
259 | name = 'repo-pre-create' | |
224 | display_name = lazy_ugettext('repository pre create') |
|
260 | display_name = lazy_ugettext('repository pre create') | |
225 | description = lazy_ugettext('Event triggered before repository is created') |
|
261 | description = lazy_ugettext('Event triggered before repository is created') | |
226 |
|
262 | |||
227 |
|
263 | |||
228 | class RepoCreateEvent(RepoEvent): |
|
264 | class RepoCreateEvent(RepoEvent): | |
229 | """ |
|
265 | """ | |
230 | An instance of this class is emitted as an :term:`event` whenever a repo is |
|
266 | An instance of this class is emitted as an :term:`event` whenever a repo is | |
231 | created. |
|
267 | created. | |
232 | """ |
|
268 | """ | |
233 | name = 'repo-create' |
|
269 | name = 'repo-create' | |
234 | display_name = lazy_ugettext('repository created') |
|
270 | display_name = lazy_ugettext('repository created') | |
235 | description = lazy_ugettext('Event triggered after repository was created') |
|
271 | description = lazy_ugettext('Event triggered after repository was created') | |
236 |
|
272 | |||
237 |
|
273 | |||
238 | class RepoPreDeleteEvent(RepoEvent): |
|
274 | class RepoPreDeleteEvent(RepoEvent): | |
239 | """ |
|
275 | """ | |
240 | An instance of this class is emitted as an :term:`event` whenever a repo is |
|
276 | An instance of this class is emitted as an :term:`event` whenever a repo is | |
241 | created. |
|
277 | created. | |
242 | """ |
|
278 | """ | |
243 | name = 'repo-pre-delete' |
|
279 | name = 'repo-pre-delete' | |
244 | display_name = lazy_ugettext('repository pre delete') |
|
280 | display_name = lazy_ugettext('repository pre delete') | |
245 | description = lazy_ugettext('Event triggered before a repository is deleted') |
|
281 | description = lazy_ugettext('Event triggered before a repository is deleted') | |
246 |
|
282 | |||
247 |
|
283 | |||
248 | class RepoDeleteEvent(RepoEvent): |
|
284 | class RepoDeleteEvent(RepoEvent): | |
249 | """ |
|
285 | """ | |
250 | An instance of this class is emitted as an :term:`event` whenever a repo is |
|
286 | An instance of this class is emitted as an :term:`event` whenever a repo is | |
251 | created. |
|
287 | created. | |
252 | """ |
|
288 | """ | |
253 | name = 'repo-delete' |
|
289 | name = 'repo-delete' | |
254 | display_name = lazy_ugettext('repository deleted') |
|
290 | display_name = lazy_ugettext('repository deleted') | |
255 | description = lazy_ugettext('Event triggered after repository was deleted') |
|
291 | description = lazy_ugettext('Event triggered after repository was deleted') | |
256 |
|
292 | |||
257 |
|
293 | |||
258 | class RepoVCSEvent(RepoEvent): |
|
294 | class RepoVCSEvent(RepoEvent): | |
259 | """ |
|
295 | """ | |
260 | Base class for events triggered by the VCS |
|
296 | Base class for events triggered by the VCS | |
261 | """ |
|
297 | """ | |
262 | def __init__(self, repo_name, extras): |
|
298 | def __init__(self, repo_name, extras): | |
263 | self.repo = Repository.get_by_repo_name(repo_name) |
|
299 | self.repo = Repository.get_by_repo_name(repo_name) | |
264 | if not self.repo: |
|
300 | if not self.repo: | |
265 | raise Exception('repo by this name %s does not exist' % repo_name) |
|
301 | raise Exception('repo by this name %s does not exist' % repo_name) | |
266 | self.extras = extras |
|
302 | self.extras = extras | |
267 | super(RepoVCSEvent, self).__init__(self.repo) |
|
303 | super(RepoVCSEvent, self).__init__(self.repo) | |
268 |
|
304 | |||
269 | @property |
|
305 | @property | |
270 | def actor(self): |
|
306 | def actor(self): | |
271 | if self.extras.get('username'): |
|
307 | if self.extras.get('username'): | |
272 | return User.get_by_username(self.extras['username']) |
|
308 | return User.get_by_username(self.extras['username']) | |
273 |
|
309 | |||
274 | @property |
|
310 | @property | |
275 | def actor_ip(self): |
|
311 | def actor_ip(self): | |
276 | if self.extras.get('ip'): |
|
312 | if self.extras.get('ip'): | |
277 | return self.extras['ip'] |
|
313 | return self.extras['ip'] | |
278 |
|
314 | |||
279 | @property |
|
315 | @property | |
280 | def server_url(self): |
|
316 | def server_url(self): | |
281 | if self.extras.get('server_url'): |
|
317 | if self.extras.get('server_url'): | |
282 | return self.extras['server_url'] |
|
318 | return self.extras['server_url'] | |
283 |
|
319 | |||
284 | @property |
|
320 | @property | |
285 | def request(self): |
|
321 | def request(self): | |
286 | return self.extras.get('request') or self.get_request() |
|
322 | return self.extras.get('request') or self.get_request() | |
287 |
|
323 | |||
288 |
|
324 | |||
289 | class RepoPrePullEvent(RepoVCSEvent): |
|
325 | class RepoPrePullEvent(RepoVCSEvent): | |
290 | """ |
|
326 | """ | |
291 | An instance of this class is emitted as an :term:`event` before commits |
|
327 | An instance of this class is emitted as an :term:`event` before commits | |
292 | are pulled from a repo. |
|
328 | are pulled from a repo. | |
293 | """ |
|
329 | """ | |
294 | name = 'repo-pre-pull' |
|
330 | name = 'repo-pre-pull' | |
295 | display_name = lazy_ugettext('repository pre pull') |
|
331 | display_name = lazy_ugettext('repository pre pull') | |
296 | description = lazy_ugettext('Event triggered before repository code is pulled') |
|
332 | description = lazy_ugettext('Event triggered before repository code is pulled') | |
297 |
|
333 | |||
298 |
|
334 | |||
299 | class RepoPullEvent(RepoVCSEvent): |
|
335 | class RepoPullEvent(RepoVCSEvent): | |
300 | """ |
|
336 | """ | |
301 | An instance of this class is emitted as an :term:`event` after commits |
|
337 | An instance of this class is emitted as an :term:`event` after commits | |
302 | are pulled from a repo. |
|
338 | are pulled from a repo. | |
303 | """ |
|
339 | """ | |
304 | name = 'repo-pull' |
|
340 | name = 'repo-pull' | |
305 | display_name = lazy_ugettext('repository pull') |
|
341 | display_name = lazy_ugettext('repository pull') | |
306 | description = lazy_ugettext('Event triggered after repository code was pulled') |
|
342 | description = lazy_ugettext('Event triggered after repository code was pulled') | |
307 |
|
343 | |||
308 |
|
344 | |||
309 | class RepoPrePushEvent(RepoVCSEvent): |
|
345 | class RepoPrePushEvent(RepoVCSEvent): | |
310 | """ |
|
346 | """ | |
311 | An instance of this class is emitted as an :term:`event` before commits |
|
347 | An instance of this class is emitted as an :term:`event` before commits | |
312 | are pushed to a repo. |
|
348 | are pushed to a repo. | |
313 | """ |
|
349 | """ | |
314 | name = 'repo-pre-push' |
|
350 | name = 'repo-pre-push' | |
315 | display_name = lazy_ugettext('repository pre push') |
|
351 | display_name = lazy_ugettext('repository pre push') | |
316 | description = lazy_ugettext('Event triggered before the code is ' |
|
352 | description = lazy_ugettext('Event triggered before the code is ' | |
317 | 'pushed to a repository') |
|
353 | 'pushed to a repository') | |
318 |
|
354 | |||
319 |
|
355 | |||
320 | class RepoPushEvent(RepoVCSEvent): |
|
356 | class RepoPushEvent(RepoVCSEvent): | |
321 | """ |
|
357 | """ | |
322 | An instance of this class is emitted as an :term:`event` after commits |
|
358 | An instance of this class is emitted as an :term:`event` after commits | |
323 | are pushed to a repo. |
|
359 | are pushed to a repo. | |
324 |
|
360 | |||
325 | :param extras: (optional) dict of data from proxied VCS actions |
|
361 | :param extras: (optional) dict of data from proxied VCS actions | |
326 | """ |
|
362 | """ | |
327 | name = 'repo-push' |
|
363 | name = 'repo-push' | |
328 | display_name = lazy_ugettext('repository push') |
|
364 | display_name = lazy_ugettext('repository push') | |
329 | description = lazy_ugettext('Event triggered after the code was ' |
|
365 | description = lazy_ugettext('Event triggered after the code was ' | |
330 | 'pushed to a repository') |
|
366 | 'pushed to a repository') | |
331 |
|
367 | |||
332 | def __init__(self, repo_name, pushed_commit_ids, extras): |
|
368 | def __init__(self, repo_name, pushed_commit_ids, extras): | |
333 | super(RepoPushEvent, self).__init__(repo_name, extras) |
|
369 | super(RepoPushEvent, self).__init__(repo_name, extras) | |
334 | self.pushed_commit_ids = pushed_commit_ids |
|
370 | self.pushed_commit_ids = pushed_commit_ids | |
335 | self.new_refs = extras.new_refs |
|
371 | self.new_refs = extras.new_refs | |
336 |
|
372 | |||
337 | def as_dict(self): |
|
373 | def as_dict(self): | |
338 | data = super(RepoPushEvent, self).as_dict() |
|
374 | data = super(RepoPushEvent, self).as_dict() | |
339 |
|
375 | |||
340 | def branch_url(branch_name): |
|
376 | def branch_url(branch_name): | |
341 | return '{}/changelog?branch={}'.format( |
|
377 | return '{}/changelog?branch={}'.format( | |
342 | data['repo']['url'], branch_name) |
|
378 | data['repo']['url'], branch_name) | |
343 |
|
379 | |||
344 | def tag_url(tag_name): |
|
380 | def tag_url(tag_name): | |
345 | return '{}/files/{}/'.format( |
|
381 | return '{}/files/{}/'.format( | |
346 | data['repo']['url'], tag_name) |
|
382 | data['repo']['url'], tag_name) | |
347 |
|
383 | |||
348 | commits = _commits_as_dict( |
|
384 | commits = _commits_as_dict( | |
349 | self, commit_ids=self.pushed_commit_ids, repos=[self.repo]) |
|
385 | self, commit_ids=self.pushed_commit_ids, repos=[self.repo]) | |
350 |
|
386 | |||
351 | last_branch = None |
|
387 | last_branch = None | |
352 | for commit in reversed(commits): |
|
388 | for commit in reversed(commits): | |
353 | commit['branch'] = commit['branch'] or last_branch |
|
389 | commit['branch'] = commit['branch'] or last_branch | |
354 | last_branch = commit['branch'] |
|
390 | last_branch = commit['branch'] | |
355 | issues = _issues_as_dict(commits) |
|
391 | issues = _issues_as_dict(commits) | |
356 |
|
392 | |||
357 | branches = set() |
|
393 | branches = set() | |
358 | tags = set() |
|
394 | tags = set() | |
359 | for commit in commits: |
|
395 | for commit in commits: | |
360 | if commit['refs']['tags']: |
|
396 | if commit['refs']['tags']: | |
361 | for tag in commit['refs']['tags']: |
|
397 | for tag in commit['refs']['tags']: | |
362 | tags.add(tag) |
|
398 | tags.add(tag) | |
363 | if commit['branch']: |
|
399 | if commit['branch']: | |
364 | branches.add(commit['branch']) |
|
400 | branches.add(commit['branch']) | |
365 |
|
401 | |||
366 | # maybe we have branches in new_refs ? |
|
402 | # maybe we have branches in new_refs ? | |
367 | try: |
|
403 | try: | |
368 | branches = branches.union(set(self.new_refs['branches'])) |
|
404 | branches = branches.union(set(self.new_refs['branches'])) | |
369 | except Exception: |
|
405 | except Exception: | |
370 | pass |
|
406 | pass | |
371 |
|
407 | |||
372 | branches = [ |
|
408 | branches = [ | |
373 | { |
|
409 | { | |
374 | 'name': branch, |
|
410 | 'name': branch, | |
375 | 'url': branch_url(branch) |
|
411 | 'url': branch_url(branch) | |
376 | } |
|
412 | } | |
377 | for branch in branches |
|
413 | for branch in branches | |
378 | ] |
|
414 | ] | |
379 |
|
415 | |||
380 | # maybe we have branches in new_refs ? |
|
416 | # maybe we have branches in new_refs ? | |
381 | try: |
|
417 | try: | |
382 | tags = tags.union(set(self.new_refs['tags'])) |
|
418 | tags = tags.union(set(self.new_refs['tags'])) | |
383 | except Exception: |
|
419 | except Exception: | |
384 | pass |
|
420 | pass | |
385 |
|
421 | |||
386 | tags = [ |
|
422 | tags = [ | |
387 | { |
|
423 | { | |
388 | 'name': tag, |
|
424 | 'name': tag, | |
389 | 'url': tag_url(tag) |
|
425 | 'url': tag_url(tag) | |
390 | } |
|
426 | } | |
391 | for tag in tags |
|
427 | for tag in tags | |
392 | ] |
|
428 | ] | |
393 |
|
429 | |||
394 | data['push'] = { |
|
430 | data['push'] = { | |
395 | 'commits': commits, |
|
431 | 'commits': commits, | |
396 | 'issues': issues, |
|
432 | 'issues': issues, | |
397 | 'branches': branches, |
|
433 | 'branches': branches, | |
398 | 'tags': tags, |
|
434 | 'tags': tags, | |
399 | } |
|
435 | } | |
400 | return data |
|
436 | return data |
@@ -1,428 +1,450 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2012-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2012-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import colander |
|
21 | import colander | |
22 | import string |
|
22 | import string | |
23 | import collections |
|
23 | import collections | |
24 | import logging |
|
24 | import logging | |
25 | import requests |
|
25 | import requests | |
26 | import urllib |
|
26 | import urllib | |
27 | from requests.adapters import HTTPAdapter |
|
27 | from requests.adapters import HTTPAdapter | |
28 | from requests.packages.urllib3.util.retry import Retry |
|
28 | from requests.packages.urllib3.util.retry import Retry | |
29 |
|
29 | |||
30 | from mako import exceptions |
|
30 | from mako import exceptions | |
31 |
|
31 | |||
32 | from rhodecode.lib.utils2 import safe_str |
|
32 | from rhodecode.lib.utils2 import safe_str | |
33 | from rhodecode.translation import _ |
|
33 | from rhodecode.translation import _ | |
34 |
|
34 | |||
35 |
|
35 | |||
36 | log = logging.getLogger(__name__) |
|
36 | log = logging.getLogger(__name__) | |
37 |
|
37 | |||
38 |
|
38 | |||
39 | class UrlTmpl(string.Template): |
|
39 | class UrlTmpl(string.Template): | |
40 |
|
40 | |||
41 | def safe_substitute(self, **kws): |
|
41 | def safe_substitute(self, **kws): | |
42 | # url encode the kw for usage in url |
|
42 | # url encode the kw for usage in url | |
43 | kws = {k: urllib.quote(safe_str(v)) for k, v in kws.items()} |
|
43 | kws = {k: urllib.quote(safe_str(v)) for k, v in kws.items()} | |
44 | return super(UrlTmpl, self).safe_substitute(**kws) |
|
44 | return super(UrlTmpl, self).safe_substitute(**kws) | |
45 |
|
45 | |||
46 |
|
46 | |||
47 | class IntegrationTypeBase(object): |
|
47 | class IntegrationTypeBase(object): | |
48 | """ Base class for IntegrationType plugins """ |
|
48 | """ Base class for IntegrationType plugins """ | |
49 | is_dummy = False |
|
49 | is_dummy = False | |
50 | description = '' |
|
50 | description = '' | |
51 |
|
51 | |||
52 | @classmethod |
|
52 | @classmethod | |
53 | def icon(cls): |
|
53 | def icon(cls): | |
54 | return ''' |
|
54 | return ''' | |
55 | <?xml version="1.0" encoding="UTF-8" standalone="no"?> |
|
55 | <?xml version="1.0" encoding="UTF-8" standalone="no"?> | |
56 | <svg |
|
56 | <svg | |
57 | xmlns:dc="http://purl.org/dc/elements/1.1/" |
|
57 | xmlns:dc="http://purl.org/dc/elements/1.1/" | |
58 | xmlns:cc="http://creativecommons.org/ns#" |
|
58 | xmlns:cc="http://creativecommons.org/ns#" | |
59 | xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" |
|
59 | xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" | |
60 | xmlns:svg="http://www.w3.org/2000/svg" |
|
60 | xmlns:svg="http://www.w3.org/2000/svg" | |
61 | xmlns="http://www.w3.org/2000/svg" |
|
61 | xmlns="http://www.w3.org/2000/svg" | |
62 | xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" |
|
62 | xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" | |
63 | xmlns:inkscape="http://setwww.inkscape.org/namespaces/inkscape" |
|
63 | xmlns:inkscape="http://setwww.inkscape.org/namespaces/inkscape" | |
64 | viewBox="0 -256 1792 1792" |
|
64 | viewBox="0 -256 1792 1792" | |
65 | id="svg3025" |
|
65 | id="svg3025" | |
66 | version="1.1" |
|
66 | version="1.1" | |
67 | inkscape:version="0.48.3.1 r9886" |
|
67 | inkscape:version="0.48.3.1 r9886" | |
68 | width="100%" |
|
68 | width="100%" | |
69 | height="100%" |
|
69 | height="100%" | |
70 | sodipodi:docname="cog_font_awesome.svg"> |
|
70 | sodipodi:docname="cog_font_awesome.svg"> | |
71 | <metadata |
|
71 | <metadata | |
72 | id="metadata3035"> |
|
72 | id="metadata3035"> | |
73 | <rdf:RDF> |
|
73 | <rdf:RDF> | |
74 | <cc:Work |
|
74 | <cc:Work | |
75 | rdf:about=""> |
|
75 | rdf:about=""> | |
76 | <dc:format>image/svg+xml</dc:format> |
|
76 | <dc:format>image/svg+xml</dc:format> | |
77 | <dc:type |
|
77 | <dc:type | |
78 | rdf:resource="http://purl.org/dc/dcmitype/StillImage" /> |
|
78 | rdf:resource="http://purl.org/dc/dcmitype/StillImage" /> | |
79 | </cc:Work> |
|
79 | </cc:Work> | |
80 | </rdf:RDF> |
|
80 | </rdf:RDF> | |
81 | </metadata> |
|
81 | </metadata> | |
82 | <defs |
|
82 | <defs | |
83 | id="defs3033" /> |
|
83 | id="defs3033" /> | |
84 | <sodipodi:namedview |
|
84 | <sodipodi:namedview | |
85 | pagecolor="#ffffff" |
|
85 | pagecolor="#ffffff" | |
86 | bordercolor="#666666" |
|
86 | bordercolor="#666666" | |
87 | borderopacity="1" |
|
87 | borderopacity="1" | |
88 | objecttolerance="10" |
|
88 | objecttolerance="10" | |
89 | gridtolerance="10" |
|
89 | gridtolerance="10" | |
90 | guidetolerance="10" |
|
90 | guidetolerance="10" | |
91 | inkscape:pageopacity="0" |
|
91 | inkscape:pageopacity="0" | |
92 | inkscape:pageshadow="2" |
|
92 | inkscape:pageshadow="2" | |
93 | inkscape:window-width="640" |
|
93 | inkscape:window-width="640" | |
94 | inkscape:window-height="480" |
|
94 | inkscape:window-height="480" | |
95 | id="namedview3031" |
|
95 | id="namedview3031" | |
96 | showgrid="false" |
|
96 | showgrid="false" | |
97 | inkscape:zoom="0.13169643" |
|
97 | inkscape:zoom="0.13169643" | |
98 | inkscape:cx="896" |
|
98 | inkscape:cx="896" | |
99 | inkscape:cy="896" |
|
99 | inkscape:cy="896" | |
100 | inkscape:window-x="0" |
|
100 | inkscape:window-x="0" | |
101 | inkscape:window-y="25" |
|
101 | inkscape:window-y="25" | |
102 | inkscape:window-maximized="0" |
|
102 | inkscape:window-maximized="0" | |
103 | inkscape:current-layer="svg3025" /> |
|
103 | inkscape:current-layer="svg3025" /> | |
104 | <g |
|
104 | <g | |
105 | transform="matrix(1,0,0,-1,121.49153,1285.4237)" |
|
105 | transform="matrix(1,0,0,-1,121.49153,1285.4237)" | |
106 | id="g3027"> |
|
106 | id="g3027"> | |
107 | <path |
|
107 | <path | |
108 | d="m 1024,640 q 0,106 -75,181 -75,75 -181,75 -106,0 -181,-75 -75,-75 -75,-181 0,-106 75,-181 75,-75 181,-75 106,0 181,75 75,75 75,181 z m 512,109 V 527 q 0,-12 -8,-23 -8,-11 -20,-13 l -185,-28 q -19,-54 -39,-91 35,-50 107,-138 10,-12 10,-25 0,-13 -9,-23 -27,-37 -99,-108 -72,-71 -94,-71 -12,0 -26,9 l -138,108 q -44,-23 -91,-38 -16,-136 -29,-186 -7,-28 -36,-28 H 657 q -14,0 -24.5,8.5 Q 622,-111 621,-98 L 593,86 q -49,16 -90,37 L 362,16 Q 352,7 337,7 323,7 312,18 186,132 147,186 q -7,10 -7,23 0,12 8,23 15,21 51,66.5 36,45.5 54,70.5 -27,50 -41,99 L 29,495 Q 16,497 8,507.5 0,518 0,531 v 222 q 0,12 8,23 8,11 19,13 l 186,28 q 14,46 39,92 -40,57 -107,138 -10,12 -10,24 0,10 9,23 26,36 98.5,107.5 72.5,71.5 94.5,71.5 13,0 26,-10 l 138,-107 q 44,23 91,38 16,136 29,186 7,28 36,28 h 222 q 14,0 24.5,-8.5 Q 914,1391 915,1378 l 28,-184 q 49,-16 90,-37 l 142,107 q 9,9 24,9 13,0 25,-10 129,-119 165,-170 7,-8 7,-22 0,-12 -8,-23 -15,-21 -51,-66.5 -36,-45.5 -54,-70.5 26,-50 41,-98 l 183,-28 q 13,-2 21,-12.5 8,-10.5 8,-23.5 z" |
|
108 | d="m 1024,640 q 0,106 -75,181 -75,75 -181,75 -106,0 -181,-75 -75,-75 -75,-181 0,-106 75,-181 75,-75 181,-75 106,0 181,75 75,75 75,181 z m 512,109 V 527 q 0,-12 -8,-23 -8,-11 -20,-13 l -185,-28 q -19,-54 -39,-91 35,-50 107,-138 10,-12 10,-25 0,-13 -9,-23 -27,-37 -99,-108 -72,-71 -94,-71 -12,0 -26,9 l -138,108 q -44,-23 -91,-38 -16,-136 -29,-186 -7,-28 -36,-28 H 657 q -14,0 -24.5,8.5 Q 622,-111 621,-98 L 593,86 q -49,16 -90,37 L 362,16 Q 352,7 337,7 323,7 312,18 186,132 147,186 q -7,10 -7,23 0,12 8,23 15,21 51,66.5 36,45.5 54,70.5 -27,50 -41,99 L 29,495 Q 16,497 8,507.5 0,518 0,531 v 222 q 0,12 8,23 8,11 19,13 l 186,28 q 14,46 39,92 -40,57 -107,138 -10,12 -10,24 0,10 9,23 26,36 98.5,107.5 72.5,71.5 94.5,71.5 13,0 26,-10 l 138,-107 q 44,23 91,38 16,136 29,186 7,28 36,28 h 222 q 14,0 24.5,-8.5 Q 914,1391 915,1378 l 28,-184 q 49,-16 90,-37 l 142,107 q 9,9 24,9 13,0 25,-10 129,-119 165,-170 7,-8 7,-22 0,-12 -8,-23 -15,-21 -51,-66.5 -36,-45.5 -54,-70.5 26,-50 41,-98 l 183,-28 q 13,-2 21,-12.5 8,-10.5 8,-23.5 z" | |
109 | id="path3029" |
|
109 | id="path3029" | |
110 | inkscape:connector-curvature="0" |
|
110 | inkscape:connector-curvature="0" | |
111 | style="fill:currentColor" /> |
|
111 | style="fill:currentColor" /> | |
112 | </g> |
|
112 | </g> | |
113 | </svg> |
|
113 | </svg> | |
114 | ''' |
|
114 | ''' | |
115 |
|
115 | |||
116 | def __init__(self, settings): |
|
116 | def __init__(self, settings): | |
117 | """ |
|
117 | """ | |
118 | :param settings: dict of settings to be used for the integration |
|
118 | :param settings: dict of settings to be used for the integration | |
119 | """ |
|
119 | """ | |
120 | self.settings = settings |
|
120 | self.settings = settings | |
121 |
|
121 | |||
122 | def settings_schema(self): |
|
122 | def settings_schema(self): | |
123 | """ |
|
123 | """ | |
124 | A colander schema of settings for the integration type |
|
124 | A colander schema of settings for the integration type | |
125 | """ |
|
125 | """ | |
126 | return colander.Schema() |
|
126 | return colander.Schema() | |
127 |
|
127 | |||
128 | def event_enabled(self, event): |
|
128 | def event_enabled(self, event): | |
129 | """ |
|
129 | """ | |
130 | Checks if submitted event is enabled based on the plugin settings |
|
130 | Checks if submitted event is enabled based on the plugin settings | |
131 | :param event: |
|
131 | :param event: | |
132 | :return: bool |
|
132 | :return: bool | |
133 | """ |
|
133 | """ | |
134 | allowed_events = self.settings['events'] |
|
134 | allowed_events = self.settings['events'] | |
135 | if event.name not in allowed_events: |
|
135 | if event.name not in allowed_events: | |
136 | log.debug('event ignored: %r event %s not in allowed set of events %s', |
|
136 | log.debug('event ignored: %r event %s not in allowed set of events %s', | |
137 | event, event.name, allowed_events) |
|
137 | event, event.name, allowed_events) | |
138 | return False |
|
138 | return False | |
139 | return True |
|
139 | return True | |
140 |
|
140 | |||
141 |
|
141 | |||
142 | class EEIntegration(IntegrationTypeBase): |
|
142 | class EEIntegration(IntegrationTypeBase): | |
143 | description = 'Integration available in RhodeCode EE edition.' |
|
143 | description = 'Integration available in RhodeCode EE edition.' | |
144 | is_dummy = True |
|
144 | is_dummy = True | |
145 |
|
145 | |||
146 | def __init__(self, name, key, settings=None): |
|
146 | def __init__(self, name, key, settings=None): | |
147 | self.display_name = name |
|
147 | self.display_name = name | |
148 | self.key = key |
|
148 | self.key = key | |
149 | super(EEIntegration, self).__init__(settings) |
|
149 | super(EEIntegration, self).__init__(settings) | |
150 |
|
150 | |||
151 |
|
151 | |||
152 | # Helpers # |
|
152 | # Helpers # | |
153 | # updating this required to update the `common_vars` as well. |
|
153 | # updating this required to update the `common_vars` as well. | |
154 | WEBHOOK_URL_VARS = [ |
|
154 | WEBHOOK_URL_VARS = [ | |
155 | # GENERAL |
|
155 | # GENERAL | |
156 | ('General', [ |
|
156 | ('General', [ | |
157 | ('event_name', 'Unique name of the event type, e.g pullrequest-update'), |
|
157 | ('event_name', 'Unique name of the event type, e.g pullrequest-update'), | |
158 | ('repo_name', 'Full name of the repository'), |
|
158 | ('repo_name', 'Full name of the repository'), | |
159 | ('repo_type', 'VCS type of repository'), |
|
159 | ('repo_type', 'VCS type of repository'), | |
160 | ('repo_id', 'Unique id of repository'), |
|
160 | ('repo_id', 'Unique id of repository'), | |
161 | ('repo_url', 'Repository url'), |
|
161 | ('repo_url', 'Repository url'), | |
162 | ] |
|
162 | ] | |
163 | ), |
|
163 | ), | |
164 | # extra repo fields |
|
164 | # extra repo fields | |
165 | ('Repository', [ |
|
165 | ('Repository', [ | |
166 | ('extra:<extra_key_name>', 'Extra repo variables, read from its settings.'), |
|
166 | ('extra:<extra_key_name>', 'Extra repo variables, read from its settings.'), | |
167 | ] |
|
167 | ] | |
168 | ), |
|
168 | ), | |
169 | # special attrs below that we handle, using multi-call |
|
169 | # special attrs below that we handle, using multi-call | |
170 | ('Commit push - Multicalls', [ |
|
170 | ('Commit push - Multicalls', [ | |
171 | ('branch', 'Name of each branch submitted, if any.'), |
|
171 | ('branch', 'Name of each branch submitted, if any.'), | |
172 | ('branch_head', 'Head ID of pushed branch (full sha of last commit), if any.'), |
|
172 | ('branch_head', 'Head ID of pushed branch (full sha of last commit), if any.'), | |
173 | ('commit_id', 'ID (full sha) of each commit submitted, if any.'), |
|
173 | ('commit_id', 'ID (full sha) of each commit submitted, if any.'), | |
174 | ] |
|
174 | ] | |
175 | ), |
|
175 | ), | |
176 | # pr events vars |
|
176 | # pr events vars | |
177 | ('Pull request', [ |
|
177 | ('Pull request', [ | |
178 | ('pull_request_id', 'Unique ID of the pull request.'), |
|
178 | ('pull_request_id', 'Unique ID of the pull request.'), | |
179 | ('pull_request_title', 'Title of the pull request.'), |
|
179 | ('pull_request_title', 'Title of the pull request.'), | |
180 | ('pull_request_url', 'Pull request url.'), |
|
180 | ('pull_request_url', 'Pull request url.'), | |
181 | ('pull_request_shadow_url', 'Pull request shadow repo clone url.'), |
|
181 | ('pull_request_shadow_url', 'Pull request shadow repo clone url.'), | |
182 | ('pull_request_commits_uid', 'Calculated UID of all commits inside the PR. ' |
|
182 | ('pull_request_commits_uid', 'Calculated UID of all commits inside the PR. ' | |
183 | 'Changes after PR update'), |
|
183 | 'Changes after PR update'), | |
184 | ] |
|
184 | ] | |
185 | ), |
|
185 | ), | |
186 | # commit comment event vars |
|
186 | # commit comment event vars | |
187 | ('Commit comment', [ |
|
187 | ('Commit comment', [ | |
188 | ('commit_comment_id', 'Unique ID of the comment made on a commit.'), |
|
188 | ('commit_comment_id', 'Unique ID of the comment made on a commit.'), | |
189 | ('commit_comment_text', 'Text of commit comment.'), |
|
189 | ('commit_comment_text', 'Text of commit comment.'), | |
190 | ('commit_comment_type', 'Type of comment, e.g note/todo.'), |
|
190 | ('commit_comment_type', 'Type of comment, e.g note/todo.'), | |
191 |
|
191 | |||
192 | ('commit_comment_f_path', 'Optionally path of file for inline comments.'), |
|
192 | ('commit_comment_f_path', 'Optionally path of file for inline comments.'), | |
193 | ('commit_comment_line_no', 'Line number of the file: eg o10, or n200'), |
|
193 | ('commit_comment_line_no', 'Line number of the file: eg o10, or n200'), | |
194 |
|
194 | |||
195 | ('commit_comment_commit_id', 'Commit id that comment was left at.'), |
|
195 | ('commit_comment_commit_id', 'Commit id that comment was left at.'), | |
196 | ('commit_comment_commit_branch', 'Commit branch that comment was left at'), |
|
196 | ('commit_comment_commit_branch', 'Commit branch that comment was left at'), | |
197 | ('commit_comment_commit_message', 'Commit message that comment was left at'), |
|
197 | ('commit_comment_commit_message', 'Commit message that comment was left at'), | |
198 | ] |
|
198 | ] | |
199 | ), |
|
199 | ), | |
200 | # user who triggers the call |
|
200 | # user who triggers the call | |
201 | ('Caller', [ |
|
201 | ('Caller', [ | |
202 | ('username', 'User who triggered the call.'), |
|
202 | ('username', 'User who triggered the call.'), | |
203 | ('user_id', 'User id who triggered the call.'), |
|
203 | ('user_id', 'User id who triggered the call.'), | |
204 | ] |
|
204 | ] | |
205 | ), |
|
205 | ), | |
206 | ] |
|
206 | ] | |
207 |
|
207 | |||
208 | # common vars for url template used for CI plugins. Shared with webhook |
|
208 | # common vars for url template used for CI plugins. Shared with webhook | |
209 | CI_URL_VARS = WEBHOOK_URL_VARS |
|
209 | CI_URL_VARS = WEBHOOK_URL_VARS | |
210 |
|
210 | |||
211 |
|
211 | |||
212 | class CommitParsingDataHandler(object): |
|
212 | class CommitParsingDataHandler(object): | |
213 |
|
213 | |||
214 | def aggregate_branch_data(self, branches, commits): |
|
214 | def aggregate_branch_data(self, branches, commits): | |
215 | branch_data = collections.OrderedDict() |
|
215 | branch_data = collections.OrderedDict() | |
216 | for obj in branches: |
|
216 | for obj in branches: | |
217 | branch_data[obj['name']] = obj |
|
217 | branch_data[obj['name']] = obj | |
218 |
|
218 | |||
219 | branches_commits = collections.OrderedDict() |
|
219 | branches_commits = collections.OrderedDict() | |
220 | for commit in commits: |
|
220 | for commit in commits: | |
221 | if commit.get('git_ref_change'): |
|
221 | if commit.get('git_ref_change'): | |
222 | # special case for GIT that allows creating tags, |
|
222 | # special case for GIT that allows creating tags, | |
223 | # deleting branches without associated commit |
|
223 | # deleting branches without associated commit | |
224 | continue |
|
224 | continue | |
225 | commit_branch = commit['branch'] |
|
225 | commit_branch = commit['branch'] | |
226 |
|
226 | |||
227 | if commit_branch not in branches_commits: |
|
227 | if commit_branch not in branches_commits: | |
228 | _branch = branch_data[commit_branch] \ |
|
228 | _branch = branch_data[commit_branch] \ | |
229 | if commit_branch else commit_branch |
|
229 | if commit_branch else commit_branch | |
230 | branch_commits = {'branch': _branch, |
|
230 | branch_commits = {'branch': _branch, | |
231 | 'branch_head': '', |
|
231 | 'branch_head': '', | |
232 | 'commits': []} |
|
232 | 'commits': []} | |
233 | branches_commits[commit_branch] = branch_commits |
|
233 | branches_commits[commit_branch] = branch_commits | |
234 |
|
234 | |||
235 | branch_commits = branches_commits[commit_branch] |
|
235 | branch_commits = branches_commits[commit_branch] | |
236 | branch_commits['commits'].append(commit) |
|
236 | branch_commits['commits'].append(commit) | |
237 | branch_commits['branch_head'] = commit['raw_id'] |
|
237 | branch_commits['branch_head'] = commit['raw_id'] | |
238 | return branches_commits |
|
238 | return branches_commits | |
239 |
|
239 | |||
240 |
|
240 | |||
241 | class WebhookDataHandler(CommitParsingDataHandler): |
|
241 | class WebhookDataHandler(CommitParsingDataHandler): | |
242 | name = 'webhook' |
|
242 | name = 'webhook' | |
243 |
|
243 | |||
244 | def __init__(self, template_url, headers): |
|
244 | def __init__(self, template_url, headers): | |
245 | self.template_url = template_url |
|
245 | self.template_url = template_url | |
246 | self.headers = headers |
|
246 | self.headers = headers | |
247 |
|
247 | |||
248 | def get_base_parsed_template(self, data): |
|
248 | def get_base_parsed_template(self, data): | |
249 | """ |
|
249 | """ | |
250 | initially parses the passed in template with some common variables |
|
250 | initially parses the passed in template with some common variables | |
251 | available on ALL calls |
|
251 | available on ALL calls | |
252 | """ |
|
252 | """ | |
253 | # note: make sure to update the `WEBHOOK_URL_VARS` if this changes |
|
253 | # note: make sure to update the `WEBHOOK_URL_VARS` if this changes | |
254 | common_vars = { |
|
254 | common_vars = { | |
255 | 'repo_name': data['repo']['repo_name'], |
|
255 | 'repo_name': data['repo']['repo_name'], | |
256 | 'repo_type': data['repo']['repo_type'], |
|
256 | 'repo_type': data['repo']['repo_type'], | |
257 | 'repo_id': data['repo']['repo_id'], |
|
257 | 'repo_id': data['repo']['repo_id'], | |
258 | 'repo_url': data['repo']['url'], |
|
258 | 'repo_url': data['repo']['url'], | |
259 | 'username': data['actor']['username'], |
|
259 | 'username': data['actor']['username'], | |
260 | 'user_id': data['actor']['user_id'], |
|
260 | 'user_id': data['actor']['user_id'], | |
261 | 'event_name': data['name'] |
|
261 | 'event_name': data['name'] | |
262 | } |
|
262 | } | |
263 |
|
263 | |||
264 | extra_vars = {} |
|
264 | extra_vars = {} | |
265 | for extra_key, extra_val in data['repo']['extra_fields'].items(): |
|
265 | for extra_key, extra_val in data['repo']['extra_fields'].items(): | |
266 | extra_vars['extra__{}'.format(extra_key)] = extra_val |
|
266 | extra_vars['extra__{}'.format(extra_key)] = extra_val | |
267 | common_vars.update(extra_vars) |
|
267 | common_vars.update(extra_vars) | |
268 |
|
268 | |||
269 | template_url = self.template_url.replace('${extra:', '${extra__') |
|
269 | template_url = self.template_url.replace('${extra:', '${extra__') | |
270 | for k, v in common_vars.items(): |
|
270 | for k, v in common_vars.items(): | |
271 | template_url = UrlTmpl(template_url).safe_substitute(**{k: v}) |
|
271 | template_url = UrlTmpl(template_url).safe_substitute(**{k: v}) | |
272 | return template_url |
|
272 | return template_url | |
273 |
|
273 | |||
274 | def repo_push_event_handler(self, event, data): |
|
274 | def repo_push_event_handler(self, event, data): | |
275 | url = self.get_base_parsed_template(data) |
|
275 | url = self.get_base_parsed_template(data) | |
276 | url_calls = [] |
|
276 | url_calls = [] | |
277 |
|
277 | |||
278 | branches_commits = self.aggregate_branch_data( |
|
278 | branches_commits = self.aggregate_branch_data( | |
279 | data['push']['branches'], data['push']['commits']) |
|
279 | data['push']['branches'], data['push']['commits']) | |
280 | if '${branch}' in url or '${branch_head}' in url or '${commit_id}' in url: |
|
280 | if '${branch}' in url or '${branch_head}' in url or '${commit_id}' in url: | |
281 | # call it multiple times, for each branch if used in variables |
|
281 | # call it multiple times, for each branch if used in variables | |
282 | for branch, commit_ids in branches_commits.items(): |
|
282 | for branch, commit_ids in branches_commits.items(): | |
283 | branch_url = UrlTmpl(url).safe_substitute(branch=branch) |
|
283 | branch_url = UrlTmpl(url).safe_substitute(branch=branch) | |
284 |
|
284 | |||
285 | if '${branch_head}' in branch_url: |
|
285 | if '${branch_head}' in branch_url: | |
286 | # last commit in the aggregate is the head of the branch |
|
286 | # last commit in the aggregate is the head of the branch | |
287 | branch_head = commit_ids['branch_head'] |
|
287 | branch_head = commit_ids['branch_head'] | |
288 | branch_url = UrlTmpl(branch_url).safe_substitute(branch_head=branch_head) |
|
288 | branch_url = UrlTmpl(branch_url).safe_substitute(branch_head=branch_head) | |
289 |
|
289 | |||
290 | # call further down for each commit if used |
|
290 | # call further down for each commit if used | |
291 | if '${commit_id}' in branch_url: |
|
291 | if '${commit_id}' in branch_url: | |
292 | for commit_data in commit_ids['commits']: |
|
292 | for commit_data in commit_ids['commits']: | |
293 | commit_id = commit_data['raw_id'] |
|
293 | commit_id = commit_data['raw_id'] | |
294 | commit_url = UrlTmpl(branch_url).safe_substitute(commit_id=commit_id) |
|
294 | commit_url = UrlTmpl(branch_url).safe_substitute(commit_id=commit_id) | |
295 | # register per-commit call |
|
295 | # register per-commit call | |
296 | log.debug( |
|
296 | log.debug( | |
297 | 'register %s call(%s) to url %s', |
|
297 | 'register %s call(%s) to url %s', | |
298 | self.name, event, commit_url) |
|
298 | self.name, event, commit_url) | |
299 | url_calls.append( |
|
299 | url_calls.append( | |
300 | (commit_url, self.headers, data)) |
|
300 | (commit_url, self.headers, data)) | |
301 |
|
301 | |||
302 | else: |
|
302 | else: | |
303 | # register per-branch call |
|
303 | # register per-branch call | |
304 | log.debug('register %s call(%s) to url %s', |
|
304 | log.debug('register %s call(%s) to url %s', | |
305 | self.name, event, branch_url) |
|
305 | self.name, event, branch_url) | |
306 | url_calls.append((branch_url, self.headers, data)) |
|
306 | url_calls.append((branch_url, self.headers, data)) | |
307 |
|
307 | |||
308 | else: |
|
308 | else: | |
309 | log.debug('register %s call(%s) to url %s', self.name, event, url) |
|
309 | log.debug('register %s call(%s) to url %s', self.name, event, url) | |
310 | url_calls.append((url, self.headers, data)) |
|
310 | url_calls.append((url, self.headers, data)) | |
311 |
|
311 | |||
312 | return url_calls |
|
312 | return url_calls | |
313 |
|
313 | |||
314 | def repo_commit_comment_handler(self, event, data): |
|
314 | def repo_commit_comment_handler(self, event, data): | |
315 | url = self.get_base_parsed_template(data) |
|
315 | url = self.get_base_parsed_template(data) | |
316 | log.debug('register %s call(%s) to url %s', self.name, event, url) |
|
316 | log.debug('register %s call(%s) to url %s', self.name, event, url) | |
317 | comment_vars = [ |
|
317 | comment_vars = [ | |
318 | ('commit_comment_id', data['comment']['comment_id']), |
|
318 | ('commit_comment_id', data['comment']['comment_id']), | |
319 | ('commit_comment_text', data['comment']['comment_text']), |
|
319 | ('commit_comment_text', data['comment']['comment_text']), | |
320 | ('commit_comment_type', data['comment']['comment_type']), |
|
320 | ('commit_comment_type', data['comment']['comment_type']), | |
321 |
|
321 | |||
322 | ('commit_comment_f_path', data['comment']['comment_f_path']), |
|
322 | ('commit_comment_f_path', data['comment']['comment_f_path']), | |
323 | ('commit_comment_line_no', data['comment']['comment_line_no']), |
|
323 | ('commit_comment_line_no', data['comment']['comment_line_no']), | |
324 |
|
324 | |||
325 | ('commit_comment_commit_id', data['commit']['commit_id']), |
|
325 | ('commit_comment_commit_id', data['commit']['commit_id']), | |
326 | ('commit_comment_commit_branch', data['commit']['commit_branch']), |
|
326 | ('commit_comment_commit_branch', data['commit']['commit_branch']), | |
327 | ('commit_comment_commit_message', data['commit']['commit_message']), |
|
327 | ('commit_comment_commit_message', data['commit']['commit_message']), | |
328 | ] |
|
328 | ] | |
329 | for k, v in comment_vars: |
|
329 | for k, v in comment_vars: | |
330 | url = UrlTmpl(url).safe_substitute(**{k: v}) |
|
330 | url = UrlTmpl(url).safe_substitute(**{k: v}) | |
331 |
|
331 | |||
332 | return [(url, self.headers, data)] |
|
332 | return [(url, self.headers, data)] | |
333 |
|
333 | |||
|
334 | def repo_commit_comment_edit_handler(self, event, data): | |||
|
335 | url = self.get_base_parsed_template(data) | |||
|
336 | log.debug('register %s call(%s) to url %s', self.name, event, url) | |||
|
337 | comment_vars = [ | |||
|
338 | ('commit_comment_id', data['comment']['comment_id']), | |||
|
339 | ('commit_comment_text', data['comment']['comment_text']), | |||
|
340 | ('commit_comment_type', data['comment']['comment_type']), | |||
|
341 | ||||
|
342 | ('commit_comment_f_path', data['comment']['comment_f_path']), | |||
|
343 | ('commit_comment_line_no', data['comment']['comment_line_no']), | |||
|
344 | ||||
|
345 | ('commit_comment_commit_id', data['commit']['commit_id']), | |||
|
346 | ('commit_comment_commit_branch', data['commit']['commit_branch']), | |||
|
347 | ('commit_comment_commit_message', data['commit']['commit_message']), | |||
|
348 | ] | |||
|
349 | for k, v in comment_vars: | |||
|
350 | url = UrlTmpl(url).safe_substitute(**{k: v}) | |||
|
351 | ||||
|
352 | return [(url, self.headers, data)] | |||
|
353 | ||||
334 | def repo_create_event_handler(self, event, data): |
|
354 | def repo_create_event_handler(self, event, data): | |
335 | url = self.get_base_parsed_template(data) |
|
355 | url = self.get_base_parsed_template(data) | |
336 | log.debug('register %s call(%s) to url %s', self.name, event, url) |
|
356 | log.debug('register %s call(%s) to url %s', self.name, event, url) | |
337 | return [(url, self.headers, data)] |
|
357 | return [(url, self.headers, data)] | |
338 |
|
358 | |||
339 | def pull_request_event_handler(self, event, data): |
|
359 | def pull_request_event_handler(self, event, data): | |
340 | url = self.get_base_parsed_template(data) |
|
360 | url = self.get_base_parsed_template(data) | |
341 | log.debug('register %s call(%s) to url %s', self.name, event, url) |
|
361 | log.debug('register %s call(%s) to url %s', self.name, event, url) | |
342 | pr_vars = [ |
|
362 | pr_vars = [ | |
343 | ('pull_request_id', data['pullrequest']['pull_request_id']), |
|
363 | ('pull_request_id', data['pullrequest']['pull_request_id']), | |
344 | ('pull_request_title', data['pullrequest']['title']), |
|
364 | ('pull_request_title', data['pullrequest']['title']), | |
345 | ('pull_request_url', data['pullrequest']['url']), |
|
365 | ('pull_request_url', data['pullrequest']['url']), | |
346 | ('pull_request_shadow_url', data['pullrequest']['shadow_url']), |
|
366 | ('pull_request_shadow_url', data['pullrequest']['shadow_url']), | |
347 | ('pull_request_commits_uid', data['pullrequest']['commits_uid']), |
|
367 | ('pull_request_commits_uid', data['pullrequest']['commits_uid']), | |
348 | ] |
|
368 | ] | |
349 | for k, v in pr_vars: |
|
369 | for k, v in pr_vars: | |
350 | url = UrlTmpl(url).safe_substitute(**{k: v}) |
|
370 | url = UrlTmpl(url).safe_substitute(**{k: v}) | |
351 |
|
371 | |||
352 | return [(url, self.headers, data)] |
|
372 | return [(url, self.headers, data)] | |
353 |
|
373 | |||
354 | def __call__(self, event, data): |
|
374 | def __call__(self, event, data): | |
355 | from rhodecode import events |
|
375 | from rhodecode import events | |
356 |
|
376 | |||
357 | if isinstance(event, events.RepoPushEvent): |
|
377 | if isinstance(event, events.RepoPushEvent): | |
358 | return self.repo_push_event_handler(event, data) |
|
378 | return self.repo_push_event_handler(event, data) | |
359 | elif isinstance(event, events.RepoCreateEvent): |
|
379 | elif isinstance(event, events.RepoCreateEvent): | |
360 | return self.repo_create_event_handler(event, data) |
|
380 | return self.repo_create_event_handler(event, data) | |
361 | elif isinstance(event, events.RepoCommitCommentEvent): |
|
381 | elif isinstance(event, events.RepoCommitCommentEvent): | |
362 | return self.repo_commit_comment_handler(event, data) |
|
382 | return self.repo_commit_comment_handler(event, data) | |
|
383 | elif isinstance(event, events.RepoCommitCommentEditEvent): | |||
|
384 | return self.repo_commit_comment_edit_handler(event, data) | |||
363 | elif isinstance(event, events.PullRequestEvent): |
|
385 | elif isinstance(event, events.PullRequestEvent): | |
364 | return self.pull_request_event_handler(event, data) |
|
386 | return self.pull_request_event_handler(event, data) | |
365 | else: |
|
387 | else: | |
366 | raise ValueError( |
|
388 | raise ValueError( | |
367 | 'event type `{}` has no handler defined'.format(event.__class__)) |
|
389 | 'event type `{}` has no handler defined'.format(event.__class__)) | |
368 |
|
390 | |||
369 |
|
391 | |||
370 | def get_auth(settings): |
|
392 | def get_auth(settings): | |
371 | from requests.auth import HTTPBasicAuth |
|
393 | from requests.auth import HTTPBasicAuth | |
372 | username = settings.get('username') |
|
394 | username = settings.get('username') | |
373 | password = settings.get('password') |
|
395 | password = settings.get('password') | |
374 | if username and password: |
|
396 | if username and password: | |
375 | return HTTPBasicAuth(username, password) |
|
397 | return HTTPBasicAuth(username, password) | |
376 | return None |
|
398 | return None | |
377 |
|
399 | |||
378 |
|
400 | |||
379 | def get_web_token(settings): |
|
401 | def get_web_token(settings): | |
380 | return settings['secret_token'] |
|
402 | return settings['secret_token'] | |
381 |
|
403 | |||
382 |
|
404 | |||
383 | def get_url_vars(url_vars): |
|
405 | def get_url_vars(url_vars): | |
384 | items = [] |
|
406 | items = [] | |
385 |
|
407 | |||
386 | for section, section_items in url_vars: |
|
408 | for section, section_items in url_vars: | |
387 | items.append('\n*{}*'.format(section)) |
|
409 | items.append('\n*{}*'.format(section)) | |
388 | for key, explanation in section_items: |
|
410 | for key, explanation in section_items: | |
389 | items.append(' {} - {}'.format('${' + key + '}', explanation)) |
|
411 | items.append(' {} - {}'.format('${' + key + '}', explanation)) | |
390 | return '\n'.join(items) |
|
412 | return '\n'.join(items) | |
391 |
|
413 | |||
392 |
|
414 | |||
393 | def render_with_traceback(template, *args, **kwargs): |
|
415 | def render_with_traceback(template, *args, **kwargs): | |
394 | try: |
|
416 | try: | |
395 | return template.render(*args, **kwargs) |
|
417 | return template.render(*args, **kwargs) | |
396 | except Exception: |
|
418 | except Exception: | |
397 | log.error(exceptions.text_error_template().render()) |
|
419 | log.error(exceptions.text_error_template().render()) | |
398 | raise |
|
420 | raise | |
399 |
|
421 | |||
400 |
|
422 | |||
401 | STATUS_400 = (400, 401, 403) |
|
423 | STATUS_400 = (400, 401, 403) | |
402 | STATUS_500 = (500, 502, 504) |
|
424 | STATUS_500 = (500, 502, 504) | |
403 |
|
425 | |||
404 |
|
426 | |||
405 | def requests_retry_call( |
|
427 | def requests_retry_call( | |
406 | retries=3, backoff_factor=0.3, status_forcelist=STATUS_400+STATUS_500, |
|
428 | retries=3, backoff_factor=0.3, status_forcelist=STATUS_400+STATUS_500, | |
407 | session=None): |
|
429 | session=None): | |
408 | """ |
|
430 | """ | |
409 | session = requests_retry_session() |
|
431 | session = requests_retry_session() | |
410 | response = session.get('http://example.com') |
|
432 | response = session.get('http://example.com') | |
411 |
|
433 | |||
412 | :param retries: |
|
434 | :param retries: | |
413 | :param backoff_factor: |
|
435 | :param backoff_factor: | |
414 | :param status_forcelist: |
|
436 | :param status_forcelist: | |
415 | :param session: |
|
437 | :param session: | |
416 | """ |
|
438 | """ | |
417 | session = session or requests.Session() |
|
439 | session = session or requests.Session() | |
418 | retry = Retry( |
|
440 | retry = Retry( | |
419 | total=retries, |
|
441 | total=retries, | |
420 | read=retries, |
|
442 | read=retries, | |
421 | connect=retries, |
|
443 | connect=retries, | |
422 | backoff_factor=backoff_factor, |
|
444 | backoff_factor=backoff_factor, | |
423 | status_forcelist=status_forcelist, |
|
445 | status_forcelist=status_forcelist, | |
424 | ) |
|
446 | ) | |
425 | adapter = HTTPAdapter(max_retries=retry) |
|
447 | adapter = HTTPAdapter(max_retries=retry) | |
426 | session.mount('http://', adapter) |
|
448 | session.mount('http://', adapter) | |
427 | session.mount('https://', adapter) |
|
449 | session.mount('https://', adapter) | |
428 | return session |
|
450 | return session |
@@ -1,251 +1,253 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2012-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2012-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | from __future__ import unicode_literals |
|
21 | from __future__ import unicode_literals | |
22 | import deform |
|
22 | import deform | |
23 | import logging |
|
23 | import logging | |
24 | import requests |
|
24 | import requests | |
25 | import colander |
|
25 | import colander | |
26 | import textwrap |
|
26 | import textwrap | |
27 | from mako.template import Template |
|
27 | from mako.template import Template | |
28 | from rhodecode import events |
|
28 | from rhodecode import events | |
29 | from rhodecode.model.validation_schema.widgets import CheckboxChoiceWidgetDesc |
|
29 | from rhodecode.model.validation_schema.widgets import CheckboxChoiceWidgetDesc | |
30 | from rhodecode.translation import _ |
|
30 | from rhodecode.translation import _ | |
31 | from rhodecode.lib import helpers as h |
|
31 | from rhodecode.lib import helpers as h | |
32 | from rhodecode.lib.celerylib import run_task, async_task, RequestContextTask |
|
32 | from rhodecode.lib.celerylib import run_task, async_task, RequestContextTask | |
33 | from rhodecode.lib.colander_utils import strip_whitespace |
|
33 | from rhodecode.lib.colander_utils import strip_whitespace | |
34 | from rhodecode.integrations.types.base import ( |
|
34 | from rhodecode.integrations.types.base import ( | |
35 | IntegrationTypeBase, CommitParsingDataHandler, render_with_traceback, |
|
35 | IntegrationTypeBase, CommitParsingDataHandler, render_with_traceback, | |
36 | requests_retry_call) |
|
36 | requests_retry_call) | |
37 |
|
37 | |||
38 | log = logging.getLogger(__name__) |
|
38 | log = logging.getLogger(__name__) | |
39 |
|
39 | |||
40 | REPO_PUSH_TEMPLATE = Template(''' |
|
40 | REPO_PUSH_TEMPLATE = Template(''' | |
41 | <b>${data['actor']['username']}</b> pushed to repo <a href="${data['repo']['url']}">${data['repo']['repo_name']}</a>: |
|
41 | <b>${data['actor']['username']}</b> pushed to repo <a href="${data['repo']['url']}">${data['repo']['repo_name']}</a>: | |
42 | <br> |
|
42 | <br> | |
43 | <ul> |
|
43 | <ul> | |
44 | %for branch, branch_commits in branches_commits.items(): |
|
44 | %for branch, branch_commits in branches_commits.items(): | |
45 | <li> |
|
45 | <li> | |
46 | % if branch: |
|
46 | % if branch: | |
47 | <a href="${branch_commits['branch']['url']}">branch: ${branch_commits['branch']['name']}</a> |
|
47 | <a href="${branch_commits['branch']['url']}">branch: ${branch_commits['branch']['name']}</a> | |
48 | % else: |
|
48 | % else: | |
49 | to trunk |
|
49 | to trunk | |
50 | % endif |
|
50 | % endif | |
51 | <ul> |
|
51 | <ul> | |
52 | % for commit in branch_commits['commits']: |
|
52 | % for commit in branch_commits['commits']: | |
53 | <li><a href="${commit['url']}">${commit['short_id']}</a> - ${commit['message_html']}</li> |
|
53 | <li><a href="${commit['url']}">${commit['short_id']}</a> - ${commit['message_html']}</li> | |
54 | % endfor |
|
54 | % endfor | |
55 | </ul> |
|
55 | </ul> | |
56 | </li> |
|
56 | </li> | |
57 | %endfor |
|
57 | %endfor | |
58 | ''') |
|
58 | ''') | |
59 |
|
59 | |||
60 |
|
60 | |||
61 | class HipchatSettingsSchema(colander.Schema): |
|
61 | class HipchatSettingsSchema(colander.Schema): | |
62 | color_choices = [ |
|
62 | color_choices = [ | |
63 | ('yellow', _('Yellow')), |
|
63 | ('yellow', _('Yellow')), | |
64 | ('red', _('Red')), |
|
64 | ('red', _('Red')), | |
65 | ('green', _('Green')), |
|
65 | ('green', _('Green')), | |
66 | ('purple', _('Purple')), |
|
66 | ('purple', _('Purple')), | |
67 | ('gray', _('Gray')), |
|
67 | ('gray', _('Gray')), | |
68 | ] |
|
68 | ] | |
69 |
|
69 | |||
70 | server_url = colander.SchemaNode( |
|
70 | server_url = colander.SchemaNode( | |
71 | colander.String(), |
|
71 | colander.String(), | |
72 | title=_('Hipchat server URL'), |
|
72 | title=_('Hipchat server URL'), | |
73 | description=_('Hipchat integration url.'), |
|
73 | description=_('Hipchat integration url.'), | |
74 | default='', |
|
74 | default='', | |
75 | preparer=strip_whitespace, |
|
75 | preparer=strip_whitespace, | |
76 | validator=colander.url, |
|
76 | validator=colander.url, | |
77 | widget=deform.widget.TextInputWidget( |
|
77 | widget=deform.widget.TextInputWidget( | |
78 | placeholder='https://?.hipchat.com/v2/room/?/notification?auth_token=?', |
|
78 | placeholder='https://?.hipchat.com/v2/room/?/notification?auth_token=?', | |
79 | ), |
|
79 | ), | |
80 | ) |
|
80 | ) | |
81 | notify = colander.SchemaNode( |
|
81 | notify = colander.SchemaNode( | |
82 | colander.Bool(), |
|
82 | colander.Bool(), | |
83 | title=_('Notify'), |
|
83 | title=_('Notify'), | |
84 | description=_('Make a notification to the users in room.'), |
|
84 | description=_('Make a notification to the users in room.'), | |
85 | missing=False, |
|
85 | missing=False, | |
86 | default=False, |
|
86 | default=False, | |
87 | ) |
|
87 | ) | |
88 | color = colander.SchemaNode( |
|
88 | color = colander.SchemaNode( | |
89 | colander.String(), |
|
89 | colander.String(), | |
90 | title=_('Color'), |
|
90 | title=_('Color'), | |
91 | description=_('Background color of message.'), |
|
91 | description=_('Background color of message.'), | |
92 | missing='', |
|
92 | missing='', | |
93 | validator=colander.OneOf([x[0] for x in color_choices]), |
|
93 | validator=colander.OneOf([x[0] for x in color_choices]), | |
94 | widget=deform.widget.Select2Widget( |
|
94 | widget=deform.widget.Select2Widget( | |
95 | values=color_choices, |
|
95 | values=color_choices, | |
96 | ), |
|
96 | ), | |
97 | ) |
|
97 | ) | |
98 |
|
98 | |||
99 |
|
99 | |||
100 | class HipchatIntegrationType(IntegrationTypeBase, CommitParsingDataHandler): |
|
100 | class HipchatIntegrationType(IntegrationTypeBase, CommitParsingDataHandler): | |
101 | key = 'hipchat' |
|
101 | key = 'hipchat' | |
102 | display_name = _('Hipchat') |
|
102 | display_name = _('Hipchat') | |
103 | description = _('Send events such as repo pushes and pull requests to ' |
|
103 | description = _('Send events such as repo pushes and pull requests to ' | |
104 | 'your hipchat channel.') |
|
104 | 'your hipchat channel.') | |
105 |
|
105 | |||
106 | @classmethod |
|
106 | @classmethod | |
107 | def icon(cls): |
|
107 | def icon(cls): | |
108 | return '''<?xml version="1.0" encoding="utf-8"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" viewBox="0 0 1000 1000" enable-background="new 0 0 1000 1000" xml:space="preserve"><g><g transform="translate(0.000000,511.000000) scale(0.100000,-0.100000)"><path fill="#205281" d="M4197.1,4662.4c-1661.5-260.4-3018-1171.6-3682.6-2473.3C219.9,1613.6,100,1120.3,100,462.6c0-1014,376.8-1918.4,1127-2699.4C2326.7-3377.6,3878.5-3898.3,5701-3730.5l486.5,44.5l208.9-123.3c637.2-373.4,1551.8-640.6,2240.4-650.9c304.9-6.9,335.7,0,417.9,75.4c185,174.7,147.3,411.1-89.1,548.1c-315.2,181.6-620,544.7-733.1,870.1l-51.4,157.6l472.7,472.7c349.4,349.4,520.7,551.5,657.7,774.2c784.5,1281.2,784.5,2788.5,0,4052.6c-236.4,376.8-794.8,966-1178.4,1236.7c-572.1,407.7-1264.1,709.1-1993.7,870.1c-267.2,58.2-479.6,75.4-1038,82.2C4714.4,4686.4,4310.2,4679.6,4197.1,4662.4z M5947.6,3740.9c1856.7-380.3,3127.6-1709.4,3127.6-3275c0-1000.3-534.4-1949.2-1466.2-2600.1c-188.4-133.6-287.8-226.1-301.5-284.4c-41.1-157.6,263.8-938.6,397.4-1020.8c20.5-10.3,34.3-44.5,34.3-75.4c0-167.8-811.9,195.3-1363.4,609.8l-181.6,137l-332.3-58.2c-445.3-78.8-1281.2-78.8-1702.6,0C2796-2569.2,1734.1-1832.6,1220.2-801.5C983.8-318.5,905,51.5,929,613.3c27.4,640.6,243.2,1192.1,685.1,1740.3c620,770.8,1661.5,1305.2,2822.8,1452.5C4806.9,3854,5553.7,3819.7,5947.6,3740.9z"/><path fill="#205281" d="M2381.5-345.9c-75.4-106.2-68.5-167.8,34.3-322c332.3-500.2,1010.6-928.4,1760.8-1120.2c417.9-106.2,1226.4-106.2,1644.3,0c712.5,181.6,1270.9,517.3,1685.4,1014C7681-561.7,7715.3-424.7,7616-325.4c-89.1,89.1-167.9,65.1-431.7-133.6c-835.8-630.3-2028-856.4-3086.5-585.8C3683.3-938.6,3142-685,2830.3-448.7C2576.8-253.4,2463.7-229.4,2381.5-345.9z"/></g></g><!-- Svg Vector Icons : http://www.onlinewebfonts.com/icon --></svg>''' |
|
108 | return '''<?xml version="1.0" encoding="utf-8"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" viewBox="0 0 1000 1000" enable-background="new 0 0 1000 1000" xml:space="preserve"><g><g transform="translate(0.000000,511.000000) scale(0.100000,-0.100000)"><path fill="#205281" d="M4197.1,4662.4c-1661.5-260.4-3018-1171.6-3682.6-2473.3C219.9,1613.6,100,1120.3,100,462.6c0-1014,376.8-1918.4,1127-2699.4C2326.7-3377.6,3878.5-3898.3,5701-3730.5l486.5,44.5l208.9-123.3c637.2-373.4,1551.8-640.6,2240.4-650.9c304.9-6.9,335.7,0,417.9,75.4c185,174.7,147.3,411.1-89.1,548.1c-315.2,181.6-620,544.7-733.1,870.1l-51.4,157.6l472.7,472.7c349.4,349.4,520.7,551.5,657.7,774.2c784.5,1281.2,784.5,2788.5,0,4052.6c-236.4,376.8-794.8,966-1178.4,1236.7c-572.1,407.7-1264.1,709.1-1993.7,870.1c-267.2,58.2-479.6,75.4-1038,82.2C4714.4,4686.4,4310.2,4679.6,4197.1,4662.4z M5947.6,3740.9c1856.7-380.3,3127.6-1709.4,3127.6-3275c0-1000.3-534.4-1949.2-1466.2-2600.1c-188.4-133.6-287.8-226.1-301.5-284.4c-41.1-157.6,263.8-938.6,397.4-1020.8c20.5-10.3,34.3-44.5,34.3-75.4c0-167.8-811.9,195.3-1363.4,609.8l-181.6,137l-332.3-58.2c-445.3-78.8-1281.2-78.8-1702.6,0C2796-2569.2,1734.1-1832.6,1220.2-801.5C983.8-318.5,905,51.5,929,613.3c27.4,640.6,243.2,1192.1,685.1,1740.3c620,770.8,1661.5,1305.2,2822.8,1452.5C4806.9,3854,5553.7,3819.7,5947.6,3740.9z"/><path fill="#205281" d="M2381.5-345.9c-75.4-106.2-68.5-167.8,34.3-322c332.3-500.2,1010.6-928.4,1760.8-1120.2c417.9-106.2,1226.4-106.2,1644.3,0c712.5,181.6,1270.9,517.3,1685.4,1014C7681-561.7,7715.3-424.7,7616-325.4c-89.1,89.1-167.9,65.1-431.7-133.6c-835.8-630.3-2028-856.4-3086.5-585.8C3683.3-938.6,3142-685,2830.3-448.7C2576.8-253.4,2463.7-229.4,2381.5-345.9z"/></g></g><!-- Svg Vector Icons : http://www.onlinewebfonts.com/icon --></svg>''' | |
109 |
|
109 | |||
110 | valid_events = [ |
|
110 | valid_events = [ | |
111 | events.PullRequestCloseEvent, |
|
111 | events.PullRequestCloseEvent, | |
112 | events.PullRequestMergeEvent, |
|
112 | events.PullRequestMergeEvent, | |
113 | events.PullRequestUpdateEvent, |
|
113 | events.PullRequestUpdateEvent, | |
114 | events.PullRequestCommentEvent, |
|
114 | events.PullRequestCommentEvent, | |
115 | events.PullRequestReviewEvent, |
|
115 | events.PullRequestReviewEvent, | |
116 | events.PullRequestCreateEvent, |
|
116 | events.PullRequestCreateEvent, | |
117 | events.RepoPushEvent, |
|
117 | events.RepoPushEvent, | |
118 | events.RepoCreateEvent, |
|
118 | events.RepoCreateEvent, | |
119 | ] |
|
119 | ] | |
120 |
|
120 | |||
121 | def send_event(self, event): |
|
121 | def send_event(self, event): | |
122 | if event.__class__ not in self.valid_events: |
|
122 | if event.__class__ not in self.valid_events: | |
123 | log.debug('event %r not present in valid event list (%s)', event, self.valid_events) |
|
123 | log.debug('event %r not present in valid event list (%s)', event, self.valid_events) | |
124 | return |
|
124 | return | |
125 |
|
125 | |||
126 | if not self.event_enabled(event): |
|
126 | if not self.event_enabled(event): | |
127 | return |
|
127 | return | |
128 |
|
128 | |||
129 | data = event.as_dict() |
|
129 | data = event.as_dict() | |
130 |
|
130 | |||
131 | text = '<b>%s<b> caused a <b>%s</b> event' % ( |
|
131 | text = '<b>%s<b> caused a <b>%s</b> event' % ( | |
132 | data['actor']['username'], event.name) |
|
132 | data['actor']['username'], event.name) | |
133 |
|
133 | |||
134 | if isinstance(event, events.PullRequestCommentEvent): |
|
134 | if isinstance(event, events.PullRequestCommentEvent): | |
135 | text = self.format_pull_request_comment_event(event, data) |
|
135 | text = self.format_pull_request_comment_event(event, data) | |
|
136 | elif isinstance(event, events.PullRequestCommentEditEvent): | |||
|
137 | text = self.format_pull_request_comment_event(event, data) | |||
136 | elif isinstance(event, events.PullRequestReviewEvent): |
|
138 | elif isinstance(event, events.PullRequestReviewEvent): | |
137 | text = self.format_pull_request_review_event(event, data) |
|
139 | text = self.format_pull_request_review_event(event, data) | |
138 | elif isinstance(event, events.PullRequestEvent): |
|
140 | elif isinstance(event, events.PullRequestEvent): | |
139 | text = self.format_pull_request_event(event, data) |
|
141 | text = self.format_pull_request_event(event, data) | |
140 | elif isinstance(event, events.RepoPushEvent): |
|
142 | elif isinstance(event, events.RepoPushEvent): | |
141 | text = self.format_repo_push_event(data) |
|
143 | text = self.format_repo_push_event(data) | |
142 | elif isinstance(event, events.RepoCreateEvent): |
|
144 | elif isinstance(event, events.RepoCreateEvent): | |
143 | text = self.format_repo_create_event(data) |
|
145 | text = self.format_repo_create_event(data) | |
144 | else: |
|
146 | else: | |
145 | log.error('unhandled event type: %r', event) |
|
147 | log.error('unhandled event type: %r', event) | |
146 |
|
148 | |||
147 | run_task(post_text_to_hipchat, self.settings, text) |
|
149 | run_task(post_text_to_hipchat, self.settings, text) | |
148 |
|
150 | |||
149 | def settings_schema(self): |
|
151 | def settings_schema(self): | |
150 | schema = HipchatSettingsSchema() |
|
152 | schema = HipchatSettingsSchema() | |
151 | schema.add(colander.SchemaNode( |
|
153 | schema.add(colander.SchemaNode( | |
152 | colander.Set(), |
|
154 | colander.Set(), | |
153 | widget=CheckboxChoiceWidgetDesc( |
|
155 | widget=CheckboxChoiceWidgetDesc( | |
154 | values=sorted( |
|
156 | values=sorted( | |
155 | [(e.name, e.display_name, e.description) for e in self.valid_events] |
|
157 | [(e.name, e.display_name, e.description) for e in self.valid_events] | |
156 | ), |
|
158 | ), | |
157 | ), |
|
159 | ), | |
158 | description="List of events activated for this integration", |
|
160 | description="List of events activated for this integration", | |
159 | name='events' |
|
161 | name='events' | |
160 | )) |
|
162 | )) | |
161 |
|
163 | |||
162 | return schema |
|
164 | return schema | |
163 |
|
165 | |||
164 | def format_pull_request_comment_event(self, event, data): |
|
166 | def format_pull_request_comment_event(self, event, data): | |
165 | comment_text = data['comment']['text'] |
|
167 | comment_text = data['comment']['text'] | |
166 | if len(comment_text) > 200: |
|
168 | if len(comment_text) > 200: | |
167 | comment_text = '{comment_text}<a href="{comment_url}">...<a/>'.format( |
|
169 | comment_text = '{comment_text}<a href="{comment_url}">...<a/>'.format( | |
168 | comment_text=h.html_escape(comment_text[:200]), |
|
170 | comment_text=h.html_escape(comment_text[:200]), | |
169 | comment_url=data['comment']['url'], |
|
171 | comment_url=data['comment']['url'], | |
170 | ) |
|
172 | ) | |
171 |
|
173 | |||
172 | comment_status = '' |
|
174 | comment_status = '' | |
173 | if data['comment']['status']: |
|
175 | if data['comment']['status']: | |
174 | comment_status = '[{}]: '.format(data['comment']['status']) |
|
176 | comment_status = '[{}]: '.format(data['comment']['status']) | |
175 |
|
177 | |||
176 | return (textwrap.dedent( |
|
178 | return (textwrap.dedent( | |
177 | ''' |
|
179 | ''' | |
178 | {user} commented on pull request <a href="{pr_url}">{number}</a> - {pr_title}: |
|
180 | {user} commented on pull request <a href="{pr_url}">{number}</a> - {pr_title}: | |
179 | >>> {comment_status}{comment_text} |
|
181 | >>> {comment_status}{comment_text} | |
180 | ''').format( |
|
182 | ''').format( | |
181 | comment_status=comment_status, |
|
183 | comment_status=comment_status, | |
182 | user=data['actor']['username'], |
|
184 | user=data['actor']['username'], | |
183 | number=data['pullrequest']['pull_request_id'], |
|
185 | number=data['pullrequest']['pull_request_id'], | |
184 | pr_url=data['pullrequest']['url'], |
|
186 | pr_url=data['pullrequest']['url'], | |
185 | pr_status=data['pullrequest']['status'], |
|
187 | pr_status=data['pullrequest']['status'], | |
186 | pr_title=h.html_escape(data['pullrequest']['title']), |
|
188 | pr_title=h.html_escape(data['pullrequest']['title']), | |
187 | comment_text=h.html_escape(comment_text) |
|
189 | comment_text=h.html_escape(comment_text) | |
188 | ) |
|
190 | ) | |
189 | ) |
|
191 | ) | |
190 |
|
192 | |||
191 | def format_pull_request_review_event(self, event, data): |
|
193 | def format_pull_request_review_event(self, event, data): | |
192 | return (textwrap.dedent( |
|
194 | return (textwrap.dedent( | |
193 | ''' |
|
195 | ''' | |
194 | Status changed to {pr_status} for pull request <a href="{pr_url}">#{number}</a> - {pr_title} |
|
196 | Status changed to {pr_status} for pull request <a href="{pr_url}">#{number}</a> - {pr_title} | |
195 | ''').format( |
|
197 | ''').format( | |
196 | user=data['actor']['username'], |
|
198 | user=data['actor']['username'], | |
197 | number=data['pullrequest']['pull_request_id'], |
|
199 | number=data['pullrequest']['pull_request_id'], | |
198 | pr_url=data['pullrequest']['url'], |
|
200 | pr_url=data['pullrequest']['url'], | |
199 | pr_status=data['pullrequest']['status'], |
|
201 | pr_status=data['pullrequest']['status'], | |
200 | pr_title=h.html_escape(data['pullrequest']['title']), |
|
202 | pr_title=h.html_escape(data['pullrequest']['title']), | |
201 | ) |
|
203 | ) | |
202 | ) |
|
204 | ) | |
203 |
|
205 | |||
204 | def format_pull_request_event(self, event, data): |
|
206 | def format_pull_request_event(self, event, data): | |
205 | action = { |
|
207 | action = { | |
206 | events.PullRequestCloseEvent: 'closed', |
|
208 | events.PullRequestCloseEvent: 'closed', | |
207 | events.PullRequestMergeEvent: 'merged', |
|
209 | events.PullRequestMergeEvent: 'merged', | |
208 | events.PullRequestUpdateEvent: 'updated', |
|
210 | events.PullRequestUpdateEvent: 'updated', | |
209 | events.PullRequestCreateEvent: 'created', |
|
211 | events.PullRequestCreateEvent: 'created', | |
210 | }.get(event.__class__, str(event.__class__)) |
|
212 | }.get(event.__class__, str(event.__class__)) | |
211 |
|
213 | |||
212 | return ('Pull request <a href="{url}">#{number}</a> - {title} ' |
|
214 | return ('Pull request <a href="{url}">#{number}</a> - {title} ' | |
213 | '{action} by <b>{user}</b>').format( |
|
215 | '{action} by <b>{user}</b>').format( | |
214 | user=data['actor']['username'], |
|
216 | user=data['actor']['username'], | |
215 | number=data['pullrequest']['pull_request_id'], |
|
217 | number=data['pullrequest']['pull_request_id'], | |
216 | url=data['pullrequest']['url'], |
|
218 | url=data['pullrequest']['url'], | |
217 | title=h.html_escape(data['pullrequest']['title']), |
|
219 | title=h.html_escape(data['pullrequest']['title']), | |
218 | action=action |
|
220 | action=action | |
219 | ) |
|
221 | ) | |
220 |
|
222 | |||
221 | def format_repo_push_event(self, data): |
|
223 | def format_repo_push_event(self, data): | |
222 | branches_commits = self.aggregate_branch_data( |
|
224 | branches_commits = self.aggregate_branch_data( | |
223 | data['push']['branches'], data['push']['commits']) |
|
225 | data['push']['branches'], data['push']['commits']) | |
224 |
|
226 | |||
225 | result = render_with_traceback( |
|
227 | result = render_with_traceback( | |
226 | REPO_PUSH_TEMPLATE, |
|
228 | REPO_PUSH_TEMPLATE, | |
227 | data=data, |
|
229 | data=data, | |
228 | branches_commits=branches_commits, |
|
230 | branches_commits=branches_commits, | |
229 | ) |
|
231 | ) | |
230 | return result |
|
232 | return result | |
231 |
|
233 | |||
232 | def format_repo_create_event(self, data): |
|
234 | def format_repo_create_event(self, data): | |
233 | return '<a href="{}">{}</a> ({}) repository created by <b>{}</b>'.format( |
|
235 | return '<a href="{}">{}</a> ({}) repository created by <b>{}</b>'.format( | |
234 | data['repo']['url'], |
|
236 | data['repo']['url'], | |
235 | h.html_escape(data['repo']['repo_name']), |
|
237 | h.html_escape(data['repo']['repo_name']), | |
236 | data['repo']['repo_type'], |
|
238 | data['repo']['repo_type'], | |
237 | data['actor']['username'], |
|
239 | data['actor']['username'], | |
238 | ) |
|
240 | ) | |
239 |
|
241 | |||
240 |
|
242 | |||
241 | @async_task(ignore_result=True, base=RequestContextTask) |
|
243 | @async_task(ignore_result=True, base=RequestContextTask) | |
242 | def post_text_to_hipchat(settings, text): |
|
244 | def post_text_to_hipchat(settings, text): | |
243 | log.debug('sending %s to hipchat %s', text, settings['server_url']) |
|
245 | log.debug('sending %s to hipchat %s', text, settings['server_url']) | |
244 | json_message = { |
|
246 | json_message = { | |
245 | "message": text, |
|
247 | "message": text, | |
246 | "color": settings.get('color', 'yellow'), |
|
248 | "color": settings.get('color', 'yellow'), | |
247 | "notify": settings.get('notify', False), |
|
249 | "notify": settings.get('notify', False), | |
248 | } |
|
250 | } | |
249 | req_session = requests_retry_call() |
|
251 | req_session = requests_retry_call() | |
250 | resp = req_session.post(settings['server_url'], json=json_message, timeout=60) |
|
252 | resp = req_session.post(settings['server_url'], json=json_message, timeout=60) | |
251 | resp.raise_for_status() # raise exception on a failed request |
|
253 | resp.raise_for_status() # raise exception on a failed request |
@@ -1,351 +1,354 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2012-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2012-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | from __future__ import unicode_literals |
|
21 | from __future__ import unicode_literals | |
22 | import re |
|
22 | import re | |
23 | import time |
|
23 | import time | |
24 | import textwrap |
|
24 | import textwrap | |
25 | import logging |
|
25 | import logging | |
26 |
|
26 | |||
27 | import deform |
|
27 | import deform | |
28 | import requests |
|
28 | import requests | |
29 | import colander |
|
29 | import colander | |
30 | from mako.template import Template |
|
30 | from mako.template import Template | |
31 |
|
31 | |||
32 | from rhodecode import events |
|
32 | from rhodecode import events | |
33 | from rhodecode.model.validation_schema.widgets import CheckboxChoiceWidgetDesc |
|
33 | from rhodecode.model.validation_schema.widgets import CheckboxChoiceWidgetDesc | |
34 | from rhodecode.translation import _ |
|
34 | from rhodecode.translation import _ | |
35 | from rhodecode.lib import helpers as h |
|
35 | from rhodecode.lib import helpers as h | |
36 | from rhodecode.lib.celerylib import run_task, async_task, RequestContextTask |
|
36 | from rhodecode.lib.celerylib import run_task, async_task, RequestContextTask | |
37 | from rhodecode.lib.colander_utils import strip_whitespace |
|
37 | from rhodecode.lib.colander_utils import strip_whitespace | |
38 | from rhodecode.integrations.types.base import ( |
|
38 | from rhodecode.integrations.types.base import ( | |
39 | IntegrationTypeBase, CommitParsingDataHandler, render_with_traceback, |
|
39 | IntegrationTypeBase, CommitParsingDataHandler, render_with_traceback, | |
40 | requests_retry_call) |
|
40 | requests_retry_call) | |
41 |
|
41 | |||
42 | log = logging.getLogger(__name__) |
|
42 | log = logging.getLogger(__name__) | |
43 |
|
43 | |||
44 |
|
44 | |||
45 | def html_to_slack_links(message): |
|
45 | def html_to_slack_links(message): | |
46 | return re.compile(r'<a .*?href=["\'](.+?)".*?>(.+?)</a>').sub( |
|
46 | return re.compile(r'<a .*?href=["\'](.+?)".*?>(.+?)</a>').sub( | |
47 | r'<\1|\2>', message) |
|
47 | r'<\1|\2>', message) | |
48 |
|
48 | |||
49 |
|
49 | |||
50 | REPO_PUSH_TEMPLATE = Template(''' |
|
50 | REPO_PUSH_TEMPLATE = Template(''' | |
51 | <% |
|
51 | <% | |
52 | def branch_text(branch): |
|
52 | def branch_text(branch): | |
53 | if branch: |
|
53 | if branch: | |
54 | return 'on branch: <{}|{}>'.format(branch_commits['branch']['url'], branch_commits['branch']['name']) |
|
54 | return 'on branch: <{}|{}>'.format(branch_commits['branch']['url'], branch_commits['branch']['name']) | |
55 | else: |
|
55 | else: | |
56 | ## case for SVN no branch push... |
|
56 | ## case for SVN no branch push... | |
57 | return 'to trunk' |
|
57 | return 'to trunk' | |
58 | %> \ |
|
58 | %> \ | |
59 |
|
59 | |||
60 | % for branch, branch_commits in branches_commits.items(): |
|
60 | % for branch, branch_commits in branches_commits.items(): | |
61 | ${len(branch_commits['commits'])} ${'commit' if len(branch_commits['commits']) == 1 else 'commits'} ${branch_text(branch)} |
|
61 | ${len(branch_commits['commits'])} ${'commit' if len(branch_commits['commits']) == 1 else 'commits'} ${branch_text(branch)} | |
62 | % for commit in branch_commits['commits']: |
|
62 | % for commit in branch_commits['commits']: | |
63 | `<${commit['url']}|${commit['short_id']}>` - ${commit['message_html']|html_to_slack_links} |
|
63 | `<${commit['url']}|${commit['short_id']}>` - ${commit['message_html']|html_to_slack_links} | |
64 | % endfor |
|
64 | % endfor | |
65 | % endfor |
|
65 | % endfor | |
66 | ''') |
|
66 | ''') | |
67 |
|
67 | |||
68 |
|
68 | |||
69 | class SlackSettingsSchema(colander.Schema): |
|
69 | class SlackSettingsSchema(colander.Schema): | |
70 | service = colander.SchemaNode( |
|
70 | service = colander.SchemaNode( | |
71 | colander.String(), |
|
71 | colander.String(), | |
72 | title=_('Slack service URL'), |
|
72 | title=_('Slack service URL'), | |
73 | description=h.literal(_( |
|
73 | description=h.literal(_( | |
74 | 'This can be setup at the ' |
|
74 | 'This can be setup at the ' | |
75 | '<a href="https://my.slack.com/services/new/incoming-webhook/">' |
|
75 | '<a href="https://my.slack.com/services/new/incoming-webhook/">' | |
76 | 'slack app manager</a>')), |
|
76 | 'slack app manager</a>')), | |
77 | default='', |
|
77 | default='', | |
78 | preparer=strip_whitespace, |
|
78 | preparer=strip_whitespace, | |
79 | validator=colander.url, |
|
79 | validator=colander.url, | |
80 | widget=deform.widget.TextInputWidget( |
|
80 | widget=deform.widget.TextInputWidget( | |
81 | placeholder='https://hooks.slack.com/services/...', |
|
81 | placeholder='https://hooks.slack.com/services/...', | |
82 | ), |
|
82 | ), | |
83 | ) |
|
83 | ) | |
84 | username = colander.SchemaNode( |
|
84 | username = colander.SchemaNode( | |
85 | colander.String(), |
|
85 | colander.String(), | |
86 | title=_('Username'), |
|
86 | title=_('Username'), | |
87 | description=_('Username to show notifications coming from.'), |
|
87 | description=_('Username to show notifications coming from.'), | |
88 | missing='Rhodecode', |
|
88 | missing='Rhodecode', | |
89 | preparer=strip_whitespace, |
|
89 | preparer=strip_whitespace, | |
90 | widget=deform.widget.TextInputWidget( |
|
90 | widget=deform.widget.TextInputWidget( | |
91 | placeholder='Rhodecode' |
|
91 | placeholder='Rhodecode' | |
92 | ), |
|
92 | ), | |
93 | ) |
|
93 | ) | |
94 | channel = colander.SchemaNode( |
|
94 | channel = colander.SchemaNode( | |
95 | colander.String(), |
|
95 | colander.String(), | |
96 | title=_('Channel'), |
|
96 | title=_('Channel'), | |
97 | description=_('Channel to send notifications to.'), |
|
97 | description=_('Channel to send notifications to.'), | |
98 | missing='', |
|
98 | missing='', | |
99 | preparer=strip_whitespace, |
|
99 | preparer=strip_whitespace, | |
100 | widget=deform.widget.TextInputWidget( |
|
100 | widget=deform.widget.TextInputWidget( | |
101 | placeholder='#general' |
|
101 | placeholder='#general' | |
102 | ), |
|
102 | ), | |
103 | ) |
|
103 | ) | |
104 | icon_emoji = colander.SchemaNode( |
|
104 | icon_emoji = colander.SchemaNode( | |
105 | colander.String(), |
|
105 | colander.String(), | |
106 | title=_('Emoji'), |
|
106 | title=_('Emoji'), | |
107 | description=_('Emoji to use eg. :studio_microphone:'), |
|
107 | description=_('Emoji to use eg. :studio_microphone:'), | |
108 | missing='', |
|
108 | missing='', | |
109 | preparer=strip_whitespace, |
|
109 | preparer=strip_whitespace, | |
110 | widget=deform.widget.TextInputWidget( |
|
110 | widget=deform.widget.TextInputWidget( | |
111 | placeholder=':studio_microphone:' |
|
111 | placeholder=':studio_microphone:' | |
112 | ), |
|
112 | ), | |
113 | ) |
|
113 | ) | |
114 |
|
114 | |||
115 |
|
115 | |||
116 | class SlackIntegrationType(IntegrationTypeBase, CommitParsingDataHandler): |
|
116 | class SlackIntegrationType(IntegrationTypeBase, CommitParsingDataHandler): | |
117 | key = 'slack' |
|
117 | key = 'slack' | |
118 | display_name = _('Slack') |
|
118 | display_name = _('Slack') | |
119 | description = _('Send events such as repo pushes and pull requests to ' |
|
119 | description = _('Send events such as repo pushes and pull requests to ' | |
120 | 'your slack channel.') |
|
120 | 'your slack channel.') | |
121 |
|
121 | |||
122 | @classmethod |
|
122 | @classmethod | |
123 | def icon(cls): |
|
123 | def icon(cls): | |
124 | return '''<?xml version="1.0" encoding="UTF-8" standalone="no"?><svg viewBox="0 0 256 256" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" preserveAspectRatio="xMidYMid"><g><path d="M165.963541,15.8384262 C162.07318,3.86308197 149.212328,-2.69009836 137.239082,1.20236066 C125.263738,5.09272131 118.710557,17.9535738 122.603016,29.9268197 L181.550164,211.292328 C185.597902,222.478689 197.682361,228.765377 209.282098,225.426885 C221.381246,221.943607 228.756984,209.093246 224.896,197.21023 C224.749115,196.756984 165.963541,15.8384262 165.963541,15.8384262" fill="#DFA22F"></path><path d="M74.6260984,45.515541 C70.7336393,33.5422951 57.8727869,26.9891148 45.899541,30.8794754 C33.9241967,34.7698361 27.3710164,47.6306885 31.2634754,59.6060328 L90.210623,240.971541 C94.2583607,252.157902 106.34282,258.44459 117.942557,255.104 C130.041705,251.62282 137.417443,238.772459 133.556459,226.887344 C133.409574,226.436197 74.6260984,45.515541 74.6260984,45.515541" fill="#3CB187"></path><path d="M240.161574,166.045377 C252.136918,162.155016 258.688,149.294164 254.797639,137.31882 C250.907279,125.345574 238.046426,118.792393 226.07318,122.682754 L44.7076721,181.632 C33.5213115,185.677639 27.234623,197.762098 30.5731148,209.361836 C34.0563934,221.460984 46.9067541,228.836721 58.7897705,224.975738 C59.2430164,224.828852 240.161574,166.045377 240.161574,166.045377" fill="#CE1E5B"></path><path d="M82.507541,217.270557 C94.312918,213.434754 109.528131,208.491016 125.855475,203.186361 C122.019672,191.380984 117.075934,176.163672 111.76918,159.83423 L68.4191475,173.924721 L82.507541,217.270557" fill="#392538"></path><path d="M173.847082,187.591344 C190.235279,182.267803 205.467279,177.31777 217.195016,173.507148 C213.359213,161.70177 208.413377,146.480262 203.106623,130.146623 L159.75659,144.237115 L173.847082,187.591344" fill="#BB242A"></path><path d="M210.484459,74.7058361 C222.457705,70.8154754 229.010885,57.954623 225.120525,45.9792787 C221.230164,34.0060328 208.369311,27.4528525 196.393967,31.3432131 L15.028459,90.292459 C3.84209836,94.3380984 -2.44459016,106.422557 0.896,118.022295 C4.37718033,130.121443 17.227541,137.49718 29.1126557,133.636197 C29.5638033,133.489311 210.484459,74.7058361 210.484459,74.7058361" fill="#72C5CD"></path><path d="M52.8220328,125.933115 C64.6274098,122.097311 79.8468197,117.151475 96.1762623,111.84682 C90.8527213,95.4565246 85.9026885,80.2245246 82.0920656,68.4946885 L38.731541,82.5872787 L52.8220328,125.933115" fill="#248C73"></path><path d="M144.159475,96.256 C160.551869,90.9303607 175.785967,85.9803279 187.515803,82.1676066 C182.190164,65.7752131 177.240131,50.5390164 173.42741,38.807082 L130.068984,52.8996721 L144.159475,96.256" fill="#62803A"></path></g></svg>''' |
|
124 | return '''<?xml version="1.0" encoding="UTF-8" standalone="no"?><svg viewBox="0 0 256 256" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" preserveAspectRatio="xMidYMid"><g><path d="M165.963541,15.8384262 C162.07318,3.86308197 149.212328,-2.69009836 137.239082,1.20236066 C125.263738,5.09272131 118.710557,17.9535738 122.603016,29.9268197 L181.550164,211.292328 C185.597902,222.478689 197.682361,228.765377 209.282098,225.426885 C221.381246,221.943607 228.756984,209.093246 224.896,197.21023 C224.749115,196.756984 165.963541,15.8384262 165.963541,15.8384262" fill="#DFA22F"></path><path d="M74.6260984,45.515541 C70.7336393,33.5422951 57.8727869,26.9891148 45.899541,30.8794754 C33.9241967,34.7698361 27.3710164,47.6306885 31.2634754,59.6060328 L90.210623,240.971541 C94.2583607,252.157902 106.34282,258.44459 117.942557,255.104 C130.041705,251.62282 137.417443,238.772459 133.556459,226.887344 C133.409574,226.436197 74.6260984,45.515541 74.6260984,45.515541" fill="#3CB187"></path><path d="M240.161574,166.045377 C252.136918,162.155016 258.688,149.294164 254.797639,137.31882 C250.907279,125.345574 238.046426,118.792393 226.07318,122.682754 L44.7076721,181.632 C33.5213115,185.677639 27.234623,197.762098 30.5731148,209.361836 C34.0563934,221.460984 46.9067541,228.836721 58.7897705,224.975738 C59.2430164,224.828852 240.161574,166.045377 240.161574,166.045377" fill="#CE1E5B"></path><path d="M82.507541,217.270557 C94.312918,213.434754 109.528131,208.491016 125.855475,203.186361 C122.019672,191.380984 117.075934,176.163672 111.76918,159.83423 L68.4191475,173.924721 L82.507541,217.270557" fill="#392538"></path><path d="M173.847082,187.591344 C190.235279,182.267803 205.467279,177.31777 217.195016,173.507148 C213.359213,161.70177 208.413377,146.480262 203.106623,130.146623 L159.75659,144.237115 L173.847082,187.591344" fill="#BB242A"></path><path d="M210.484459,74.7058361 C222.457705,70.8154754 229.010885,57.954623 225.120525,45.9792787 C221.230164,34.0060328 208.369311,27.4528525 196.393967,31.3432131 L15.028459,90.292459 C3.84209836,94.3380984 -2.44459016,106.422557 0.896,118.022295 C4.37718033,130.121443 17.227541,137.49718 29.1126557,133.636197 C29.5638033,133.489311 210.484459,74.7058361 210.484459,74.7058361" fill="#72C5CD"></path><path d="M52.8220328,125.933115 C64.6274098,122.097311 79.8468197,117.151475 96.1762623,111.84682 C90.8527213,95.4565246 85.9026885,80.2245246 82.0920656,68.4946885 L38.731541,82.5872787 L52.8220328,125.933115" fill="#248C73"></path><path d="M144.159475,96.256 C160.551869,90.9303607 175.785967,85.9803279 187.515803,82.1676066 C182.190164,65.7752131 177.240131,50.5390164 173.42741,38.807082 L130.068984,52.8996721 L144.159475,96.256" fill="#62803A"></path></g></svg>''' | |
125 |
|
125 | |||
126 | valid_events = [ |
|
126 | valid_events = [ | |
127 | events.PullRequestCloseEvent, |
|
127 | events.PullRequestCloseEvent, | |
128 | events.PullRequestMergeEvent, |
|
128 | events.PullRequestMergeEvent, | |
129 | events.PullRequestUpdateEvent, |
|
129 | events.PullRequestUpdateEvent, | |
130 | events.PullRequestCommentEvent, |
|
130 | events.PullRequestCommentEvent, | |
131 | events.PullRequestReviewEvent, |
|
131 | events.PullRequestReviewEvent, | |
132 | events.PullRequestCreateEvent, |
|
132 | events.PullRequestCreateEvent, | |
133 | events.RepoPushEvent, |
|
133 | events.RepoPushEvent, | |
134 | events.RepoCreateEvent, |
|
134 | events.RepoCreateEvent, | |
135 | ] |
|
135 | ] | |
136 |
|
136 | |||
137 | def send_event(self, event): |
|
137 | def send_event(self, event): | |
138 | log.debug('handling event %s with integration %s', event.name, self) |
|
138 | log.debug('handling event %s with integration %s', event.name, self) | |
139 |
|
139 | |||
140 | if event.__class__ not in self.valid_events: |
|
140 | if event.__class__ not in self.valid_events: | |
141 | log.debug('event %r not present in valid event list (%s)', event, self.valid_events) |
|
141 | log.debug('event %r not present in valid event list (%s)', event, self.valid_events) | |
142 | return |
|
142 | return | |
143 |
|
143 | |||
144 | if not self.event_enabled(event): |
|
144 | if not self.event_enabled(event): | |
145 | return |
|
145 | return | |
146 |
|
146 | |||
147 | data = event.as_dict() |
|
147 | data = event.as_dict() | |
148 |
|
148 | |||
149 | # defaults |
|
149 | # defaults | |
150 | title = '*%s* caused a *%s* event' % ( |
|
150 | title = '*%s* caused a *%s* event' % ( | |
151 | data['actor']['username'], event.name) |
|
151 | data['actor']['username'], event.name) | |
152 | text = '*%s* caused a *%s* event' % ( |
|
152 | text = '*%s* caused a *%s* event' % ( | |
153 | data['actor']['username'], event.name) |
|
153 | data['actor']['username'], event.name) | |
154 | fields = None |
|
154 | fields = None | |
155 | overrides = None |
|
155 | overrides = None | |
156 |
|
156 | |||
157 | if isinstance(event, events.PullRequestCommentEvent): |
|
157 | if isinstance(event, events.PullRequestCommentEvent): | |
158 | (title, text, fields, overrides) \ |
|
158 | (title, text, fields, overrides) \ | |
159 | = self.format_pull_request_comment_event(event, data) |
|
159 | = self.format_pull_request_comment_event(event, data) | |
|
160 | elif isinstance(event, events.PullRequestCommentEditEvent): | |||
|
161 | (title, text, fields, overrides) \ | |||
|
162 | = self.format_pull_request_comment_event(event, data) | |||
160 | elif isinstance(event, events.PullRequestReviewEvent): |
|
163 | elif isinstance(event, events.PullRequestReviewEvent): | |
161 | title, text = self.format_pull_request_review_event(event, data) |
|
164 | title, text = self.format_pull_request_review_event(event, data) | |
162 | elif isinstance(event, events.PullRequestEvent): |
|
165 | elif isinstance(event, events.PullRequestEvent): | |
163 | title, text = self.format_pull_request_event(event, data) |
|
166 | title, text = self.format_pull_request_event(event, data) | |
164 | elif isinstance(event, events.RepoPushEvent): |
|
167 | elif isinstance(event, events.RepoPushEvent): | |
165 | title, text = self.format_repo_push_event(data) |
|
168 | title, text = self.format_repo_push_event(data) | |
166 | elif isinstance(event, events.RepoCreateEvent): |
|
169 | elif isinstance(event, events.RepoCreateEvent): | |
167 | title, text = self.format_repo_create_event(data) |
|
170 | title, text = self.format_repo_create_event(data) | |
168 | else: |
|
171 | else: | |
169 | log.error('unhandled event type: %r', event) |
|
172 | log.error('unhandled event type: %r', event) | |
170 |
|
173 | |||
171 | run_task(post_text_to_slack, self.settings, title, text, fields, overrides) |
|
174 | run_task(post_text_to_slack, self.settings, title, text, fields, overrides) | |
172 |
|
175 | |||
173 | def settings_schema(self): |
|
176 | def settings_schema(self): | |
174 | schema = SlackSettingsSchema() |
|
177 | schema = SlackSettingsSchema() | |
175 | schema.add(colander.SchemaNode( |
|
178 | schema.add(colander.SchemaNode( | |
176 | colander.Set(), |
|
179 | colander.Set(), | |
177 | widget=CheckboxChoiceWidgetDesc( |
|
180 | widget=CheckboxChoiceWidgetDesc( | |
178 | values=sorted( |
|
181 | values=sorted( | |
179 | [(e.name, e.display_name, e.description) for e in self.valid_events] |
|
182 | [(e.name, e.display_name, e.description) for e in self.valid_events] | |
180 | ), |
|
183 | ), | |
181 | ), |
|
184 | ), | |
182 | description="List of events activated for this integration", |
|
185 | description="List of events activated for this integration", | |
183 | name='events' |
|
186 | name='events' | |
184 | )) |
|
187 | )) | |
185 |
|
188 | |||
186 | return schema |
|
189 | return schema | |
187 |
|
190 | |||
188 | def format_pull_request_comment_event(self, event, data): |
|
191 | def format_pull_request_comment_event(self, event, data): | |
189 | comment_text = data['comment']['text'] |
|
192 | comment_text = data['comment']['text'] | |
190 | if len(comment_text) > 200: |
|
193 | if len(comment_text) > 200: | |
191 | comment_text = '<{comment_url}|{comment_text}...>'.format( |
|
194 | comment_text = '<{comment_url}|{comment_text}...>'.format( | |
192 | comment_text=comment_text[:200], |
|
195 | comment_text=comment_text[:200], | |
193 | comment_url=data['comment']['url'], |
|
196 | comment_url=data['comment']['url'], | |
194 | ) |
|
197 | ) | |
195 |
|
198 | |||
196 | fields = None |
|
199 | fields = None | |
197 | overrides = None |
|
200 | overrides = None | |
198 | status_text = None |
|
201 | status_text = None | |
199 |
|
202 | |||
200 | if data['comment']['status']: |
|
203 | if data['comment']['status']: | |
201 | status_color = { |
|
204 | status_color = { | |
202 | 'approved': '#0ac878', |
|
205 | 'approved': '#0ac878', | |
203 | 'rejected': '#e85e4d'}.get(data['comment']['status']) |
|
206 | 'rejected': '#e85e4d'}.get(data['comment']['status']) | |
204 |
|
207 | |||
205 | if status_color: |
|
208 | if status_color: | |
206 | overrides = {"color": status_color} |
|
209 | overrides = {"color": status_color} | |
207 |
|
210 | |||
208 | status_text = data['comment']['status'] |
|
211 | status_text = data['comment']['status'] | |
209 |
|
212 | |||
210 | if data['comment']['file']: |
|
213 | if data['comment']['file']: | |
211 | fields = [ |
|
214 | fields = [ | |
212 | { |
|
215 | { | |
213 | "title": "file", |
|
216 | "title": "file", | |
214 | "value": data['comment']['file'] |
|
217 | "value": data['comment']['file'] | |
215 | }, |
|
218 | }, | |
216 | { |
|
219 | { | |
217 | "title": "line", |
|
220 | "title": "line", | |
218 | "value": data['comment']['line'] |
|
221 | "value": data['comment']['line'] | |
219 | } |
|
222 | } | |
220 | ] |
|
223 | ] | |
221 |
|
224 | |||
222 | template = Template(textwrap.dedent(r''' |
|
225 | template = Template(textwrap.dedent(r''' | |
223 | *${data['actor']['username']}* left ${data['comment']['type']} on pull request <${data['pullrequest']['url']}|#${data['pullrequest']['pull_request_id']}>: |
|
226 | *${data['actor']['username']}* left ${data['comment']['type']} on pull request <${data['pullrequest']['url']}|#${data['pullrequest']['pull_request_id']}>: | |
224 | ''')) |
|
227 | ''')) | |
225 | title = render_with_traceback( |
|
228 | title = render_with_traceback( | |
226 | template, data=data, comment=event.comment) |
|
229 | template, data=data, comment=event.comment) | |
227 |
|
230 | |||
228 | template = Template(textwrap.dedent(r''' |
|
231 | template = Template(textwrap.dedent(r''' | |
229 | *pull request title*: ${pr_title} |
|
232 | *pull request title*: ${pr_title} | |
230 | % if status_text: |
|
233 | % if status_text: | |
231 | *submitted status*: `${status_text}` |
|
234 | *submitted status*: `${status_text}` | |
232 | % endif |
|
235 | % endif | |
233 | >>> ${comment_text} |
|
236 | >>> ${comment_text} | |
234 | ''')) |
|
237 | ''')) | |
235 | text = render_with_traceback( |
|
238 | text = render_with_traceback( | |
236 | template, |
|
239 | template, | |
237 | comment_text=comment_text, |
|
240 | comment_text=comment_text, | |
238 | pr_title=data['pullrequest']['title'], |
|
241 | pr_title=data['pullrequest']['title'], | |
239 | status_text=status_text) |
|
242 | status_text=status_text) | |
240 |
|
243 | |||
241 | return title, text, fields, overrides |
|
244 | return title, text, fields, overrides | |
242 |
|
245 | |||
243 | def format_pull_request_review_event(self, event, data): |
|
246 | def format_pull_request_review_event(self, event, data): | |
244 | template = Template(textwrap.dedent(r''' |
|
247 | template = Template(textwrap.dedent(r''' | |
245 | *${data['actor']['username']}* changed status of pull request <${data['pullrequest']['url']}|#${data['pullrequest']['pull_request_id']} to `${data['pullrequest']['status']}`>: |
|
248 | *${data['actor']['username']}* changed status of pull request <${data['pullrequest']['url']}|#${data['pullrequest']['pull_request_id']} to `${data['pullrequest']['status']}`>: | |
246 | ''')) |
|
249 | ''')) | |
247 | title = render_with_traceback(template, data=data) |
|
250 | title = render_with_traceback(template, data=data) | |
248 |
|
251 | |||
249 | template = Template(textwrap.dedent(r''' |
|
252 | template = Template(textwrap.dedent(r''' | |
250 | *pull request title*: ${pr_title} |
|
253 | *pull request title*: ${pr_title} | |
251 | ''')) |
|
254 | ''')) | |
252 | text = render_with_traceback( |
|
255 | text = render_with_traceback( | |
253 | template, |
|
256 | template, | |
254 | pr_title=data['pullrequest']['title']) |
|
257 | pr_title=data['pullrequest']['title']) | |
255 |
|
258 | |||
256 | return title, text |
|
259 | return title, text | |
257 |
|
260 | |||
258 | def format_pull_request_event(self, event, data): |
|
261 | def format_pull_request_event(self, event, data): | |
259 | action = { |
|
262 | action = { | |
260 | events.PullRequestCloseEvent: 'closed', |
|
263 | events.PullRequestCloseEvent: 'closed', | |
261 | events.PullRequestMergeEvent: 'merged', |
|
264 | events.PullRequestMergeEvent: 'merged', | |
262 | events.PullRequestUpdateEvent: 'updated', |
|
265 | events.PullRequestUpdateEvent: 'updated', | |
263 | events.PullRequestCreateEvent: 'created', |
|
266 | events.PullRequestCreateEvent: 'created', | |
264 | }.get(event.__class__, str(event.__class__)) |
|
267 | }.get(event.__class__, str(event.__class__)) | |
265 |
|
268 | |||
266 | template = Template(textwrap.dedent(r''' |
|
269 | template = Template(textwrap.dedent(r''' | |
267 | *${data['actor']['username']}* `${action}` pull request <${data['pullrequest']['url']}|#${data['pullrequest']['pull_request_id']}>: |
|
270 | *${data['actor']['username']}* `${action}` pull request <${data['pullrequest']['url']}|#${data['pullrequest']['pull_request_id']}>: | |
268 | ''')) |
|
271 | ''')) | |
269 | title = render_with_traceback(template, data=data, action=action) |
|
272 | title = render_with_traceback(template, data=data, action=action) | |
270 |
|
273 | |||
271 | template = Template(textwrap.dedent(r''' |
|
274 | template = Template(textwrap.dedent(r''' | |
272 | *pull request title*: ${pr_title} |
|
275 | *pull request title*: ${pr_title} | |
273 | %if data['pullrequest']['commits']: |
|
276 | %if data['pullrequest']['commits']: | |
274 | *commits*: ${len(data['pullrequest']['commits'])} |
|
277 | *commits*: ${len(data['pullrequest']['commits'])} | |
275 | %endif |
|
278 | %endif | |
276 | ''')) |
|
279 | ''')) | |
277 | text = render_with_traceback( |
|
280 | text = render_with_traceback( | |
278 | template, |
|
281 | template, | |
279 | pr_title=data['pullrequest']['title'], |
|
282 | pr_title=data['pullrequest']['title'], | |
280 | data=data) |
|
283 | data=data) | |
281 |
|
284 | |||
282 | return title, text |
|
285 | return title, text | |
283 |
|
286 | |||
284 | def format_repo_push_event(self, data): |
|
287 | def format_repo_push_event(self, data): | |
285 | branches_commits = self.aggregate_branch_data( |
|
288 | branches_commits = self.aggregate_branch_data( | |
286 | data['push']['branches'], data['push']['commits']) |
|
289 | data['push']['branches'], data['push']['commits']) | |
287 |
|
290 | |||
288 | template = Template(r''' |
|
291 | template = Template(r''' | |
289 | *${data['actor']['username']}* pushed to repo <${data['repo']['url']}|${data['repo']['repo_name']}>: |
|
292 | *${data['actor']['username']}* pushed to repo <${data['repo']['url']}|${data['repo']['repo_name']}>: | |
290 | ''') |
|
293 | ''') | |
291 | title = render_with_traceback(template, data=data) |
|
294 | title = render_with_traceback(template, data=data) | |
292 |
|
295 | |||
293 | text = render_with_traceback( |
|
296 | text = render_with_traceback( | |
294 | REPO_PUSH_TEMPLATE, |
|
297 | REPO_PUSH_TEMPLATE, | |
295 | data=data, |
|
298 | data=data, | |
296 | branches_commits=branches_commits, |
|
299 | branches_commits=branches_commits, | |
297 | html_to_slack_links=html_to_slack_links, |
|
300 | html_to_slack_links=html_to_slack_links, | |
298 | ) |
|
301 | ) | |
299 |
|
302 | |||
300 | return title, text |
|
303 | return title, text | |
301 |
|
304 | |||
302 | def format_repo_create_event(self, data): |
|
305 | def format_repo_create_event(self, data): | |
303 | template = Template(r''' |
|
306 | template = Template(r''' | |
304 | *${data['actor']['username']}* created new repository ${data['repo']['repo_name']}: |
|
307 | *${data['actor']['username']}* created new repository ${data['repo']['repo_name']}: | |
305 | ''') |
|
308 | ''') | |
306 | title = render_with_traceback(template, data=data) |
|
309 | title = render_with_traceback(template, data=data) | |
307 |
|
310 | |||
308 | template = Template(textwrap.dedent(r''' |
|
311 | template = Template(textwrap.dedent(r''' | |
309 | repo_url: ${data['repo']['url']} |
|
312 | repo_url: ${data['repo']['url']} | |
310 | repo_type: ${data['repo']['repo_type']} |
|
313 | repo_type: ${data['repo']['repo_type']} | |
311 | ''')) |
|
314 | ''')) | |
312 | text = render_with_traceback(template, data=data) |
|
315 | text = render_with_traceback(template, data=data) | |
313 |
|
316 | |||
314 | return title, text |
|
317 | return title, text | |
315 |
|
318 | |||
316 |
|
319 | |||
317 | @async_task(ignore_result=True, base=RequestContextTask) |
|
320 | @async_task(ignore_result=True, base=RequestContextTask) | |
318 | def post_text_to_slack(settings, title, text, fields=None, overrides=None): |
|
321 | def post_text_to_slack(settings, title, text, fields=None, overrides=None): | |
319 | log.debug('sending %s (%s) to slack %s', title, text, settings['service']) |
|
322 | log.debug('sending %s (%s) to slack %s', title, text, settings['service']) | |
320 |
|
323 | |||
321 | fields = fields or [] |
|
324 | fields = fields or [] | |
322 | overrides = overrides or {} |
|
325 | overrides = overrides or {} | |
323 |
|
326 | |||
324 | message_data = { |
|
327 | message_data = { | |
325 | "fallback": text, |
|
328 | "fallback": text, | |
326 | "color": "#427cc9", |
|
329 | "color": "#427cc9", | |
327 | "pretext": title, |
|
330 | "pretext": title, | |
328 | #"author_name": "Bobby Tables", |
|
331 | #"author_name": "Bobby Tables", | |
329 | #"author_link": "http://flickr.com/bobby/", |
|
332 | #"author_link": "http://flickr.com/bobby/", | |
330 | #"author_icon": "http://flickr.com/icons/bobby.jpg", |
|
333 | #"author_icon": "http://flickr.com/icons/bobby.jpg", | |
331 | #"title": "Slack API Documentation", |
|
334 | #"title": "Slack API Documentation", | |
332 | #"title_link": "https://api.slack.com/", |
|
335 | #"title_link": "https://api.slack.com/", | |
333 | "text": text, |
|
336 | "text": text, | |
334 | "fields": fields, |
|
337 | "fields": fields, | |
335 | #"image_url": "http://my-website.com/path/to/image.jpg", |
|
338 | #"image_url": "http://my-website.com/path/to/image.jpg", | |
336 | #"thumb_url": "http://example.com/path/to/thumb.png", |
|
339 | #"thumb_url": "http://example.com/path/to/thumb.png", | |
337 | "footer": "RhodeCode", |
|
340 | "footer": "RhodeCode", | |
338 | #"footer_icon": "", |
|
341 | #"footer_icon": "", | |
339 | "ts": time.time(), |
|
342 | "ts": time.time(), | |
340 | "mrkdwn_in": ["pretext", "text"] |
|
343 | "mrkdwn_in": ["pretext", "text"] | |
341 | } |
|
344 | } | |
342 | message_data.update(overrides) |
|
345 | message_data.update(overrides) | |
343 | json_message = { |
|
346 | json_message = { | |
344 | "icon_emoji": settings.get('icon_emoji', ':studio_microphone:'), |
|
347 | "icon_emoji": settings.get('icon_emoji', ':studio_microphone:'), | |
345 | "channel": settings.get('channel', ''), |
|
348 | "channel": settings.get('channel', ''), | |
346 | "username": settings.get('username', 'Rhodecode'), |
|
349 | "username": settings.get('username', 'Rhodecode'), | |
347 | "attachments": [message_data] |
|
350 | "attachments": [message_data] | |
348 | } |
|
351 | } | |
349 | req_session = requests_retry_call() |
|
352 | req_session = requests_retry_call() | |
350 | resp = req_session.post(settings['service'], json=json_message, timeout=60) |
|
353 | resp = req_session.post(settings['service'], json=json_message, timeout=60) | |
351 | resp.raise_for_status() # raise exception on a failed request |
|
354 | resp.raise_for_status() # raise exception on a failed request |
@@ -1,264 +1,266 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2012-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2012-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | from __future__ import unicode_literals |
|
21 | from __future__ import unicode_literals | |
22 |
|
22 | |||
23 | import deform.widget |
|
23 | import deform.widget | |
24 | import logging |
|
24 | import logging | |
25 | import colander |
|
25 | import colander | |
26 |
|
26 | |||
27 | import rhodecode |
|
27 | import rhodecode | |
28 | from rhodecode import events |
|
28 | from rhodecode import events | |
29 | from rhodecode.lib.colander_utils import strip_whitespace |
|
29 | from rhodecode.lib.colander_utils import strip_whitespace | |
30 | from rhodecode.model.validation_schema.widgets import CheckboxChoiceWidgetDesc |
|
30 | from rhodecode.model.validation_schema.widgets import CheckboxChoiceWidgetDesc | |
31 | from rhodecode.translation import _ |
|
31 | from rhodecode.translation import _ | |
32 | from rhodecode.integrations.types.base import ( |
|
32 | from rhodecode.integrations.types.base import ( | |
33 | IntegrationTypeBase, get_auth, get_web_token, get_url_vars, |
|
33 | IntegrationTypeBase, get_auth, get_web_token, get_url_vars, | |
34 | WebhookDataHandler, WEBHOOK_URL_VARS, requests_retry_call) |
|
34 | WebhookDataHandler, WEBHOOK_URL_VARS, requests_retry_call) | |
35 | from rhodecode.lib.celerylib import run_task, async_task, RequestContextTask |
|
35 | from rhodecode.lib.celerylib import run_task, async_task, RequestContextTask | |
36 | from rhodecode.model.validation_schema import widgets |
|
36 | from rhodecode.model.validation_schema import widgets | |
37 |
|
37 | |||
38 | log = logging.getLogger(__name__) |
|
38 | log = logging.getLogger(__name__) | |
39 |
|
39 | |||
40 |
|
40 | |||
41 | # updating this required to update the `common_vars` passed in url calling func |
|
41 | # updating this required to update the `common_vars` passed in url calling func | |
42 |
|
42 | |||
43 | URL_VARS = get_url_vars(WEBHOOK_URL_VARS) |
|
43 | URL_VARS = get_url_vars(WEBHOOK_URL_VARS) | |
44 |
|
44 | |||
45 |
|
45 | |||
46 | class WebhookSettingsSchema(colander.Schema): |
|
46 | class WebhookSettingsSchema(colander.Schema): | |
47 | url = colander.SchemaNode( |
|
47 | url = colander.SchemaNode( | |
48 | colander.String(), |
|
48 | colander.String(), | |
49 | title=_('Webhook URL'), |
|
49 | title=_('Webhook URL'), | |
50 | description= |
|
50 | description= | |
51 | _('URL to which Webhook should submit data. If used some of the ' |
|
51 | _('URL to which Webhook should submit data. If used some of the ' | |
52 | 'variables would trigger multiple calls, like ${branch} or ' |
|
52 | 'variables would trigger multiple calls, like ${branch} or ' | |
53 | '${commit_id}. Webhook will be called as many times as unique ' |
|
53 | '${commit_id}. Webhook will be called as many times as unique ' | |
54 | 'objects in data in such cases.'), |
|
54 | 'objects in data in such cases.'), | |
55 | missing=colander.required, |
|
55 | missing=colander.required, | |
56 | required=True, |
|
56 | required=True, | |
57 | preparer=strip_whitespace, |
|
57 | preparer=strip_whitespace, | |
58 | validator=colander.url, |
|
58 | validator=colander.url, | |
59 | widget=widgets.CodeMirrorWidget( |
|
59 | widget=widgets.CodeMirrorWidget( | |
60 | help_block_collapsable_name='Show url variables', |
|
60 | help_block_collapsable_name='Show url variables', | |
61 | help_block_collapsable=( |
|
61 | help_block_collapsable=( | |
62 | 'E.g http://my-serv.com/trigger_job/${{event_name}}' |
|
62 | 'E.g http://my-serv.com/trigger_job/${{event_name}}' | |
63 | '?PR_ID=${{pull_request_id}}' |
|
63 | '?PR_ID=${{pull_request_id}}' | |
64 | '\nFull list of vars:\n{}'.format(URL_VARS)), |
|
64 | '\nFull list of vars:\n{}'.format(URL_VARS)), | |
65 | codemirror_mode='text', |
|
65 | codemirror_mode='text', | |
66 | codemirror_options='{"lineNumbers": false, "lineWrapping": true}'), |
|
66 | codemirror_options='{"lineNumbers": false, "lineWrapping": true}'), | |
67 | ) |
|
67 | ) | |
68 | secret_token = colander.SchemaNode( |
|
68 | secret_token = colander.SchemaNode( | |
69 | colander.String(), |
|
69 | colander.String(), | |
70 | title=_('Secret Token'), |
|
70 | title=_('Secret Token'), | |
71 | description=_('Optional string used to validate received payloads. ' |
|
71 | description=_('Optional string used to validate received payloads. ' | |
72 | 'It will be sent together with event data in JSON'), |
|
72 | 'It will be sent together with event data in JSON'), | |
73 | default='', |
|
73 | default='', | |
74 | missing='', |
|
74 | missing='', | |
75 | widget=deform.widget.TextInputWidget( |
|
75 | widget=deform.widget.TextInputWidget( | |
76 | placeholder='e.g. secret_token' |
|
76 | placeholder='e.g. secret_token' | |
77 | ), |
|
77 | ), | |
78 | ) |
|
78 | ) | |
79 | username = colander.SchemaNode( |
|
79 | username = colander.SchemaNode( | |
80 | colander.String(), |
|
80 | colander.String(), | |
81 | title=_('Username'), |
|
81 | title=_('Username'), | |
82 | description=_('Optional username to authenticate the call.'), |
|
82 | description=_('Optional username to authenticate the call.'), | |
83 | default='', |
|
83 | default='', | |
84 | missing='', |
|
84 | missing='', | |
85 | widget=deform.widget.TextInputWidget( |
|
85 | widget=deform.widget.TextInputWidget( | |
86 | placeholder='e.g. admin' |
|
86 | placeholder='e.g. admin' | |
87 | ), |
|
87 | ), | |
88 | ) |
|
88 | ) | |
89 | password = colander.SchemaNode( |
|
89 | password = colander.SchemaNode( | |
90 | colander.String(), |
|
90 | colander.String(), | |
91 | title=_('Password'), |
|
91 | title=_('Password'), | |
92 | description=_('Optional password to authenticate the call.'), |
|
92 | description=_('Optional password to authenticate the call.'), | |
93 | default='', |
|
93 | default='', | |
94 | missing='', |
|
94 | missing='', | |
95 | widget=deform.widget.PasswordWidget( |
|
95 | widget=deform.widget.PasswordWidget( | |
96 | placeholder='e.g. secret.', |
|
96 | placeholder='e.g. secret.', | |
97 | redisplay=True, |
|
97 | redisplay=True, | |
98 | ), |
|
98 | ), | |
99 | ) |
|
99 | ) | |
100 | custom_header_key = colander.SchemaNode( |
|
100 | custom_header_key = colander.SchemaNode( | |
101 | colander.String(), |
|
101 | colander.String(), | |
102 | title=_('Custom Header Key'), |
|
102 | title=_('Custom Header Key'), | |
103 | description=_('Custom Header name to be set when calling endpoint.'), |
|
103 | description=_('Custom Header name to be set when calling endpoint.'), | |
104 | default='', |
|
104 | default='', | |
105 | missing='', |
|
105 | missing='', | |
106 | widget=deform.widget.TextInputWidget( |
|
106 | widget=deform.widget.TextInputWidget( | |
107 | placeholder='e.g: Authorization' |
|
107 | placeholder='e.g: Authorization' | |
108 | ), |
|
108 | ), | |
109 | ) |
|
109 | ) | |
110 | custom_header_val = colander.SchemaNode( |
|
110 | custom_header_val = colander.SchemaNode( | |
111 | colander.String(), |
|
111 | colander.String(), | |
112 | title=_('Custom Header Value'), |
|
112 | title=_('Custom Header Value'), | |
113 | description=_('Custom Header value to be set when calling endpoint.'), |
|
113 | description=_('Custom Header value to be set when calling endpoint.'), | |
114 | default='', |
|
114 | default='', | |
115 | missing='', |
|
115 | missing='', | |
116 | widget=deform.widget.TextInputWidget( |
|
116 | widget=deform.widget.TextInputWidget( | |
117 | placeholder='e.g. Basic XxXxXx' |
|
117 | placeholder='e.g. Basic XxXxXx' | |
118 | ), |
|
118 | ), | |
119 | ) |
|
119 | ) | |
120 | method_type = colander.SchemaNode( |
|
120 | method_type = colander.SchemaNode( | |
121 | colander.String(), |
|
121 | colander.String(), | |
122 | title=_('Call Method'), |
|
122 | title=_('Call Method'), | |
123 | description=_('Select a HTTP method to use when calling the Webhook.'), |
|
123 | description=_('Select a HTTP method to use when calling the Webhook.'), | |
124 | default='post', |
|
124 | default='post', | |
125 | missing='', |
|
125 | missing='', | |
126 | widget=deform.widget.RadioChoiceWidget( |
|
126 | widget=deform.widget.RadioChoiceWidget( | |
127 | values=[('get', 'GET'), ('post', 'POST'), ('put', 'PUT')], |
|
127 | values=[('get', 'GET'), ('post', 'POST'), ('put', 'PUT')], | |
128 | inline=True |
|
128 | inline=True | |
129 | ), |
|
129 | ), | |
130 | ) |
|
130 | ) | |
131 |
|
131 | |||
132 |
|
132 | |||
133 | class WebhookIntegrationType(IntegrationTypeBase): |
|
133 | class WebhookIntegrationType(IntegrationTypeBase): | |
134 | key = 'webhook' |
|
134 | key = 'webhook' | |
135 | display_name = _('Webhook') |
|
135 | display_name = _('Webhook') | |
136 | description = _('send JSON data to a url endpoint') |
|
136 | description = _('send JSON data to a url endpoint') | |
137 |
|
137 | |||
138 | @classmethod |
|
138 | @classmethod | |
139 | def icon(cls): |
|
139 | def icon(cls): | |
140 | return '''<?xml version="1.0" encoding="UTF-8" standalone="no"?><svg viewBox="0 0 256 239" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" preserveAspectRatio="xMidYMid"><g><path d="M119.540432,100.502743 C108.930124,118.338815 98.7646301,135.611455 88.3876025,152.753617 C85.7226696,157.154315 84.4040417,160.738531 86.5332204,166.333309 C92.4107024,181.787152 84.1193605,196.825836 68.5350381,200.908244 C53.8383677,204.759349 39.5192953,195.099955 36.6032893,179.365384 C34.0194114,165.437749 44.8274148,151.78491 60.1824106,149.608284 C61.4694072,149.424428 62.7821041,149.402681 64.944891,149.240571 C72.469175,136.623655 80.1773157,123.700312 88.3025935,110.073173 C73.611854,95.4654658 64.8677898,78.3885437 66.803227,57.2292132 C68.1712787,42.2715849 74.0527146,29.3462646 84.8033863,18.7517722 C105.393354,-1.53572199 136.805164,-4.82141828 161.048542,10.7510424 C184.333097,25.7086706 194.996783,54.8450075 185.906752,79.7822957 C179.052655,77.9239597 172.151111,76.049808 164.563565,73.9917997 C167.418285,60.1274266 165.306899,47.6765751 155.95591,37.0109123 C149.777932,29.9690049 141.850349,26.2780332 132.835442,24.9178894 C114.764113,22.1877169 97.0209573,33.7983633 91.7563309,51.5355878 C85.7800012,71.6669027 94.8245623,88.1111998 119.540432,100.502743 L119.540432,100.502743 Z" fill="#C73A63"></path><path d="M149.841194,79.4106285 C157.316054,92.5969067 164.905578,105.982857 172.427885,119.246236 C210.44865,107.483365 239.114472,128.530009 249.398582,151.063322 C261.81978,178.282014 253.328765,210.520191 228.933162,227.312431 C203.893073,244.551464 172.226236,241.605803 150.040866,219.46195 C155.694953,214.729124 161.376716,209.974552 167.44794,204.895759 C189.360489,219.088306 208.525074,218.420096 222.753207,201.614016 C234.885769,187.277151 234.622834,165.900356 222.138374,151.863988 C207.730339,135.66681 188.431321,135.172572 165.103273,150.721309 C155.426087,133.553447 145.58086,116.521995 136.210101,99.2295848 C133.05093,93.4015266 129.561608,90.0209366 122.440622,88.7873178 C110.547271,86.7253555 102.868785,76.5124151 102.408155,65.0698097 C101.955433,53.7537294 108.621719,43.5249733 119.04224,39.5394355 C129.363912,35.5914599 141.476705,38.7783085 148.419765,47.554004 C154.093621,54.7244134 155.896602,62.7943365 152.911402,71.6372484 C152.081082,74.1025091 151.00562,76.4886916 149.841194,79.4106285 L149.841194,79.4106285 Z" fill="#4B4B4B"></path><path d="M167.706921,187.209935 L121.936499,187.209935 C117.54964,205.253587 108.074103,219.821756 91.7464461,229.085759 C79.0544063,236.285822 65.3738898,238.72736 50.8136292,236.376762 C24.0061432,232.053165 2.08568567,207.920497 0.156179306,180.745298 C-2.02835403,149.962159 19.1309765,122.599149 47.3341915,116.452801 C49.2814904,123.524363 51.2485589,130.663141 53.1958579,137.716911 C27.3195169,150.919004 18.3639187,167.553089 25.6054984,188.352614 C31.9811726,206.657224 50.0900643,216.690262 69.7528413,212.809503 C89.8327554,208.847688 99.9567329,192.160226 98.7211371,165.37844 C117.75722,165.37844 136.809118,165.180745 155.847178,165.475311 C163.280522,165.591951 169.019617,164.820939 174.620326,158.267339 C183.840836,147.48306 200.811003,148.455721 210.741239,158.640984 C220.88894,169.049642 220.402609,185.79839 209.663799,195.768166 C199.302587,205.38802 182.933414,204.874012 173.240413,194.508846 C171.247644,192.37176 169.677943,189.835329 167.706921,187.209935 L167.706921,187.209935 Z" fill="#4A4A4A"></path></g></svg>''' |
|
140 | return '''<?xml version="1.0" encoding="UTF-8" standalone="no"?><svg viewBox="0 0 256 239" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" preserveAspectRatio="xMidYMid"><g><path d="M119.540432,100.502743 C108.930124,118.338815 98.7646301,135.611455 88.3876025,152.753617 C85.7226696,157.154315 84.4040417,160.738531 86.5332204,166.333309 C92.4107024,181.787152 84.1193605,196.825836 68.5350381,200.908244 C53.8383677,204.759349 39.5192953,195.099955 36.6032893,179.365384 C34.0194114,165.437749 44.8274148,151.78491 60.1824106,149.608284 C61.4694072,149.424428 62.7821041,149.402681 64.944891,149.240571 C72.469175,136.623655 80.1773157,123.700312 88.3025935,110.073173 C73.611854,95.4654658 64.8677898,78.3885437 66.803227,57.2292132 C68.1712787,42.2715849 74.0527146,29.3462646 84.8033863,18.7517722 C105.393354,-1.53572199 136.805164,-4.82141828 161.048542,10.7510424 C184.333097,25.7086706 194.996783,54.8450075 185.906752,79.7822957 C179.052655,77.9239597 172.151111,76.049808 164.563565,73.9917997 C167.418285,60.1274266 165.306899,47.6765751 155.95591,37.0109123 C149.777932,29.9690049 141.850349,26.2780332 132.835442,24.9178894 C114.764113,22.1877169 97.0209573,33.7983633 91.7563309,51.5355878 C85.7800012,71.6669027 94.8245623,88.1111998 119.540432,100.502743 L119.540432,100.502743 Z" fill="#C73A63"></path><path d="M149.841194,79.4106285 C157.316054,92.5969067 164.905578,105.982857 172.427885,119.246236 C210.44865,107.483365 239.114472,128.530009 249.398582,151.063322 C261.81978,178.282014 253.328765,210.520191 228.933162,227.312431 C203.893073,244.551464 172.226236,241.605803 150.040866,219.46195 C155.694953,214.729124 161.376716,209.974552 167.44794,204.895759 C189.360489,219.088306 208.525074,218.420096 222.753207,201.614016 C234.885769,187.277151 234.622834,165.900356 222.138374,151.863988 C207.730339,135.66681 188.431321,135.172572 165.103273,150.721309 C155.426087,133.553447 145.58086,116.521995 136.210101,99.2295848 C133.05093,93.4015266 129.561608,90.0209366 122.440622,88.7873178 C110.547271,86.7253555 102.868785,76.5124151 102.408155,65.0698097 C101.955433,53.7537294 108.621719,43.5249733 119.04224,39.5394355 C129.363912,35.5914599 141.476705,38.7783085 148.419765,47.554004 C154.093621,54.7244134 155.896602,62.7943365 152.911402,71.6372484 C152.081082,74.1025091 151.00562,76.4886916 149.841194,79.4106285 L149.841194,79.4106285 Z" fill="#4B4B4B"></path><path d="M167.706921,187.209935 L121.936499,187.209935 C117.54964,205.253587 108.074103,219.821756 91.7464461,229.085759 C79.0544063,236.285822 65.3738898,238.72736 50.8136292,236.376762 C24.0061432,232.053165 2.08568567,207.920497 0.156179306,180.745298 C-2.02835403,149.962159 19.1309765,122.599149 47.3341915,116.452801 C49.2814904,123.524363 51.2485589,130.663141 53.1958579,137.716911 C27.3195169,150.919004 18.3639187,167.553089 25.6054984,188.352614 C31.9811726,206.657224 50.0900643,216.690262 69.7528413,212.809503 C89.8327554,208.847688 99.9567329,192.160226 98.7211371,165.37844 C117.75722,165.37844 136.809118,165.180745 155.847178,165.475311 C163.280522,165.591951 169.019617,164.820939 174.620326,158.267339 C183.840836,147.48306 200.811003,148.455721 210.741239,158.640984 C220.88894,169.049642 220.402609,185.79839 209.663799,195.768166 C199.302587,205.38802 182.933414,204.874012 173.240413,194.508846 C171.247644,192.37176 169.677943,189.835329 167.706921,187.209935 L167.706921,187.209935 Z" fill="#4A4A4A"></path></g></svg>''' | |
141 |
|
141 | |||
142 | valid_events = [ |
|
142 | valid_events = [ | |
143 | events.PullRequestCloseEvent, |
|
143 | events.PullRequestCloseEvent, | |
144 | events.PullRequestMergeEvent, |
|
144 | events.PullRequestMergeEvent, | |
145 | events.PullRequestUpdateEvent, |
|
145 | events.PullRequestUpdateEvent, | |
146 | events.PullRequestCommentEvent, |
|
146 | events.PullRequestCommentEvent, | |
|
147 | events.PullRequestCommentEditEvent, | |||
147 | events.PullRequestReviewEvent, |
|
148 | events.PullRequestReviewEvent, | |
148 | events.PullRequestCreateEvent, |
|
149 | events.PullRequestCreateEvent, | |
149 | events.RepoPushEvent, |
|
150 | events.RepoPushEvent, | |
150 | events.RepoCreateEvent, |
|
151 | events.RepoCreateEvent, | |
151 | events.RepoCommitCommentEvent, |
|
152 | events.RepoCommitCommentEvent, | |
|
153 | events.RepoCommitCommentEditEvent, | |||
152 | ] |
|
154 | ] | |
153 |
|
155 | |||
154 | def settings_schema(self): |
|
156 | def settings_schema(self): | |
155 | schema = WebhookSettingsSchema() |
|
157 | schema = WebhookSettingsSchema() | |
156 | schema.add(colander.SchemaNode( |
|
158 | schema.add(colander.SchemaNode( | |
157 | colander.Set(), |
|
159 | colander.Set(), | |
158 | widget=CheckboxChoiceWidgetDesc( |
|
160 | widget=CheckboxChoiceWidgetDesc( | |
159 | values=sorted( |
|
161 | values=sorted( | |
160 | [(e.name, e.display_name, e.description) for e in self.valid_events] |
|
162 | [(e.name, e.display_name, e.description) for e in self.valid_events] | |
161 | ), |
|
163 | ), | |
162 | ), |
|
164 | ), | |
163 | description="List of events activated for this integration", |
|
165 | description="List of events activated for this integration", | |
164 | name='events' |
|
166 | name='events' | |
165 | )) |
|
167 | )) | |
166 | return schema |
|
168 | return schema | |
167 |
|
169 | |||
168 | def send_event(self, event): |
|
170 | def send_event(self, event): | |
169 | log.debug('handling event %s with integration %s', event.name, self) |
|
171 | log.debug('handling event %s with integration %s', event.name, self) | |
170 |
|
172 | |||
171 | if event.__class__ not in self.valid_events: |
|
173 | if event.__class__ not in self.valid_events: | |
172 | log.debug('event %r not present in valid event list (%s)', event, self.valid_events) |
|
174 | log.debug('event %r not present in valid event list (%s)', event, self.valid_events) | |
173 | return |
|
175 | return | |
174 |
|
176 | |||
175 | if not self.event_enabled(event): |
|
177 | if not self.event_enabled(event): | |
176 | return |
|
178 | return | |
177 |
|
179 | |||
178 | data = event.as_dict() |
|
180 | data = event.as_dict() | |
179 | template_url = self.settings['url'] |
|
181 | template_url = self.settings['url'] | |
180 |
|
182 | |||
181 | headers = {} |
|
183 | headers = {} | |
182 | head_key = self.settings.get('custom_header_key') |
|
184 | head_key = self.settings.get('custom_header_key') | |
183 | head_val = self.settings.get('custom_header_val') |
|
185 | head_val = self.settings.get('custom_header_val') | |
184 | if head_key and head_val: |
|
186 | if head_key and head_val: | |
185 | headers = {head_key: head_val} |
|
187 | headers = {head_key: head_val} | |
186 |
|
188 | |||
187 | handler = WebhookDataHandler(template_url, headers) |
|
189 | handler = WebhookDataHandler(template_url, headers) | |
188 |
|
190 | |||
189 | url_calls = handler(event, data) |
|
191 | url_calls = handler(event, data) | |
190 | log.debug('Webhook: calling following urls: %s', [x[0] for x in url_calls]) |
|
192 | log.debug('Webhook: calling following urls: %s', [x[0] for x in url_calls]) | |
191 |
|
193 | |||
192 | run_task(post_to_webhook, url_calls, self.settings) |
|
194 | run_task(post_to_webhook, url_calls, self.settings) | |
193 |
|
195 | |||
194 |
|
196 | |||
195 | @async_task(ignore_result=True, base=RequestContextTask) |
|
197 | @async_task(ignore_result=True, base=RequestContextTask) | |
196 | def post_to_webhook(url_calls, settings): |
|
198 | def post_to_webhook(url_calls, settings): | |
197 | """ |
|
199 | """ | |
198 | Example data:: |
|
200 | Example data:: | |
199 |
|
201 | |||
200 | {'actor': {'user_id': 2, 'username': u'admin'}, |
|
202 | {'actor': {'user_id': 2, 'username': u'admin'}, | |
201 | 'actor_ip': u'192.168.157.1', |
|
203 | 'actor_ip': u'192.168.157.1', | |
202 | 'name': 'repo-push', |
|
204 | 'name': 'repo-push', | |
203 | 'push': {'branches': [{'name': u'default', |
|
205 | 'push': {'branches': [{'name': u'default', | |
204 | 'url': 'http://rc.local:8080/hg-repo/changelog?branch=default'}], |
|
206 | 'url': 'http://rc.local:8080/hg-repo/changelog?branch=default'}], | |
205 | 'commits': [{'author': u'Marcin Kuzminski <marcin@rhodecode.com>', |
|
207 | 'commits': [{'author': u'Marcin Kuzminski <marcin@rhodecode.com>', | |
206 | 'branch': u'default', |
|
208 | 'branch': u'default', | |
207 | 'date': datetime.datetime(2017, 11, 30, 12, 59, 48), |
|
209 | 'date': datetime.datetime(2017, 11, 30, 12, 59, 48), | |
208 | 'issues': [], |
|
210 | 'issues': [], | |
209 | 'mentions': [], |
|
211 | 'mentions': [], | |
210 | 'message': u'commit Thu 30 Nov 2017 13:59:48 CET', |
|
212 | 'message': u'commit Thu 30 Nov 2017 13:59:48 CET', | |
211 | 'message_html': u'commit Thu 30 Nov 2017 13:59:48 CET', |
|
213 | 'message_html': u'commit Thu 30 Nov 2017 13:59:48 CET', | |
212 | 'message_html_title': u'commit Thu 30 Nov 2017 13:59:48 CET', |
|
214 | 'message_html_title': u'commit Thu 30 Nov 2017 13:59:48 CET', | |
213 | 'parents': [{'raw_id': '431b772a5353dad9974b810dd3707d79e3a7f6e0'}], |
|
215 | 'parents': [{'raw_id': '431b772a5353dad9974b810dd3707d79e3a7f6e0'}], | |
214 | 'permalink_url': u'http://rc.local:8080/_7/changeset/a815cc738b9651eb5ffbcfb1ce6ccd7c701a5ddf', |
|
216 | 'permalink_url': u'http://rc.local:8080/_7/changeset/a815cc738b9651eb5ffbcfb1ce6ccd7c701a5ddf', | |
215 | 'raw_id': 'a815cc738b9651eb5ffbcfb1ce6ccd7c701a5ddf', |
|
217 | 'raw_id': 'a815cc738b9651eb5ffbcfb1ce6ccd7c701a5ddf', | |
216 | 'refs': {'bookmarks': [], |
|
218 | 'refs': {'bookmarks': [], | |
217 | 'branches': [u'default'], |
|
219 | 'branches': [u'default'], | |
218 | 'tags': [u'tip']}, |
|
220 | 'tags': [u'tip']}, | |
219 | 'reviewers': [], |
|
221 | 'reviewers': [], | |
220 | 'revision': 9L, |
|
222 | 'revision': 9L, | |
221 | 'short_id': 'a815cc738b96', |
|
223 | 'short_id': 'a815cc738b96', | |
222 | 'url': u'http://rc.local:8080/hg-repo/changeset/a815cc738b9651eb5ffbcfb1ce6ccd7c701a5ddf'}], |
|
224 | 'url': u'http://rc.local:8080/hg-repo/changeset/a815cc738b9651eb5ffbcfb1ce6ccd7c701a5ddf'}], | |
223 | 'issues': {}}, |
|
225 | 'issues': {}}, | |
224 | 'repo': {'extra_fields': '', |
|
226 | 'repo': {'extra_fields': '', | |
225 | 'permalink_url': u'http://rc.local:8080/_7', |
|
227 | 'permalink_url': u'http://rc.local:8080/_7', | |
226 | 'repo_id': 7, |
|
228 | 'repo_id': 7, | |
227 | 'repo_name': u'hg-repo', |
|
229 | 'repo_name': u'hg-repo', | |
228 | 'repo_type': u'hg', |
|
230 | 'repo_type': u'hg', | |
229 | 'url': u'http://rc.local:8080/hg-repo'}, |
|
231 | 'url': u'http://rc.local:8080/hg-repo'}, | |
230 | 'server_url': u'http://rc.local:8080', |
|
232 | 'server_url': u'http://rc.local:8080', | |
231 | 'utc_timestamp': datetime.datetime(2017, 11, 30, 13, 0, 1, 569276) |
|
233 | 'utc_timestamp': datetime.datetime(2017, 11, 30, 13, 0, 1, 569276) | |
232 | } |
|
234 | } | |
233 | """ |
|
235 | """ | |
234 |
|
236 | |||
235 | call_headers = { |
|
237 | call_headers = { | |
236 | 'User-Agent': 'RhodeCode-webhook-caller/{}'.format(rhodecode.__version__) |
|
238 | 'User-Agent': 'RhodeCode-webhook-caller/{}'.format(rhodecode.__version__) | |
237 | } # updated below with custom ones, allows override |
|
239 | } # updated below with custom ones, allows override | |
238 |
|
240 | |||
239 | auth = get_auth(settings) |
|
241 | auth = get_auth(settings) | |
240 | token = get_web_token(settings) |
|
242 | token = get_web_token(settings) | |
241 |
|
243 | |||
242 | for url, headers, data in url_calls: |
|
244 | for url, headers, data in url_calls: | |
243 | req_session = requests_retry_call() |
|
245 | req_session = requests_retry_call() | |
244 |
|
246 | |||
245 | method = settings.get('method_type') or 'post' |
|
247 | method = settings.get('method_type') or 'post' | |
246 | call_method = getattr(req_session, method) |
|
248 | call_method = getattr(req_session, method) | |
247 |
|
249 | |||
248 | headers = headers or {} |
|
250 | headers = headers or {} | |
249 | call_headers.update(headers) |
|
251 | call_headers.update(headers) | |
250 |
|
252 | |||
251 | log.debug('calling Webhook with method: %s, and auth:%s', call_method, auth) |
|
253 | log.debug('calling Webhook with method: %s, and auth:%s', call_method, auth) | |
252 | if settings.get('log_data'): |
|
254 | if settings.get('log_data'): | |
253 | log.debug('calling webhook with data: %s', data) |
|
255 | log.debug('calling webhook with data: %s', data) | |
254 | resp = call_method(url, json={ |
|
256 | resp = call_method(url, json={ | |
255 | 'token': token, |
|
257 | 'token': token, | |
256 | 'event': data |
|
258 | 'event': data | |
257 | }, headers=call_headers, auth=auth, timeout=60) |
|
259 | }, headers=call_headers, auth=auth, timeout=60) | |
258 | log.debug('Got Webhook response: %s', resp) |
|
260 | log.debug('Got Webhook response: %s', resp) | |
259 |
|
261 | |||
260 | try: |
|
262 | try: | |
261 | resp.raise_for_status() # raise exception on a failed request |
|
263 | resp.raise_for_status() # raise exception on a failed request | |
262 | except Exception: |
|
264 | except Exception: | |
263 | log.error(resp.text) |
|
265 | log.error(resp.text) | |
264 | raise |
|
266 | raise |
@@ -1,215 +1,266 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import webob |
|
21 | import webob | |
22 | from pyramid.threadlocal import get_current_request |
|
22 | from pyramid.threadlocal import get_current_request | |
23 |
|
23 | |||
24 | from rhodecode import events |
|
24 | from rhodecode import events | |
25 | from rhodecode.lib import hooks_base |
|
25 | from rhodecode.lib import hooks_base | |
26 | from rhodecode.lib import utils2 |
|
26 | from rhodecode.lib import utils2 | |
27 |
|
27 | |||
28 |
|
28 | |||
29 | def _supports_repo_type(repo_type): |
|
29 | def _supports_repo_type(repo_type): | |
30 | if repo_type in ('hg', 'git'): |
|
30 | if repo_type in ('hg', 'git'): | |
31 | return True |
|
31 | return True | |
32 | return False |
|
32 | return False | |
33 |
|
33 | |||
34 |
|
34 | |||
35 | def _get_vcs_operation_context(username, repo_name, repo_type, action): |
|
35 | def _get_vcs_operation_context(username, repo_name, repo_type, action): | |
36 | # NOTE(dan): import loop |
|
36 | # NOTE(dan): import loop | |
37 | from rhodecode.lib.base import vcs_operation_context |
|
37 | from rhodecode.lib.base import vcs_operation_context | |
38 |
|
38 | |||
39 | check_locking = action in ('pull', 'push') |
|
39 | check_locking = action in ('pull', 'push') | |
40 |
|
40 | |||
41 | request = get_current_request() |
|
41 | request = get_current_request() | |
42 |
|
42 | |||
43 | try: |
|
43 | try: | |
44 | environ = request.environ |
|
44 | environ = request.environ | |
45 | except TypeError: |
|
45 | except TypeError: | |
46 | # we might use this outside of request context |
|
46 | # we might use this outside of request context | |
47 | environ = {} |
|
47 | environ = {} | |
48 |
|
48 | |||
49 | if not environ: |
|
49 | if not environ: | |
50 | environ = webob.Request.blank('').environ |
|
50 | environ = webob.Request.blank('').environ | |
51 |
|
51 | |||
52 | extras = vcs_operation_context(environ, repo_name, username, action, repo_type, check_locking) |
|
52 | extras = vcs_operation_context(environ, repo_name, username, action, repo_type, check_locking) | |
53 | return utils2.AttributeDict(extras) |
|
53 | return utils2.AttributeDict(extras) | |
54 |
|
54 | |||
55 |
|
55 | |||
56 | def trigger_post_push_hook(username, action, hook_type, repo_name, repo_type, commit_ids): |
|
56 | def trigger_post_push_hook(username, action, hook_type, repo_name, repo_type, commit_ids): | |
57 | """ |
|
57 | """ | |
58 | Triggers push action hooks |
|
58 | Triggers push action hooks | |
59 |
|
59 | |||
60 | :param username: username who pushes |
|
60 | :param username: username who pushes | |
61 | :param action: push/push_local/push_remote |
|
61 | :param action: push/push_local/push_remote | |
62 | :param hook_type: type of hook executed |
|
62 | :param hook_type: type of hook executed | |
63 | :param repo_name: name of repo |
|
63 | :param repo_name: name of repo | |
64 | :param repo_type: the type of SCM repo |
|
64 | :param repo_type: the type of SCM repo | |
65 | :param commit_ids: list of commit ids that we pushed |
|
65 | :param commit_ids: list of commit ids that we pushed | |
66 | """ |
|
66 | """ | |
67 | extras = _get_vcs_operation_context(username, repo_name, repo_type, action) |
|
67 | extras = _get_vcs_operation_context(username, repo_name, repo_type, action) | |
68 | extras.commit_ids = commit_ids |
|
68 | extras.commit_ids = commit_ids | |
69 | extras.hook_type = hook_type |
|
69 | extras.hook_type = hook_type | |
70 | hooks_base.post_push(extras) |
|
70 | hooks_base.post_push(extras) | |
71 |
|
71 | |||
72 |
|
72 | |||
73 | def trigger_comment_commit_hooks(username, repo_name, repo_type, repo, data=None): |
|
73 | def trigger_comment_commit_hooks(username, repo_name, repo_type, repo, data=None): | |
74 | """ |
|
74 | """ | |
75 | Triggers when a comment is made on a commit |
|
75 | Triggers when a comment is made on a commit | |
76 |
|
76 | |||
77 | :param username: username who creates the comment |
|
77 | :param username: username who creates the comment | |
78 | :param repo_name: name of target repo |
|
78 | :param repo_name: name of target repo | |
79 | :param repo_type: the type of SCM target repo |
|
79 | :param repo_type: the type of SCM target repo | |
80 | :param repo: the repo object we trigger the event for |
|
80 | :param repo: the repo object we trigger the event for | |
81 | :param data: extra data for specific events e.g {'comment': comment_obj, 'commit': commit_obj} |
|
81 | :param data: extra data for specific events e.g {'comment': comment_obj, 'commit': commit_obj} | |
82 | """ |
|
82 | """ | |
83 | if not _supports_repo_type(repo_type): |
|
83 | if not _supports_repo_type(repo_type): | |
84 | return |
|
84 | return | |
85 |
|
85 | |||
86 | extras = _get_vcs_operation_context(username, repo_name, repo_type, 'comment_commit') |
|
86 | extras = _get_vcs_operation_context(username, repo_name, repo_type, 'comment_commit') | |
87 |
|
87 | |||
88 | comment = data['comment'] |
|
88 | comment = data['comment'] | |
89 | commit = data['commit'] |
|
89 | commit = data['commit'] | |
90 |
|
90 | |||
91 | events.trigger(events.RepoCommitCommentEvent(repo, commit, comment)) |
|
91 | events.trigger(events.RepoCommitCommentEvent(repo, commit, comment)) | |
92 | extras.update(repo.get_dict()) |
|
92 | extras.update(repo.get_dict()) | |
93 |
|
93 | |||
94 | extras.commit = commit.serialize() |
|
94 | extras.commit = commit.serialize() | |
95 | extras.comment = comment.get_api_data() |
|
95 | extras.comment = comment.get_api_data() | |
96 | extras.created_by = username |
|
96 | extras.created_by = username | |
97 | hooks_base.log_comment_commit_repository(**extras) |
|
97 | hooks_base.log_comment_commit_repository(**extras) | |
98 |
|
98 | |||
99 |
|
99 | |||
|
100 | def trigger_comment_commit_edit_hooks(username, repo_name, repo_type, repo, data=None): | |||
|
101 | """ | |||
|
102 | Triggers when a comment is edited on a commit | |||
|
103 | ||||
|
104 | :param username: username who edits the comment | |||
|
105 | :param repo_name: name of target repo | |||
|
106 | :param repo_type: the type of SCM target repo | |||
|
107 | :param repo: the repo object we trigger the event for | |||
|
108 | :param data: extra data for specific events e.g {'comment': comment_obj, 'commit': commit_obj} | |||
|
109 | """ | |||
|
110 | if not _supports_repo_type(repo_type): | |||
|
111 | return | |||
|
112 | ||||
|
113 | extras = _get_vcs_operation_context(username, repo_name, repo_type, 'comment_commit') | |||
|
114 | ||||
|
115 | comment = data['comment'] | |||
|
116 | commit = data['commit'] | |||
|
117 | ||||
|
118 | events.trigger(events.RepoCommitCommentEditEvent(repo, commit, comment)) | |||
|
119 | extras.update(repo.get_dict()) | |||
|
120 | ||||
|
121 | extras.commit = commit.serialize() | |||
|
122 | extras.comment = comment.get_api_data() | |||
|
123 | extras.created_by = username | |||
|
124 | # TODO(marcink): rcextensions handlers ?? | |||
|
125 | hooks_base.log_comment_commit_repository(**extras) | |||
|
126 | ||||
|
127 | ||||
100 | def trigger_create_pull_request_hook(username, repo_name, repo_type, pull_request, data=None): |
|
128 | def trigger_create_pull_request_hook(username, repo_name, repo_type, pull_request, data=None): | |
101 | """ |
|
129 | """ | |
102 | Triggers create pull request action hooks |
|
130 | Triggers create pull request action hooks | |
103 |
|
131 | |||
104 | :param username: username who creates the pull request |
|
132 | :param username: username who creates the pull request | |
105 | :param repo_name: name of target repo |
|
133 | :param repo_name: name of target repo | |
106 | :param repo_type: the type of SCM target repo |
|
134 | :param repo_type: the type of SCM target repo | |
107 | :param pull_request: the pull request that was created |
|
135 | :param pull_request: the pull request that was created | |
108 | :param data: extra data for specific events e.g {'comment': comment_obj} |
|
136 | :param data: extra data for specific events e.g {'comment': comment_obj} | |
109 | """ |
|
137 | """ | |
110 | if not _supports_repo_type(repo_type): |
|
138 | if not _supports_repo_type(repo_type): | |
111 | return |
|
139 | return | |
112 |
|
140 | |||
113 | extras = _get_vcs_operation_context(username, repo_name, repo_type, 'create_pull_request') |
|
141 | extras = _get_vcs_operation_context(username, repo_name, repo_type, 'create_pull_request') | |
114 | events.trigger(events.PullRequestCreateEvent(pull_request)) |
|
142 | events.trigger(events.PullRequestCreateEvent(pull_request)) | |
115 | extras.update(pull_request.get_api_data(with_merge_state=False)) |
|
143 | extras.update(pull_request.get_api_data(with_merge_state=False)) | |
116 | hooks_base.log_create_pull_request(**extras) |
|
144 | hooks_base.log_create_pull_request(**extras) | |
117 |
|
145 | |||
118 |
|
146 | |||
119 | def trigger_merge_pull_request_hook(username, repo_name, repo_type, pull_request, data=None): |
|
147 | def trigger_merge_pull_request_hook(username, repo_name, repo_type, pull_request, data=None): | |
120 | """ |
|
148 | """ | |
121 | Triggers merge pull request action hooks |
|
149 | Triggers merge pull request action hooks | |
122 |
|
150 | |||
123 | :param username: username who creates the pull request |
|
151 | :param username: username who creates the pull request | |
124 | :param repo_name: name of target repo |
|
152 | :param repo_name: name of target repo | |
125 | :param repo_type: the type of SCM target repo |
|
153 | :param repo_type: the type of SCM target repo | |
126 | :param pull_request: the pull request that was merged |
|
154 | :param pull_request: the pull request that was merged | |
127 | :param data: extra data for specific events e.g {'comment': comment_obj} |
|
155 | :param data: extra data for specific events e.g {'comment': comment_obj} | |
128 | """ |
|
156 | """ | |
129 | if not _supports_repo_type(repo_type): |
|
157 | if not _supports_repo_type(repo_type): | |
130 | return |
|
158 | return | |
131 |
|
159 | |||
132 | extras = _get_vcs_operation_context(username, repo_name, repo_type, 'merge_pull_request') |
|
160 | extras = _get_vcs_operation_context(username, repo_name, repo_type, 'merge_pull_request') | |
133 | events.trigger(events.PullRequestMergeEvent(pull_request)) |
|
161 | events.trigger(events.PullRequestMergeEvent(pull_request)) | |
134 | extras.update(pull_request.get_api_data()) |
|
162 | extras.update(pull_request.get_api_data()) | |
135 | hooks_base.log_merge_pull_request(**extras) |
|
163 | hooks_base.log_merge_pull_request(**extras) | |
136 |
|
164 | |||
137 |
|
165 | |||
138 | def trigger_close_pull_request_hook(username, repo_name, repo_type, pull_request, data=None): |
|
166 | def trigger_close_pull_request_hook(username, repo_name, repo_type, pull_request, data=None): | |
139 | """ |
|
167 | """ | |
140 | Triggers close pull request action hooks |
|
168 | Triggers close pull request action hooks | |
141 |
|
169 | |||
142 | :param username: username who creates the pull request |
|
170 | :param username: username who creates the pull request | |
143 | :param repo_name: name of target repo |
|
171 | :param repo_name: name of target repo | |
144 | :param repo_type: the type of SCM target repo |
|
172 | :param repo_type: the type of SCM target repo | |
145 | :param pull_request: the pull request that was closed |
|
173 | :param pull_request: the pull request that was closed | |
146 | :param data: extra data for specific events e.g {'comment': comment_obj} |
|
174 | :param data: extra data for specific events e.g {'comment': comment_obj} | |
147 | """ |
|
175 | """ | |
148 | if not _supports_repo_type(repo_type): |
|
176 | if not _supports_repo_type(repo_type): | |
149 | return |
|
177 | return | |
150 |
|
178 | |||
151 | extras = _get_vcs_operation_context(username, repo_name, repo_type, 'close_pull_request') |
|
179 | extras = _get_vcs_operation_context(username, repo_name, repo_type, 'close_pull_request') | |
152 | events.trigger(events.PullRequestCloseEvent(pull_request)) |
|
180 | events.trigger(events.PullRequestCloseEvent(pull_request)) | |
153 | extras.update(pull_request.get_api_data()) |
|
181 | extras.update(pull_request.get_api_data()) | |
154 | hooks_base.log_close_pull_request(**extras) |
|
182 | hooks_base.log_close_pull_request(**extras) | |
155 |
|
183 | |||
156 |
|
184 | |||
157 | def trigger_review_pull_request_hook(username, repo_name, repo_type, pull_request, data=None): |
|
185 | def trigger_review_pull_request_hook(username, repo_name, repo_type, pull_request, data=None): | |
158 | """ |
|
186 | """ | |
159 | Triggers review status change pull request action hooks |
|
187 | Triggers review status change pull request action hooks | |
160 |
|
188 | |||
161 | :param username: username who creates the pull request |
|
189 | :param username: username who creates the pull request | |
162 | :param repo_name: name of target repo |
|
190 | :param repo_name: name of target repo | |
163 | :param repo_type: the type of SCM target repo |
|
191 | :param repo_type: the type of SCM target repo | |
164 | :param pull_request: the pull request that review status changed |
|
192 | :param pull_request: the pull request that review status changed | |
165 | :param data: extra data for specific events e.g {'comment': comment_obj} |
|
193 | :param data: extra data for specific events e.g {'comment': comment_obj} | |
166 | """ |
|
194 | """ | |
167 | if not _supports_repo_type(repo_type): |
|
195 | if not _supports_repo_type(repo_type): | |
168 | return |
|
196 | return | |
169 |
|
197 | |||
170 | extras = _get_vcs_operation_context(username, repo_name, repo_type, 'review_pull_request') |
|
198 | extras = _get_vcs_operation_context(username, repo_name, repo_type, 'review_pull_request') | |
171 | status = data.get('status') |
|
199 | status = data.get('status') | |
172 | events.trigger(events.PullRequestReviewEvent(pull_request, status)) |
|
200 | events.trigger(events.PullRequestReviewEvent(pull_request, status)) | |
173 | extras.update(pull_request.get_api_data()) |
|
201 | extras.update(pull_request.get_api_data()) | |
174 | hooks_base.log_review_pull_request(**extras) |
|
202 | hooks_base.log_review_pull_request(**extras) | |
175 |
|
203 | |||
176 |
|
204 | |||
177 | def trigger_comment_pull_request_hook(username, repo_name, repo_type, pull_request, data=None): |
|
205 | def trigger_comment_pull_request_hook(username, repo_name, repo_type, pull_request, data=None): | |
178 | """ |
|
206 | """ | |
179 | Triggers when a comment is made on a pull request |
|
207 | Triggers when a comment is made on a pull request | |
180 |
|
208 | |||
181 | :param username: username who creates the pull request |
|
209 | :param username: username who creates the pull request | |
182 | :param repo_name: name of target repo |
|
210 | :param repo_name: name of target repo | |
183 | :param repo_type: the type of SCM target repo |
|
211 | :param repo_type: the type of SCM target repo | |
184 | :param pull_request: the pull request that comment was made on |
|
212 | :param pull_request: the pull request that comment was made on | |
185 | :param data: extra data for specific events e.g {'comment': comment_obj} |
|
213 | :param data: extra data for specific events e.g {'comment': comment_obj} | |
186 | """ |
|
214 | """ | |
187 | if not _supports_repo_type(repo_type): |
|
215 | if not _supports_repo_type(repo_type): | |
188 | return |
|
216 | return | |
189 |
|
217 | |||
190 | extras = _get_vcs_operation_context(username, repo_name, repo_type, 'comment_pull_request') |
|
218 | extras = _get_vcs_operation_context(username, repo_name, repo_type, 'comment_pull_request') | |
191 |
|
219 | |||
192 | comment = data['comment'] |
|
220 | comment = data['comment'] | |
193 | events.trigger(events.PullRequestCommentEvent(pull_request, comment)) |
|
221 | events.trigger(events.PullRequestCommentEvent(pull_request, comment)) | |
194 | extras.update(pull_request.get_api_data()) |
|
222 | extras.update(pull_request.get_api_data()) | |
195 | extras.comment = comment.get_api_data() |
|
223 | extras.comment = comment.get_api_data() | |
196 | hooks_base.log_comment_pull_request(**extras) |
|
224 | hooks_base.log_comment_pull_request(**extras) | |
197 |
|
225 | |||
198 |
|
226 | |||
|
227 | def trigger_comment_pull_request_edit_hook(username, repo_name, repo_type, pull_request, data=None): | |||
|
228 | """ | |||
|
229 | Triggers when a comment was edited on a pull request | |||
|
230 | ||||
|
231 | :param username: username who made the edit | |||
|
232 | :param repo_name: name of target repo | |||
|
233 | :param repo_type: the type of SCM target repo | |||
|
234 | :param pull_request: the pull request that comment was made on | |||
|
235 | :param data: extra data for specific events e.g {'comment': comment_obj} | |||
|
236 | """ | |||
|
237 | if not _supports_repo_type(repo_type): | |||
|
238 | return | |||
|
239 | ||||
|
240 | extras = _get_vcs_operation_context(username, repo_name, repo_type, 'comment_pull_request') | |||
|
241 | ||||
|
242 | comment = data['comment'] | |||
|
243 | events.trigger(events.PullRequestCommentEditEvent(pull_request, comment)) | |||
|
244 | extras.update(pull_request.get_api_data()) | |||
|
245 | extras.comment = comment.get_api_data() | |||
|
246 | # TODO(marcink): handle rcextensions... | |||
|
247 | hooks_base.log_comment_pull_request(**extras) | |||
|
248 | ||||
|
249 | ||||
199 | def trigger_update_pull_request_hook(username, repo_name, repo_type, pull_request, data=None): |
|
250 | def trigger_update_pull_request_hook(username, repo_name, repo_type, pull_request, data=None): | |
200 | """ |
|
251 | """ | |
201 | Triggers update pull request action hooks |
|
252 | Triggers update pull request action hooks | |
202 |
|
253 | |||
203 | :param username: username who creates the pull request |
|
254 | :param username: username who creates the pull request | |
204 | :param repo_name: name of target repo |
|
255 | :param repo_name: name of target repo | |
205 | :param repo_type: the type of SCM target repo |
|
256 | :param repo_type: the type of SCM target repo | |
206 | :param pull_request: the pull request that was updated |
|
257 | :param pull_request: the pull request that was updated | |
207 | :param data: extra data for specific events e.g {'comment': comment_obj} |
|
258 | :param data: extra data for specific events e.g {'comment': comment_obj} | |
208 | """ |
|
259 | """ | |
209 | if not _supports_repo_type(repo_type): |
|
260 | if not _supports_repo_type(repo_type): | |
210 | return |
|
261 | return | |
211 |
|
262 | |||
212 | extras = _get_vcs_operation_context(username, repo_name, repo_type, 'update_pull_request') |
|
263 | extras = _get_vcs_operation_context(username, repo_name, repo_type, 'update_pull_request') | |
213 | events.trigger(events.PullRequestUpdateEvent(pull_request)) |
|
264 | events.trigger(events.PullRequestUpdateEvent(pull_request)) | |
214 | extras.update(pull_request.get_api_data()) |
|
265 | extras.update(pull_request.get_api_data()) | |
215 | hooks_base.log_update_pull_request(**extras) |
|
266 | hooks_base.log_update_pull_request(**extras) |
@@ -1,837 +1,836 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2011-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2011-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | comments model for RhodeCode |
|
22 | comments model for RhodeCode | |
23 | """ |
|
23 | """ | |
24 | import datetime |
|
24 | import datetime | |
25 |
|
25 | |||
26 | import logging |
|
26 | import logging | |
27 | import traceback |
|
27 | import traceback | |
28 | import collections |
|
28 | import collections | |
29 |
|
29 | |||
30 | from pyramid.threadlocal import get_current_registry, get_current_request |
|
30 | from pyramid.threadlocal import get_current_registry, get_current_request | |
31 | from sqlalchemy.sql.expression import null |
|
31 | from sqlalchemy.sql.expression import null | |
32 | from sqlalchemy.sql.functions import coalesce |
|
32 | from sqlalchemy.sql.functions import coalesce | |
33 |
|
33 | |||
34 | from rhodecode.lib import helpers as h, diffs, channelstream, hooks_utils |
|
34 | from rhodecode.lib import helpers as h, diffs, channelstream, hooks_utils | |
35 | from rhodecode.lib import audit_logger |
|
35 | from rhodecode.lib import audit_logger | |
36 | from rhodecode.lib.exceptions import CommentVersionMismatch |
|
36 | from rhodecode.lib.exceptions import CommentVersionMismatch | |
37 | from rhodecode.lib.utils2 import extract_mentioned_users, safe_str, safe_int |
|
37 | from rhodecode.lib.utils2 import extract_mentioned_users, safe_str, safe_int | |
38 | from rhodecode.model import BaseModel |
|
38 | from rhodecode.model import BaseModel | |
39 | from rhodecode.model.db import ( |
|
39 | from rhodecode.model.db import ( | |
40 | ChangesetComment, |
|
40 | ChangesetComment, | |
41 | User, |
|
41 | User, | |
42 | Notification, |
|
42 | Notification, | |
43 | PullRequest, |
|
43 | PullRequest, | |
44 | AttributeDict, |
|
44 | AttributeDict, | |
45 | ChangesetCommentHistory, |
|
45 | ChangesetCommentHistory, | |
46 | ) |
|
46 | ) | |
47 | from rhodecode.model.notification import NotificationModel |
|
47 | from rhodecode.model.notification import NotificationModel | |
48 | from rhodecode.model.meta import Session |
|
48 | from rhodecode.model.meta import Session | |
49 | from rhodecode.model.settings import VcsSettingsModel |
|
49 | from rhodecode.model.settings import VcsSettingsModel | |
50 | from rhodecode.model.notification import EmailNotificationModel |
|
50 | from rhodecode.model.notification import EmailNotificationModel | |
51 | from rhodecode.model.validation_schema.schemas import comment_schema |
|
51 | from rhodecode.model.validation_schema.schemas import comment_schema | |
52 |
|
52 | |||
53 |
|
53 | |||
54 | log = logging.getLogger(__name__) |
|
54 | log = logging.getLogger(__name__) | |
55 |
|
55 | |||
56 |
|
56 | |||
57 | class CommentsModel(BaseModel): |
|
57 | class CommentsModel(BaseModel): | |
58 |
|
58 | |||
59 | cls = ChangesetComment |
|
59 | cls = ChangesetComment | |
60 |
|
60 | |||
61 | DIFF_CONTEXT_BEFORE = 3 |
|
61 | DIFF_CONTEXT_BEFORE = 3 | |
62 | DIFF_CONTEXT_AFTER = 3 |
|
62 | DIFF_CONTEXT_AFTER = 3 | |
63 |
|
63 | |||
64 | def __get_commit_comment(self, changeset_comment): |
|
64 | def __get_commit_comment(self, changeset_comment): | |
65 | return self._get_instance(ChangesetComment, changeset_comment) |
|
65 | return self._get_instance(ChangesetComment, changeset_comment) | |
66 |
|
66 | |||
67 | def __get_pull_request(self, pull_request): |
|
67 | def __get_pull_request(self, pull_request): | |
68 | return self._get_instance(PullRequest, pull_request) |
|
68 | return self._get_instance(PullRequest, pull_request) | |
69 |
|
69 | |||
70 | def _extract_mentions(self, s): |
|
70 | def _extract_mentions(self, s): | |
71 | user_objects = [] |
|
71 | user_objects = [] | |
72 | for username in extract_mentioned_users(s): |
|
72 | for username in extract_mentioned_users(s): | |
73 | user_obj = User.get_by_username(username, case_insensitive=True) |
|
73 | user_obj = User.get_by_username(username, case_insensitive=True) | |
74 | if user_obj: |
|
74 | if user_obj: | |
75 | user_objects.append(user_obj) |
|
75 | user_objects.append(user_obj) | |
76 | return user_objects |
|
76 | return user_objects | |
77 |
|
77 | |||
78 | def _get_renderer(self, global_renderer='rst', request=None): |
|
78 | def _get_renderer(self, global_renderer='rst', request=None): | |
79 | request = request or get_current_request() |
|
79 | request = request or get_current_request() | |
80 |
|
80 | |||
81 | try: |
|
81 | try: | |
82 | global_renderer = request.call_context.visual.default_renderer |
|
82 | global_renderer = request.call_context.visual.default_renderer | |
83 | except AttributeError: |
|
83 | except AttributeError: | |
84 | log.debug("Renderer not set, falling back " |
|
84 | log.debug("Renderer not set, falling back " | |
85 | "to default renderer '%s'", global_renderer) |
|
85 | "to default renderer '%s'", global_renderer) | |
86 | except Exception: |
|
86 | except Exception: | |
87 | log.error(traceback.format_exc()) |
|
87 | log.error(traceback.format_exc()) | |
88 | return global_renderer |
|
88 | return global_renderer | |
89 |
|
89 | |||
90 | def aggregate_comments(self, comments, versions, show_version, inline=False): |
|
90 | def aggregate_comments(self, comments, versions, show_version, inline=False): | |
91 | # group by versions, and count until, and display objects |
|
91 | # group by versions, and count until, and display objects | |
92 |
|
92 | |||
93 | comment_groups = collections.defaultdict(list) |
|
93 | comment_groups = collections.defaultdict(list) | |
94 | [comment_groups[ |
|
94 | [comment_groups[ | |
95 | _co.pull_request_version_id].append(_co) for _co in comments] |
|
95 | _co.pull_request_version_id].append(_co) for _co in comments] | |
96 |
|
96 | |||
97 | def yield_comments(pos): |
|
97 | def yield_comments(pos): | |
98 | for co in comment_groups[pos]: |
|
98 | for co in comment_groups[pos]: | |
99 | yield co |
|
99 | yield co | |
100 |
|
100 | |||
101 | comment_versions = collections.defaultdict( |
|
101 | comment_versions = collections.defaultdict( | |
102 | lambda: collections.defaultdict(list)) |
|
102 | lambda: collections.defaultdict(list)) | |
103 | prev_prvid = -1 |
|
103 | prev_prvid = -1 | |
104 | # fake last entry with None, to aggregate on "latest" version which |
|
104 | # fake last entry with None, to aggregate on "latest" version which | |
105 | # doesn't have an pull_request_version_id |
|
105 | # doesn't have an pull_request_version_id | |
106 | for ver in versions + [AttributeDict({'pull_request_version_id': None})]: |
|
106 | for ver in versions + [AttributeDict({'pull_request_version_id': None})]: | |
107 | prvid = ver.pull_request_version_id |
|
107 | prvid = ver.pull_request_version_id | |
108 | if prev_prvid == -1: |
|
108 | if prev_prvid == -1: | |
109 | prev_prvid = prvid |
|
109 | prev_prvid = prvid | |
110 |
|
110 | |||
111 | for co in yield_comments(prvid): |
|
111 | for co in yield_comments(prvid): | |
112 | comment_versions[prvid]['at'].append(co) |
|
112 | comment_versions[prvid]['at'].append(co) | |
113 |
|
113 | |||
114 | # save until |
|
114 | # save until | |
115 | current = comment_versions[prvid]['at'] |
|
115 | current = comment_versions[prvid]['at'] | |
116 | prev_until = comment_versions[prev_prvid]['until'] |
|
116 | prev_until = comment_versions[prev_prvid]['until'] | |
117 | cur_until = prev_until + current |
|
117 | cur_until = prev_until + current | |
118 | comment_versions[prvid]['until'].extend(cur_until) |
|
118 | comment_versions[prvid]['until'].extend(cur_until) | |
119 |
|
119 | |||
120 | # save outdated |
|
120 | # save outdated | |
121 | if inline: |
|
121 | if inline: | |
122 | outdated = [x for x in cur_until |
|
122 | outdated = [x for x in cur_until | |
123 | if x.outdated_at_version(show_version)] |
|
123 | if x.outdated_at_version(show_version)] | |
124 | else: |
|
124 | else: | |
125 | outdated = [x for x in cur_until |
|
125 | outdated = [x for x in cur_until | |
126 | if x.older_than_version(show_version)] |
|
126 | if x.older_than_version(show_version)] | |
127 | display = [x for x in cur_until if x not in outdated] |
|
127 | display = [x for x in cur_until if x not in outdated] | |
128 |
|
128 | |||
129 | comment_versions[prvid]['outdated'] = outdated |
|
129 | comment_versions[prvid]['outdated'] = outdated | |
130 | comment_versions[prvid]['display'] = display |
|
130 | comment_versions[prvid]['display'] = display | |
131 |
|
131 | |||
132 | prev_prvid = prvid |
|
132 | prev_prvid = prvid | |
133 |
|
133 | |||
134 | return comment_versions |
|
134 | return comment_versions | |
135 |
|
135 | |||
136 | def get_repository_comments(self, repo, comment_type=None, user=None, commit_id=None): |
|
136 | def get_repository_comments(self, repo, comment_type=None, user=None, commit_id=None): | |
137 | qry = Session().query(ChangesetComment) \ |
|
137 | qry = Session().query(ChangesetComment) \ | |
138 | .filter(ChangesetComment.repo == repo) |
|
138 | .filter(ChangesetComment.repo == repo) | |
139 |
|
139 | |||
140 | if comment_type and comment_type in ChangesetComment.COMMENT_TYPES: |
|
140 | if comment_type and comment_type in ChangesetComment.COMMENT_TYPES: | |
141 | qry = qry.filter(ChangesetComment.comment_type == comment_type) |
|
141 | qry = qry.filter(ChangesetComment.comment_type == comment_type) | |
142 |
|
142 | |||
143 | if user: |
|
143 | if user: | |
144 | user = self._get_user(user) |
|
144 | user = self._get_user(user) | |
145 | if user: |
|
145 | if user: | |
146 | qry = qry.filter(ChangesetComment.user_id == user.user_id) |
|
146 | qry = qry.filter(ChangesetComment.user_id == user.user_id) | |
147 |
|
147 | |||
148 | if commit_id: |
|
148 | if commit_id: | |
149 | qry = qry.filter(ChangesetComment.revision == commit_id) |
|
149 | qry = qry.filter(ChangesetComment.revision == commit_id) | |
150 |
|
150 | |||
151 | qry = qry.order_by(ChangesetComment.created_on) |
|
151 | qry = qry.order_by(ChangesetComment.created_on) | |
152 | return qry.all() |
|
152 | return qry.all() | |
153 |
|
153 | |||
154 | def get_repository_unresolved_todos(self, repo): |
|
154 | def get_repository_unresolved_todos(self, repo): | |
155 | todos = Session().query(ChangesetComment) \ |
|
155 | todos = Session().query(ChangesetComment) \ | |
156 | .filter(ChangesetComment.repo == repo) \ |
|
156 | .filter(ChangesetComment.repo == repo) \ | |
157 | .filter(ChangesetComment.resolved_by == None) \ |
|
157 | .filter(ChangesetComment.resolved_by == None) \ | |
158 | .filter(ChangesetComment.comment_type |
|
158 | .filter(ChangesetComment.comment_type | |
159 | == ChangesetComment.COMMENT_TYPE_TODO) |
|
159 | == ChangesetComment.COMMENT_TYPE_TODO) | |
160 | todos = todos.all() |
|
160 | todos = todos.all() | |
161 |
|
161 | |||
162 | return todos |
|
162 | return todos | |
163 |
|
163 | |||
164 | def get_pull_request_unresolved_todos(self, pull_request, show_outdated=True): |
|
164 | def get_pull_request_unresolved_todos(self, pull_request, show_outdated=True): | |
165 |
|
165 | |||
166 | todos = Session().query(ChangesetComment) \ |
|
166 | todos = Session().query(ChangesetComment) \ | |
167 | .filter(ChangesetComment.pull_request == pull_request) \ |
|
167 | .filter(ChangesetComment.pull_request == pull_request) \ | |
168 | .filter(ChangesetComment.resolved_by == None) \ |
|
168 | .filter(ChangesetComment.resolved_by == None) \ | |
169 | .filter(ChangesetComment.comment_type |
|
169 | .filter(ChangesetComment.comment_type | |
170 | == ChangesetComment.COMMENT_TYPE_TODO) |
|
170 | == ChangesetComment.COMMENT_TYPE_TODO) | |
171 |
|
171 | |||
172 | if not show_outdated: |
|
172 | if not show_outdated: | |
173 | todos = todos.filter( |
|
173 | todos = todos.filter( | |
174 | coalesce(ChangesetComment.display_state, '') != |
|
174 | coalesce(ChangesetComment.display_state, '') != | |
175 | ChangesetComment.COMMENT_OUTDATED) |
|
175 | ChangesetComment.COMMENT_OUTDATED) | |
176 |
|
176 | |||
177 | todos = todos.all() |
|
177 | todos = todos.all() | |
178 |
|
178 | |||
179 | return todos |
|
179 | return todos | |
180 |
|
180 | |||
181 | def get_pull_request_resolved_todos(self, pull_request, show_outdated=True): |
|
181 | def get_pull_request_resolved_todos(self, pull_request, show_outdated=True): | |
182 |
|
182 | |||
183 | todos = Session().query(ChangesetComment) \ |
|
183 | todos = Session().query(ChangesetComment) \ | |
184 | .filter(ChangesetComment.pull_request == pull_request) \ |
|
184 | .filter(ChangesetComment.pull_request == pull_request) \ | |
185 | .filter(ChangesetComment.resolved_by != None) \ |
|
185 | .filter(ChangesetComment.resolved_by != None) \ | |
186 | .filter(ChangesetComment.comment_type |
|
186 | .filter(ChangesetComment.comment_type | |
187 | == ChangesetComment.COMMENT_TYPE_TODO) |
|
187 | == ChangesetComment.COMMENT_TYPE_TODO) | |
188 |
|
188 | |||
189 | if not show_outdated: |
|
189 | if not show_outdated: | |
190 | todos = todos.filter( |
|
190 | todos = todos.filter( | |
191 | coalesce(ChangesetComment.display_state, '') != |
|
191 | coalesce(ChangesetComment.display_state, '') != | |
192 | ChangesetComment.COMMENT_OUTDATED) |
|
192 | ChangesetComment.COMMENT_OUTDATED) | |
193 |
|
193 | |||
194 | todos = todos.all() |
|
194 | todos = todos.all() | |
195 |
|
195 | |||
196 | return todos |
|
196 | return todos | |
197 |
|
197 | |||
198 | def get_commit_unresolved_todos(self, commit_id, show_outdated=True): |
|
198 | def get_commit_unresolved_todos(self, commit_id, show_outdated=True): | |
199 |
|
199 | |||
200 | todos = Session().query(ChangesetComment) \ |
|
200 | todos = Session().query(ChangesetComment) \ | |
201 | .filter(ChangesetComment.revision == commit_id) \ |
|
201 | .filter(ChangesetComment.revision == commit_id) \ | |
202 | .filter(ChangesetComment.resolved_by == None) \ |
|
202 | .filter(ChangesetComment.resolved_by == None) \ | |
203 | .filter(ChangesetComment.comment_type |
|
203 | .filter(ChangesetComment.comment_type | |
204 | == ChangesetComment.COMMENT_TYPE_TODO) |
|
204 | == ChangesetComment.COMMENT_TYPE_TODO) | |
205 |
|
205 | |||
206 | if not show_outdated: |
|
206 | if not show_outdated: | |
207 | todos = todos.filter( |
|
207 | todos = todos.filter( | |
208 | coalesce(ChangesetComment.display_state, '') != |
|
208 | coalesce(ChangesetComment.display_state, '') != | |
209 | ChangesetComment.COMMENT_OUTDATED) |
|
209 | ChangesetComment.COMMENT_OUTDATED) | |
210 |
|
210 | |||
211 | todos = todos.all() |
|
211 | todos = todos.all() | |
212 |
|
212 | |||
213 | return todos |
|
213 | return todos | |
214 |
|
214 | |||
215 | def get_commit_resolved_todos(self, commit_id, show_outdated=True): |
|
215 | def get_commit_resolved_todos(self, commit_id, show_outdated=True): | |
216 |
|
216 | |||
217 | todos = Session().query(ChangesetComment) \ |
|
217 | todos = Session().query(ChangesetComment) \ | |
218 | .filter(ChangesetComment.revision == commit_id) \ |
|
218 | .filter(ChangesetComment.revision == commit_id) \ | |
219 | .filter(ChangesetComment.resolved_by != None) \ |
|
219 | .filter(ChangesetComment.resolved_by != None) \ | |
220 | .filter(ChangesetComment.comment_type |
|
220 | .filter(ChangesetComment.comment_type | |
221 | == ChangesetComment.COMMENT_TYPE_TODO) |
|
221 | == ChangesetComment.COMMENT_TYPE_TODO) | |
222 |
|
222 | |||
223 | if not show_outdated: |
|
223 | if not show_outdated: | |
224 | todos = todos.filter( |
|
224 | todos = todos.filter( | |
225 | coalesce(ChangesetComment.display_state, '') != |
|
225 | coalesce(ChangesetComment.display_state, '') != | |
226 | ChangesetComment.COMMENT_OUTDATED) |
|
226 | ChangesetComment.COMMENT_OUTDATED) | |
227 |
|
227 | |||
228 | todos = todos.all() |
|
228 | todos = todos.all() | |
229 |
|
229 | |||
230 | return todos |
|
230 | return todos | |
231 |
|
231 | |||
232 | def _log_audit_action(self, action, action_data, auth_user, comment): |
|
232 | def _log_audit_action(self, action, action_data, auth_user, comment): | |
233 | audit_logger.store( |
|
233 | audit_logger.store( | |
234 | action=action, |
|
234 | action=action, | |
235 | action_data=action_data, |
|
235 | action_data=action_data, | |
236 | user=auth_user, |
|
236 | user=auth_user, | |
237 | repo=comment.repo) |
|
237 | repo=comment.repo) | |
238 |
|
238 | |||
239 | def create(self, text, repo, user, commit_id=None, pull_request=None, |
|
239 | def create(self, text, repo, user, commit_id=None, pull_request=None, | |
240 | f_path=None, line_no=None, status_change=None, |
|
240 | f_path=None, line_no=None, status_change=None, | |
241 | status_change_type=None, comment_type=None, |
|
241 | status_change_type=None, comment_type=None, | |
242 | resolves_comment_id=None, closing_pr=False, send_email=True, |
|
242 | resolves_comment_id=None, closing_pr=False, send_email=True, | |
243 | renderer=None, auth_user=None, extra_recipients=None): |
|
243 | renderer=None, auth_user=None, extra_recipients=None): | |
244 | """ |
|
244 | """ | |
245 | Creates new comment for commit or pull request. |
|
245 | Creates new comment for commit or pull request. | |
246 | IF status_change is not none this comment is associated with a |
|
246 | IF status_change is not none this comment is associated with a | |
247 | status change of commit or commit associated with pull request |
|
247 | status change of commit or commit associated with pull request | |
248 |
|
248 | |||
249 | :param text: |
|
249 | :param text: | |
250 | :param repo: |
|
250 | :param repo: | |
251 | :param user: |
|
251 | :param user: | |
252 | :param commit_id: |
|
252 | :param commit_id: | |
253 | :param pull_request: |
|
253 | :param pull_request: | |
254 | :param f_path: |
|
254 | :param f_path: | |
255 | :param line_no: |
|
255 | :param line_no: | |
256 | :param status_change: Label for status change |
|
256 | :param status_change: Label for status change | |
257 | :param comment_type: Type of comment |
|
257 | :param comment_type: Type of comment | |
258 | :param resolves_comment_id: id of comment which this one will resolve |
|
258 | :param resolves_comment_id: id of comment which this one will resolve | |
259 | :param status_change_type: type of status change |
|
259 | :param status_change_type: type of status change | |
260 | :param closing_pr: |
|
260 | :param closing_pr: | |
261 | :param send_email: |
|
261 | :param send_email: | |
262 | :param renderer: pick renderer for this comment |
|
262 | :param renderer: pick renderer for this comment | |
263 | :param auth_user: current authenticated user calling this method |
|
263 | :param auth_user: current authenticated user calling this method | |
264 | :param extra_recipients: list of extra users to be added to recipients |
|
264 | :param extra_recipients: list of extra users to be added to recipients | |
265 | """ |
|
265 | """ | |
266 |
|
266 | |||
267 | if not text: |
|
267 | if not text: | |
268 | log.warning('Missing text for comment, skipping...') |
|
268 | log.warning('Missing text for comment, skipping...') | |
269 | return |
|
269 | return | |
270 | request = get_current_request() |
|
270 | request = get_current_request() | |
271 | _ = request.translate |
|
271 | _ = request.translate | |
272 |
|
272 | |||
273 | if not renderer: |
|
273 | if not renderer: | |
274 | renderer = self._get_renderer(request=request) |
|
274 | renderer = self._get_renderer(request=request) | |
275 |
|
275 | |||
276 | repo = self._get_repo(repo) |
|
276 | repo = self._get_repo(repo) | |
277 | user = self._get_user(user) |
|
277 | user = self._get_user(user) | |
278 | auth_user = auth_user or user |
|
278 | auth_user = auth_user or user | |
279 |
|
279 | |||
280 | schema = comment_schema.CommentSchema() |
|
280 | schema = comment_schema.CommentSchema() | |
281 | validated_kwargs = schema.deserialize(dict( |
|
281 | validated_kwargs = schema.deserialize(dict( | |
282 | comment_body=text, |
|
282 | comment_body=text, | |
283 | comment_type=comment_type, |
|
283 | comment_type=comment_type, | |
284 | comment_file=f_path, |
|
284 | comment_file=f_path, | |
285 | comment_line=line_no, |
|
285 | comment_line=line_no, | |
286 | renderer_type=renderer, |
|
286 | renderer_type=renderer, | |
287 | status_change=status_change_type, |
|
287 | status_change=status_change_type, | |
288 | resolves_comment_id=resolves_comment_id, |
|
288 | resolves_comment_id=resolves_comment_id, | |
289 | repo=repo.repo_id, |
|
289 | repo=repo.repo_id, | |
290 | user=user.user_id, |
|
290 | user=user.user_id, | |
291 | )) |
|
291 | )) | |
292 |
|
292 | |||
293 | comment = ChangesetComment() |
|
293 | comment = ChangesetComment() | |
294 | comment.renderer = validated_kwargs['renderer_type'] |
|
294 | comment.renderer = validated_kwargs['renderer_type'] | |
295 | comment.text = validated_kwargs['comment_body'] |
|
295 | comment.text = validated_kwargs['comment_body'] | |
296 | comment.f_path = validated_kwargs['comment_file'] |
|
296 | comment.f_path = validated_kwargs['comment_file'] | |
297 | comment.line_no = validated_kwargs['comment_line'] |
|
297 | comment.line_no = validated_kwargs['comment_line'] | |
298 | comment.comment_type = validated_kwargs['comment_type'] |
|
298 | comment.comment_type = validated_kwargs['comment_type'] | |
299 |
|
299 | |||
300 | comment.repo = repo |
|
300 | comment.repo = repo | |
301 | comment.author = user |
|
301 | comment.author = user | |
302 | resolved_comment = self.__get_commit_comment( |
|
302 | resolved_comment = self.__get_commit_comment( | |
303 | validated_kwargs['resolves_comment_id']) |
|
303 | validated_kwargs['resolves_comment_id']) | |
304 | # check if the comment actually belongs to this PR |
|
304 | # check if the comment actually belongs to this PR | |
305 | if resolved_comment and resolved_comment.pull_request and \ |
|
305 | if resolved_comment and resolved_comment.pull_request and \ | |
306 | resolved_comment.pull_request != pull_request: |
|
306 | resolved_comment.pull_request != pull_request: | |
307 | log.warning('Comment tried to resolved unrelated todo comment: %s', |
|
307 | log.warning('Comment tried to resolved unrelated todo comment: %s', | |
308 | resolved_comment) |
|
308 | resolved_comment) | |
309 | # comment not bound to this pull request, forbid |
|
309 | # comment not bound to this pull request, forbid | |
310 | resolved_comment = None |
|
310 | resolved_comment = None | |
311 |
|
311 | |||
312 | elif resolved_comment and resolved_comment.repo and \ |
|
312 | elif resolved_comment and resolved_comment.repo and \ | |
313 | resolved_comment.repo != repo: |
|
313 | resolved_comment.repo != repo: | |
314 | log.warning('Comment tried to resolved unrelated todo comment: %s', |
|
314 | log.warning('Comment tried to resolved unrelated todo comment: %s', | |
315 | resolved_comment) |
|
315 | resolved_comment) | |
316 | # comment not bound to this repo, forbid |
|
316 | # comment not bound to this repo, forbid | |
317 | resolved_comment = None |
|
317 | resolved_comment = None | |
318 |
|
318 | |||
319 | comment.resolved_comment = resolved_comment |
|
319 | comment.resolved_comment = resolved_comment | |
320 |
|
320 | |||
321 | pull_request_id = pull_request |
|
321 | pull_request_id = pull_request | |
322 |
|
322 | |||
323 | commit_obj = None |
|
323 | commit_obj = None | |
324 | pull_request_obj = None |
|
324 | pull_request_obj = None | |
325 |
|
325 | |||
326 | if commit_id: |
|
326 | if commit_id: | |
327 | notification_type = EmailNotificationModel.TYPE_COMMIT_COMMENT |
|
327 | notification_type = EmailNotificationModel.TYPE_COMMIT_COMMENT | |
328 | # do a lookup, so we don't pass something bad here |
|
328 | # do a lookup, so we don't pass something bad here | |
329 | commit_obj = repo.scm_instance().get_commit(commit_id=commit_id) |
|
329 | commit_obj = repo.scm_instance().get_commit(commit_id=commit_id) | |
330 | comment.revision = commit_obj.raw_id |
|
330 | comment.revision = commit_obj.raw_id | |
331 |
|
331 | |||
332 | elif pull_request_id: |
|
332 | elif pull_request_id: | |
333 | notification_type = EmailNotificationModel.TYPE_PULL_REQUEST_COMMENT |
|
333 | notification_type = EmailNotificationModel.TYPE_PULL_REQUEST_COMMENT | |
334 | pull_request_obj = self.__get_pull_request(pull_request_id) |
|
334 | pull_request_obj = self.__get_pull_request(pull_request_id) | |
335 | comment.pull_request = pull_request_obj |
|
335 | comment.pull_request = pull_request_obj | |
336 | else: |
|
336 | else: | |
337 | raise Exception('Please specify commit or pull_request_id') |
|
337 | raise Exception('Please specify commit or pull_request_id') | |
338 |
|
338 | |||
339 | Session().add(comment) |
|
339 | Session().add(comment) | |
340 | Session().flush() |
|
340 | Session().flush() | |
341 | kwargs = { |
|
341 | kwargs = { | |
342 | 'user': user, |
|
342 | 'user': user, | |
343 | 'renderer_type': renderer, |
|
343 | 'renderer_type': renderer, | |
344 | 'repo_name': repo.repo_name, |
|
344 | 'repo_name': repo.repo_name, | |
345 | 'status_change': status_change, |
|
345 | 'status_change': status_change, | |
346 | 'status_change_type': status_change_type, |
|
346 | 'status_change_type': status_change_type, | |
347 | 'comment_body': text, |
|
347 | 'comment_body': text, | |
348 | 'comment_file': f_path, |
|
348 | 'comment_file': f_path, | |
349 | 'comment_line': line_no, |
|
349 | 'comment_line': line_no, | |
350 | 'comment_type': comment_type or 'note', |
|
350 | 'comment_type': comment_type or 'note', | |
351 | 'comment_id': comment.comment_id |
|
351 | 'comment_id': comment.comment_id | |
352 | } |
|
352 | } | |
353 |
|
353 | |||
354 | if commit_obj: |
|
354 | if commit_obj: | |
355 | recipients = ChangesetComment.get_users( |
|
355 | recipients = ChangesetComment.get_users( | |
356 | revision=commit_obj.raw_id) |
|
356 | revision=commit_obj.raw_id) | |
357 | # add commit author if it's in RhodeCode system |
|
357 | # add commit author if it's in RhodeCode system | |
358 | cs_author = User.get_from_cs_author(commit_obj.author) |
|
358 | cs_author = User.get_from_cs_author(commit_obj.author) | |
359 | if not cs_author: |
|
359 | if not cs_author: | |
360 | # use repo owner if we cannot extract the author correctly |
|
360 | # use repo owner if we cannot extract the author correctly | |
361 | cs_author = repo.user |
|
361 | cs_author = repo.user | |
362 | recipients += [cs_author] |
|
362 | recipients += [cs_author] | |
363 |
|
363 | |||
364 | commit_comment_url = self.get_url(comment, request=request) |
|
364 | commit_comment_url = self.get_url(comment, request=request) | |
365 | commit_comment_reply_url = self.get_url( |
|
365 | commit_comment_reply_url = self.get_url( | |
366 | comment, request=request, |
|
366 | comment, request=request, | |
367 | anchor='comment-{}/?/ReplyToComment'.format(comment.comment_id)) |
|
367 | anchor='comment-{}/?/ReplyToComment'.format(comment.comment_id)) | |
368 |
|
368 | |||
369 | target_repo_url = h.link_to( |
|
369 | target_repo_url = h.link_to( | |
370 | repo.repo_name, |
|
370 | repo.repo_name, | |
371 | h.route_url('repo_summary', repo_name=repo.repo_name)) |
|
371 | h.route_url('repo_summary', repo_name=repo.repo_name)) | |
372 |
|
372 | |||
373 | # commit specifics |
|
373 | # commit specifics | |
374 | kwargs.update({ |
|
374 | kwargs.update({ | |
375 | 'commit': commit_obj, |
|
375 | 'commit': commit_obj, | |
376 | 'commit_message': commit_obj.message, |
|
376 | 'commit_message': commit_obj.message, | |
377 | 'commit_target_repo_url': target_repo_url, |
|
377 | 'commit_target_repo_url': target_repo_url, | |
378 | 'commit_comment_url': commit_comment_url, |
|
378 | 'commit_comment_url': commit_comment_url, | |
379 | 'commit_comment_reply_url': commit_comment_reply_url |
|
379 | 'commit_comment_reply_url': commit_comment_reply_url | |
380 | }) |
|
380 | }) | |
381 |
|
381 | |||
382 | elif pull_request_obj: |
|
382 | elif pull_request_obj: | |
383 | # get the current participants of this pull request |
|
383 | # get the current participants of this pull request | |
384 | recipients = ChangesetComment.get_users( |
|
384 | recipients = ChangesetComment.get_users( | |
385 | pull_request_id=pull_request_obj.pull_request_id) |
|
385 | pull_request_id=pull_request_obj.pull_request_id) | |
386 | # add pull request author |
|
386 | # add pull request author | |
387 | recipients += [pull_request_obj.author] |
|
387 | recipients += [pull_request_obj.author] | |
388 |
|
388 | |||
389 | # add the reviewers to notification |
|
389 | # add the reviewers to notification | |
390 | recipients += [x.user for x in pull_request_obj.reviewers] |
|
390 | recipients += [x.user for x in pull_request_obj.reviewers] | |
391 |
|
391 | |||
392 | pr_target_repo = pull_request_obj.target_repo |
|
392 | pr_target_repo = pull_request_obj.target_repo | |
393 | pr_source_repo = pull_request_obj.source_repo |
|
393 | pr_source_repo = pull_request_obj.source_repo | |
394 |
|
394 | |||
395 | pr_comment_url = self.get_url(comment, request=request) |
|
395 | pr_comment_url = self.get_url(comment, request=request) | |
396 | pr_comment_reply_url = self.get_url( |
|
396 | pr_comment_reply_url = self.get_url( | |
397 | comment, request=request, |
|
397 | comment, request=request, | |
398 | anchor='comment-{}/?/ReplyToComment'.format(comment.comment_id)) |
|
398 | anchor='comment-{}/?/ReplyToComment'.format(comment.comment_id)) | |
399 |
|
399 | |||
400 | pr_url = h.route_url( |
|
400 | pr_url = h.route_url( | |
401 | 'pullrequest_show', |
|
401 | 'pullrequest_show', | |
402 | repo_name=pr_target_repo.repo_name, |
|
402 | repo_name=pr_target_repo.repo_name, | |
403 | pull_request_id=pull_request_obj.pull_request_id, ) |
|
403 | pull_request_id=pull_request_obj.pull_request_id, ) | |
404 |
|
404 | |||
405 | # set some variables for email notification |
|
405 | # set some variables for email notification | |
406 | pr_target_repo_url = h.route_url( |
|
406 | pr_target_repo_url = h.route_url( | |
407 | 'repo_summary', repo_name=pr_target_repo.repo_name) |
|
407 | 'repo_summary', repo_name=pr_target_repo.repo_name) | |
408 |
|
408 | |||
409 | pr_source_repo_url = h.route_url( |
|
409 | pr_source_repo_url = h.route_url( | |
410 | 'repo_summary', repo_name=pr_source_repo.repo_name) |
|
410 | 'repo_summary', repo_name=pr_source_repo.repo_name) | |
411 |
|
411 | |||
412 | # pull request specifics |
|
412 | # pull request specifics | |
413 | kwargs.update({ |
|
413 | kwargs.update({ | |
414 | 'pull_request': pull_request_obj, |
|
414 | 'pull_request': pull_request_obj, | |
415 | 'pr_id': pull_request_obj.pull_request_id, |
|
415 | 'pr_id': pull_request_obj.pull_request_id, | |
416 | 'pull_request_url': pr_url, |
|
416 | 'pull_request_url': pr_url, | |
417 | 'pull_request_target_repo': pr_target_repo, |
|
417 | 'pull_request_target_repo': pr_target_repo, | |
418 | 'pull_request_target_repo_url': pr_target_repo_url, |
|
418 | 'pull_request_target_repo_url': pr_target_repo_url, | |
419 | 'pull_request_source_repo': pr_source_repo, |
|
419 | 'pull_request_source_repo': pr_source_repo, | |
420 | 'pull_request_source_repo_url': pr_source_repo_url, |
|
420 | 'pull_request_source_repo_url': pr_source_repo_url, | |
421 | 'pr_comment_url': pr_comment_url, |
|
421 | 'pr_comment_url': pr_comment_url, | |
422 | 'pr_comment_reply_url': pr_comment_reply_url, |
|
422 | 'pr_comment_reply_url': pr_comment_reply_url, | |
423 | 'pr_closing': closing_pr, |
|
423 | 'pr_closing': closing_pr, | |
424 | }) |
|
424 | }) | |
425 |
|
425 | |||
426 | recipients += [self._get_user(u) for u in (extra_recipients or [])] |
|
426 | recipients += [self._get_user(u) for u in (extra_recipients or [])] | |
427 |
|
427 | |||
428 | if send_email: |
|
428 | if send_email: | |
429 | # pre-generate the subject for notification itself |
|
429 | # pre-generate the subject for notification itself | |
430 | (subject, |
|
430 | (subject, | |
431 | _h, _e, # we don't care about those |
|
431 | _h, _e, # we don't care about those | |
432 | body_plaintext) = EmailNotificationModel().render_email( |
|
432 | body_plaintext) = EmailNotificationModel().render_email( | |
433 | notification_type, **kwargs) |
|
433 | notification_type, **kwargs) | |
434 |
|
434 | |||
435 | mention_recipients = set( |
|
435 | mention_recipients = set( | |
436 | self._extract_mentions(text)).difference(recipients) |
|
436 | self._extract_mentions(text)).difference(recipients) | |
437 |
|
437 | |||
438 | # create notification objects, and emails |
|
438 | # create notification objects, and emails | |
439 | NotificationModel().create( |
|
439 | NotificationModel().create( | |
440 | created_by=user, |
|
440 | created_by=user, | |
441 | notification_subject=subject, |
|
441 | notification_subject=subject, | |
442 | notification_body=body_plaintext, |
|
442 | notification_body=body_plaintext, | |
443 | notification_type=notification_type, |
|
443 | notification_type=notification_type, | |
444 | recipients=recipients, |
|
444 | recipients=recipients, | |
445 | mention_recipients=mention_recipients, |
|
445 | mention_recipients=mention_recipients, | |
446 | email_kwargs=kwargs, |
|
446 | email_kwargs=kwargs, | |
447 | ) |
|
447 | ) | |
448 |
|
448 | |||
449 | Session().flush() |
|
449 | Session().flush() | |
450 | if comment.pull_request: |
|
450 | if comment.pull_request: | |
451 | action = 'repo.pull_request.comment.create' |
|
451 | action = 'repo.pull_request.comment.create' | |
452 | else: |
|
452 | else: | |
453 | action = 'repo.commit.comment.create' |
|
453 | action = 'repo.commit.comment.create' | |
454 |
|
454 | |||
455 | comment_data = comment.get_api_data() |
|
455 | comment_data = comment.get_api_data() | |
456 | self._log_audit_action( |
|
456 | self._log_audit_action( | |
457 | action, {'data': comment_data}, auth_user, comment) |
|
457 | action, {'data': comment_data}, auth_user, comment) | |
458 |
|
458 | |||
459 | msg_url = '' |
|
459 | msg_url = '' | |
460 | channel = None |
|
460 | channel = None | |
461 | if commit_obj: |
|
461 | if commit_obj: | |
462 | msg_url = commit_comment_url |
|
462 | msg_url = commit_comment_url | |
463 | repo_name = repo.repo_name |
|
463 | repo_name = repo.repo_name | |
464 | channel = u'/repo${}$/commit/{}'.format( |
|
464 | channel = u'/repo${}$/commit/{}'.format( | |
465 | repo_name, |
|
465 | repo_name, | |
466 | commit_obj.raw_id |
|
466 | commit_obj.raw_id | |
467 | ) |
|
467 | ) | |
468 | elif pull_request_obj: |
|
468 | elif pull_request_obj: | |
469 | msg_url = pr_comment_url |
|
469 | msg_url = pr_comment_url | |
470 | repo_name = pr_target_repo.repo_name |
|
470 | repo_name = pr_target_repo.repo_name | |
471 | channel = u'/repo${}$/pr/{}'.format( |
|
471 | channel = u'/repo${}$/pr/{}'.format( | |
472 | repo_name, |
|
472 | repo_name, | |
473 | pull_request_id |
|
473 | pull_request_id | |
474 | ) |
|
474 | ) | |
475 |
|
475 | |||
476 | message = '<strong>{}</strong> {} - ' \ |
|
476 | message = '<strong>{}</strong> {} - ' \ | |
477 | '<a onclick="window.location=\'{}\';' \ |
|
477 | '<a onclick="window.location=\'{}\';' \ | |
478 | 'window.location.reload()">' \ |
|
478 | 'window.location.reload()">' \ | |
479 | '<strong>{}</strong></a>' |
|
479 | '<strong>{}</strong></a>' | |
480 | message = message.format( |
|
480 | message = message.format( | |
481 | user.username, _('made a comment'), msg_url, |
|
481 | user.username, _('made a comment'), msg_url, | |
482 | _('Show it now')) |
|
482 | _('Show it now')) | |
483 |
|
483 | |||
484 | channelstream.post_message( |
|
484 | channelstream.post_message( | |
485 | channel, message, user.username, |
|
485 | channel, message, user.username, | |
486 | registry=get_current_registry()) |
|
486 | registry=get_current_registry()) | |
487 |
|
487 | |||
488 | return comment |
|
488 | return comment | |
489 |
|
489 | |||
490 | def edit(self, comment_id, text, auth_user, version): |
|
490 | def edit(self, comment_id, text, auth_user, version): | |
491 | """ |
|
491 | """ | |
492 | Change existing comment for commit or pull request. |
|
492 | Change existing comment for commit or pull request. | |
493 |
|
493 | |||
494 | :param comment_id: |
|
494 | :param comment_id: | |
495 | :param text: |
|
495 | :param text: | |
496 | :param auth_user: current authenticated user calling this method |
|
496 | :param auth_user: current authenticated user calling this method | |
497 | :param version: last comment version |
|
497 | :param version: last comment version | |
498 | """ |
|
498 | """ | |
499 | if not text: |
|
499 | if not text: | |
500 | log.warning('Missing text for comment, skipping...') |
|
500 | log.warning('Missing text for comment, skipping...') | |
501 | return |
|
501 | return | |
502 |
|
502 | |||
503 | comment = ChangesetComment.get(comment_id) |
|
503 | comment = ChangesetComment.get(comment_id) | |
504 | old_comment_text = comment.text |
|
504 | old_comment_text = comment.text | |
505 | comment.text = text |
|
505 | comment.text = text | |
506 | comment.modified_at = datetime.datetime.now() |
|
506 | comment.modified_at = datetime.datetime.now() | |
507 | version = safe_int(version) |
|
507 | version = safe_int(version) | |
508 |
|
508 | |||
509 | # NOTE(marcink): this returns initial comment + edits, so v2 from ui |
|
509 | # NOTE(marcink): this returns initial comment + edits, so v2 from ui | |
510 | # would return 3 here |
|
510 | # would return 3 here | |
511 | comment_version = ChangesetCommentHistory.get_version(comment_id) |
|
511 | comment_version = ChangesetCommentHistory.get_version(comment_id) | |
512 |
|
512 | |||
513 | if isinstance(version, (int, long)) and (comment_version - version) != 1: |
|
513 | if isinstance(version, (int, long)) and (comment_version - version) != 1: | |
514 | log.warning( |
|
514 | log.warning( | |
515 | 'Version mismatch comment_version {} submitted {}, skipping'.format( |
|
515 | 'Version mismatch comment_version {} submitted {}, skipping'.format( | |
516 | comment_version-1, # -1 since note above |
|
516 | comment_version-1, # -1 since note above | |
517 | version |
|
517 | version | |
518 | ) |
|
518 | ) | |
519 | ) |
|
519 | ) | |
520 | raise CommentVersionMismatch() |
|
520 | raise CommentVersionMismatch() | |
521 |
|
521 | |||
522 | comment_history = ChangesetCommentHistory() |
|
522 | comment_history = ChangesetCommentHistory() | |
523 | comment_history.comment_id = comment_id |
|
523 | comment_history.comment_id = comment_id | |
524 | comment_history.version = comment_version |
|
524 | comment_history.version = comment_version | |
525 | comment_history.created_by_user_id = auth_user.user_id |
|
525 | comment_history.created_by_user_id = auth_user.user_id | |
526 | comment_history.text = old_comment_text |
|
526 | comment_history.text = old_comment_text | |
527 | # TODO add email notification |
|
527 | # TODO add email notification | |
528 | Session().add(comment_history) |
|
528 | Session().add(comment_history) | |
529 | Session().add(comment) |
|
529 | Session().add(comment) | |
530 | Session().flush() |
|
530 | Session().flush() | |
531 |
|
531 | |||
532 | if comment.pull_request: |
|
532 | if comment.pull_request: | |
533 | action = 'repo.pull_request.comment.edit' |
|
533 | action = 'repo.pull_request.comment.edit' | |
534 | else: |
|
534 | else: | |
535 | action = 'repo.commit.comment.edit' |
|
535 | action = 'repo.commit.comment.edit' | |
536 |
|
536 | |||
537 | comment_data = comment.get_api_data() |
|
537 | comment_data = comment.get_api_data() | |
538 | comment_data['old_comment_text'] = old_comment_text |
|
538 | comment_data['old_comment_text'] = old_comment_text | |
539 | self._log_audit_action( |
|
539 | self._log_audit_action( | |
540 | action, {'data': comment_data}, auth_user, comment) |
|
540 | action, {'data': comment_data}, auth_user, comment) | |
541 |
|
541 | |||
542 | return comment_history |
|
542 | return comment_history | |
543 |
|
543 | |||
544 | def delete(self, comment, auth_user): |
|
544 | def delete(self, comment, auth_user): | |
545 | """ |
|
545 | """ | |
546 | Deletes given comment |
|
546 | Deletes given comment | |
547 | """ |
|
547 | """ | |
548 | comment = self.__get_commit_comment(comment) |
|
548 | comment = self.__get_commit_comment(comment) | |
549 | old_data = comment.get_api_data() |
|
549 | old_data = comment.get_api_data() | |
550 | Session().delete(comment) |
|
550 | Session().delete(comment) | |
551 |
|
551 | |||
552 | if comment.pull_request: |
|
552 | if comment.pull_request: | |
553 | action = 'repo.pull_request.comment.delete' |
|
553 | action = 'repo.pull_request.comment.delete' | |
554 | else: |
|
554 | else: | |
555 | action = 'repo.commit.comment.delete' |
|
555 | action = 'repo.commit.comment.delete' | |
556 |
|
556 | |||
557 | self._log_audit_action( |
|
557 | self._log_audit_action( | |
558 | action, {'old_data': old_data}, auth_user, comment) |
|
558 | action, {'old_data': old_data}, auth_user, comment) | |
559 |
|
559 | |||
560 | return comment |
|
560 | return comment | |
561 |
|
561 | |||
562 | def get_all_comments(self, repo_id, revision=None, pull_request=None): |
|
562 | def get_all_comments(self, repo_id, revision=None, pull_request=None): | |
563 | q = ChangesetComment.query()\ |
|
563 | q = ChangesetComment.query()\ | |
564 | .filter(ChangesetComment.repo_id == repo_id) |
|
564 | .filter(ChangesetComment.repo_id == repo_id) | |
565 | if revision: |
|
565 | if revision: | |
566 | q = q.filter(ChangesetComment.revision == revision) |
|
566 | q = q.filter(ChangesetComment.revision == revision) | |
567 | elif pull_request: |
|
567 | elif pull_request: | |
568 | pull_request = self.__get_pull_request(pull_request) |
|
568 | pull_request = self.__get_pull_request(pull_request) | |
569 | q = q.filter(ChangesetComment.pull_request == pull_request) |
|
569 | q = q.filter(ChangesetComment.pull_request == pull_request) | |
570 | else: |
|
570 | else: | |
571 | raise Exception('Please specify commit or pull_request') |
|
571 | raise Exception('Please specify commit or pull_request') | |
572 | q = q.order_by(ChangesetComment.created_on) |
|
572 | q = q.order_by(ChangesetComment.created_on) | |
573 | return q.all() |
|
573 | return q.all() | |
574 |
|
574 | |||
575 | def get_url(self, comment, request=None, permalink=False, anchor=None): |
|
575 | def get_url(self, comment, request=None, permalink=False, anchor=None): | |
576 | if not request: |
|
576 | if not request: | |
577 | request = get_current_request() |
|
577 | request = get_current_request() | |
578 |
|
578 | |||
579 | comment = self.__get_commit_comment(comment) |
|
579 | comment = self.__get_commit_comment(comment) | |
580 | if anchor is None: |
|
580 | if anchor is None: | |
581 | anchor = 'comment-{}'.format(comment.comment_id) |
|
581 | anchor = 'comment-{}'.format(comment.comment_id) | |
582 |
|
582 | |||
583 | if comment.pull_request: |
|
583 | if comment.pull_request: | |
584 | pull_request = comment.pull_request |
|
584 | pull_request = comment.pull_request | |
585 | if permalink: |
|
585 | if permalink: | |
586 | return request.route_url( |
|
586 | return request.route_url( | |
587 | 'pull_requests_global', |
|
587 | 'pull_requests_global', | |
588 | pull_request_id=pull_request.pull_request_id, |
|
588 | pull_request_id=pull_request.pull_request_id, | |
589 | _anchor=anchor) |
|
589 | _anchor=anchor) | |
590 | else: |
|
590 | else: | |
591 | return request.route_url( |
|
591 | return request.route_url( | |
592 | 'pullrequest_show', |
|
592 | 'pullrequest_show', | |
593 | repo_name=safe_str(pull_request.target_repo.repo_name), |
|
593 | repo_name=safe_str(pull_request.target_repo.repo_name), | |
594 | pull_request_id=pull_request.pull_request_id, |
|
594 | pull_request_id=pull_request.pull_request_id, | |
595 | _anchor=anchor) |
|
595 | _anchor=anchor) | |
596 |
|
596 | |||
597 | else: |
|
597 | else: | |
598 | repo = comment.repo |
|
598 | repo = comment.repo | |
599 | commit_id = comment.revision |
|
599 | commit_id = comment.revision | |
600 |
|
600 | |||
601 | if permalink: |
|
601 | if permalink: | |
602 | return request.route_url( |
|
602 | return request.route_url( | |
603 | 'repo_commit', repo_name=safe_str(repo.repo_id), |
|
603 | 'repo_commit', repo_name=safe_str(repo.repo_id), | |
604 | commit_id=commit_id, |
|
604 | commit_id=commit_id, | |
605 | _anchor=anchor) |
|
605 | _anchor=anchor) | |
606 |
|
606 | |||
607 | else: |
|
607 | else: | |
608 | return request.route_url( |
|
608 | return request.route_url( | |
609 | 'repo_commit', repo_name=safe_str(repo.repo_name), |
|
609 | 'repo_commit', repo_name=safe_str(repo.repo_name), | |
610 | commit_id=commit_id, |
|
610 | commit_id=commit_id, | |
611 | _anchor=anchor) |
|
611 | _anchor=anchor) | |
612 |
|
612 | |||
613 | def get_comments(self, repo_id, revision=None, pull_request=None): |
|
613 | def get_comments(self, repo_id, revision=None, pull_request=None): | |
614 | """ |
|
614 | """ | |
615 | Gets main comments based on revision or pull_request_id |
|
615 | Gets main comments based on revision or pull_request_id | |
616 |
|
616 | |||
617 | :param repo_id: |
|
617 | :param repo_id: | |
618 | :param revision: |
|
618 | :param revision: | |
619 | :param pull_request: |
|
619 | :param pull_request: | |
620 | """ |
|
620 | """ | |
621 |
|
621 | |||
622 | q = ChangesetComment.query()\ |
|
622 | q = ChangesetComment.query()\ | |
623 | .filter(ChangesetComment.repo_id == repo_id)\ |
|
623 | .filter(ChangesetComment.repo_id == repo_id)\ | |
624 | .filter(ChangesetComment.line_no == None)\ |
|
624 | .filter(ChangesetComment.line_no == None)\ | |
625 | .filter(ChangesetComment.f_path == None) |
|
625 | .filter(ChangesetComment.f_path == None) | |
626 | if revision: |
|
626 | if revision: | |
627 | q = q.filter(ChangesetComment.revision == revision) |
|
627 | q = q.filter(ChangesetComment.revision == revision) | |
628 | elif pull_request: |
|
628 | elif pull_request: | |
629 | pull_request = self.__get_pull_request(pull_request) |
|
629 | pull_request = self.__get_pull_request(pull_request) | |
630 | q = q.filter(ChangesetComment.pull_request == pull_request) |
|
630 | q = q.filter(ChangesetComment.pull_request == pull_request) | |
631 | else: |
|
631 | else: | |
632 | raise Exception('Please specify commit or pull_request') |
|
632 | raise Exception('Please specify commit or pull_request') | |
633 | q = q.order_by(ChangesetComment.created_on) |
|
633 | q = q.order_by(ChangesetComment.created_on) | |
634 | return q.all() |
|
634 | return q.all() | |
635 |
|
635 | |||
636 | def get_inline_comments(self, repo_id, revision=None, pull_request=None): |
|
636 | def get_inline_comments(self, repo_id, revision=None, pull_request=None): | |
637 | q = self._get_inline_comments_query(repo_id, revision, pull_request) |
|
637 | q = self._get_inline_comments_query(repo_id, revision, pull_request) | |
638 | return self._group_comments_by_path_and_line_number(q) |
|
638 | return self._group_comments_by_path_and_line_number(q) | |
639 |
|
639 | |||
640 | def get_inline_comments_count(self, inline_comments, skip_outdated=True, |
|
640 | def get_inline_comments_count(self, inline_comments, skip_outdated=True, | |
641 | version=None): |
|
641 | version=None): | |
642 | inline_cnt = 0 |
|
642 | inline_cnt = 0 | |
643 | for fname, per_line_comments in inline_comments.iteritems(): |
|
643 | for fname, per_line_comments in inline_comments.iteritems(): | |
644 | for lno, comments in per_line_comments.iteritems(): |
|
644 | for lno, comments in per_line_comments.iteritems(): | |
645 | for comm in comments: |
|
645 | for comm in comments: | |
646 | if not comm.outdated_at_version(version) and skip_outdated: |
|
646 | if not comm.outdated_at_version(version) and skip_outdated: | |
647 | inline_cnt += 1 |
|
647 | inline_cnt += 1 | |
648 |
|
648 | |||
649 | return inline_cnt |
|
649 | return inline_cnt | |
650 |
|
650 | |||
651 | def get_outdated_comments(self, repo_id, pull_request): |
|
651 | def get_outdated_comments(self, repo_id, pull_request): | |
652 | # TODO: johbo: Remove `repo_id`, it is not needed to find the comments |
|
652 | # TODO: johbo: Remove `repo_id`, it is not needed to find the comments | |
653 | # of a pull request. |
|
653 | # of a pull request. | |
654 | q = self._all_inline_comments_of_pull_request(pull_request) |
|
654 | q = self._all_inline_comments_of_pull_request(pull_request) | |
655 | q = q.filter( |
|
655 | q = q.filter( | |
656 | ChangesetComment.display_state == |
|
656 | ChangesetComment.display_state == | |
657 | ChangesetComment.COMMENT_OUTDATED |
|
657 | ChangesetComment.COMMENT_OUTDATED | |
658 | ).order_by(ChangesetComment.comment_id.asc()) |
|
658 | ).order_by(ChangesetComment.comment_id.asc()) | |
659 |
|
659 | |||
660 | return self._group_comments_by_path_and_line_number(q) |
|
660 | return self._group_comments_by_path_and_line_number(q) | |
661 |
|
661 | |||
662 | def _get_inline_comments_query(self, repo_id, revision, pull_request): |
|
662 | def _get_inline_comments_query(self, repo_id, revision, pull_request): | |
663 | # TODO: johbo: Split this into two methods: One for PR and one for |
|
663 | # TODO: johbo: Split this into two methods: One for PR and one for | |
664 | # commit. |
|
664 | # commit. | |
665 | if revision: |
|
665 | if revision: | |
666 | q = Session().query(ChangesetComment).filter( |
|
666 | q = Session().query(ChangesetComment).filter( | |
667 | ChangesetComment.repo_id == repo_id, |
|
667 | ChangesetComment.repo_id == repo_id, | |
668 | ChangesetComment.line_no != null(), |
|
668 | ChangesetComment.line_no != null(), | |
669 | ChangesetComment.f_path != null(), |
|
669 | ChangesetComment.f_path != null(), | |
670 | ChangesetComment.revision == revision) |
|
670 | ChangesetComment.revision == revision) | |
671 |
|
671 | |||
672 | elif pull_request: |
|
672 | elif pull_request: | |
673 | pull_request = self.__get_pull_request(pull_request) |
|
673 | pull_request = self.__get_pull_request(pull_request) | |
674 | if not CommentsModel.use_outdated_comments(pull_request): |
|
674 | if not CommentsModel.use_outdated_comments(pull_request): | |
675 | q = self._visible_inline_comments_of_pull_request(pull_request) |
|
675 | q = self._visible_inline_comments_of_pull_request(pull_request) | |
676 | else: |
|
676 | else: | |
677 | q = self._all_inline_comments_of_pull_request(pull_request) |
|
677 | q = self._all_inline_comments_of_pull_request(pull_request) | |
678 |
|
678 | |||
679 | else: |
|
679 | else: | |
680 | raise Exception('Please specify commit or pull_request_id') |
|
680 | raise Exception('Please specify commit or pull_request_id') | |
681 | q = q.order_by(ChangesetComment.comment_id.asc()) |
|
681 | q = q.order_by(ChangesetComment.comment_id.asc()) | |
682 | return q |
|
682 | return q | |
683 |
|
683 | |||
684 | def _group_comments_by_path_and_line_number(self, q): |
|
684 | def _group_comments_by_path_and_line_number(self, q): | |
685 | comments = q.all() |
|
685 | comments = q.all() | |
686 | paths = collections.defaultdict(lambda: collections.defaultdict(list)) |
|
686 | paths = collections.defaultdict(lambda: collections.defaultdict(list)) | |
687 | for co in comments: |
|
687 | for co in comments: | |
688 | paths[co.f_path][co.line_no].append(co) |
|
688 | paths[co.f_path][co.line_no].append(co) | |
689 | return paths |
|
689 | return paths | |
690 |
|
690 | |||
691 | @classmethod |
|
691 | @classmethod | |
692 | def needed_extra_diff_context(cls): |
|
692 | def needed_extra_diff_context(cls): | |
693 | return max(cls.DIFF_CONTEXT_BEFORE, cls.DIFF_CONTEXT_AFTER) |
|
693 | return max(cls.DIFF_CONTEXT_BEFORE, cls.DIFF_CONTEXT_AFTER) | |
694 |
|
694 | |||
695 | def outdate_comments(self, pull_request, old_diff_data, new_diff_data): |
|
695 | def outdate_comments(self, pull_request, old_diff_data, new_diff_data): | |
696 | if not CommentsModel.use_outdated_comments(pull_request): |
|
696 | if not CommentsModel.use_outdated_comments(pull_request): | |
697 | return |
|
697 | return | |
698 |
|
698 | |||
699 | comments = self._visible_inline_comments_of_pull_request(pull_request) |
|
699 | comments = self._visible_inline_comments_of_pull_request(pull_request) | |
700 | comments_to_outdate = comments.all() |
|
700 | comments_to_outdate = comments.all() | |
701 |
|
701 | |||
702 | for comment in comments_to_outdate: |
|
702 | for comment in comments_to_outdate: | |
703 | self._outdate_one_comment(comment, old_diff_data, new_diff_data) |
|
703 | self._outdate_one_comment(comment, old_diff_data, new_diff_data) | |
704 |
|
704 | |||
705 | def _outdate_one_comment(self, comment, old_diff_proc, new_diff_proc): |
|
705 | def _outdate_one_comment(self, comment, old_diff_proc, new_diff_proc): | |
706 | diff_line = _parse_comment_line_number(comment.line_no) |
|
706 | diff_line = _parse_comment_line_number(comment.line_no) | |
707 |
|
707 | |||
708 | try: |
|
708 | try: | |
709 | old_context = old_diff_proc.get_context_of_line( |
|
709 | old_context = old_diff_proc.get_context_of_line( | |
710 | path=comment.f_path, diff_line=diff_line) |
|
710 | path=comment.f_path, diff_line=diff_line) | |
711 | new_context = new_diff_proc.get_context_of_line( |
|
711 | new_context = new_diff_proc.get_context_of_line( | |
712 | path=comment.f_path, diff_line=diff_line) |
|
712 | path=comment.f_path, diff_line=diff_line) | |
713 | except (diffs.LineNotInDiffException, |
|
713 | except (diffs.LineNotInDiffException, | |
714 | diffs.FileNotInDiffException): |
|
714 | diffs.FileNotInDiffException): | |
715 | comment.display_state = ChangesetComment.COMMENT_OUTDATED |
|
715 | comment.display_state = ChangesetComment.COMMENT_OUTDATED | |
716 | return |
|
716 | return | |
717 |
|
717 | |||
718 | if old_context == new_context: |
|
718 | if old_context == new_context: | |
719 | return |
|
719 | return | |
720 |
|
720 | |||
721 | if self._should_relocate_diff_line(diff_line): |
|
721 | if self._should_relocate_diff_line(diff_line): | |
722 | new_diff_lines = new_diff_proc.find_context( |
|
722 | new_diff_lines = new_diff_proc.find_context( | |
723 | path=comment.f_path, context=old_context, |
|
723 | path=comment.f_path, context=old_context, | |
724 | offset=self.DIFF_CONTEXT_BEFORE) |
|
724 | offset=self.DIFF_CONTEXT_BEFORE) | |
725 | if not new_diff_lines: |
|
725 | if not new_diff_lines: | |
726 | comment.display_state = ChangesetComment.COMMENT_OUTDATED |
|
726 | comment.display_state = ChangesetComment.COMMENT_OUTDATED | |
727 | else: |
|
727 | else: | |
728 | new_diff_line = self._choose_closest_diff_line( |
|
728 | new_diff_line = self._choose_closest_diff_line( | |
729 | diff_line, new_diff_lines) |
|
729 | diff_line, new_diff_lines) | |
730 | comment.line_no = _diff_to_comment_line_number(new_diff_line) |
|
730 | comment.line_no = _diff_to_comment_line_number(new_diff_line) | |
731 | else: |
|
731 | else: | |
732 | comment.display_state = ChangesetComment.COMMENT_OUTDATED |
|
732 | comment.display_state = ChangesetComment.COMMENT_OUTDATED | |
733 |
|
733 | |||
734 | def _should_relocate_diff_line(self, diff_line): |
|
734 | def _should_relocate_diff_line(self, diff_line): | |
735 | """ |
|
735 | """ | |
736 | Checks if relocation shall be tried for the given `diff_line`. |
|
736 | Checks if relocation shall be tried for the given `diff_line`. | |
737 |
|
737 | |||
738 | If a comment points into the first lines, then we can have a situation |
|
738 | If a comment points into the first lines, then we can have a situation | |
739 | that after an update another line has been added on top. In this case |
|
739 | that after an update another line has been added on top. In this case | |
740 | we would find the context still and move the comment around. This |
|
740 | we would find the context still and move the comment around. This | |
741 | would be wrong. |
|
741 | would be wrong. | |
742 | """ |
|
742 | """ | |
743 | should_relocate = ( |
|
743 | should_relocate = ( | |
744 | (diff_line.new and diff_line.new > self.DIFF_CONTEXT_BEFORE) or |
|
744 | (diff_line.new and diff_line.new > self.DIFF_CONTEXT_BEFORE) or | |
745 | (diff_line.old and diff_line.old > self.DIFF_CONTEXT_BEFORE)) |
|
745 | (diff_line.old and diff_line.old > self.DIFF_CONTEXT_BEFORE)) | |
746 | return should_relocate |
|
746 | return should_relocate | |
747 |
|
747 | |||
748 | def _choose_closest_diff_line(self, diff_line, new_diff_lines): |
|
748 | def _choose_closest_diff_line(self, diff_line, new_diff_lines): | |
749 | candidate = new_diff_lines[0] |
|
749 | candidate = new_diff_lines[0] | |
750 | best_delta = _diff_line_delta(diff_line, candidate) |
|
750 | best_delta = _diff_line_delta(diff_line, candidate) | |
751 | for new_diff_line in new_diff_lines[1:]: |
|
751 | for new_diff_line in new_diff_lines[1:]: | |
752 | delta = _diff_line_delta(diff_line, new_diff_line) |
|
752 | delta = _diff_line_delta(diff_line, new_diff_line) | |
753 | if delta < best_delta: |
|
753 | if delta < best_delta: | |
754 | candidate = new_diff_line |
|
754 | candidate = new_diff_line | |
755 | best_delta = delta |
|
755 | best_delta = delta | |
756 | return candidate |
|
756 | return candidate | |
757 |
|
757 | |||
758 | def _visible_inline_comments_of_pull_request(self, pull_request): |
|
758 | def _visible_inline_comments_of_pull_request(self, pull_request): | |
759 | comments = self._all_inline_comments_of_pull_request(pull_request) |
|
759 | comments = self._all_inline_comments_of_pull_request(pull_request) | |
760 | comments = comments.filter( |
|
760 | comments = comments.filter( | |
761 | coalesce(ChangesetComment.display_state, '') != |
|
761 | coalesce(ChangesetComment.display_state, '') != | |
762 | ChangesetComment.COMMENT_OUTDATED) |
|
762 | ChangesetComment.COMMENT_OUTDATED) | |
763 | return comments |
|
763 | return comments | |
764 |
|
764 | |||
765 | def _all_inline_comments_of_pull_request(self, pull_request): |
|
765 | def _all_inline_comments_of_pull_request(self, pull_request): | |
766 | comments = Session().query(ChangesetComment)\ |
|
766 | comments = Session().query(ChangesetComment)\ | |
767 | .filter(ChangesetComment.line_no != None)\ |
|
767 | .filter(ChangesetComment.line_no != None)\ | |
768 | .filter(ChangesetComment.f_path != None)\ |
|
768 | .filter(ChangesetComment.f_path != None)\ | |
769 | .filter(ChangesetComment.pull_request == pull_request) |
|
769 | .filter(ChangesetComment.pull_request == pull_request) | |
770 | return comments |
|
770 | return comments | |
771 |
|
771 | |||
772 | def _all_general_comments_of_pull_request(self, pull_request): |
|
772 | def _all_general_comments_of_pull_request(self, pull_request): | |
773 | comments = Session().query(ChangesetComment)\ |
|
773 | comments = Session().query(ChangesetComment)\ | |
774 | .filter(ChangesetComment.line_no == None)\ |
|
774 | .filter(ChangesetComment.line_no == None)\ | |
775 | .filter(ChangesetComment.f_path == None)\ |
|
775 | .filter(ChangesetComment.f_path == None)\ | |
776 | .filter(ChangesetComment.pull_request == pull_request) |
|
776 | .filter(ChangesetComment.pull_request == pull_request) | |
777 |
|
777 | |||
778 | return comments |
|
778 | return comments | |
779 |
|
779 | |||
780 | @staticmethod |
|
780 | @staticmethod | |
781 | def use_outdated_comments(pull_request): |
|
781 | def use_outdated_comments(pull_request): | |
782 | settings_model = VcsSettingsModel(repo=pull_request.target_repo) |
|
782 | settings_model = VcsSettingsModel(repo=pull_request.target_repo) | |
783 | settings = settings_model.get_general_settings() |
|
783 | settings = settings_model.get_general_settings() | |
784 | return settings.get('rhodecode_use_outdated_comments', False) |
|
784 | return settings.get('rhodecode_use_outdated_comments', False) | |
785 |
|
785 | |||
786 | def trigger_commit_comment_hook(self, repo, user, action, data=None): |
|
786 | def trigger_commit_comment_hook(self, repo, user, action, data=None): | |
787 | repo = self._get_repo(repo) |
|
787 | repo = self._get_repo(repo) | |
788 | target_scm = repo.scm_instance() |
|
788 | target_scm = repo.scm_instance() | |
789 | if action == 'create': |
|
789 | if action == 'create': | |
790 | trigger_hook = hooks_utils.trigger_comment_commit_hooks |
|
790 | trigger_hook = hooks_utils.trigger_comment_commit_hooks | |
791 | elif action == 'edit': |
|
791 | elif action == 'edit': | |
792 | # TODO(dan): when this is supported we trigger edit hook too |
|
792 | trigger_hook = hooks_utils.trigger_comment_commit_edit_hooks | |
793 | return |
|
|||
794 | else: |
|
793 | else: | |
795 | return |
|
794 | return | |
796 |
|
795 | |||
797 | log.debug('Handling repo %s trigger_commit_comment_hook with action %s: %s', |
|
796 | log.debug('Handling repo %s trigger_commit_comment_hook with action %s: %s', | |
798 | repo, action, trigger_hook) |
|
797 | repo, action, trigger_hook) | |
799 | trigger_hook( |
|
798 | trigger_hook( | |
800 | username=user.username, |
|
799 | username=user.username, | |
801 | repo_name=repo.repo_name, |
|
800 | repo_name=repo.repo_name, | |
802 | repo_type=target_scm.alias, |
|
801 | repo_type=target_scm.alias, | |
803 | repo=repo, |
|
802 | repo=repo, | |
804 | data=data) |
|
803 | data=data) | |
805 |
|
804 | |||
806 |
|
805 | |||
807 | def _parse_comment_line_number(line_no): |
|
806 | def _parse_comment_line_number(line_no): | |
808 | """ |
|
807 | """ | |
809 | Parses line numbers of the form "(o|n)\d+" and returns them in a tuple. |
|
808 | Parses line numbers of the form "(o|n)\d+" and returns them in a tuple. | |
810 | """ |
|
809 | """ | |
811 | old_line = None |
|
810 | old_line = None | |
812 | new_line = None |
|
811 | new_line = None | |
813 | if line_no.startswith('o'): |
|
812 | if line_no.startswith('o'): | |
814 | old_line = int(line_no[1:]) |
|
813 | old_line = int(line_no[1:]) | |
815 | elif line_no.startswith('n'): |
|
814 | elif line_no.startswith('n'): | |
816 | new_line = int(line_no[1:]) |
|
815 | new_line = int(line_no[1:]) | |
817 | else: |
|
816 | else: | |
818 | raise ValueError("Comment lines have to start with either 'o' or 'n'.") |
|
817 | raise ValueError("Comment lines have to start with either 'o' or 'n'.") | |
819 | return diffs.DiffLineNumber(old_line, new_line) |
|
818 | return diffs.DiffLineNumber(old_line, new_line) | |
820 |
|
819 | |||
821 |
|
820 | |||
822 | def _diff_to_comment_line_number(diff_line): |
|
821 | def _diff_to_comment_line_number(diff_line): | |
823 | if diff_line.new is not None: |
|
822 | if diff_line.new is not None: | |
824 | return u'n{}'.format(diff_line.new) |
|
823 | return u'n{}'.format(diff_line.new) | |
825 | elif diff_line.old is not None: |
|
824 | elif diff_line.old is not None: | |
826 | return u'o{}'.format(diff_line.old) |
|
825 | return u'o{}'.format(diff_line.old) | |
827 | return u'' |
|
826 | return u'' | |
828 |
|
827 | |||
829 |
|
828 | |||
830 | def _diff_line_delta(a, b): |
|
829 | def _diff_line_delta(a, b): | |
831 | if None not in (a.new, b.new): |
|
830 | if None not in (a.new, b.new): | |
832 | return abs(a.new - b.new) |
|
831 | return abs(a.new - b.new) | |
833 | elif None not in (a.old, b.old): |
|
832 | elif None not in (a.old, b.old): | |
834 | return abs(a.old - b.old) |
|
833 | return abs(a.old - b.old) | |
835 | else: |
|
834 | else: | |
836 | raise ValueError( |
|
835 | raise ValueError( | |
837 | "Cannot compute delta between {} and {}".format(a, b)) |
|
836 | "Cannot compute delta between {} and {}".format(a, b)) |
@@ -1,2072 +1,2074 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2012-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2012-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 |
|
21 | |||
22 | """ |
|
22 | """ | |
23 | pull request model for RhodeCode |
|
23 | pull request model for RhodeCode | |
24 | """ |
|
24 | """ | |
25 |
|
25 | |||
26 |
|
26 | |||
27 | import json |
|
27 | import json | |
28 | import logging |
|
28 | import logging | |
29 | import os |
|
29 | import os | |
30 |
|
30 | |||
31 | import datetime |
|
31 | import datetime | |
32 | import urllib |
|
32 | import urllib | |
33 | import collections |
|
33 | import collections | |
34 |
|
34 | |||
35 | from pyramid import compat |
|
35 | from pyramid import compat | |
36 | from pyramid.threadlocal import get_current_request |
|
36 | from pyramid.threadlocal import get_current_request | |
37 |
|
37 | |||
38 | from rhodecode.lib.vcs.nodes import FileNode |
|
38 | from rhodecode.lib.vcs.nodes import FileNode | |
39 | from rhodecode.translation import lazy_ugettext |
|
39 | from rhodecode.translation import lazy_ugettext | |
40 | from rhodecode.lib import helpers as h, hooks_utils, diffs |
|
40 | from rhodecode.lib import helpers as h, hooks_utils, diffs | |
41 | from rhodecode.lib import audit_logger |
|
41 | from rhodecode.lib import audit_logger | |
42 | from rhodecode.lib.compat import OrderedDict |
|
42 | from rhodecode.lib.compat import OrderedDict | |
43 | from rhodecode.lib.hooks_daemon import prepare_callback_daemon |
|
43 | from rhodecode.lib.hooks_daemon import prepare_callback_daemon | |
44 | from rhodecode.lib.markup_renderer import ( |
|
44 | from rhodecode.lib.markup_renderer import ( | |
45 | DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer) |
|
45 | DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer) | |
46 | from rhodecode.lib.utils2 import ( |
|
46 | from rhodecode.lib.utils2 import ( | |
47 | safe_unicode, safe_str, md5_safe, AttributeDict, safe_int, |
|
47 | safe_unicode, safe_str, md5_safe, AttributeDict, safe_int, | |
48 | get_current_rhodecode_user) |
|
48 | get_current_rhodecode_user) | |
49 | from rhodecode.lib.vcs.backends.base import ( |
|
49 | from rhodecode.lib.vcs.backends.base import ( | |
50 | Reference, MergeResponse, MergeFailureReason, UpdateFailureReason, |
|
50 | Reference, MergeResponse, MergeFailureReason, UpdateFailureReason, | |
51 | TargetRefMissing, SourceRefMissing) |
|
51 | TargetRefMissing, SourceRefMissing) | |
52 | from rhodecode.lib.vcs.conf import settings as vcs_settings |
|
52 | from rhodecode.lib.vcs.conf import settings as vcs_settings | |
53 | from rhodecode.lib.vcs.exceptions import ( |
|
53 | from rhodecode.lib.vcs.exceptions import ( | |
54 | CommitDoesNotExistError, EmptyRepositoryError) |
|
54 | CommitDoesNotExistError, EmptyRepositoryError) | |
55 | from rhodecode.model import BaseModel |
|
55 | from rhodecode.model import BaseModel | |
56 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
56 | from rhodecode.model.changeset_status import ChangesetStatusModel | |
57 | from rhodecode.model.comment import CommentsModel |
|
57 | from rhodecode.model.comment import CommentsModel | |
58 | from rhodecode.model.db import ( |
|
58 | from rhodecode.model.db import ( | |
59 | or_, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus, |
|
59 | or_, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus, | |
60 | PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User) |
|
60 | PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User) | |
61 | from rhodecode.model.meta import Session |
|
61 | from rhodecode.model.meta import Session | |
62 | from rhodecode.model.notification import NotificationModel, \ |
|
62 | from rhodecode.model.notification import NotificationModel, \ | |
63 | EmailNotificationModel |
|
63 | EmailNotificationModel | |
64 | from rhodecode.model.scm import ScmModel |
|
64 | from rhodecode.model.scm import ScmModel | |
65 | from rhodecode.model.settings import VcsSettingsModel |
|
65 | from rhodecode.model.settings import VcsSettingsModel | |
66 |
|
66 | |||
67 |
|
67 | |||
68 | log = logging.getLogger(__name__) |
|
68 | log = logging.getLogger(__name__) | |
69 |
|
69 | |||
70 |
|
70 | |||
71 | # Data structure to hold the response data when updating commits during a pull |
|
71 | # Data structure to hold the response data when updating commits during a pull | |
72 | # request update. |
|
72 | # request update. | |
73 | class UpdateResponse(object): |
|
73 | class UpdateResponse(object): | |
74 |
|
74 | |||
75 | def __init__(self, executed, reason, new, old, common_ancestor_id, |
|
75 | def __init__(self, executed, reason, new, old, common_ancestor_id, | |
76 | commit_changes, source_changed, target_changed): |
|
76 | commit_changes, source_changed, target_changed): | |
77 |
|
77 | |||
78 | self.executed = executed |
|
78 | self.executed = executed | |
79 | self.reason = reason |
|
79 | self.reason = reason | |
80 | self.new = new |
|
80 | self.new = new | |
81 | self.old = old |
|
81 | self.old = old | |
82 | self.common_ancestor_id = common_ancestor_id |
|
82 | self.common_ancestor_id = common_ancestor_id | |
83 | self.changes = commit_changes |
|
83 | self.changes = commit_changes | |
84 | self.source_changed = source_changed |
|
84 | self.source_changed = source_changed | |
85 | self.target_changed = target_changed |
|
85 | self.target_changed = target_changed | |
86 |
|
86 | |||
87 |
|
87 | |||
88 | def get_diff_info( |
|
88 | def get_diff_info( | |
89 | source_repo, source_ref, target_repo, target_ref, get_authors=False, |
|
89 | source_repo, source_ref, target_repo, target_ref, get_authors=False, | |
90 | get_commit_authors=True): |
|
90 | get_commit_authors=True): | |
91 | """ |
|
91 | """ | |
92 | Calculates detailed diff information for usage in preview of creation of a pull-request. |
|
92 | Calculates detailed diff information for usage in preview of creation of a pull-request. | |
93 | This is also used for default reviewers logic |
|
93 | This is also used for default reviewers logic | |
94 | """ |
|
94 | """ | |
95 |
|
95 | |||
96 | source_scm = source_repo.scm_instance() |
|
96 | source_scm = source_repo.scm_instance() | |
97 | target_scm = target_repo.scm_instance() |
|
97 | target_scm = target_repo.scm_instance() | |
98 |
|
98 | |||
99 | ancestor_id = target_scm.get_common_ancestor(target_ref, source_ref, source_scm) |
|
99 | ancestor_id = target_scm.get_common_ancestor(target_ref, source_ref, source_scm) | |
100 | if not ancestor_id: |
|
100 | if not ancestor_id: | |
101 | raise ValueError( |
|
101 | raise ValueError( | |
102 | 'cannot calculate diff info without a common ancestor. ' |
|
102 | 'cannot calculate diff info without a common ancestor. ' | |
103 | 'Make sure both repositories are related, and have a common forking commit.') |
|
103 | 'Make sure both repositories are related, and have a common forking commit.') | |
104 |
|
104 | |||
105 | # case here is that want a simple diff without incoming commits, |
|
105 | # case here is that want a simple diff without incoming commits, | |
106 | # previewing what will be merged based only on commits in the source. |
|
106 | # previewing what will be merged based only on commits in the source. | |
107 | log.debug('Using ancestor %s as source_ref instead of %s', |
|
107 | log.debug('Using ancestor %s as source_ref instead of %s', | |
108 | ancestor_id, source_ref) |
|
108 | ancestor_id, source_ref) | |
109 |
|
109 | |||
110 | # source of changes now is the common ancestor |
|
110 | # source of changes now is the common ancestor | |
111 | source_commit = source_scm.get_commit(commit_id=ancestor_id) |
|
111 | source_commit = source_scm.get_commit(commit_id=ancestor_id) | |
112 | # target commit becomes the source ref as it is the last commit |
|
112 | # target commit becomes the source ref as it is the last commit | |
113 | # for diff generation this logic gives proper diff |
|
113 | # for diff generation this logic gives proper diff | |
114 | target_commit = source_scm.get_commit(commit_id=source_ref) |
|
114 | target_commit = source_scm.get_commit(commit_id=source_ref) | |
115 |
|
115 | |||
116 | vcs_diff = \ |
|
116 | vcs_diff = \ | |
117 | source_scm.get_diff(commit1=source_commit, commit2=target_commit, |
|
117 | source_scm.get_diff(commit1=source_commit, commit2=target_commit, | |
118 | ignore_whitespace=False, context=3) |
|
118 | ignore_whitespace=False, context=3) | |
119 |
|
119 | |||
120 | diff_processor = diffs.DiffProcessor( |
|
120 | diff_processor = diffs.DiffProcessor( | |
121 | vcs_diff, format='newdiff', diff_limit=None, |
|
121 | vcs_diff, format='newdiff', diff_limit=None, | |
122 | file_limit=None, show_full_diff=True) |
|
122 | file_limit=None, show_full_diff=True) | |
123 |
|
123 | |||
124 | _parsed = diff_processor.prepare() |
|
124 | _parsed = diff_processor.prepare() | |
125 |
|
125 | |||
126 | all_files = [] |
|
126 | all_files = [] | |
127 | all_files_changes = [] |
|
127 | all_files_changes = [] | |
128 | changed_lines = {} |
|
128 | changed_lines = {} | |
129 | stats = [0, 0] |
|
129 | stats = [0, 0] | |
130 | for f in _parsed: |
|
130 | for f in _parsed: | |
131 | all_files.append(f['filename']) |
|
131 | all_files.append(f['filename']) | |
132 | all_files_changes.append({ |
|
132 | all_files_changes.append({ | |
133 | 'filename': f['filename'], |
|
133 | 'filename': f['filename'], | |
134 | 'stats': f['stats'] |
|
134 | 'stats': f['stats'] | |
135 | }) |
|
135 | }) | |
136 | stats[0] += f['stats']['added'] |
|
136 | stats[0] += f['stats']['added'] | |
137 | stats[1] += f['stats']['deleted'] |
|
137 | stats[1] += f['stats']['deleted'] | |
138 |
|
138 | |||
139 | changed_lines[f['filename']] = [] |
|
139 | changed_lines[f['filename']] = [] | |
140 | if len(f['chunks']) < 2: |
|
140 | if len(f['chunks']) < 2: | |
141 | continue |
|
141 | continue | |
142 | # first line is "context" information |
|
142 | # first line is "context" information | |
143 | for chunks in f['chunks'][1:]: |
|
143 | for chunks in f['chunks'][1:]: | |
144 | for chunk in chunks['lines']: |
|
144 | for chunk in chunks['lines']: | |
145 | if chunk['action'] not in ('del', 'mod'): |
|
145 | if chunk['action'] not in ('del', 'mod'): | |
146 | continue |
|
146 | continue | |
147 | changed_lines[f['filename']].append(chunk['old_lineno']) |
|
147 | changed_lines[f['filename']].append(chunk['old_lineno']) | |
148 |
|
148 | |||
149 | commit_authors = [] |
|
149 | commit_authors = [] | |
150 | user_counts = {} |
|
150 | user_counts = {} | |
151 | email_counts = {} |
|
151 | email_counts = {} | |
152 | author_counts = {} |
|
152 | author_counts = {} | |
153 | _commit_cache = {} |
|
153 | _commit_cache = {} | |
154 |
|
154 | |||
155 | commits = [] |
|
155 | commits = [] | |
156 | if get_commit_authors: |
|
156 | if get_commit_authors: | |
157 | commits = target_scm.compare( |
|
157 | commits = target_scm.compare( | |
158 | target_ref, source_ref, source_scm, merge=True, |
|
158 | target_ref, source_ref, source_scm, merge=True, | |
159 | pre_load=["author"]) |
|
159 | pre_load=["author"]) | |
160 |
|
160 | |||
161 | for commit in commits: |
|
161 | for commit in commits: | |
162 | user = User.get_from_cs_author(commit.author) |
|
162 | user = User.get_from_cs_author(commit.author) | |
163 | if user and user not in commit_authors: |
|
163 | if user and user not in commit_authors: | |
164 | commit_authors.append(user) |
|
164 | commit_authors.append(user) | |
165 |
|
165 | |||
166 | # lines |
|
166 | # lines | |
167 | if get_authors: |
|
167 | if get_authors: | |
168 | target_commit = source_repo.get_commit(ancestor_id) |
|
168 | target_commit = source_repo.get_commit(ancestor_id) | |
169 |
|
169 | |||
170 | for fname, lines in changed_lines.items(): |
|
170 | for fname, lines in changed_lines.items(): | |
171 | try: |
|
171 | try: | |
172 | node = target_commit.get_node(fname) |
|
172 | node = target_commit.get_node(fname) | |
173 | except Exception: |
|
173 | except Exception: | |
174 | continue |
|
174 | continue | |
175 |
|
175 | |||
176 | if not isinstance(node, FileNode): |
|
176 | if not isinstance(node, FileNode): | |
177 | continue |
|
177 | continue | |
178 |
|
178 | |||
179 | for annotation in node.annotate: |
|
179 | for annotation in node.annotate: | |
180 | line_no, commit_id, get_commit_func, line_text = annotation |
|
180 | line_no, commit_id, get_commit_func, line_text = annotation | |
181 | if line_no in lines: |
|
181 | if line_no in lines: | |
182 | if commit_id not in _commit_cache: |
|
182 | if commit_id not in _commit_cache: | |
183 | _commit_cache[commit_id] = get_commit_func() |
|
183 | _commit_cache[commit_id] = get_commit_func() | |
184 | commit = _commit_cache[commit_id] |
|
184 | commit = _commit_cache[commit_id] | |
185 | author = commit.author |
|
185 | author = commit.author | |
186 | email = commit.author_email |
|
186 | email = commit.author_email | |
187 | user = User.get_from_cs_author(author) |
|
187 | user = User.get_from_cs_author(author) | |
188 | if user: |
|
188 | if user: | |
189 | user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1 |
|
189 | user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1 | |
190 | author_counts[author] = author_counts.get(author, 0) + 1 |
|
190 | author_counts[author] = author_counts.get(author, 0) + 1 | |
191 | email_counts[email] = email_counts.get(email, 0) + 1 |
|
191 | email_counts[email] = email_counts.get(email, 0) + 1 | |
192 |
|
192 | |||
193 | return { |
|
193 | return { | |
194 | 'commits': commits, |
|
194 | 'commits': commits, | |
195 | 'files': all_files_changes, |
|
195 | 'files': all_files_changes, | |
196 | 'stats': stats, |
|
196 | 'stats': stats, | |
197 | 'ancestor': ancestor_id, |
|
197 | 'ancestor': ancestor_id, | |
198 | # original authors of modified files |
|
198 | # original authors of modified files | |
199 | 'original_authors': { |
|
199 | 'original_authors': { | |
200 | 'users': user_counts, |
|
200 | 'users': user_counts, | |
201 | 'authors': author_counts, |
|
201 | 'authors': author_counts, | |
202 | 'emails': email_counts, |
|
202 | 'emails': email_counts, | |
203 | }, |
|
203 | }, | |
204 | 'commit_authors': commit_authors |
|
204 | 'commit_authors': commit_authors | |
205 | } |
|
205 | } | |
206 |
|
206 | |||
207 |
|
207 | |||
208 | class PullRequestModel(BaseModel): |
|
208 | class PullRequestModel(BaseModel): | |
209 |
|
209 | |||
210 | cls = PullRequest |
|
210 | cls = PullRequest | |
211 |
|
211 | |||
212 | DIFF_CONTEXT = diffs.DEFAULT_CONTEXT |
|
212 | DIFF_CONTEXT = diffs.DEFAULT_CONTEXT | |
213 |
|
213 | |||
214 | UPDATE_STATUS_MESSAGES = { |
|
214 | UPDATE_STATUS_MESSAGES = { | |
215 | UpdateFailureReason.NONE: lazy_ugettext( |
|
215 | UpdateFailureReason.NONE: lazy_ugettext( | |
216 | 'Pull request update successful.'), |
|
216 | 'Pull request update successful.'), | |
217 | UpdateFailureReason.UNKNOWN: lazy_ugettext( |
|
217 | UpdateFailureReason.UNKNOWN: lazy_ugettext( | |
218 | 'Pull request update failed because of an unknown error.'), |
|
218 | 'Pull request update failed because of an unknown error.'), | |
219 | UpdateFailureReason.NO_CHANGE: lazy_ugettext( |
|
219 | UpdateFailureReason.NO_CHANGE: lazy_ugettext( | |
220 | 'No update needed because the source and target have not changed.'), |
|
220 | 'No update needed because the source and target have not changed.'), | |
221 | UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext( |
|
221 | UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext( | |
222 | 'Pull request cannot be updated because the reference type is ' |
|
222 | 'Pull request cannot be updated because the reference type is ' | |
223 | 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'), |
|
223 | 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'), | |
224 | UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext( |
|
224 | UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext( | |
225 | 'This pull request cannot be updated because the target ' |
|
225 | 'This pull request cannot be updated because the target ' | |
226 | 'reference is missing.'), |
|
226 | 'reference is missing.'), | |
227 | UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext( |
|
227 | UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext( | |
228 | 'This pull request cannot be updated because the source ' |
|
228 | 'This pull request cannot be updated because the source ' | |
229 | 'reference is missing.'), |
|
229 | 'reference is missing.'), | |
230 | } |
|
230 | } | |
231 | REF_TYPES = ['bookmark', 'book', 'tag', 'branch'] |
|
231 | REF_TYPES = ['bookmark', 'book', 'tag', 'branch'] | |
232 | UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch'] |
|
232 | UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch'] | |
233 |
|
233 | |||
234 | def __get_pull_request(self, pull_request): |
|
234 | def __get_pull_request(self, pull_request): | |
235 | return self._get_instance(( |
|
235 | return self._get_instance(( | |
236 | PullRequest, PullRequestVersion), pull_request) |
|
236 | PullRequest, PullRequestVersion), pull_request) | |
237 |
|
237 | |||
238 | def _check_perms(self, perms, pull_request, user, api=False): |
|
238 | def _check_perms(self, perms, pull_request, user, api=False): | |
239 | if not api: |
|
239 | if not api: | |
240 | return h.HasRepoPermissionAny(*perms)( |
|
240 | return h.HasRepoPermissionAny(*perms)( | |
241 | user=user, repo_name=pull_request.target_repo.repo_name) |
|
241 | user=user, repo_name=pull_request.target_repo.repo_name) | |
242 | else: |
|
242 | else: | |
243 | return h.HasRepoPermissionAnyApi(*perms)( |
|
243 | return h.HasRepoPermissionAnyApi(*perms)( | |
244 | user=user, repo_name=pull_request.target_repo.repo_name) |
|
244 | user=user, repo_name=pull_request.target_repo.repo_name) | |
245 |
|
245 | |||
246 | def check_user_read(self, pull_request, user, api=False): |
|
246 | def check_user_read(self, pull_request, user, api=False): | |
247 | _perms = ('repository.admin', 'repository.write', 'repository.read',) |
|
247 | _perms = ('repository.admin', 'repository.write', 'repository.read',) | |
248 | return self._check_perms(_perms, pull_request, user, api) |
|
248 | return self._check_perms(_perms, pull_request, user, api) | |
249 |
|
249 | |||
250 | def check_user_merge(self, pull_request, user, api=False): |
|
250 | def check_user_merge(self, pull_request, user, api=False): | |
251 | _perms = ('repository.admin', 'repository.write', 'hg.admin',) |
|
251 | _perms = ('repository.admin', 'repository.write', 'hg.admin',) | |
252 | return self._check_perms(_perms, pull_request, user, api) |
|
252 | return self._check_perms(_perms, pull_request, user, api) | |
253 |
|
253 | |||
254 | def check_user_update(self, pull_request, user, api=False): |
|
254 | def check_user_update(self, pull_request, user, api=False): | |
255 | owner = user.user_id == pull_request.user_id |
|
255 | owner = user.user_id == pull_request.user_id | |
256 | return self.check_user_merge(pull_request, user, api) or owner |
|
256 | return self.check_user_merge(pull_request, user, api) or owner | |
257 |
|
257 | |||
258 | def check_user_delete(self, pull_request, user): |
|
258 | def check_user_delete(self, pull_request, user): | |
259 | owner = user.user_id == pull_request.user_id |
|
259 | owner = user.user_id == pull_request.user_id | |
260 | _perms = ('repository.admin',) |
|
260 | _perms = ('repository.admin',) | |
261 | return self._check_perms(_perms, pull_request, user) or owner |
|
261 | return self._check_perms(_perms, pull_request, user) or owner | |
262 |
|
262 | |||
263 | def check_user_change_status(self, pull_request, user, api=False): |
|
263 | def check_user_change_status(self, pull_request, user, api=False): | |
264 | reviewer = user.user_id in [x.user_id for x in |
|
264 | reviewer = user.user_id in [x.user_id for x in | |
265 | pull_request.reviewers] |
|
265 | pull_request.reviewers] | |
266 | return self.check_user_update(pull_request, user, api) or reviewer |
|
266 | return self.check_user_update(pull_request, user, api) or reviewer | |
267 |
|
267 | |||
268 | def check_user_comment(self, pull_request, user): |
|
268 | def check_user_comment(self, pull_request, user): | |
269 | owner = user.user_id == pull_request.user_id |
|
269 | owner = user.user_id == pull_request.user_id | |
270 | return self.check_user_read(pull_request, user) or owner |
|
270 | return self.check_user_read(pull_request, user) or owner | |
271 |
|
271 | |||
272 | def get(self, pull_request): |
|
272 | def get(self, pull_request): | |
273 | return self.__get_pull_request(pull_request) |
|
273 | return self.__get_pull_request(pull_request) | |
274 |
|
274 | |||
275 | def _prepare_get_all_query(self, repo_name, search_q=None, source=False, |
|
275 | def _prepare_get_all_query(self, repo_name, search_q=None, source=False, | |
276 | statuses=None, opened_by=None, order_by=None, |
|
276 | statuses=None, opened_by=None, order_by=None, | |
277 | order_dir='desc', only_created=False): |
|
277 | order_dir='desc', only_created=False): | |
278 | repo = None |
|
278 | repo = None | |
279 | if repo_name: |
|
279 | if repo_name: | |
280 | repo = self._get_repo(repo_name) |
|
280 | repo = self._get_repo(repo_name) | |
281 |
|
281 | |||
282 | q = PullRequest.query() |
|
282 | q = PullRequest.query() | |
283 |
|
283 | |||
284 | if search_q: |
|
284 | if search_q: | |
285 | like_expression = u'%{}%'.format(safe_unicode(search_q)) |
|
285 | like_expression = u'%{}%'.format(safe_unicode(search_q)) | |
286 | q = q.join(User) |
|
286 | q = q.join(User) | |
287 | q = q.filter(or_( |
|
287 | q = q.filter(or_( | |
288 | cast(PullRequest.pull_request_id, String).ilike(like_expression), |
|
288 | cast(PullRequest.pull_request_id, String).ilike(like_expression), | |
289 | User.username.ilike(like_expression), |
|
289 | User.username.ilike(like_expression), | |
290 | PullRequest.title.ilike(like_expression), |
|
290 | PullRequest.title.ilike(like_expression), | |
291 | PullRequest.description.ilike(like_expression), |
|
291 | PullRequest.description.ilike(like_expression), | |
292 | )) |
|
292 | )) | |
293 |
|
293 | |||
294 | # source or target |
|
294 | # source or target | |
295 | if repo and source: |
|
295 | if repo and source: | |
296 | q = q.filter(PullRequest.source_repo == repo) |
|
296 | q = q.filter(PullRequest.source_repo == repo) | |
297 | elif repo: |
|
297 | elif repo: | |
298 | q = q.filter(PullRequest.target_repo == repo) |
|
298 | q = q.filter(PullRequest.target_repo == repo) | |
299 |
|
299 | |||
300 | # closed,opened |
|
300 | # closed,opened | |
301 | if statuses: |
|
301 | if statuses: | |
302 | q = q.filter(PullRequest.status.in_(statuses)) |
|
302 | q = q.filter(PullRequest.status.in_(statuses)) | |
303 |
|
303 | |||
304 | # opened by filter |
|
304 | # opened by filter | |
305 | if opened_by: |
|
305 | if opened_by: | |
306 | q = q.filter(PullRequest.user_id.in_(opened_by)) |
|
306 | q = q.filter(PullRequest.user_id.in_(opened_by)) | |
307 |
|
307 | |||
308 | # only get those that are in "created" state |
|
308 | # only get those that are in "created" state | |
309 | if only_created: |
|
309 | if only_created: | |
310 | q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED) |
|
310 | q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED) | |
311 |
|
311 | |||
312 | if order_by: |
|
312 | if order_by: | |
313 | order_map = { |
|
313 | order_map = { | |
314 | 'name_raw': PullRequest.pull_request_id, |
|
314 | 'name_raw': PullRequest.pull_request_id, | |
315 | 'id': PullRequest.pull_request_id, |
|
315 | 'id': PullRequest.pull_request_id, | |
316 | 'title': PullRequest.title, |
|
316 | 'title': PullRequest.title, | |
317 | 'updated_on_raw': PullRequest.updated_on, |
|
317 | 'updated_on_raw': PullRequest.updated_on, | |
318 | 'target_repo': PullRequest.target_repo_id |
|
318 | 'target_repo': PullRequest.target_repo_id | |
319 | } |
|
319 | } | |
320 | if order_dir == 'asc': |
|
320 | if order_dir == 'asc': | |
321 | q = q.order_by(order_map[order_by].asc()) |
|
321 | q = q.order_by(order_map[order_by].asc()) | |
322 | else: |
|
322 | else: | |
323 | q = q.order_by(order_map[order_by].desc()) |
|
323 | q = q.order_by(order_map[order_by].desc()) | |
324 |
|
324 | |||
325 | return q |
|
325 | return q | |
326 |
|
326 | |||
327 | def count_all(self, repo_name, search_q=None, source=False, statuses=None, |
|
327 | def count_all(self, repo_name, search_q=None, source=False, statuses=None, | |
328 | opened_by=None): |
|
328 | opened_by=None): | |
329 | """ |
|
329 | """ | |
330 | Count the number of pull requests for a specific repository. |
|
330 | Count the number of pull requests for a specific repository. | |
331 |
|
331 | |||
332 | :param repo_name: target or source repo |
|
332 | :param repo_name: target or source repo | |
333 | :param search_q: filter by text |
|
333 | :param search_q: filter by text | |
334 | :param source: boolean flag to specify if repo_name refers to source |
|
334 | :param source: boolean flag to specify if repo_name refers to source | |
335 | :param statuses: list of pull request statuses |
|
335 | :param statuses: list of pull request statuses | |
336 | :param opened_by: author user of the pull request |
|
336 | :param opened_by: author user of the pull request | |
337 | :returns: int number of pull requests |
|
337 | :returns: int number of pull requests | |
338 | """ |
|
338 | """ | |
339 | q = self._prepare_get_all_query( |
|
339 | q = self._prepare_get_all_query( | |
340 | repo_name, search_q=search_q, source=source, statuses=statuses, |
|
340 | repo_name, search_q=search_q, source=source, statuses=statuses, | |
341 | opened_by=opened_by) |
|
341 | opened_by=opened_by) | |
342 |
|
342 | |||
343 | return q.count() |
|
343 | return q.count() | |
344 |
|
344 | |||
345 | def get_all(self, repo_name, search_q=None, source=False, statuses=None, |
|
345 | def get_all(self, repo_name, search_q=None, source=False, statuses=None, | |
346 | opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'): |
|
346 | opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'): | |
347 | """ |
|
347 | """ | |
348 | Get all pull requests for a specific repository. |
|
348 | Get all pull requests for a specific repository. | |
349 |
|
349 | |||
350 | :param repo_name: target or source repo |
|
350 | :param repo_name: target or source repo | |
351 | :param search_q: filter by text |
|
351 | :param search_q: filter by text | |
352 | :param source: boolean flag to specify if repo_name refers to source |
|
352 | :param source: boolean flag to specify if repo_name refers to source | |
353 | :param statuses: list of pull request statuses |
|
353 | :param statuses: list of pull request statuses | |
354 | :param opened_by: author user of the pull request |
|
354 | :param opened_by: author user of the pull request | |
355 | :param offset: pagination offset |
|
355 | :param offset: pagination offset | |
356 | :param length: length of returned list |
|
356 | :param length: length of returned list | |
357 | :param order_by: order of the returned list |
|
357 | :param order_by: order of the returned list | |
358 | :param order_dir: 'asc' or 'desc' ordering direction |
|
358 | :param order_dir: 'asc' or 'desc' ordering direction | |
359 | :returns: list of pull requests |
|
359 | :returns: list of pull requests | |
360 | """ |
|
360 | """ | |
361 | q = self._prepare_get_all_query( |
|
361 | q = self._prepare_get_all_query( | |
362 | repo_name, search_q=search_q, source=source, statuses=statuses, |
|
362 | repo_name, search_q=search_q, source=source, statuses=statuses, | |
363 | opened_by=opened_by, order_by=order_by, order_dir=order_dir) |
|
363 | opened_by=opened_by, order_by=order_by, order_dir=order_dir) | |
364 |
|
364 | |||
365 | if length: |
|
365 | if length: | |
366 | pull_requests = q.limit(length).offset(offset).all() |
|
366 | pull_requests = q.limit(length).offset(offset).all() | |
367 | else: |
|
367 | else: | |
368 | pull_requests = q.all() |
|
368 | pull_requests = q.all() | |
369 |
|
369 | |||
370 | return pull_requests |
|
370 | return pull_requests | |
371 |
|
371 | |||
372 | def count_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None, |
|
372 | def count_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None, | |
373 | opened_by=None): |
|
373 | opened_by=None): | |
374 | """ |
|
374 | """ | |
375 | Count the number of pull requests for a specific repository that are |
|
375 | Count the number of pull requests for a specific repository that are | |
376 | awaiting review. |
|
376 | awaiting review. | |
377 |
|
377 | |||
378 | :param repo_name: target or source repo |
|
378 | :param repo_name: target or source repo | |
379 | :param search_q: filter by text |
|
379 | :param search_q: filter by text | |
380 | :param source: boolean flag to specify if repo_name refers to source |
|
380 | :param source: boolean flag to specify if repo_name refers to source | |
381 | :param statuses: list of pull request statuses |
|
381 | :param statuses: list of pull request statuses | |
382 | :param opened_by: author user of the pull request |
|
382 | :param opened_by: author user of the pull request | |
383 | :returns: int number of pull requests |
|
383 | :returns: int number of pull requests | |
384 | """ |
|
384 | """ | |
385 | pull_requests = self.get_awaiting_review( |
|
385 | pull_requests = self.get_awaiting_review( | |
386 | repo_name, search_q=search_q, source=source, statuses=statuses, opened_by=opened_by) |
|
386 | repo_name, search_q=search_q, source=source, statuses=statuses, opened_by=opened_by) | |
387 |
|
387 | |||
388 | return len(pull_requests) |
|
388 | return len(pull_requests) | |
389 |
|
389 | |||
390 | def get_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None, |
|
390 | def get_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None, | |
391 | opened_by=None, offset=0, length=None, |
|
391 | opened_by=None, offset=0, length=None, | |
392 | order_by=None, order_dir='desc'): |
|
392 | order_by=None, order_dir='desc'): | |
393 | """ |
|
393 | """ | |
394 | Get all pull requests for a specific repository that are awaiting |
|
394 | Get all pull requests for a specific repository that are awaiting | |
395 | review. |
|
395 | review. | |
396 |
|
396 | |||
397 | :param repo_name: target or source repo |
|
397 | :param repo_name: target or source repo | |
398 | :param search_q: filter by text |
|
398 | :param search_q: filter by text | |
399 | :param source: boolean flag to specify if repo_name refers to source |
|
399 | :param source: boolean flag to specify if repo_name refers to source | |
400 | :param statuses: list of pull request statuses |
|
400 | :param statuses: list of pull request statuses | |
401 | :param opened_by: author user of the pull request |
|
401 | :param opened_by: author user of the pull request | |
402 | :param offset: pagination offset |
|
402 | :param offset: pagination offset | |
403 | :param length: length of returned list |
|
403 | :param length: length of returned list | |
404 | :param order_by: order of the returned list |
|
404 | :param order_by: order of the returned list | |
405 | :param order_dir: 'asc' or 'desc' ordering direction |
|
405 | :param order_dir: 'asc' or 'desc' ordering direction | |
406 | :returns: list of pull requests |
|
406 | :returns: list of pull requests | |
407 | """ |
|
407 | """ | |
408 | pull_requests = self.get_all( |
|
408 | pull_requests = self.get_all( | |
409 | repo_name, search_q=search_q, source=source, statuses=statuses, |
|
409 | repo_name, search_q=search_q, source=source, statuses=statuses, | |
410 | opened_by=opened_by, order_by=order_by, order_dir=order_dir) |
|
410 | opened_by=opened_by, order_by=order_by, order_dir=order_dir) | |
411 |
|
411 | |||
412 | _filtered_pull_requests = [] |
|
412 | _filtered_pull_requests = [] | |
413 | for pr in pull_requests: |
|
413 | for pr in pull_requests: | |
414 | status = pr.calculated_review_status() |
|
414 | status = pr.calculated_review_status() | |
415 | if status in [ChangesetStatus.STATUS_NOT_REVIEWED, |
|
415 | if status in [ChangesetStatus.STATUS_NOT_REVIEWED, | |
416 | ChangesetStatus.STATUS_UNDER_REVIEW]: |
|
416 | ChangesetStatus.STATUS_UNDER_REVIEW]: | |
417 | _filtered_pull_requests.append(pr) |
|
417 | _filtered_pull_requests.append(pr) | |
418 | if length: |
|
418 | if length: | |
419 | return _filtered_pull_requests[offset:offset+length] |
|
419 | return _filtered_pull_requests[offset:offset+length] | |
420 | else: |
|
420 | else: | |
421 | return _filtered_pull_requests |
|
421 | return _filtered_pull_requests | |
422 |
|
422 | |||
423 | def count_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None, |
|
423 | def count_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None, | |
424 | opened_by=None, user_id=None): |
|
424 | opened_by=None, user_id=None): | |
425 | """ |
|
425 | """ | |
426 | Count the number of pull requests for a specific repository that are |
|
426 | Count the number of pull requests for a specific repository that are | |
427 | awaiting review from a specific user. |
|
427 | awaiting review from a specific user. | |
428 |
|
428 | |||
429 | :param repo_name: target or source repo |
|
429 | :param repo_name: target or source repo | |
430 | :param search_q: filter by text |
|
430 | :param search_q: filter by text | |
431 | :param source: boolean flag to specify if repo_name refers to source |
|
431 | :param source: boolean flag to specify if repo_name refers to source | |
432 | :param statuses: list of pull request statuses |
|
432 | :param statuses: list of pull request statuses | |
433 | :param opened_by: author user of the pull request |
|
433 | :param opened_by: author user of the pull request | |
434 | :param user_id: reviewer user of the pull request |
|
434 | :param user_id: reviewer user of the pull request | |
435 | :returns: int number of pull requests |
|
435 | :returns: int number of pull requests | |
436 | """ |
|
436 | """ | |
437 | pull_requests = self.get_awaiting_my_review( |
|
437 | pull_requests = self.get_awaiting_my_review( | |
438 | repo_name, search_q=search_q, source=source, statuses=statuses, |
|
438 | repo_name, search_q=search_q, source=source, statuses=statuses, | |
439 | opened_by=opened_by, user_id=user_id) |
|
439 | opened_by=opened_by, user_id=user_id) | |
440 |
|
440 | |||
441 | return len(pull_requests) |
|
441 | return len(pull_requests) | |
442 |
|
442 | |||
443 | def get_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None, |
|
443 | def get_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None, | |
444 | opened_by=None, user_id=None, offset=0, |
|
444 | opened_by=None, user_id=None, offset=0, | |
445 | length=None, order_by=None, order_dir='desc'): |
|
445 | length=None, order_by=None, order_dir='desc'): | |
446 | """ |
|
446 | """ | |
447 | Get all pull requests for a specific repository that are awaiting |
|
447 | Get all pull requests for a specific repository that are awaiting | |
448 | review from a specific user. |
|
448 | review from a specific user. | |
449 |
|
449 | |||
450 | :param repo_name: target or source repo |
|
450 | :param repo_name: target or source repo | |
451 | :param search_q: filter by text |
|
451 | :param search_q: filter by text | |
452 | :param source: boolean flag to specify if repo_name refers to source |
|
452 | :param source: boolean flag to specify if repo_name refers to source | |
453 | :param statuses: list of pull request statuses |
|
453 | :param statuses: list of pull request statuses | |
454 | :param opened_by: author user of the pull request |
|
454 | :param opened_by: author user of the pull request | |
455 | :param user_id: reviewer user of the pull request |
|
455 | :param user_id: reviewer user of the pull request | |
456 | :param offset: pagination offset |
|
456 | :param offset: pagination offset | |
457 | :param length: length of returned list |
|
457 | :param length: length of returned list | |
458 | :param order_by: order of the returned list |
|
458 | :param order_by: order of the returned list | |
459 | :param order_dir: 'asc' or 'desc' ordering direction |
|
459 | :param order_dir: 'asc' or 'desc' ordering direction | |
460 | :returns: list of pull requests |
|
460 | :returns: list of pull requests | |
461 | """ |
|
461 | """ | |
462 | pull_requests = self.get_all( |
|
462 | pull_requests = self.get_all( | |
463 | repo_name, search_q=search_q, source=source, statuses=statuses, |
|
463 | repo_name, search_q=search_q, source=source, statuses=statuses, | |
464 | opened_by=opened_by, order_by=order_by, order_dir=order_dir) |
|
464 | opened_by=opened_by, order_by=order_by, order_dir=order_dir) | |
465 |
|
465 | |||
466 | _my = PullRequestModel().get_not_reviewed(user_id) |
|
466 | _my = PullRequestModel().get_not_reviewed(user_id) | |
467 | my_participation = [] |
|
467 | my_participation = [] | |
468 | for pr in pull_requests: |
|
468 | for pr in pull_requests: | |
469 | if pr in _my: |
|
469 | if pr in _my: | |
470 | my_participation.append(pr) |
|
470 | my_participation.append(pr) | |
471 | _filtered_pull_requests = my_participation |
|
471 | _filtered_pull_requests = my_participation | |
472 | if length: |
|
472 | if length: | |
473 | return _filtered_pull_requests[offset:offset+length] |
|
473 | return _filtered_pull_requests[offset:offset+length] | |
474 | else: |
|
474 | else: | |
475 | return _filtered_pull_requests |
|
475 | return _filtered_pull_requests | |
476 |
|
476 | |||
477 | def get_not_reviewed(self, user_id): |
|
477 | def get_not_reviewed(self, user_id): | |
478 | return [ |
|
478 | return [ | |
479 | x.pull_request for x in PullRequestReviewers.query().filter( |
|
479 | x.pull_request for x in PullRequestReviewers.query().filter( | |
480 | PullRequestReviewers.user_id == user_id).all() |
|
480 | PullRequestReviewers.user_id == user_id).all() | |
481 | ] |
|
481 | ] | |
482 |
|
482 | |||
483 | def _prepare_participating_query(self, user_id=None, statuses=None, query='', |
|
483 | def _prepare_participating_query(self, user_id=None, statuses=None, query='', | |
484 | order_by=None, order_dir='desc'): |
|
484 | order_by=None, order_dir='desc'): | |
485 | q = PullRequest.query() |
|
485 | q = PullRequest.query() | |
486 | if user_id: |
|
486 | if user_id: | |
487 | reviewers_subquery = Session().query( |
|
487 | reviewers_subquery = Session().query( | |
488 | PullRequestReviewers.pull_request_id).filter( |
|
488 | PullRequestReviewers.pull_request_id).filter( | |
489 | PullRequestReviewers.user_id == user_id).subquery() |
|
489 | PullRequestReviewers.user_id == user_id).subquery() | |
490 | user_filter = or_( |
|
490 | user_filter = or_( | |
491 | PullRequest.user_id == user_id, |
|
491 | PullRequest.user_id == user_id, | |
492 | PullRequest.pull_request_id.in_(reviewers_subquery) |
|
492 | PullRequest.pull_request_id.in_(reviewers_subquery) | |
493 | ) |
|
493 | ) | |
494 | q = PullRequest.query().filter(user_filter) |
|
494 | q = PullRequest.query().filter(user_filter) | |
495 |
|
495 | |||
496 | # closed,opened |
|
496 | # closed,opened | |
497 | if statuses: |
|
497 | if statuses: | |
498 | q = q.filter(PullRequest.status.in_(statuses)) |
|
498 | q = q.filter(PullRequest.status.in_(statuses)) | |
499 |
|
499 | |||
500 | if query: |
|
500 | if query: | |
501 | like_expression = u'%{}%'.format(safe_unicode(query)) |
|
501 | like_expression = u'%{}%'.format(safe_unicode(query)) | |
502 | q = q.join(User) |
|
502 | q = q.join(User) | |
503 | q = q.filter(or_( |
|
503 | q = q.filter(or_( | |
504 | cast(PullRequest.pull_request_id, String).ilike(like_expression), |
|
504 | cast(PullRequest.pull_request_id, String).ilike(like_expression), | |
505 | User.username.ilike(like_expression), |
|
505 | User.username.ilike(like_expression), | |
506 | PullRequest.title.ilike(like_expression), |
|
506 | PullRequest.title.ilike(like_expression), | |
507 | PullRequest.description.ilike(like_expression), |
|
507 | PullRequest.description.ilike(like_expression), | |
508 | )) |
|
508 | )) | |
509 | if order_by: |
|
509 | if order_by: | |
510 | order_map = { |
|
510 | order_map = { | |
511 | 'name_raw': PullRequest.pull_request_id, |
|
511 | 'name_raw': PullRequest.pull_request_id, | |
512 | 'title': PullRequest.title, |
|
512 | 'title': PullRequest.title, | |
513 | 'updated_on_raw': PullRequest.updated_on, |
|
513 | 'updated_on_raw': PullRequest.updated_on, | |
514 | 'target_repo': PullRequest.target_repo_id |
|
514 | 'target_repo': PullRequest.target_repo_id | |
515 | } |
|
515 | } | |
516 | if order_dir == 'asc': |
|
516 | if order_dir == 'asc': | |
517 | q = q.order_by(order_map[order_by].asc()) |
|
517 | q = q.order_by(order_map[order_by].asc()) | |
518 | else: |
|
518 | else: | |
519 | q = q.order_by(order_map[order_by].desc()) |
|
519 | q = q.order_by(order_map[order_by].desc()) | |
520 |
|
520 | |||
521 | return q |
|
521 | return q | |
522 |
|
522 | |||
523 | def count_im_participating_in(self, user_id=None, statuses=None, query=''): |
|
523 | def count_im_participating_in(self, user_id=None, statuses=None, query=''): | |
524 | q = self._prepare_participating_query(user_id, statuses=statuses, query=query) |
|
524 | q = self._prepare_participating_query(user_id, statuses=statuses, query=query) | |
525 | return q.count() |
|
525 | return q.count() | |
526 |
|
526 | |||
527 | def get_im_participating_in( |
|
527 | def get_im_participating_in( | |
528 | self, user_id=None, statuses=None, query='', offset=0, |
|
528 | self, user_id=None, statuses=None, query='', offset=0, | |
529 | length=None, order_by=None, order_dir='desc'): |
|
529 | length=None, order_by=None, order_dir='desc'): | |
530 | """ |
|
530 | """ | |
531 | Get all Pull requests that i'm participating in, or i have opened |
|
531 | Get all Pull requests that i'm participating in, or i have opened | |
532 | """ |
|
532 | """ | |
533 |
|
533 | |||
534 | q = self._prepare_participating_query( |
|
534 | q = self._prepare_participating_query( | |
535 | user_id, statuses=statuses, query=query, order_by=order_by, |
|
535 | user_id, statuses=statuses, query=query, order_by=order_by, | |
536 | order_dir=order_dir) |
|
536 | order_dir=order_dir) | |
537 |
|
537 | |||
538 | if length: |
|
538 | if length: | |
539 | pull_requests = q.limit(length).offset(offset).all() |
|
539 | pull_requests = q.limit(length).offset(offset).all() | |
540 | else: |
|
540 | else: | |
541 | pull_requests = q.all() |
|
541 | pull_requests = q.all() | |
542 |
|
542 | |||
543 | return pull_requests |
|
543 | return pull_requests | |
544 |
|
544 | |||
545 | def get_versions(self, pull_request): |
|
545 | def get_versions(self, pull_request): | |
546 | """ |
|
546 | """ | |
547 | returns version of pull request sorted by ID descending |
|
547 | returns version of pull request sorted by ID descending | |
548 | """ |
|
548 | """ | |
549 | return PullRequestVersion.query()\ |
|
549 | return PullRequestVersion.query()\ | |
550 | .filter(PullRequestVersion.pull_request == pull_request)\ |
|
550 | .filter(PullRequestVersion.pull_request == pull_request)\ | |
551 | .order_by(PullRequestVersion.pull_request_version_id.asc())\ |
|
551 | .order_by(PullRequestVersion.pull_request_version_id.asc())\ | |
552 | .all() |
|
552 | .all() | |
553 |
|
553 | |||
554 | def get_pr_version(self, pull_request_id, version=None): |
|
554 | def get_pr_version(self, pull_request_id, version=None): | |
555 | at_version = None |
|
555 | at_version = None | |
556 |
|
556 | |||
557 | if version and version == 'latest': |
|
557 | if version and version == 'latest': | |
558 | pull_request_ver = PullRequest.get(pull_request_id) |
|
558 | pull_request_ver = PullRequest.get(pull_request_id) | |
559 | pull_request_obj = pull_request_ver |
|
559 | pull_request_obj = pull_request_ver | |
560 | _org_pull_request_obj = pull_request_obj |
|
560 | _org_pull_request_obj = pull_request_obj | |
561 | at_version = 'latest' |
|
561 | at_version = 'latest' | |
562 | elif version: |
|
562 | elif version: | |
563 | pull_request_ver = PullRequestVersion.get_or_404(version) |
|
563 | pull_request_ver = PullRequestVersion.get_or_404(version) | |
564 | pull_request_obj = pull_request_ver |
|
564 | pull_request_obj = pull_request_ver | |
565 | _org_pull_request_obj = pull_request_ver.pull_request |
|
565 | _org_pull_request_obj = pull_request_ver.pull_request | |
566 | at_version = pull_request_ver.pull_request_version_id |
|
566 | at_version = pull_request_ver.pull_request_version_id | |
567 | else: |
|
567 | else: | |
568 | _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404( |
|
568 | _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404( | |
569 | pull_request_id) |
|
569 | pull_request_id) | |
570 |
|
570 | |||
571 | pull_request_display_obj = PullRequest.get_pr_display_object( |
|
571 | pull_request_display_obj = PullRequest.get_pr_display_object( | |
572 | pull_request_obj, _org_pull_request_obj) |
|
572 | pull_request_obj, _org_pull_request_obj) | |
573 |
|
573 | |||
574 | return _org_pull_request_obj, pull_request_obj, \ |
|
574 | return _org_pull_request_obj, pull_request_obj, \ | |
575 | pull_request_display_obj, at_version |
|
575 | pull_request_display_obj, at_version | |
576 |
|
576 | |||
577 | def create(self, created_by, source_repo, source_ref, target_repo, |
|
577 | def create(self, created_by, source_repo, source_ref, target_repo, | |
578 | target_ref, revisions, reviewers, title, description=None, |
|
578 | target_ref, revisions, reviewers, title, description=None, | |
579 | common_ancestor_id=None, |
|
579 | common_ancestor_id=None, | |
580 | description_renderer=None, |
|
580 | description_renderer=None, | |
581 | reviewer_data=None, translator=None, auth_user=None): |
|
581 | reviewer_data=None, translator=None, auth_user=None): | |
582 | translator = translator or get_current_request().translate |
|
582 | translator = translator or get_current_request().translate | |
583 |
|
583 | |||
584 | created_by_user = self._get_user(created_by) |
|
584 | created_by_user = self._get_user(created_by) | |
585 | auth_user = auth_user or created_by_user.AuthUser() |
|
585 | auth_user = auth_user or created_by_user.AuthUser() | |
586 | source_repo = self._get_repo(source_repo) |
|
586 | source_repo = self._get_repo(source_repo) | |
587 | target_repo = self._get_repo(target_repo) |
|
587 | target_repo = self._get_repo(target_repo) | |
588 |
|
588 | |||
589 | pull_request = PullRequest() |
|
589 | pull_request = PullRequest() | |
590 | pull_request.source_repo = source_repo |
|
590 | pull_request.source_repo = source_repo | |
591 | pull_request.source_ref = source_ref |
|
591 | pull_request.source_ref = source_ref | |
592 | pull_request.target_repo = target_repo |
|
592 | pull_request.target_repo = target_repo | |
593 | pull_request.target_ref = target_ref |
|
593 | pull_request.target_ref = target_ref | |
594 | pull_request.revisions = revisions |
|
594 | pull_request.revisions = revisions | |
595 | pull_request.title = title |
|
595 | pull_request.title = title | |
596 | pull_request.description = description |
|
596 | pull_request.description = description | |
597 | pull_request.description_renderer = description_renderer |
|
597 | pull_request.description_renderer = description_renderer | |
598 | pull_request.author = created_by_user |
|
598 | pull_request.author = created_by_user | |
599 | pull_request.reviewer_data = reviewer_data |
|
599 | pull_request.reviewer_data = reviewer_data | |
600 | pull_request.pull_request_state = pull_request.STATE_CREATING |
|
600 | pull_request.pull_request_state = pull_request.STATE_CREATING | |
601 | pull_request.common_ancestor_id = common_ancestor_id |
|
601 | pull_request.common_ancestor_id = common_ancestor_id | |
602 |
|
602 | |||
603 | Session().add(pull_request) |
|
603 | Session().add(pull_request) | |
604 | Session().flush() |
|
604 | Session().flush() | |
605 |
|
605 | |||
606 | reviewer_ids = set() |
|
606 | reviewer_ids = set() | |
607 | # members / reviewers |
|
607 | # members / reviewers | |
608 | for reviewer_object in reviewers: |
|
608 | for reviewer_object in reviewers: | |
609 | user_id, reasons, mandatory, rules = reviewer_object |
|
609 | user_id, reasons, mandatory, rules = reviewer_object | |
610 | user = self._get_user(user_id) |
|
610 | user = self._get_user(user_id) | |
611 |
|
611 | |||
612 | # skip duplicates |
|
612 | # skip duplicates | |
613 | if user.user_id in reviewer_ids: |
|
613 | if user.user_id in reviewer_ids: | |
614 | continue |
|
614 | continue | |
615 |
|
615 | |||
616 | reviewer_ids.add(user.user_id) |
|
616 | reviewer_ids.add(user.user_id) | |
617 |
|
617 | |||
618 | reviewer = PullRequestReviewers() |
|
618 | reviewer = PullRequestReviewers() | |
619 | reviewer.user = user |
|
619 | reviewer.user = user | |
620 | reviewer.pull_request = pull_request |
|
620 | reviewer.pull_request = pull_request | |
621 | reviewer.reasons = reasons |
|
621 | reviewer.reasons = reasons | |
622 | reviewer.mandatory = mandatory |
|
622 | reviewer.mandatory = mandatory | |
623 |
|
623 | |||
624 | # NOTE(marcink): pick only first rule for now |
|
624 | # NOTE(marcink): pick only first rule for now | |
625 | rule_id = list(rules)[0] if rules else None |
|
625 | rule_id = list(rules)[0] if rules else None | |
626 | rule = RepoReviewRule.get(rule_id) if rule_id else None |
|
626 | rule = RepoReviewRule.get(rule_id) if rule_id else None | |
627 | if rule: |
|
627 | if rule: | |
628 | review_group = rule.user_group_vote_rule(user_id) |
|
628 | review_group = rule.user_group_vote_rule(user_id) | |
629 | # we check if this particular reviewer is member of a voting group |
|
629 | # we check if this particular reviewer is member of a voting group | |
630 | if review_group: |
|
630 | if review_group: | |
631 | # NOTE(marcink): |
|
631 | # NOTE(marcink): | |
632 | # can be that user is member of more but we pick the first same, |
|
632 | # can be that user is member of more but we pick the first same, | |
633 | # same as default reviewers algo |
|
633 | # same as default reviewers algo | |
634 | review_group = review_group[0] |
|
634 | review_group = review_group[0] | |
635 |
|
635 | |||
636 | rule_data = { |
|
636 | rule_data = { | |
637 | 'rule_name': |
|
637 | 'rule_name': | |
638 | rule.review_rule_name, |
|
638 | rule.review_rule_name, | |
639 | 'rule_user_group_entry_id': |
|
639 | 'rule_user_group_entry_id': | |
640 | review_group.repo_review_rule_users_group_id, |
|
640 | review_group.repo_review_rule_users_group_id, | |
641 | 'rule_user_group_name': |
|
641 | 'rule_user_group_name': | |
642 | review_group.users_group.users_group_name, |
|
642 | review_group.users_group.users_group_name, | |
643 | 'rule_user_group_members': |
|
643 | 'rule_user_group_members': | |
644 | [x.user.username for x in review_group.users_group.members], |
|
644 | [x.user.username for x in review_group.users_group.members], | |
645 | 'rule_user_group_members_id': |
|
645 | 'rule_user_group_members_id': | |
646 | [x.user.user_id for x in review_group.users_group.members], |
|
646 | [x.user.user_id for x in review_group.users_group.members], | |
647 | } |
|
647 | } | |
648 | # e.g {'vote_rule': -1, 'mandatory': True} |
|
648 | # e.g {'vote_rule': -1, 'mandatory': True} | |
649 | rule_data.update(review_group.rule_data()) |
|
649 | rule_data.update(review_group.rule_data()) | |
650 |
|
650 | |||
651 | reviewer.rule_data = rule_data |
|
651 | reviewer.rule_data = rule_data | |
652 |
|
652 | |||
653 | Session().add(reviewer) |
|
653 | Session().add(reviewer) | |
654 | Session().flush() |
|
654 | Session().flush() | |
655 |
|
655 | |||
656 | # Set approval status to "Under Review" for all commits which are |
|
656 | # Set approval status to "Under Review" for all commits which are | |
657 | # part of this pull request. |
|
657 | # part of this pull request. | |
658 | ChangesetStatusModel().set_status( |
|
658 | ChangesetStatusModel().set_status( | |
659 | repo=target_repo, |
|
659 | repo=target_repo, | |
660 | status=ChangesetStatus.STATUS_UNDER_REVIEW, |
|
660 | status=ChangesetStatus.STATUS_UNDER_REVIEW, | |
661 | user=created_by_user, |
|
661 | user=created_by_user, | |
662 | pull_request=pull_request |
|
662 | pull_request=pull_request | |
663 | ) |
|
663 | ) | |
664 | # we commit early at this point. This has to do with a fact |
|
664 | # we commit early at this point. This has to do with a fact | |
665 | # that before queries do some row-locking. And because of that |
|
665 | # that before queries do some row-locking. And because of that | |
666 | # we need to commit and finish transaction before below validate call |
|
666 | # we need to commit and finish transaction before below validate call | |
667 | # that for large repos could be long resulting in long row locks |
|
667 | # that for large repos could be long resulting in long row locks | |
668 | Session().commit() |
|
668 | Session().commit() | |
669 |
|
669 | |||
670 | # prepare workspace, and run initial merge simulation. Set state during that |
|
670 | # prepare workspace, and run initial merge simulation. Set state during that | |
671 | # operation |
|
671 | # operation | |
672 | pull_request = PullRequest.get(pull_request.pull_request_id) |
|
672 | pull_request = PullRequest.get(pull_request.pull_request_id) | |
673 |
|
673 | |||
674 | # set as merging, for merge simulation, and if finished to created so we mark |
|
674 | # set as merging, for merge simulation, and if finished to created so we mark | |
675 | # simulation is working fine |
|
675 | # simulation is working fine | |
676 | with pull_request.set_state(PullRequest.STATE_MERGING, |
|
676 | with pull_request.set_state(PullRequest.STATE_MERGING, | |
677 | final_state=PullRequest.STATE_CREATED) as state_obj: |
|
677 | final_state=PullRequest.STATE_CREATED) as state_obj: | |
678 | MergeCheck.validate( |
|
678 | MergeCheck.validate( | |
679 | pull_request, auth_user=auth_user, translator=translator) |
|
679 | pull_request, auth_user=auth_user, translator=translator) | |
680 |
|
680 | |||
681 | self.notify_reviewers(pull_request, reviewer_ids) |
|
681 | self.notify_reviewers(pull_request, reviewer_ids) | |
682 | self.trigger_pull_request_hook(pull_request, created_by_user, 'create') |
|
682 | self.trigger_pull_request_hook(pull_request, created_by_user, 'create') | |
683 |
|
683 | |||
684 | creation_data = pull_request.get_api_data(with_merge_state=False) |
|
684 | creation_data = pull_request.get_api_data(with_merge_state=False) | |
685 | self._log_audit_action( |
|
685 | self._log_audit_action( | |
686 | 'repo.pull_request.create', {'data': creation_data}, |
|
686 | 'repo.pull_request.create', {'data': creation_data}, | |
687 | auth_user, pull_request) |
|
687 | auth_user, pull_request) | |
688 |
|
688 | |||
689 | return pull_request |
|
689 | return pull_request | |
690 |
|
690 | |||
691 | def trigger_pull_request_hook(self, pull_request, user, action, data=None): |
|
691 | def trigger_pull_request_hook(self, pull_request, user, action, data=None): | |
692 | pull_request = self.__get_pull_request(pull_request) |
|
692 | pull_request = self.__get_pull_request(pull_request) | |
693 | target_scm = pull_request.target_repo.scm_instance() |
|
693 | target_scm = pull_request.target_repo.scm_instance() | |
694 | if action == 'create': |
|
694 | if action == 'create': | |
695 | trigger_hook = hooks_utils.trigger_create_pull_request_hook |
|
695 | trigger_hook = hooks_utils.trigger_create_pull_request_hook | |
696 | elif action == 'merge': |
|
696 | elif action == 'merge': | |
697 | trigger_hook = hooks_utils.trigger_merge_pull_request_hook |
|
697 | trigger_hook = hooks_utils.trigger_merge_pull_request_hook | |
698 | elif action == 'close': |
|
698 | elif action == 'close': | |
699 | trigger_hook = hooks_utils.trigger_close_pull_request_hook |
|
699 | trigger_hook = hooks_utils.trigger_close_pull_request_hook | |
700 | elif action == 'review_status_change': |
|
700 | elif action == 'review_status_change': | |
701 | trigger_hook = hooks_utils.trigger_review_pull_request_hook |
|
701 | trigger_hook = hooks_utils.trigger_review_pull_request_hook | |
702 | elif action == 'update': |
|
702 | elif action == 'update': | |
703 | trigger_hook = hooks_utils.trigger_update_pull_request_hook |
|
703 | trigger_hook = hooks_utils.trigger_update_pull_request_hook | |
704 | elif action == 'comment': |
|
704 | elif action == 'comment': | |
705 | trigger_hook = hooks_utils.trigger_comment_pull_request_hook |
|
705 | trigger_hook = hooks_utils.trigger_comment_pull_request_hook | |
|
706 | elif action == 'comment_edit': | |||
|
707 | trigger_hook = hooks_utils.trigger_comment_pull_request_edit_hook | |||
706 | else: |
|
708 | else: | |
707 | return |
|
709 | return | |
708 |
|
710 | |||
709 | log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s', |
|
711 | log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s', | |
710 | pull_request, action, trigger_hook) |
|
712 | pull_request, action, trigger_hook) | |
711 | trigger_hook( |
|
713 | trigger_hook( | |
712 | username=user.username, |
|
714 | username=user.username, | |
713 | repo_name=pull_request.target_repo.repo_name, |
|
715 | repo_name=pull_request.target_repo.repo_name, | |
714 | repo_type=target_scm.alias, |
|
716 | repo_type=target_scm.alias, | |
715 | pull_request=pull_request, |
|
717 | pull_request=pull_request, | |
716 | data=data) |
|
718 | data=data) | |
717 |
|
719 | |||
718 | def _get_commit_ids(self, pull_request): |
|
720 | def _get_commit_ids(self, pull_request): | |
719 | """ |
|
721 | """ | |
720 | Return the commit ids of the merged pull request. |
|
722 | Return the commit ids of the merged pull request. | |
721 |
|
723 | |||
722 | This method is not dealing correctly yet with the lack of autoupdates |
|
724 | This method is not dealing correctly yet with the lack of autoupdates | |
723 | nor with the implicit target updates. |
|
725 | nor with the implicit target updates. | |
724 | For example: if a commit in the source repo is already in the target it |
|
726 | For example: if a commit in the source repo is already in the target it | |
725 | will be reported anyways. |
|
727 | will be reported anyways. | |
726 | """ |
|
728 | """ | |
727 | merge_rev = pull_request.merge_rev |
|
729 | merge_rev = pull_request.merge_rev | |
728 | if merge_rev is None: |
|
730 | if merge_rev is None: | |
729 | raise ValueError('This pull request was not merged yet') |
|
731 | raise ValueError('This pull request was not merged yet') | |
730 |
|
732 | |||
731 | commit_ids = list(pull_request.revisions) |
|
733 | commit_ids = list(pull_request.revisions) | |
732 | if merge_rev not in commit_ids: |
|
734 | if merge_rev not in commit_ids: | |
733 | commit_ids.append(merge_rev) |
|
735 | commit_ids.append(merge_rev) | |
734 |
|
736 | |||
735 | return commit_ids |
|
737 | return commit_ids | |
736 |
|
738 | |||
737 | def merge_repo(self, pull_request, user, extras): |
|
739 | def merge_repo(self, pull_request, user, extras): | |
738 | log.debug("Merging pull request %s", pull_request.pull_request_id) |
|
740 | log.debug("Merging pull request %s", pull_request.pull_request_id) | |
739 | extras['user_agent'] = 'internal-merge' |
|
741 | extras['user_agent'] = 'internal-merge' | |
740 | merge_state = self._merge_pull_request(pull_request, user, extras) |
|
742 | merge_state = self._merge_pull_request(pull_request, user, extras) | |
741 | if merge_state.executed: |
|
743 | if merge_state.executed: | |
742 | log.debug("Merge was successful, updating the pull request comments.") |
|
744 | log.debug("Merge was successful, updating the pull request comments.") | |
743 | self._comment_and_close_pr(pull_request, user, merge_state) |
|
745 | self._comment_and_close_pr(pull_request, user, merge_state) | |
744 |
|
746 | |||
745 | self._log_audit_action( |
|
747 | self._log_audit_action( | |
746 | 'repo.pull_request.merge', |
|
748 | 'repo.pull_request.merge', | |
747 | {'merge_state': merge_state.__dict__}, |
|
749 | {'merge_state': merge_state.__dict__}, | |
748 | user, pull_request) |
|
750 | user, pull_request) | |
749 |
|
751 | |||
750 | else: |
|
752 | else: | |
751 | log.warn("Merge failed, not updating the pull request.") |
|
753 | log.warn("Merge failed, not updating the pull request.") | |
752 | return merge_state |
|
754 | return merge_state | |
753 |
|
755 | |||
754 | def _merge_pull_request(self, pull_request, user, extras, merge_msg=None): |
|
756 | def _merge_pull_request(self, pull_request, user, extras, merge_msg=None): | |
755 | target_vcs = pull_request.target_repo.scm_instance() |
|
757 | target_vcs = pull_request.target_repo.scm_instance() | |
756 | source_vcs = pull_request.source_repo.scm_instance() |
|
758 | source_vcs = pull_request.source_repo.scm_instance() | |
757 |
|
759 | |||
758 | message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format( |
|
760 | message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format( | |
759 | pr_id=pull_request.pull_request_id, |
|
761 | pr_id=pull_request.pull_request_id, | |
760 | pr_title=pull_request.title, |
|
762 | pr_title=pull_request.title, | |
761 | source_repo=source_vcs.name, |
|
763 | source_repo=source_vcs.name, | |
762 | source_ref_name=pull_request.source_ref_parts.name, |
|
764 | source_ref_name=pull_request.source_ref_parts.name, | |
763 | target_repo=target_vcs.name, |
|
765 | target_repo=target_vcs.name, | |
764 | target_ref_name=pull_request.target_ref_parts.name, |
|
766 | target_ref_name=pull_request.target_ref_parts.name, | |
765 | ) |
|
767 | ) | |
766 |
|
768 | |||
767 | workspace_id = self._workspace_id(pull_request) |
|
769 | workspace_id = self._workspace_id(pull_request) | |
768 | repo_id = pull_request.target_repo.repo_id |
|
770 | repo_id = pull_request.target_repo.repo_id | |
769 | use_rebase = self._use_rebase_for_merging(pull_request) |
|
771 | use_rebase = self._use_rebase_for_merging(pull_request) | |
770 | close_branch = self._close_branch_before_merging(pull_request) |
|
772 | close_branch = self._close_branch_before_merging(pull_request) | |
771 | user_name = self._user_name_for_merging(pull_request, user) |
|
773 | user_name = self._user_name_for_merging(pull_request, user) | |
772 |
|
774 | |||
773 | target_ref = self._refresh_reference( |
|
775 | target_ref = self._refresh_reference( | |
774 | pull_request.target_ref_parts, target_vcs) |
|
776 | pull_request.target_ref_parts, target_vcs) | |
775 |
|
777 | |||
776 | callback_daemon, extras = prepare_callback_daemon( |
|
778 | callback_daemon, extras = prepare_callback_daemon( | |
777 | extras, protocol=vcs_settings.HOOKS_PROTOCOL, |
|
779 | extras, protocol=vcs_settings.HOOKS_PROTOCOL, | |
778 | host=vcs_settings.HOOKS_HOST, |
|
780 | host=vcs_settings.HOOKS_HOST, | |
779 | use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS) |
|
781 | use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS) | |
780 |
|
782 | |||
781 | with callback_daemon: |
|
783 | with callback_daemon: | |
782 | # TODO: johbo: Implement a clean way to run a config_override |
|
784 | # TODO: johbo: Implement a clean way to run a config_override | |
783 | # for a single call. |
|
785 | # for a single call. | |
784 | target_vcs.config.set( |
|
786 | target_vcs.config.set( | |
785 | 'rhodecode', 'RC_SCM_DATA', json.dumps(extras)) |
|
787 | 'rhodecode', 'RC_SCM_DATA', json.dumps(extras)) | |
786 |
|
788 | |||
787 | merge_state = target_vcs.merge( |
|
789 | merge_state = target_vcs.merge( | |
788 | repo_id, workspace_id, target_ref, source_vcs, |
|
790 | repo_id, workspace_id, target_ref, source_vcs, | |
789 | pull_request.source_ref_parts, |
|
791 | pull_request.source_ref_parts, | |
790 | user_name=user_name, user_email=user.email, |
|
792 | user_name=user_name, user_email=user.email, | |
791 | message=message, use_rebase=use_rebase, |
|
793 | message=message, use_rebase=use_rebase, | |
792 | close_branch=close_branch) |
|
794 | close_branch=close_branch) | |
793 | return merge_state |
|
795 | return merge_state | |
794 |
|
796 | |||
795 | def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None): |
|
797 | def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None): | |
796 | pull_request.merge_rev = merge_state.merge_ref.commit_id |
|
798 | pull_request.merge_rev = merge_state.merge_ref.commit_id | |
797 | pull_request.updated_on = datetime.datetime.now() |
|
799 | pull_request.updated_on = datetime.datetime.now() | |
798 | close_msg = close_msg or 'Pull request merged and closed' |
|
800 | close_msg = close_msg or 'Pull request merged and closed' | |
799 |
|
801 | |||
800 | CommentsModel().create( |
|
802 | CommentsModel().create( | |
801 | text=safe_unicode(close_msg), |
|
803 | text=safe_unicode(close_msg), | |
802 | repo=pull_request.target_repo.repo_id, |
|
804 | repo=pull_request.target_repo.repo_id, | |
803 | user=user.user_id, |
|
805 | user=user.user_id, | |
804 | pull_request=pull_request.pull_request_id, |
|
806 | pull_request=pull_request.pull_request_id, | |
805 | f_path=None, |
|
807 | f_path=None, | |
806 | line_no=None, |
|
808 | line_no=None, | |
807 | closing_pr=True |
|
809 | closing_pr=True | |
808 | ) |
|
810 | ) | |
809 |
|
811 | |||
810 | Session().add(pull_request) |
|
812 | Session().add(pull_request) | |
811 | Session().flush() |
|
813 | Session().flush() | |
812 | # TODO: paris: replace invalidation with less radical solution |
|
814 | # TODO: paris: replace invalidation with less radical solution | |
813 | ScmModel().mark_for_invalidation( |
|
815 | ScmModel().mark_for_invalidation( | |
814 | pull_request.target_repo.repo_name) |
|
816 | pull_request.target_repo.repo_name) | |
815 | self.trigger_pull_request_hook(pull_request, user, 'merge') |
|
817 | self.trigger_pull_request_hook(pull_request, user, 'merge') | |
816 |
|
818 | |||
817 | def has_valid_update_type(self, pull_request): |
|
819 | def has_valid_update_type(self, pull_request): | |
818 | source_ref_type = pull_request.source_ref_parts.type |
|
820 | source_ref_type = pull_request.source_ref_parts.type | |
819 | return source_ref_type in self.REF_TYPES |
|
821 | return source_ref_type in self.REF_TYPES | |
820 |
|
822 | |||
821 | def get_flow_commits(self, pull_request): |
|
823 | def get_flow_commits(self, pull_request): | |
822 |
|
824 | |||
823 | # source repo |
|
825 | # source repo | |
824 | source_ref_name = pull_request.source_ref_parts.name |
|
826 | source_ref_name = pull_request.source_ref_parts.name | |
825 | source_ref_type = pull_request.source_ref_parts.type |
|
827 | source_ref_type = pull_request.source_ref_parts.type | |
826 | source_ref_id = pull_request.source_ref_parts.commit_id |
|
828 | source_ref_id = pull_request.source_ref_parts.commit_id | |
827 | source_repo = pull_request.source_repo.scm_instance() |
|
829 | source_repo = pull_request.source_repo.scm_instance() | |
828 |
|
830 | |||
829 | try: |
|
831 | try: | |
830 | if source_ref_type in self.REF_TYPES: |
|
832 | if source_ref_type in self.REF_TYPES: | |
831 | source_commit = source_repo.get_commit(source_ref_name) |
|
833 | source_commit = source_repo.get_commit(source_ref_name) | |
832 | else: |
|
834 | else: | |
833 | source_commit = source_repo.get_commit(source_ref_id) |
|
835 | source_commit = source_repo.get_commit(source_ref_id) | |
834 | except CommitDoesNotExistError: |
|
836 | except CommitDoesNotExistError: | |
835 | raise SourceRefMissing() |
|
837 | raise SourceRefMissing() | |
836 |
|
838 | |||
837 | # target repo |
|
839 | # target repo | |
838 | target_ref_name = pull_request.target_ref_parts.name |
|
840 | target_ref_name = pull_request.target_ref_parts.name | |
839 | target_ref_type = pull_request.target_ref_parts.type |
|
841 | target_ref_type = pull_request.target_ref_parts.type | |
840 | target_ref_id = pull_request.target_ref_parts.commit_id |
|
842 | target_ref_id = pull_request.target_ref_parts.commit_id | |
841 | target_repo = pull_request.target_repo.scm_instance() |
|
843 | target_repo = pull_request.target_repo.scm_instance() | |
842 |
|
844 | |||
843 | try: |
|
845 | try: | |
844 | if target_ref_type in self.REF_TYPES: |
|
846 | if target_ref_type in self.REF_TYPES: | |
845 | target_commit = target_repo.get_commit(target_ref_name) |
|
847 | target_commit = target_repo.get_commit(target_ref_name) | |
846 | else: |
|
848 | else: | |
847 | target_commit = target_repo.get_commit(target_ref_id) |
|
849 | target_commit = target_repo.get_commit(target_ref_id) | |
848 | except CommitDoesNotExistError: |
|
850 | except CommitDoesNotExistError: | |
849 | raise TargetRefMissing() |
|
851 | raise TargetRefMissing() | |
850 |
|
852 | |||
851 | return source_commit, target_commit |
|
853 | return source_commit, target_commit | |
852 |
|
854 | |||
853 | def update_commits(self, pull_request, updating_user): |
|
855 | def update_commits(self, pull_request, updating_user): | |
854 | """ |
|
856 | """ | |
855 | Get the updated list of commits for the pull request |
|
857 | Get the updated list of commits for the pull request | |
856 | and return the new pull request version and the list |
|
858 | and return the new pull request version and the list | |
857 | of commits processed by this update action |
|
859 | of commits processed by this update action | |
858 |
|
860 | |||
859 | updating_user is the user_object who triggered the update |
|
861 | updating_user is the user_object who triggered the update | |
860 | """ |
|
862 | """ | |
861 | pull_request = self.__get_pull_request(pull_request) |
|
863 | pull_request = self.__get_pull_request(pull_request) | |
862 | source_ref_type = pull_request.source_ref_parts.type |
|
864 | source_ref_type = pull_request.source_ref_parts.type | |
863 | source_ref_name = pull_request.source_ref_parts.name |
|
865 | source_ref_name = pull_request.source_ref_parts.name | |
864 | source_ref_id = pull_request.source_ref_parts.commit_id |
|
866 | source_ref_id = pull_request.source_ref_parts.commit_id | |
865 |
|
867 | |||
866 | target_ref_type = pull_request.target_ref_parts.type |
|
868 | target_ref_type = pull_request.target_ref_parts.type | |
867 | target_ref_name = pull_request.target_ref_parts.name |
|
869 | target_ref_name = pull_request.target_ref_parts.name | |
868 | target_ref_id = pull_request.target_ref_parts.commit_id |
|
870 | target_ref_id = pull_request.target_ref_parts.commit_id | |
869 |
|
871 | |||
870 | if not self.has_valid_update_type(pull_request): |
|
872 | if not self.has_valid_update_type(pull_request): | |
871 | log.debug("Skipping update of pull request %s due to ref type: %s", |
|
873 | log.debug("Skipping update of pull request %s due to ref type: %s", | |
872 | pull_request, source_ref_type) |
|
874 | pull_request, source_ref_type) | |
873 | return UpdateResponse( |
|
875 | return UpdateResponse( | |
874 | executed=False, |
|
876 | executed=False, | |
875 | reason=UpdateFailureReason.WRONG_REF_TYPE, |
|
877 | reason=UpdateFailureReason.WRONG_REF_TYPE, | |
876 | old=pull_request, new=None, common_ancestor_id=None, commit_changes=None, |
|
878 | old=pull_request, new=None, common_ancestor_id=None, commit_changes=None, | |
877 | source_changed=False, target_changed=False) |
|
879 | source_changed=False, target_changed=False) | |
878 |
|
880 | |||
879 | try: |
|
881 | try: | |
880 | source_commit, target_commit = self.get_flow_commits(pull_request) |
|
882 | source_commit, target_commit = self.get_flow_commits(pull_request) | |
881 | except SourceRefMissing: |
|
883 | except SourceRefMissing: | |
882 | return UpdateResponse( |
|
884 | return UpdateResponse( | |
883 | executed=False, |
|
885 | executed=False, | |
884 | reason=UpdateFailureReason.MISSING_SOURCE_REF, |
|
886 | reason=UpdateFailureReason.MISSING_SOURCE_REF, | |
885 | old=pull_request, new=None, common_ancestor_id=None, commit_changes=None, |
|
887 | old=pull_request, new=None, common_ancestor_id=None, commit_changes=None, | |
886 | source_changed=False, target_changed=False) |
|
888 | source_changed=False, target_changed=False) | |
887 | except TargetRefMissing: |
|
889 | except TargetRefMissing: | |
888 | return UpdateResponse( |
|
890 | return UpdateResponse( | |
889 | executed=False, |
|
891 | executed=False, | |
890 | reason=UpdateFailureReason.MISSING_TARGET_REF, |
|
892 | reason=UpdateFailureReason.MISSING_TARGET_REF, | |
891 | old=pull_request, new=None, common_ancestor_id=None, commit_changes=None, |
|
893 | old=pull_request, new=None, common_ancestor_id=None, commit_changes=None, | |
892 | source_changed=False, target_changed=False) |
|
894 | source_changed=False, target_changed=False) | |
893 |
|
895 | |||
894 | source_changed = source_ref_id != source_commit.raw_id |
|
896 | source_changed = source_ref_id != source_commit.raw_id | |
895 | target_changed = target_ref_id != target_commit.raw_id |
|
897 | target_changed = target_ref_id != target_commit.raw_id | |
896 |
|
898 | |||
897 | if not (source_changed or target_changed): |
|
899 | if not (source_changed or target_changed): | |
898 | log.debug("Nothing changed in pull request %s", pull_request) |
|
900 | log.debug("Nothing changed in pull request %s", pull_request) | |
899 | return UpdateResponse( |
|
901 | return UpdateResponse( | |
900 | executed=False, |
|
902 | executed=False, | |
901 | reason=UpdateFailureReason.NO_CHANGE, |
|
903 | reason=UpdateFailureReason.NO_CHANGE, | |
902 | old=pull_request, new=None, common_ancestor_id=None, commit_changes=None, |
|
904 | old=pull_request, new=None, common_ancestor_id=None, commit_changes=None, | |
903 | source_changed=target_changed, target_changed=source_changed) |
|
905 | source_changed=target_changed, target_changed=source_changed) | |
904 |
|
906 | |||
905 | change_in_found = 'target repo' if target_changed else 'source repo' |
|
907 | change_in_found = 'target repo' if target_changed else 'source repo' | |
906 | log.debug('Updating pull request because of change in %s detected', |
|
908 | log.debug('Updating pull request because of change in %s detected', | |
907 | change_in_found) |
|
909 | change_in_found) | |
908 |
|
910 | |||
909 | # Finally there is a need for an update, in case of source change |
|
911 | # Finally there is a need for an update, in case of source change | |
910 | # we create a new version, else just an update |
|
912 | # we create a new version, else just an update | |
911 | if source_changed: |
|
913 | if source_changed: | |
912 | pull_request_version = self._create_version_from_snapshot(pull_request) |
|
914 | pull_request_version = self._create_version_from_snapshot(pull_request) | |
913 | self._link_comments_to_version(pull_request_version) |
|
915 | self._link_comments_to_version(pull_request_version) | |
914 | else: |
|
916 | else: | |
915 | try: |
|
917 | try: | |
916 | ver = pull_request.versions[-1] |
|
918 | ver = pull_request.versions[-1] | |
917 | except IndexError: |
|
919 | except IndexError: | |
918 | ver = None |
|
920 | ver = None | |
919 |
|
921 | |||
920 | pull_request.pull_request_version_id = \ |
|
922 | pull_request.pull_request_version_id = \ | |
921 | ver.pull_request_version_id if ver else None |
|
923 | ver.pull_request_version_id if ver else None | |
922 | pull_request_version = pull_request |
|
924 | pull_request_version = pull_request | |
923 |
|
925 | |||
924 | source_repo = pull_request.source_repo.scm_instance() |
|
926 | source_repo = pull_request.source_repo.scm_instance() | |
925 | target_repo = pull_request.target_repo.scm_instance() |
|
927 | target_repo = pull_request.target_repo.scm_instance() | |
926 |
|
928 | |||
927 | # re-compute commit ids |
|
929 | # re-compute commit ids | |
928 | old_commit_ids = pull_request.revisions |
|
930 | old_commit_ids = pull_request.revisions | |
929 | pre_load = ["author", "date", "message", "branch"] |
|
931 | pre_load = ["author", "date", "message", "branch"] | |
930 | commit_ranges = target_repo.compare( |
|
932 | commit_ranges = target_repo.compare( | |
931 | target_commit.raw_id, source_commit.raw_id, source_repo, merge=True, |
|
933 | target_commit.raw_id, source_commit.raw_id, source_repo, merge=True, | |
932 | pre_load=pre_load) |
|
934 | pre_load=pre_load) | |
933 |
|
935 | |||
934 | target_ref = target_commit.raw_id |
|
936 | target_ref = target_commit.raw_id | |
935 | source_ref = source_commit.raw_id |
|
937 | source_ref = source_commit.raw_id | |
936 | ancestor_commit_id = target_repo.get_common_ancestor( |
|
938 | ancestor_commit_id = target_repo.get_common_ancestor( | |
937 | target_ref, source_ref, source_repo) |
|
939 | target_ref, source_ref, source_repo) | |
938 |
|
940 | |||
939 | if not ancestor_commit_id: |
|
941 | if not ancestor_commit_id: | |
940 | raise ValueError( |
|
942 | raise ValueError( | |
941 | 'cannot calculate diff info without a common ancestor. ' |
|
943 | 'cannot calculate diff info without a common ancestor. ' | |
942 | 'Make sure both repositories are related, and have a common forking commit.') |
|
944 | 'Make sure both repositories are related, and have a common forking commit.') | |
943 |
|
945 | |||
944 | pull_request.common_ancestor_id = ancestor_commit_id |
|
946 | pull_request.common_ancestor_id = ancestor_commit_id | |
945 |
|
947 | |||
946 | pull_request.source_ref = '%s:%s:%s' % ( |
|
948 | pull_request.source_ref = '%s:%s:%s' % ( | |
947 | source_ref_type, source_ref_name, source_commit.raw_id) |
|
949 | source_ref_type, source_ref_name, source_commit.raw_id) | |
948 | pull_request.target_ref = '%s:%s:%s' % ( |
|
950 | pull_request.target_ref = '%s:%s:%s' % ( | |
949 | target_ref_type, target_ref_name, ancestor_commit_id) |
|
951 | target_ref_type, target_ref_name, ancestor_commit_id) | |
950 |
|
952 | |||
951 | pull_request.revisions = [ |
|
953 | pull_request.revisions = [ | |
952 | commit.raw_id for commit in reversed(commit_ranges)] |
|
954 | commit.raw_id for commit in reversed(commit_ranges)] | |
953 | pull_request.updated_on = datetime.datetime.now() |
|
955 | pull_request.updated_on = datetime.datetime.now() | |
954 | Session().add(pull_request) |
|
956 | Session().add(pull_request) | |
955 | new_commit_ids = pull_request.revisions |
|
957 | new_commit_ids = pull_request.revisions | |
956 |
|
958 | |||
957 | old_diff_data, new_diff_data = self._generate_update_diffs( |
|
959 | old_diff_data, new_diff_data = self._generate_update_diffs( | |
958 | pull_request, pull_request_version) |
|
960 | pull_request, pull_request_version) | |
959 |
|
961 | |||
960 | # calculate commit and file changes |
|
962 | # calculate commit and file changes | |
961 | commit_changes = self._calculate_commit_id_changes( |
|
963 | commit_changes = self._calculate_commit_id_changes( | |
962 | old_commit_ids, new_commit_ids) |
|
964 | old_commit_ids, new_commit_ids) | |
963 | file_changes = self._calculate_file_changes( |
|
965 | file_changes = self._calculate_file_changes( | |
964 | old_diff_data, new_diff_data) |
|
966 | old_diff_data, new_diff_data) | |
965 |
|
967 | |||
966 | # set comments as outdated if DIFFS changed |
|
968 | # set comments as outdated if DIFFS changed | |
967 | CommentsModel().outdate_comments( |
|
969 | CommentsModel().outdate_comments( | |
968 | pull_request, old_diff_data=old_diff_data, |
|
970 | pull_request, old_diff_data=old_diff_data, | |
969 | new_diff_data=new_diff_data) |
|
971 | new_diff_data=new_diff_data) | |
970 |
|
972 | |||
971 | valid_commit_changes = (commit_changes.added or commit_changes.removed) |
|
973 | valid_commit_changes = (commit_changes.added or commit_changes.removed) | |
972 | file_node_changes = ( |
|
974 | file_node_changes = ( | |
973 | file_changes.added or file_changes.modified or file_changes.removed) |
|
975 | file_changes.added or file_changes.modified or file_changes.removed) | |
974 | pr_has_changes = valid_commit_changes or file_node_changes |
|
976 | pr_has_changes = valid_commit_changes or file_node_changes | |
975 |
|
977 | |||
976 | # Add an automatic comment to the pull request, in case |
|
978 | # Add an automatic comment to the pull request, in case | |
977 | # anything has changed |
|
979 | # anything has changed | |
978 | if pr_has_changes: |
|
980 | if pr_has_changes: | |
979 | update_comment = CommentsModel().create( |
|
981 | update_comment = CommentsModel().create( | |
980 | text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes), |
|
982 | text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes), | |
981 | repo=pull_request.target_repo, |
|
983 | repo=pull_request.target_repo, | |
982 | user=pull_request.author, |
|
984 | user=pull_request.author, | |
983 | pull_request=pull_request, |
|
985 | pull_request=pull_request, | |
984 | send_email=False, renderer=DEFAULT_COMMENTS_RENDERER) |
|
986 | send_email=False, renderer=DEFAULT_COMMENTS_RENDERER) | |
985 |
|
987 | |||
986 | # Update status to "Under Review" for added commits |
|
988 | # Update status to "Under Review" for added commits | |
987 | for commit_id in commit_changes.added: |
|
989 | for commit_id in commit_changes.added: | |
988 | ChangesetStatusModel().set_status( |
|
990 | ChangesetStatusModel().set_status( | |
989 | repo=pull_request.source_repo, |
|
991 | repo=pull_request.source_repo, | |
990 | status=ChangesetStatus.STATUS_UNDER_REVIEW, |
|
992 | status=ChangesetStatus.STATUS_UNDER_REVIEW, | |
991 | comment=update_comment, |
|
993 | comment=update_comment, | |
992 | user=pull_request.author, |
|
994 | user=pull_request.author, | |
993 | pull_request=pull_request, |
|
995 | pull_request=pull_request, | |
994 | revision=commit_id) |
|
996 | revision=commit_id) | |
995 |
|
997 | |||
996 | # send update email to users |
|
998 | # send update email to users | |
997 | try: |
|
999 | try: | |
998 | self.notify_users(pull_request=pull_request, updating_user=updating_user, |
|
1000 | self.notify_users(pull_request=pull_request, updating_user=updating_user, | |
999 | ancestor_commit_id=ancestor_commit_id, |
|
1001 | ancestor_commit_id=ancestor_commit_id, | |
1000 | commit_changes=commit_changes, |
|
1002 | commit_changes=commit_changes, | |
1001 | file_changes=file_changes) |
|
1003 | file_changes=file_changes) | |
1002 | except Exception: |
|
1004 | except Exception: | |
1003 | log.exception('Failed to send email notification to users') |
|
1005 | log.exception('Failed to send email notification to users') | |
1004 |
|
1006 | |||
1005 | log.debug( |
|
1007 | log.debug( | |
1006 | 'Updated pull request %s, added_ids: %s, common_ids: %s, ' |
|
1008 | 'Updated pull request %s, added_ids: %s, common_ids: %s, ' | |
1007 | 'removed_ids: %s', pull_request.pull_request_id, |
|
1009 | 'removed_ids: %s', pull_request.pull_request_id, | |
1008 | commit_changes.added, commit_changes.common, commit_changes.removed) |
|
1010 | commit_changes.added, commit_changes.common, commit_changes.removed) | |
1009 | log.debug( |
|
1011 | log.debug( | |
1010 | 'Updated pull request with the following file changes: %s', |
|
1012 | 'Updated pull request with the following file changes: %s', | |
1011 | file_changes) |
|
1013 | file_changes) | |
1012 |
|
1014 | |||
1013 | log.info( |
|
1015 | log.info( | |
1014 | "Updated pull request %s from commit %s to commit %s, " |
|
1016 | "Updated pull request %s from commit %s to commit %s, " | |
1015 | "stored new version %s of this pull request.", |
|
1017 | "stored new version %s of this pull request.", | |
1016 | pull_request.pull_request_id, source_ref_id, |
|
1018 | pull_request.pull_request_id, source_ref_id, | |
1017 | pull_request.source_ref_parts.commit_id, |
|
1019 | pull_request.source_ref_parts.commit_id, | |
1018 | pull_request_version.pull_request_version_id) |
|
1020 | pull_request_version.pull_request_version_id) | |
1019 | Session().commit() |
|
1021 | Session().commit() | |
1020 | self.trigger_pull_request_hook(pull_request, pull_request.author, 'update') |
|
1022 | self.trigger_pull_request_hook(pull_request, pull_request.author, 'update') | |
1021 |
|
1023 | |||
1022 | return UpdateResponse( |
|
1024 | return UpdateResponse( | |
1023 | executed=True, reason=UpdateFailureReason.NONE, |
|
1025 | executed=True, reason=UpdateFailureReason.NONE, | |
1024 | old=pull_request, new=pull_request_version, |
|
1026 | old=pull_request, new=pull_request_version, | |
1025 | common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes, |
|
1027 | common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes, | |
1026 | source_changed=source_changed, target_changed=target_changed) |
|
1028 | source_changed=source_changed, target_changed=target_changed) | |
1027 |
|
1029 | |||
1028 | def _create_version_from_snapshot(self, pull_request): |
|
1030 | def _create_version_from_snapshot(self, pull_request): | |
1029 | version = PullRequestVersion() |
|
1031 | version = PullRequestVersion() | |
1030 | version.title = pull_request.title |
|
1032 | version.title = pull_request.title | |
1031 | version.description = pull_request.description |
|
1033 | version.description = pull_request.description | |
1032 | version.status = pull_request.status |
|
1034 | version.status = pull_request.status | |
1033 | version.pull_request_state = pull_request.pull_request_state |
|
1035 | version.pull_request_state = pull_request.pull_request_state | |
1034 | version.created_on = datetime.datetime.now() |
|
1036 | version.created_on = datetime.datetime.now() | |
1035 | version.updated_on = pull_request.updated_on |
|
1037 | version.updated_on = pull_request.updated_on | |
1036 | version.user_id = pull_request.user_id |
|
1038 | version.user_id = pull_request.user_id | |
1037 | version.source_repo = pull_request.source_repo |
|
1039 | version.source_repo = pull_request.source_repo | |
1038 | version.source_ref = pull_request.source_ref |
|
1040 | version.source_ref = pull_request.source_ref | |
1039 | version.target_repo = pull_request.target_repo |
|
1041 | version.target_repo = pull_request.target_repo | |
1040 | version.target_ref = pull_request.target_ref |
|
1042 | version.target_ref = pull_request.target_ref | |
1041 |
|
1043 | |||
1042 | version._last_merge_source_rev = pull_request._last_merge_source_rev |
|
1044 | version._last_merge_source_rev = pull_request._last_merge_source_rev | |
1043 | version._last_merge_target_rev = pull_request._last_merge_target_rev |
|
1045 | version._last_merge_target_rev = pull_request._last_merge_target_rev | |
1044 | version.last_merge_status = pull_request.last_merge_status |
|
1046 | version.last_merge_status = pull_request.last_merge_status | |
1045 | version.last_merge_metadata = pull_request.last_merge_metadata |
|
1047 | version.last_merge_metadata = pull_request.last_merge_metadata | |
1046 | version.shadow_merge_ref = pull_request.shadow_merge_ref |
|
1048 | version.shadow_merge_ref = pull_request.shadow_merge_ref | |
1047 | version.merge_rev = pull_request.merge_rev |
|
1049 | version.merge_rev = pull_request.merge_rev | |
1048 | version.reviewer_data = pull_request.reviewer_data |
|
1050 | version.reviewer_data = pull_request.reviewer_data | |
1049 |
|
1051 | |||
1050 | version.revisions = pull_request.revisions |
|
1052 | version.revisions = pull_request.revisions | |
1051 | version.common_ancestor_id = pull_request.common_ancestor_id |
|
1053 | version.common_ancestor_id = pull_request.common_ancestor_id | |
1052 | version.pull_request = pull_request |
|
1054 | version.pull_request = pull_request | |
1053 | Session().add(version) |
|
1055 | Session().add(version) | |
1054 | Session().flush() |
|
1056 | Session().flush() | |
1055 |
|
1057 | |||
1056 | return version |
|
1058 | return version | |
1057 |
|
1059 | |||
1058 | def _generate_update_diffs(self, pull_request, pull_request_version): |
|
1060 | def _generate_update_diffs(self, pull_request, pull_request_version): | |
1059 |
|
1061 | |||
1060 | diff_context = ( |
|
1062 | diff_context = ( | |
1061 | self.DIFF_CONTEXT + |
|
1063 | self.DIFF_CONTEXT + | |
1062 | CommentsModel.needed_extra_diff_context()) |
|
1064 | CommentsModel.needed_extra_diff_context()) | |
1063 | hide_whitespace_changes = False |
|
1065 | hide_whitespace_changes = False | |
1064 | source_repo = pull_request_version.source_repo |
|
1066 | source_repo = pull_request_version.source_repo | |
1065 | source_ref_id = pull_request_version.source_ref_parts.commit_id |
|
1067 | source_ref_id = pull_request_version.source_ref_parts.commit_id | |
1066 | target_ref_id = pull_request_version.target_ref_parts.commit_id |
|
1068 | target_ref_id = pull_request_version.target_ref_parts.commit_id | |
1067 | old_diff = self._get_diff_from_pr_or_version( |
|
1069 | old_diff = self._get_diff_from_pr_or_version( | |
1068 | source_repo, source_ref_id, target_ref_id, |
|
1070 | source_repo, source_ref_id, target_ref_id, | |
1069 | hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context) |
|
1071 | hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context) | |
1070 |
|
1072 | |||
1071 | source_repo = pull_request.source_repo |
|
1073 | source_repo = pull_request.source_repo | |
1072 | source_ref_id = pull_request.source_ref_parts.commit_id |
|
1074 | source_ref_id = pull_request.source_ref_parts.commit_id | |
1073 | target_ref_id = pull_request.target_ref_parts.commit_id |
|
1075 | target_ref_id = pull_request.target_ref_parts.commit_id | |
1074 |
|
1076 | |||
1075 | new_diff = self._get_diff_from_pr_or_version( |
|
1077 | new_diff = self._get_diff_from_pr_or_version( | |
1076 | source_repo, source_ref_id, target_ref_id, |
|
1078 | source_repo, source_ref_id, target_ref_id, | |
1077 | hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context) |
|
1079 | hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context) | |
1078 |
|
1080 | |||
1079 | old_diff_data = diffs.DiffProcessor(old_diff) |
|
1081 | old_diff_data = diffs.DiffProcessor(old_diff) | |
1080 | old_diff_data.prepare() |
|
1082 | old_diff_data.prepare() | |
1081 | new_diff_data = diffs.DiffProcessor(new_diff) |
|
1083 | new_diff_data = diffs.DiffProcessor(new_diff) | |
1082 | new_diff_data.prepare() |
|
1084 | new_diff_data.prepare() | |
1083 |
|
1085 | |||
1084 | return old_diff_data, new_diff_data |
|
1086 | return old_diff_data, new_diff_data | |
1085 |
|
1087 | |||
1086 | def _link_comments_to_version(self, pull_request_version): |
|
1088 | def _link_comments_to_version(self, pull_request_version): | |
1087 | """ |
|
1089 | """ | |
1088 | Link all unlinked comments of this pull request to the given version. |
|
1090 | Link all unlinked comments of this pull request to the given version. | |
1089 |
|
1091 | |||
1090 | :param pull_request_version: The `PullRequestVersion` to which |
|
1092 | :param pull_request_version: The `PullRequestVersion` to which | |
1091 | the comments shall be linked. |
|
1093 | the comments shall be linked. | |
1092 |
|
1094 | |||
1093 | """ |
|
1095 | """ | |
1094 | pull_request = pull_request_version.pull_request |
|
1096 | pull_request = pull_request_version.pull_request | |
1095 | comments = ChangesetComment.query()\ |
|
1097 | comments = ChangesetComment.query()\ | |
1096 | .filter( |
|
1098 | .filter( | |
1097 | # TODO: johbo: Should we query for the repo at all here? |
|
1099 | # TODO: johbo: Should we query for the repo at all here? | |
1098 | # Pending decision on how comments of PRs are to be related |
|
1100 | # Pending decision on how comments of PRs are to be related | |
1099 | # to either the source repo, the target repo or no repo at all. |
|
1101 | # to either the source repo, the target repo or no repo at all. | |
1100 | ChangesetComment.repo_id == pull_request.target_repo.repo_id, |
|
1102 | ChangesetComment.repo_id == pull_request.target_repo.repo_id, | |
1101 | ChangesetComment.pull_request == pull_request, |
|
1103 | ChangesetComment.pull_request == pull_request, | |
1102 | ChangesetComment.pull_request_version == None)\ |
|
1104 | ChangesetComment.pull_request_version == None)\ | |
1103 | .order_by(ChangesetComment.comment_id.asc()) |
|
1105 | .order_by(ChangesetComment.comment_id.asc()) | |
1104 |
|
1106 | |||
1105 | # TODO: johbo: Find out why this breaks if it is done in a bulk |
|
1107 | # TODO: johbo: Find out why this breaks if it is done in a bulk | |
1106 | # operation. |
|
1108 | # operation. | |
1107 | for comment in comments: |
|
1109 | for comment in comments: | |
1108 | comment.pull_request_version_id = ( |
|
1110 | comment.pull_request_version_id = ( | |
1109 | pull_request_version.pull_request_version_id) |
|
1111 | pull_request_version.pull_request_version_id) | |
1110 | Session().add(comment) |
|
1112 | Session().add(comment) | |
1111 |
|
1113 | |||
1112 | def _calculate_commit_id_changes(self, old_ids, new_ids): |
|
1114 | def _calculate_commit_id_changes(self, old_ids, new_ids): | |
1113 | added = [x for x in new_ids if x not in old_ids] |
|
1115 | added = [x for x in new_ids if x not in old_ids] | |
1114 | common = [x for x in new_ids if x in old_ids] |
|
1116 | common = [x for x in new_ids if x in old_ids] | |
1115 | removed = [x for x in old_ids if x not in new_ids] |
|
1117 | removed = [x for x in old_ids if x not in new_ids] | |
1116 | total = new_ids |
|
1118 | total = new_ids | |
1117 | return ChangeTuple(added, common, removed, total) |
|
1119 | return ChangeTuple(added, common, removed, total) | |
1118 |
|
1120 | |||
1119 | def _calculate_file_changes(self, old_diff_data, new_diff_data): |
|
1121 | def _calculate_file_changes(self, old_diff_data, new_diff_data): | |
1120 |
|
1122 | |||
1121 | old_files = OrderedDict() |
|
1123 | old_files = OrderedDict() | |
1122 | for diff_data in old_diff_data.parsed_diff: |
|
1124 | for diff_data in old_diff_data.parsed_diff: | |
1123 | old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff']) |
|
1125 | old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff']) | |
1124 |
|
1126 | |||
1125 | added_files = [] |
|
1127 | added_files = [] | |
1126 | modified_files = [] |
|
1128 | modified_files = [] | |
1127 | removed_files = [] |
|
1129 | removed_files = [] | |
1128 | for diff_data in new_diff_data.parsed_diff: |
|
1130 | for diff_data in new_diff_data.parsed_diff: | |
1129 | new_filename = diff_data['filename'] |
|
1131 | new_filename = diff_data['filename'] | |
1130 | new_hash = md5_safe(diff_data['raw_diff']) |
|
1132 | new_hash = md5_safe(diff_data['raw_diff']) | |
1131 |
|
1133 | |||
1132 | old_hash = old_files.get(new_filename) |
|
1134 | old_hash = old_files.get(new_filename) | |
1133 | if not old_hash: |
|
1135 | if not old_hash: | |
1134 | # file is not present in old diff, we have to figure out from parsed diff |
|
1136 | # file is not present in old diff, we have to figure out from parsed diff | |
1135 | # operation ADD/REMOVE |
|
1137 | # operation ADD/REMOVE | |
1136 | operations_dict = diff_data['stats']['ops'] |
|
1138 | operations_dict = diff_data['stats']['ops'] | |
1137 | if diffs.DEL_FILENODE in operations_dict: |
|
1139 | if diffs.DEL_FILENODE in operations_dict: | |
1138 | removed_files.append(new_filename) |
|
1140 | removed_files.append(new_filename) | |
1139 | else: |
|
1141 | else: | |
1140 | added_files.append(new_filename) |
|
1142 | added_files.append(new_filename) | |
1141 | else: |
|
1143 | else: | |
1142 | if new_hash != old_hash: |
|
1144 | if new_hash != old_hash: | |
1143 | modified_files.append(new_filename) |
|
1145 | modified_files.append(new_filename) | |
1144 | # now remove a file from old, since we have seen it already |
|
1146 | # now remove a file from old, since we have seen it already | |
1145 | del old_files[new_filename] |
|
1147 | del old_files[new_filename] | |
1146 |
|
1148 | |||
1147 | # removed files is when there are present in old, but not in NEW, |
|
1149 | # removed files is when there are present in old, but not in NEW, | |
1148 | # since we remove old files that are present in new diff, left-overs |
|
1150 | # since we remove old files that are present in new diff, left-overs | |
1149 | # if any should be the removed files |
|
1151 | # if any should be the removed files | |
1150 | removed_files.extend(old_files.keys()) |
|
1152 | removed_files.extend(old_files.keys()) | |
1151 |
|
1153 | |||
1152 | return FileChangeTuple(added_files, modified_files, removed_files) |
|
1154 | return FileChangeTuple(added_files, modified_files, removed_files) | |
1153 |
|
1155 | |||
1154 | def _render_update_message(self, ancestor_commit_id, changes, file_changes): |
|
1156 | def _render_update_message(self, ancestor_commit_id, changes, file_changes): | |
1155 | """ |
|
1157 | """ | |
1156 | render the message using DEFAULT_COMMENTS_RENDERER (RST renderer), |
|
1158 | render the message using DEFAULT_COMMENTS_RENDERER (RST renderer), | |
1157 | so it's always looking the same disregarding on which default |
|
1159 | so it's always looking the same disregarding on which default | |
1158 | renderer system is using. |
|
1160 | renderer system is using. | |
1159 |
|
1161 | |||
1160 | :param ancestor_commit_id: ancestor raw_id |
|
1162 | :param ancestor_commit_id: ancestor raw_id | |
1161 | :param changes: changes named tuple |
|
1163 | :param changes: changes named tuple | |
1162 | :param file_changes: file changes named tuple |
|
1164 | :param file_changes: file changes named tuple | |
1163 |
|
1165 | |||
1164 | """ |
|
1166 | """ | |
1165 | new_status = ChangesetStatus.get_status_lbl( |
|
1167 | new_status = ChangesetStatus.get_status_lbl( | |
1166 | ChangesetStatus.STATUS_UNDER_REVIEW) |
|
1168 | ChangesetStatus.STATUS_UNDER_REVIEW) | |
1167 |
|
1169 | |||
1168 | changed_files = ( |
|
1170 | changed_files = ( | |
1169 | file_changes.added + file_changes.modified + file_changes.removed) |
|
1171 | file_changes.added + file_changes.modified + file_changes.removed) | |
1170 |
|
1172 | |||
1171 | params = { |
|
1173 | params = { | |
1172 | 'under_review_label': new_status, |
|
1174 | 'under_review_label': new_status, | |
1173 | 'added_commits': changes.added, |
|
1175 | 'added_commits': changes.added, | |
1174 | 'removed_commits': changes.removed, |
|
1176 | 'removed_commits': changes.removed, | |
1175 | 'changed_files': changed_files, |
|
1177 | 'changed_files': changed_files, | |
1176 | 'added_files': file_changes.added, |
|
1178 | 'added_files': file_changes.added, | |
1177 | 'modified_files': file_changes.modified, |
|
1179 | 'modified_files': file_changes.modified, | |
1178 | 'removed_files': file_changes.removed, |
|
1180 | 'removed_files': file_changes.removed, | |
1179 | 'ancestor_commit_id': ancestor_commit_id |
|
1181 | 'ancestor_commit_id': ancestor_commit_id | |
1180 | } |
|
1182 | } | |
1181 | renderer = RstTemplateRenderer() |
|
1183 | renderer = RstTemplateRenderer() | |
1182 | return renderer.render('pull_request_update.mako', **params) |
|
1184 | return renderer.render('pull_request_update.mako', **params) | |
1183 |
|
1185 | |||
1184 | def edit(self, pull_request, title, description, description_renderer, user): |
|
1186 | def edit(self, pull_request, title, description, description_renderer, user): | |
1185 | pull_request = self.__get_pull_request(pull_request) |
|
1187 | pull_request = self.__get_pull_request(pull_request) | |
1186 | old_data = pull_request.get_api_data(with_merge_state=False) |
|
1188 | old_data = pull_request.get_api_data(with_merge_state=False) | |
1187 | if pull_request.is_closed(): |
|
1189 | if pull_request.is_closed(): | |
1188 | raise ValueError('This pull request is closed') |
|
1190 | raise ValueError('This pull request is closed') | |
1189 | if title: |
|
1191 | if title: | |
1190 | pull_request.title = title |
|
1192 | pull_request.title = title | |
1191 | pull_request.description = description |
|
1193 | pull_request.description = description | |
1192 | pull_request.updated_on = datetime.datetime.now() |
|
1194 | pull_request.updated_on = datetime.datetime.now() | |
1193 | pull_request.description_renderer = description_renderer |
|
1195 | pull_request.description_renderer = description_renderer | |
1194 | Session().add(pull_request) |
|
1196 | Session().add(pull_request) | |
1195 | self._log_audit_action( |
|
1197 | self._log_audit_action( | |
1196 | 'repo.pull_request.edit', {'old_data': old_data}, |
|
1198 | 'repo.pull_request.edit', {'old_data': old_data}, | |
1197 | user, pull_request) |
|
1199 | user, pull_request) | |
1198 |
|
1200 | |||
1199 | def update_reviewers(self, pull_request, reviewer_data, user): |
|
1201 | def update_reviewers(self, pull_request, reviewer_data, user): | |
1200 | """ |
|
1202 | """ | |
1201 | Update the reviewers in the pull request |
|
1203 | Update the reviewers in the pull request | |
1202 |
|
1204 | |||
1203 | :param pull_request: the pr to update |
|
1205 | :param pull_request: the pr to update | |
1204 | :param reviewer_data: list of tuples |
|
1206 | :param reviewer_data: list of tuples | |
1205 | [(user, ['reason1', 'reason2'], mandatory_flag, [rules])] |
|
1207 | [(user, ['reason1', 'reason2'], mandatory_flag, [rules])] | |
1206 | """ |
|
1208 | """ | |
1207 | pull_request = self.__get_pull_request(pull_request) |
|
1209 | pull_request = self.__get_pull_request(pull_request) | |
1208 | if pull_request.is_closed(): |
|
1210 | if pull_request.is_closed(): | |
1209 | raise ValueError('This pull request is closed') |
|
1211 | raise ValueError('This pull request is closed') | |
1210 |
|
1212 | |||
1211 | reviewers = {} |
|
1213 | reviewers = {} | |
1212 | for user_id, reasons, mandatory, rules in reviewer_data: |
|
1214 | for user_id, reasons, mandatory, rules in reviewer_data: | |
1213 | if isinstance(user_id, (int, compat.string_types)): |
|
1215 | if isinstance(user_id, (int, compat.string_types)): | |
1214 | user_id = self._get_user(user_id).user_id |
|
1216 | user_id = self._get_user(user_id).user_id | |
1215 | reviewers[user_id] = { |
|
1217 | reviewers[user_id] = { | |
1216 | 'reasons': reasons, 'mandatory': mandatory} |
|
1218 | 'reasons': reasons, 'mandatory': mandatory} | |
1217 |
|
1219 | |||
1218 | reviewers_ids = set(reviewers.keys()) |
|
1220 | reviewers_ids = set(reviewers.keys()) | |
1219 | current_reviewers = PullRequestReviewers.query()\ |
|
1221 | current_reviewers = PullRequestReviewers.query()\ | |
1220 | .filter(PullRequestReviewers.pull_request == |
|
1222 | .filter(PullRequestReviewers.pull_request == | |
1221 | pull_request).all() |
|
1223 | pull_request).all() | |
1222 | current_reviewers_ids = set([x.user.user_id for x in current_reviewers]) |
|
1224 | current_reviewers_ids = set([x.user.user_id for x in current_reviewers]) | |
1223 |
|
1225 | |||
1224 | ids_to_add = reviewers_ids.difference(current_reviewers_ids) |
|
1226 | ids_to_add = reviewers_ids.difference(current_reviewers_ids) | |
1225 | ids_to_remove = current_reviewers_ids.difference(reviewers_ids) |
|
1227 | ids_to_remove = current_reviewers_ids.difference(reviewers_ids) | |
1226 |
|
1228 | |||
1227 | log.debug("Adding %s reviewers", ids_to_add) |
|
1229 | log.debug("Adding %s reviewers", ids_to_add) | |
1228 | log.debug("Removing %s reviewers", ids_to_remove) |
|
1230 | log.debug("Removing %s reviewers", ids_to_remove) | |
1229 | changed = False |
|
1231 | changed = False | |
1230 | added_audit_reviewers = [] |
|
1232 | added_audit_reviewers = [] | |
1231 | removed_audit_reviewers = [] |
|
1233 | removed_audit_reviewers = [] | |
1232 |
|
1234 | |||
1233 | for uid in ids_to_add: |
|
1235 | for uid in ids_to_add: | |
1234 | changed = True |
|
1236 | changed = True | |
1235 | _usr = self._get_user(uid) |
|
1237 | _usr = self._get_user(uid) | |
1236 | reviewer = PullRequestReviewers() |
|
1238 | reviewer = PullRequestReviewers() | |
1237 | reviewer.user = _usr |
|
1239 | reviewer.user = _usr | |
1238 | reviewer.pull_request = pull_request |
|
1240 | reviewer.pull_request = pull_request | |
1239 | reviewer.reasons = reviewers[uid]['reasons'] |
|
1241 | reviewer.reasons = reviewers[uid]['reasons'] | |
1240 | # NOTE(marcink): mandatory shouldn't be changed now |
|
1242 | # NOTE(marcink): mandatory shouldn't be changed now | |
1241 | # reviewer.mandatory = reviewers[uid]['reasons'] |
|
1243 | # reviewer.mandatory = reviewers[uid]['reasons'] | |
1242 | Session().add(reviewer) |
|
1244 | Session().add(reviewer) | |
1243 | added_audit_reviewers.append(reviewer.get_dict()) |
|
1245 | added_audit_reviewers.append(reviewer.get_dict()) | |
1244 |
|
1246 | |||
1245 | for uid in ids_to_remove: |
|
1247 | for uid in ids_to_remove: | |
1246 | changed = True |
|
1248 | changed = True | |
1247 | # NOTE(marcink): we fetch "ALL" reviewers using .all(). This is an edge case |
|
1249 | # NOTE(marcink): we fetch "ALL" reviewers using .all(). This is an edge case | |
1248 | # that prevents and fixes cases that we added the same reviewer twice. |
|
1250 | # that prevents and fixes cases that we added the same reviewer twice. | |
1249 | # this CAN happen due to the lack of DB checks |
|
1251 | # this CAN happen due to the lack of DB checks | |
1250 | reviewers = PullRequestReviewers.query()\ |
|
1252 | reviewers = PullRequestReviewers.query()\ | |
1251 | .filter(PullRequestReviewers.user_id == uid, |
|
1253 | .filter(PullRequestReviewers.user_id == uid, | |
1252 | PullRequestReviewers.pull_request == pull_request)\ |
|
1254 | PullRequestReviewers.pull_request == pull_request)\ | |
1253 | .all() |
|
1255 | .all() | |
1254 |
|
1256 | |||
1255 | for obj in reviewers: |
|
1257 | for obj in reviewers: | |
1256 | added_audit_reviewers.append(obj.get_dict()) |
|
1258 | added_audit_reviewers.append(obj.get_dict()) | |
1257 | Session().delete(obj) |
|
1259 | Session().delete(obj) | |
1258 |
|
1260 | |||
1259 | if changed: |
|
1261 | if changed: | |
1260 | Session().expire_all() |
|
1262 | Session().expire_all() | |
1261 | pull_request.updated_on = datetime.datetime.now() |
|
1263 | pull_request.updated_on = datetime.datetime.now() | |
1262 | Session().add(pull_request) |
|
1264 | Session().add(pull_request) | |
1263 |
|
1265 | |||
1264 | # finally store audit logs |
|
1266 | # finally store audit logs | |
1265 | for user_data in added_audit_reviewers: |
|
1267 | for user_data in added_audit_reviewers: | |
1266 | self._log_audit_action( |
|
1268 | self._log_audit_action( | |
1267 | 'repo.pull_request.reviewer.add', {'data': user_data}, |
|
1269 | 'repo.pull_request.reviewer.add', {'data': user_data}, | |
1268 | user, pull_request) |
|
1270 | user, pull_request) | |
1269 | for user_data in removed_audit_reviewers: |
|
1271 | for user_data in removed_audit_reviewers: | |
1270 | self._log_audit_action( |
|
1272 | self._log_audit_action( | |
1271 | 'repo.pull_request.reviewer.delete', {'old_data': user_data}, |
|
1273 | 'repo.pull_request.reviewer.delete', {'old_data': user_data}, | |
1272 | user, pull_request) |
|
1274 | user, pull_request) | |
1273 |
|
1275 | |||
1274 | self.notify_reviewers(pull_request, ids_to_add) |
|
1276 | self.notify_reviewers(pull_request, ids_to_add) | |
1275 | return ids_to_add, ids_to_remove |
|
1277 | return ids_to_add, ids_to_remove | |
1276 |
|
1278 | |||
1277 | def get_url(self, pull_request, request=None, permalink=False): |
|
1279 | def get_url(self, pull_request, request=None, permalink=False): | |
1278 | if not request: |
|
1280 | if not request: | |
1279 | request = get_current_request() |
|
1281 | request = get_current_request() | |
1280 |
|
1282 | |||
1281 | if permalink: |
|
1283 | if permalink: | |
1282 | return request.route_url( |
|
1284 | return request.route_url( | |
1283 | 'pull_requests_global', |
|
1285 | 'pull_requests_global', | |
1284 | pull_request_id=pull_request.pull_request_id,) |
|
1286 | pull_request_id=pull_request.pull_request_id,) | |
1285 | else: |
|
1287 | else: | |
1286 | return request.route_url('pullrequest_show', |
|
1288 | return request.route_url('pullrequest_show', | |
1287 | repo_name=safe_str(pull_request.target_repo.repo_name), |
|
1289 | repo_name=safe_str(pull_request.target_repo.repo_name), | |
1288 | pull_request_id=pull_request.pull_request_id,) |
|
1290 | pull_request_id=pull_request.pull_request_id,) | |
1289 |
|
1291 | |||
1290 | def get_shadow_clone_url(self, pull_request, request=None): |
|
1292 | def get_shadow_clone_url(self, pull_request, request=None): | |
1291 | """ |
|
1293 | """ | |
1292 | Returns qualified url pointing to the shadow repository. If this pull |
|
1294 | Returns qualified url pointing to the shadow repository. If this pull | |
1293 | request is closed there is no shadow repository and ``None`` will be |
|
1295 | request is closed there is no shadow repository and ``None`` will be | |
1294 | returned. |
|
1296 | returned. | |
1295 | """ |
|
1297 | """ | |
1296 | if pull_request.is_closed(): |
|
1298 | if pull_request.is_closed(): | |
1297 | return None |
|
1299 | return None | |
1298 | else: |
|
1300 | else: | |
1299 | pr_url = urllib.unquote(self.get_url(pull_request, request=request)) |
|
1301 | pr_url = urllib.unquote(self.get_url(pull_request, request=request)) | |
1300 | return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url)) |
|
1302 | return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url)) | |
1301 |
|
1303 | |||
1302 | def notify_reviewers(self, pull_request, reviewers_ids): |
|
1304 | def notify_reviewers(self, pull_request, reviewers_ids): | |
1303 | # notification to reviewers |
|
1305 | # notification to reviewers | |
1304 | if not reviewers_ids: |
|
1306 | if not reviewers_ids: | |
1305 | return |
|
1307 | return | |
1306 |
|
1308 | |||
1307 | log.debug('Notify following reviewers about pull-request %s', reviewers_ids) |
|
1309 | log.debug('Notify following reviewers about pull-request %s', reviewers_ids) | |
1308 |
|
1310 | |||
1309 | pull_request_obj = pull_request |
|
1311 | pull_request_obj = pull_request | |
1310 | # get the current participants of this pull request |
|
1312 | # get the current participants of this pull request | |
1311 | recipients = reviewers_ids |
|
1313 | recipients = reviewers_ids | |
1312 | notification_type = EmailNotificationModel.TYPE_PULL_REQUEST |
|
1314 | notification_type = EmailNotificationModel.TYPE_PULL_REQUEST | |
1313 |
|
1315 | |||
1314 | pr_source_repo = pull_request_obj.source_repo |
|
1316 | pr_source_repo = pull_request_obj.source_repo | |
1315 | pr_target_repo = pull_request_obj.target_repo |
|
1317 | pr_target_repo = pull_request_obj.target_repo | |
1316 |
|
1318 | |||
1317 | pr_url = h.route_url('pullrequest_show', |
|
1319 | pr_url = h.route_url('pullrequest_show', | |
1318 | repo_name=pr_target_repo.repo_name, |
|
1320 | repo_name=pr_target_repo.repo_name, | |
1319 | pull_request_id=pull_request_obj.pull_request_id,) |
|
1321 | pull_request_id=pull_request_obj.pull_request_id,) | |
1320 |
|
1322 | |||
1321 | # set some variables for email notification |
|
1323 | # set some variables for email notification | |
1322 | pr_target_repo_url = h.route_url( |
|
1324 | pr_target_repo_url = h.route_url( | |
1323 | 'repo_summary', repo_name=pr_target_repo.repo_name) |
|
1325 | 'repo_summary', repo_name=pr_target_repo.repo_name) | |
1324 |
|
1326 | |||
1325 | pr_source_repo_url = h.route_url( |
|
1327 | pr_source_repo_url = h.route_url( | |
1326 | 'repo_summary', repo_name=pr_source_repo.repo_name) |
|
1328 | 'repo_summary', repo_name=pr_source_repo.repo_name) | |
1327 |
|
1329 | |||
1328 | # pull request specifics |
|
1330 | # pull request specifics | |
1329 | pull_request_commits = [ |
|
1331 | pull_request_commits = [ | |
1330 | (x.raw_id, x.message) |
|
1332 | (x.raw_id, x.message) | |
1331 | for x in map(pr_source_repo.get_commit, pull_request.revisions)] |
|
1333 | for x in map(pr_source_repo.get_commit, pull_request.revisions)] | |
1332 |
|
1334 | |||
1333 | kwargs = { |
|
1335 | kwargs = { | |
1334 | 'user': pull_request.author, |
|
1336 | 'user': pull_request.author, | |
1335 | 'pull_request': pull_request_obj, |
|
1337 | 'pull_request': pull_request_obj, | |
1336 | 'pull_request_commits': pull_request_commits, |
|
1338 | 'pull_request_commits': pull_request_commits, | |
1337 |
|
1339 | |||
1338 | 'pull_request_target_repo': pr_target_repo, |
|
1340 | 'pull_request_target_repo': pr_target_repo, | |
1339 | 'pull_request_target_repo_url': pr_target_repo_url, |
|
1341 | 'pull_request_target_repo_url': pr_target_repo_url, | |
1340 |
|
1342 | |||
1341 | 'pull_request_source_repo': pr_source_repo, |
|
1343 | 'pull_request_source_repo': pr_source_repo, | |
1342 | 'pull_request_source_repo_url': pr_source_repo_url, |
|
1344 | 'pull_request_source_repo_url': pr_source_repo_url, | |
1343 |
|
1345 | |||
1344 | 'pull_request_url': pr_url, |
|
1346 | 'pull_request_url': pr_url, | |
1345 | } |
|
1347 | } | |
1346 |
|
1348 | |||
1347 | # pre-generate the subject for notification itself |
|
1349 | # pre-generate the subject for notification itself | |
1348 | (subject, |
|
1350 | (subject, | |
1349 | _h, _e, # we don't care about those |
|
1351 | _h, _e, # we don't care about those | |
1350 | body_plaintext) = EmailNotificationModel().render_email( |
|
1352 | body_plaintext) = EmailNotificationModel().render_email( | |
1351 | notification_type, **kwargs) |
|
1353 | notification_type, **kwargs) | |
1352 |
|
1354 | |||
1353 | # create notification objects, and emails |
|
1355 | # create notification objects, and emails | |
1354 | NotificationModel().create( |
|
1356 | NotificationModel().create( | |
1355 | created_by=pull_request.author, |
|
1357 | created_by=pull_request.author, | |
1356 | notification_subject=subject, |
|
1358 | notification_subject=subject, | |
1357 | notification_body=body_plaintext, |
|
1359 | notification_body=body_plaintext, | |
1358 | notification_type=notification_type, |
|
1360 | notification_type=notification_type, | |
1359 | recipients=recipients, |
|
1361 | recipients=recipients, | |
1360 | email_kwargs=kwargs, |
|
1362 | email_kwargs=kwargs, | |
1361 | ) |
|
1363 | ) | |
1362 |
|
1364 | |||
1363 | def notify_users(self, pull_request, updating_user, ancestor_commit_id, |
|
1365 | def notify_users(self, pull_request, updating_user, ancestor_commit_id, | |
1364 | commit_changes, file_changes): |
|
1366 | commit_changes, file_changes): | |
1365 |
|
1367 | |||
1366 | updating_user_id = updating_user.user_id |
|
1368 | updating_user_id = updating_user.user_id | |
1367 | reviewers = set([x.user.user_id for x in pull_request.reviewers]) |
|
1369 | reviewers = set([x.user.user_id for x in pull_request.reviewers]) | |
1368 | # NOTE(marcink): send notification to all other users except to |
|
1370 | # NOTE(marcink): send notification to all other users except to | |
1369 | # person who updated the PR |
|
1371 | # person who updated the PR | |
1370 | recipients = reviewers.difference(set([updating_user_id])) |
|
1372 | recipients = reviewers.difference(set([updating_user_id])) | |
1371 |
|
1373 | |||
1372 | log.debug('Notify following recipients about pull-request update %s', recipients) |
|
1374 | log.debug('Notify following recipients about pull-request update %s', recipients) | |
1373 |
|
1375 | |||
1374 | pull_request_obj = pull_request |
|
1376 | pull_request_obj = pull_request | |
1375 |
|
1377 | |||
1376 | # send email about the update |
|
1378 | # send email about the update | |
1377 | changed_files = ( |
|
1379 | changed_files = ( | |
1378 | file_changes.added + file_changes.modified + file_changes.removed) |
|
1380 | file_changes.added + file_changes.modified + file_changes.removed) | |
1379 |
|
1381 | |||
1380 | pr_source_repo = pull_request_obj.source_repo |
|
1382 | pr_source_repo = pull_request_obj.source_repo | |
1381 | pr_target_repo = pull_request_obj.target_repo |
|
1383 | pr_target_repo = pull_request_obj.target_repo | |
1382 |
|
1384 | |||
1383 | pr_url = h.route_url('pullrequest_show', |
|
1385 | pr_url = h.route_url('pullrequest_show', | |
1384 | repo_name=pr_target_repo.repo_name, |
|
1386 | repo_name=pr_target_repo.repo_name, | |
1385 | pull_request_id=pull_request_obj.pull_request_id,) |
|
1387 | pull_request_id=pull_request_obj.pull_request_id,) | |
1386 |
|
1388 | |||
1387 | # set some variables for email notification |
|
1389 | # set some variables for email notification | |
1388 | pr_target_repo_url = h.route_url( |
|
1390 | pr_target_repo_url = h.route_url( | |
1389 | 'repo_summary', repo_name=pr_target_repo.repo_name) |
|
1391 | 'repo_summary', repo_name=pr_target_repo.repo_name) | |
1390 |
|
1392 | |||
1391 | pr_source_repo_url = h.route_url( |
|
1393 | pr_source_repo_url = h.route_url( | |
1392 | 'repo_summary', repo_name=pr_source_repo.repo_name) |
|
1394 | 'repo_summary', repo_name=pr_source_repo.repo_name) | |
1393 |
|
1395 | |||
1394 | email_kwargs = { |
|
1396 | email_kwargs = { | |
1395 | 'date': datetime.datetime.now(), |
|
1397 | 'date': datetime.datetime.now(), | |
1396 | 'updating_user': updating_user, |
|
1398 | 'updating_user': updating_user, | |
1397 |
|
1399 | |||
1398 | 'pull_request': pull_request_obj, |
|
1400 | 'pull_request': pull_request_obj, | |
1399 |
|
1401 | |||
1400 | 'pull_request_target_repo': pr_target_repo, |
|
1402 | 'pull_request_target_repo': pr_target_repo, | |
1401 | 'pull_request_target_repo_url': pr_target_repo_url, |
|
1403 | 'pull_request_target_repo_url': pr_target_repo_url, | |
1402 |
|
1404 | |||
1403 | 'pull_request_source_repo': pr_source_repo, |
|
1405 | 'pull_request_source_repo': pr_source_repo, | |
1404 | 'pull_request_source_repo_url': pr_source_repo_url, |
|
1406 | 'pull_request_source_repo_url': pr_source_repo_url, | |
1405 |
|
1407 | |||
1406 | 'pull_request_url': pr_url, |
|
1408 | 'pull_request_url': pr_url, | |
1407 |
|
1409 | |||
1408 | 'ancestor_commit_id': ancestor_commit_id, |
|
1410 | 'ancestor_commit_id': ancestor_commit_id, | |
1409 | 'added_commits': commit_changes.added, |
|
1411 | 'added_commits': commit_changes.added, | |
1410 | 'removed_commits': commit_changes.removed, |
|
1412 | 'removed_commits': commit_changes.removed, | |
1411 | 'changed_files': changed_files, |
|
1413 | 'changed_files': changed_files, | |
1412 | 'added_files': file_changes.added, |
|
1414 | 'added_files': file_changes.added, | |
1413 | 'modified_files': file_changes.modified, |
|
1415 | 'modified_files': file_changes.modified, | |
1414 | 'removed_files': file_changes.removed, |
|
1416 | 'removed_files': file_changes.removed, | |
1415 | } |
|
1417 | } | |
1416 |
|
1418 | |||
1417 | (subject, |
|
1419 | (subject, | |
1418 | _h, _e, # we don't care about those |
|
1420 | _h, _e, # we don't care about those | |
1419 | body_plaintext) = EmailNotificationModel().render_email( |
|
1421 | body_plaintext) = EmailNotificationModel().render_email( | |
1420 | EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE, **email_kwargs) |
|
1422 | EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE, **email_kwargs) | |
1421 |
|
1423 | |||
1422 | # create notification objects, and emails |
|
1424 | # create notification objects, and emails | |
1423 | NotificationModel().create( |
|
1425 | NotificationModel().create( | |
1424 | created_by=updating_user, |
|
1426 | created_by=updating_user, | |
1425 | notification_subject=subject, |
|
1427 | notification_subject=subject, | |
1426 | notification_body=body_plaintext, |
|
1428 | notification_body=body_plaintext, | |
1427 | notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE, |
|
1429 | notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE, | |
1428 | recipients=recipients, |
|
1430 | recipients=recipients, | |
1429 | email_kwargs=email_kwargs, |
|
1431 | email_kwargs=email_kwargs, | |
1430 | ) |
|
1432 | ) | |
1431 |
|
1433 | |||
1432 | def delete(self, pull_request, user=None): |
|
1434 | def delete(self, pull_request, user=None): | |
1433 | if not user: |
|
1435 | if not user: | |
1434 | user = getattr(get_current_rhodecode_user(), 'username', None) |
|
1436 | user = getattr(get_current_rhodecode_user(), 'username', None) | |
1435 |
|
1437 | |||
1436 | pull_request = self.__get_pull_request(pull_request) |
|
1438 | pull_request = self.__get_pull_request(pull_request) | |
1437 | old_data = pull_request.get_api_data(with_merge_state=False) |
|
1439 | old_data = pull_request.get_api_data(with_merge_state=False) | |
1438 | self._cleanup_merge_workspace(pull_request) |
|
1440 | self._cleanup_merge_workspace(pull_request) | |
1439 | self._log_audit_action( |
|
1441 | self._log_audit_action( | |
1440 | 'repo.pull_request.delete', {'old_data': old_data}, |
|
1442 | 'repo.pull_request.delete', {'old_data': old_data}, | |
1441 | user, pull_request) |
|
1443 | user, pull_request) | |
1442 | Session().delete(pull_request) |
|
1444 | Session().delete(pull_request) | |
1443 |
|
1445 | |||
1444 | def close_pull_request(self, pull_request, user): |
|
1446 | def close_pull_request(self, pull_request, user): | |
1445 | pull_request = self.__get_pull_request(pull_request) |
|
1447 | pull_request = self.__get_pull_request(pull_request) | |
1446 | self._cleanup_merge_workspace(pull_request) |
|
1448 | self._cleanup_merge_workspace(pull_request) | |
1447 | pull_request.status = PullRequest.STATUS_CLOSED |
|
1449 | pull_request.status = PullRequest.STATUS_CLOSED | |
1448 | pull_request.updated_on = datetime.datetime.now() |
|
1450 | pull_request.updated_on = datetime.datetime.now() | |
1449 | Session().add(pull_request) |
|
1451 | Session().add(pull_request) | |
1450 | self.trigger_pull_request_hook(pull_request, pull_request.author, 'close') |
|
1452 | self.trigger_pull_request_hook(pull_request, pull_request.author, 'close') | |
1451 |
|
1453 | |||
1452 | pr_data = pull_request.get_api_data(with_merge_state=False) |
|
1454 | pr_data = pull_request.get_api_data(with_merge_state=False) | |
1453 | self._log_audit_action( |
|
1455 | self._log_audit_action( | |
1454 | 'repo.pull_request.close', {'data': pr_data}, user, pull_request) |
|
1456 | 'repo.pull_request.close', {'data': pr_data}, user, pull_request) | |
1455 |
|
1457 | |||
1456 | def close_pull_request_with_comment( |
|
1458 | def close_pull_request_with_comment( | |
1457 | self, pull_request, user, repo, message=None, auth_user=None): |
|
1459 | self, pull_request, user, repo, message=None, auth_user=None): | |
1458 |
|
1460 | |||
1459 | pull_request_review_status = pull_request.calculated_review_status() |
|
1461 | pull_request_review_status = pull_request.calculated_review_status() | |
1460 |
|
1462 | |||
1461 | if pull_request_review_status == ChangesetStatus.STATUS_APPROVED: |
|
1463 | if pull_request_review_status == ChangesetStatus.STATUS_APPROVED: | |
1462 | # approved only if we have voting consent |
|
1464 | # approved only if we have voting consent | |
1463 | status = ChangesetStatus.STATUS_APPROVED |
|
1465 | status = ChangesetStatus.STATUS_APPROVED | |
1464 | else: |
|
1466 | else: | |
1465 | status = ChangesetStatus.STATUS_REJECTED |
|
1467 | status = ChangesetStatus.STATUS_REJECTED | |
1466 | status_lbl = ChangesetStatus.get_status_lbl(status) |
|
1468 | status_lbl = ChangesetStatus.get_status_lbl(status) | |
1467 |
|
1469 | |||
1468 | default_message = ( |
|
1470 | default_message = ( | |
1469 | 'Closing with status change {transition_icon} {status}.' |
|
1471 | 'Closing with status change {transition_icon} {status}.' | |
1470 | ).format(transition_icon='>', status=status_lbl) |
|
1472 | ).format(transition_icon='>', status=status_lbl) | |
1471 | text = message or default_message |
|
1473 | text = message or default_message | |
1472 |
|
1474 | |||
1473 | # create a comment, and link it to new status |
|
1475 | # create a comment, and link it to new status | |
1474 | comment = CommentsModel().create( |
|
1476 | comment = CommentsModel().create( | |
1475 | text=text, |
|
1477 | text=text, | |
1476 | repo=repo.repo_id, |
|
1478 | repo=repo.repo_id, | |
1477 | user=user.user_id, |
|
1479 | user=user.user_id, | |
1478 | pull_request=pull_request.pull_request_id, |
|
1480 | pull_request=pull_request.pull_request_id, | |
1479 | status_change=status_lbl, |
|
1481 | status_change=status_lbl, | |
1480 | status_change_type=status, |
|
1482 | status_change_type=status, | |
1481 | closing_pr=True, |
|
1483 | closing_pr=True, | |
1482 | auth_user=auth_user, |
|
1484 | auth_user=auth_user, | |
1483 | ) |
|
1485 | ) | |
1484 |
|
1486 | |||
1485 | # calculate old status before we change it |
|
1487 | # calculate old status before we change it | |
1486 | old_calculated_status = pull_request.calculated_review_status() |
|
1488 | old_calculated_status = pull_request.calculated_review_status() | |
1487 | ChangesetStatusModel().set_status( |
|
1489 | ChangesetStatusModel().set_status( | |
1488 | repo.repo_id, |
|
1490 | repo.repo_id, | |
1489 | status, |
|
1491 | status, | |
1490 | user.user_id, |
|
1492 | user.user_id, | |
1491 | comment=comment, |
|
1493 | comment=comment, | |
1492 | pull_request=pull_request.pull_request_id |
|
1494 | pull_request=pull_request.pull_request_id | |
1493 | ) |
|
1495 | ) | |
1494 |
|
1496 | |||
1495 | Session().flush() |
|
1497 | Session().flush() | |
1496 |
|
1498 | |||
1497 | self.trigger_pull_request_hook(pull_request, user, 'comment', |
|
1499 | self.trigger_pull_request_hook(pull_request, user, 'comment', | |
1498 | data={'comment': comment}) |
|
1500 | data={'comment': comment}) | |
1499 |
|
1501 | |||
1500 | # we now calculate the status of pull request again, and based on that |
|
1502 | # we now calculate the status of pull request again, and based on that | |
1501 | # calculation trigger status change. This might happen in cases |
|
1503 | # calculation trigger status change. This might happen in cases | |
1502 | # that non-reviewer admin closes a pr, which means his vote doesn't |
|
1504 | # that non-reviewer admin closes a pr, which means his vote doesn't | |
1503 | # change the status, while if he's a reviewer this might change it. |
|
1505 | # change the status, while if he's a reviewer this might change it. | |
1504 | calculated_status = pull_request.calculated_review_status() |
|
1506 | calculated_status = pull_request.calculated_review_status() | |
1505 | if old_calculated_status != calculated_status: |
|
1507 | if old_calculated_status != calculated_status: | |
1506 | self.trigger_pull_request_hook(pull_request, user, 'review_status_change', |
|
1508 | self.trigger_pull_request_hook(pull_request, user, 'review_status_change', | |
1507 | data={'status': calculated_status}) |
|
1509 | data={'status': calculated_status}) | |
1508 |
|
1510 | |||
1509 | # finally close the PR |
|
1511 | # finally close the PR | |
1510 | PullRequestModel().close_pull_request(pull_request.pull_request_id, user) |
|
1512 | PullRequestModel().close_pull_request(pull_request.pull_request_id, user) | |
1511 |
|
1513 | |||
1512 | return comment, status |
|
1514 | return comment, status | |
1513 |
|
1515 | |||
1514 | def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False): |
|
1516 | def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False): | |
1515 | _ = translator or get_current_request().translate |
|
1517 | _ = translator or get_current_request().translate | |
1516 |
|
1518 | |||
1517 | if not self._is_merge_enabled(pull_request): |
|
1519 | if not self._is_merge_enabled(pull_request): | |
1518 | return None, False, _('Server-side pull request merging is disabled.') |
|
1520 | return None, False, _('Server-side pull request merging is disabled.') | |
1519 |
|
1521 | |||
1520 | if pull_request.is_closed(): |
|
1522 | if pull_request.is_closed(): | |
1521 | return None, False, _('This pull request is closed.') |
|
1523 | return None, False, _('This pull request is closed.') | |
1522 |
|
1524 | |||
1523 | merge_possible, msg = self._check_repo_requirements( |
|
1525 | merge_possible, msg = self._check_repo_requirements( | |
1524 | target=pull_request.target_repo, source=pull_request.source_repo, |
|
1526 | target=pull_request.target_repo, source=pull_request.source_repo, | |
1525 | translator=_) |
|
1527 | translator=_) | |
1526 | if not merge_possible: |
|
1528 | if not merge_possible: | |
1527 | return None, merge_possible, msg |
|
1529 | return None, merge_possible, msg | |
1528 |
|
1530 | |||
1529 | try: |
|
1531 | try: | |
1530 | merge_response = self._try_merge( |
|
1532 | merge_response = self._try_merge( | |
1531 | pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh) |
|
1533 | pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh) | |
1532 | log.debug("Merge response: %s", merge_response) |
|
1534 | log.debug("Merge response: %s", merge_response) | |
1533 | return merge_response, merge_response.possible, merge_response.merge_status_message |
|
1535 | return merge_response, merge_response.possible, merge_response.merge_status_message | |
1534 | except NotImplementedError: |
|
1536 | except NotImplementedError: | |
1535 | return None, False, _('Pull request merging is not supported.') |
|
1537 | return None, False, _('Pull request merging is not supported.') | |
1536 |
|
1538 | |||
1537 | def _check_repo_requirements(self, target, source, translator): |
|
1539 | def _check_repo_requirements(self, target, source, translator): | |
1538 | """ |
|
1540 | """ | |
1539 | Check if `target` and `source` have compatible requirements. |
|
1541 | Check if `target` and `source` have compatible requirements. | |
1540 |
|
1542 | |||
1541 | Currently this is just checking for largefiles. |
|
1543 | Currently this is just checking for largefiles. | |
1542 | """ |
|
1544 | """ | |
1543 | _ = translator |
|
1545 | _ = translator | |
1544 | target_has_largefiles = self._has_largefiles(target) |
|
1546 | target_has_largefiles = self._has_largefiles(target) | |
1545 | source_has_largefiles = self._has_largefiles(source) |
|
1547 | source_has_largefiles = self._has_largefiles(source) | |
1546 | merge_possible = True |
|
1548 | merge_possible = True | |
1547 | message = u'' |
|
1549 | message = u'' | |
1548 |
|
1550 | |||
1549 | if target_has_largefiles != source_has_largefiles: |
|
1551 | if target_has_largefiles != source_has_largefiles: | |
1550 | merge_possible = False |
|
1552 | merge_possible = False | |
1551 | if source_has_largefiles: |
|
1553 | if source_has_largefiles: | |
1552 | message = _( |
|
1554 | message = _( | |
1553 | 'Target repository large files support is disabled.') |
|
1555 | 'Target repository large files support is disabled.') | |
1554 | else: |
|
1556 | else: | |
1555 | message = _( |
|
1557 | message = _( | |
1556 | 'Source repository large files support is disabled.') |
|
1558 | 'Source repository large files support is disabled.') | |
1557 |
|
1559 | |||
1558 | return merge_possible, message |
|
1560 | return merge_possible, message | |
1559 |
|
1561 | |||
1560 | def _has_largefiles(self, repo): |
|
1562 | def _has_largefiles(self, repo): | |
1561 | largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings( |
|
1563 | largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings( | |
1562 | 'extensions', 'largefiles') |
|
1564 | 'extensions', 'largefiles') | |
1563 | return largefiles_ui and largefiles_ui[0].active |
|
1565 | return largefiles_ui and largefiles_ui[0].active | |
1564 |
|
1566 | |||
1565 | def _try_merge(self, pull_request, force_shadow_repo_refresh=False): |
|
1567 | def _try_merge(self, pull_request, force_shadow_repo_refresh=False): | |
1566 | """ |
|
1568 | """ | |
1567 | Try to merge the pull request and return the merge status. |
|
1569 | Try to merge the pull request and return the merge status. | |
1568 | """ |
|
1570 | """ | |
1569 | log.debug( |
|
1571 | log.debug( | |
1570 | "Trying out if the pull request %s can be merged. Force_refresh=%s", |
|
1572 | "Trying out if the pull request %s can be merged. Force_refresh=%s", | |
1571 | pull_request.pull_request_id, force_shadow_repo_refresh) |
|
1573 | pull_request.pull_request_id, force_shadow_repo_refresh) | |
1572 | target_vcs = pull_request.target_repo.scm_instance() |
|
1574 | target_vcs = pull_request.target_repo.scm_instance() | |
1573 | # Refresh the target reference. |
|
1575 | # Refresh the target reference. | |
1574 | try: |
|
1576 | try: | |
1575 | target_ref = self._refresh_reference( |
|
1577 | target_ref = self._refresh_reference( | |
1576 | pull_request.target_ref_parts, target_vcs) |
|
1578 | pull_request.target_ref_parts, target_vcs) | |
1577 | except CommitDoesNotExistError: |
|
1579 | except CommitDoesNotExistError: | |
1578 | merge_state = MergeResponse( |
|
1580 | merge_state = MergeResponse( | |
1579 | False, False, None, MergeFailureReason.MISSING_TARGET_REF, |
|
1581 | False, False, None, MergeFailureReason.MISSING_TARGET_REF, | |
1580 | metadata={'target_ref': pull_request.target_ref_parts}) |
|
1582 | metadata={'target_ref': pull_request.target_ref_parts}) | |
1581 | return merge_state |
|
1583 | return merge_state | |
1582 |
|
1584 | |||
1583 | target_locked = pull_request.target_repo.locked |
|
1585 | target_locked = pull_request.target_repo.locked | |
1584 | if target_locked and target_locked[0]: |
|
1586 | if target_locked and target_locked[0]: | |
1585 | locked_by = 'user:{}'.format(target_locked[0]) |
|
1587 | locked_by = 'user:{}'.format(target_locked[0]) | |
1586 | log.debug("The target repository is locked by %s.", locked_by) |
|
1588 | log.debug("The target repository is locked by %s.", locked_by) | |
1587 | merge_state = MergeResponse( |
|
1589 | merge_state = MergeResponse( | |
1588 | False, False, None, MergeFailureReason.TARGET_IS_LOCKED, |
|
1590 | False, False, None, MergeFailureReason.TARGET_IS_LOCKED, | |
1589 | metadata={'locked_by': locked_by}) |
|
1591 | metadata={'locked_by': locked_by}) | |
1590 | elif force_shadow_repo_refresh or self._needs_merge_state_refresh( |
|
1592 | elif force_shadow_repo_refresh or self._needs_merge_state_refresh( | |
1591 | pull_request, target_ref): |
|
1593 | pull_request, target_ref): | |
1592 | log.debug("Refreshing the merge status of the repository.") |
|
1594 | log.debug("Refreshing the merge status of the repository.") | |
1593 | merge_state = self._refresh_merge_state( |
|
1595 | merge_state = self._refresh_merge_state( | |
1594 | pull_request, target_vcs, target_ref) |
|
1596 | pull_request, target_vcs, target_ref) | |
1595 | else: |
|
1597 | else: | |
1596 | possible = pull_request.last_merge_status == MergeFailureReason.NONE |
|
1598 | possible = pull_request.last_merge_status == MergeFailureReason.NONE | |
1597 | metadata = { |
|
1599 | metadata = { | |
1598 | 'unresolved_files': '', |
|
1600 | 'unresolved_files': '', | |
1599 | 'target_ref': pull_request.target_ref_parts, |
|
1601 | 'target_ref': pull_request.target_ref_parts, | |
1600 | 'source_ref': pull_request.source_ref_parts, |
|
1602 | 'source_ref': pull_request.source_ref_parts, | |
1601 | } |
|
1603 | } | |
1602 | if pull_request.last_merge_metadata: |
|
1604 | if pull_request.last_merge_metadata: | |
1603 | metadata.update(pull_request.last_merge_metadata) |
|
1605 | metadata.update(pull_request.last_merge_metadata) | |
1604 |
|
1606 | |||
1605 | if not possible and target_ref.type == 'branch': |
|
1607 | if not possible and target_ref.type == 'branch': | |
1606 | # NOTE(marcink): case for mercurial multiple heads on branch |
|
1608 | # NOTE(marcink): case for mercurial multiple heads on branch | |
1607 | heads = target_vcs._heads(target_ref.name) |
|
1609 | heads = target_vcs._heads(target_ref.name) | |
1608 | if len(heads) != 1: |
|
1610 | if len(heads) != 1: | |
1609 | heads = '\n,'.join(target_vcs._heads(target_ref.name)) |
|
1611 | heads = '\n,'.join(target_vcs._heads(target_ref.name)) | |
1610 | metadata.update({ |
|
1612 | metadata.update({ | |
1611 | 'heads': heads |
|
1613 | 'heads': heads | |
1612 | }) |
|
1614 | }) | |
1613 |
|
1615 | |||
1614 | merge_state = MergeResponse( |
|
1616 | merge_state = MergeResponse( | |
1615 | possible, False, None, pull_request.last_merge_status, metadata=metadata) |
|
1617 | possible, False, None, pull_request.last_merge_status, metadata=metadata) | |
1616 |
|
1618 | |||
1617 | return merge_state |
|
1619 | return merge_state | |
1618 |
|
1620 | |||
1619 | def _refresh_reference(self, reference, vcs_repository): |
|
1621 | def _refresh_reference(self, reference, vcs_repository): | |
1620 | if reference.type in self.UPDATABLE_REF_TYPES: |
|
1622 | if reference.type in self.UPDATABLE_REF_TYPES: | |
1621 | name_or_id = reference.name |
|
1623 | name_or_id = reference.name | |
1622 | else: |
|
1624 | else: | |
1623 | name_or_id = reference.commit_id |
|
1625 | name_or_id = reference.commit_id | |
1624 |
|
1626 | |||
1625 | refreshed_commit = vcs_repository.get_commit(name_or_id) |
|
1627 | refreshed_commit = vcs_repository.get_commit(name_or_id) | |
1626 | refreshed_reference = Reference( |
|
1628 | refreshed_reference = Reference( | |
1627 | reference.type, reference.name, refreshed_commit.raw_id) |
|
1629 | reference.type, reference.name, refreshed_commit.raw_id) | |
1628 | return refreshed_reference |
|
1630 | return refreshed_reference | |
1629 |
|
1631 | |||
1630 | def _needs_merge_state_refresh(self, pull_request, target_reference): |
|
1632 | def _needs_merge_state_refresh(self, pull_request, target_reference): | |
1631 | return not( |
|
1633 | return not( | |
1632 | pull_request.revisions and |
|
1634 | pull_request.revisions and | |
1633 | pull_request.revisions[0] == pull_request._last_merge_source_rev and |
|
1635 | pull_request.revisions[0] == pull_request._last_merge_source_rev and | |
1634 | target_reference.commit_id == pull_request._last_merge_target_rev) |
|
1636 | target_reference.commit_id == pull_request._last_merge_target_rev) | |
1635 |
|
1637 | |||
1636 | def _refresh_merge_state(self, pull_request, target_vcs, target_reference): |
|
1638 | def _refresh_merge_state(self, pull_request, target_vcs, target_reference): | |
1637 | workspace_id = self._workspace_id(pull_request) |
|
1639 | workspace_id = self._workspace_id(pull_request) | |
1638 | source_vcs = pull_request.source_repo.scm_instance() |
|
1640 | source_vcs = pull_request.source_repo.scm_instance() | |
1639 | repo_id = pull_request.target_repo.repo_id |
|
1641 | repo_id = pull_request.target_repo.repo_id | |
1640 | use_rebase = self._use_rebase_for_merging(pull_request) |
|
1642 | use_rebase = self._use_rebase_for_merging(pull_request) | |
1641 | close_branch = self._close_branch_before_merging(pull_request) |
|
1643 | close_branch = self._close_branch_before_merging(pull_request) | |
1642 | merge_state = target_vcs.merge( |
|
1644 | merge_state = target_vcs.merge( | |
1643 | repo_id, workspace_id, |
|
1645 | repo_id, workspace_id, | |
1644 | target_reference, source_vcs, pull_request.source_ref_parts, |
|
1646 | target_reference, source_vcs, pull_request.source_ref_parts, | |
1645 | dry_run=True, use_rebase=use_rebase, |
|
1647 | dry_run=True, use_rebase=use_rebase, | |
1646 | close_branch=close_branch) |
|
1648 | close_branch=close_branch) | |
1647 |
|
1649 | |||
1648 | # Do not store the response if there was an unknown error. |
|
1650 | # Do not store the response if there was an unknown error. | |
1649 | if merge_state.failure_reason != MergeFailureReason.UNKNOWN: |
|
1651 | if merge_state.failure_reason != MergeFailureReason.UNKNOWN: | |
1650 | pull_request._last_merge_source_rev = \ |
|
1652 | pull_request._last_merge_source_rev = \ | |
1651 | pull_request.source_ref_parts.commit_id |
|
1653 | pull_request.source_ref_parts.commit_id | |
1652 | pull_request._last_merge_target_rev = target_reference.commit_id |
|
1654 | pull_request._last_merge_target_rev = target_reference.commit_id | |
1653 | pull_request.last_merge_status = merge_state.failure_reason |
|
1655 | pull_request.last_merge_status = merge_state.failure_reason | |
1654 | pull_request.last_merge_metadata = merge_state.metadata |
|
1656 | pull_request.last_merge_metadata = merge_state.metadata | |
1655 |
|
1657 | |||
1656 | pull_request.shadow_merge_ref = merge_state.merge_ref |
|
1658 | pull_request.shadow_merge_ref = merge_state.merge_ref | |
1657 | Session().add(pull_request) |
|
1659 | Session().add(pull_request) | |
1658 | Session().commit() |
|
1660 | Session().commit() | |
1659 |
|
1661 | |||
1660 | return merge_state |
|
1662 | return merge_state | |
1661 |
|
1663 | |||
1662 | def _workspace_id(self, pull_request): |
|
1664 | def _workspace_id(self, pull_request): | |
1663 | workspace_id = 'pr-%s' % pull_request.pull_request_id |
|
1665 | workspace_id = 'pr-%s' % pull_request.pull_request_id | |
1664 | return workspace_id |
|
1666 | return workspace_id | |
1665 |
|
1667 | |||
1666 | def generate_repo_data(self, repo, commit_id=None, branch=None, |
|
1668 | def generate_repo_data(self, repo, commit_id=None, branch=None, | |
1667 | bookmark=None, translator=None): |
|
1669 | bookmark=None, translator=None): | |
1668 | from rhodecode.model.repo import RepoModel |
|
1670 | from rhodecode.model.repo import RepoModel | |
1669 |
|
1671 | |||
1670 | all_refs, selected_ref = \ |
|
1672 | all_refs, selected_ref = \ | |
1671 | self._get_repo_pullrequest_sources( |
|
1673 | self._get_repo_pullrequest_sources( | |
1672 | repo.scm_instance(), commit_id=commit_id, |
|
1674 | repo.scm_instance(), commit_id=commit_id, | |
1673 | branch=branch, bookmark=bookmark, translator=translator) |
|
1675 | branch=branch, bookmark=bookmark, translator=translator) | |
1674 |
|
1676 | |||
1675 | refs_select2 = [] |
|
1677 | refs_select2 = [] | |
1676 | for element in all_refs: |
|
1678 | for element in all_refs: | |
1677 | children = [{'id': x[0], 'text': x[1]} for x in element[0]] |
|
1679 | children = [{'id': x[0], 'text': x[1]} for x in element[0]] | |
1678 | refs_select2.append({'text': element[1], 'children': children}) |
|
1680 | refs_select2.append({'text': element[1], 'children': children}) | |
1679 |
|
1681 | |||
1680 | return { |
|
1682 | return { | |
1681 | 'user': { |
|
1683 | 'user': { | |
1682 | 'user_id': repo.user.user_id, |
|
1684 | 'user_id': repo.user.user_id, | |
1683 | 'username': repo.user.username, |
|
1685 | 'username': repo.user.username, | |
1684 | 'firstname': repo.user.first_name, |
|
1686 | 'firstname': repo.user.first_name, | |
1685 | 'lastname': repo.user.last_name, |
|
1687 | 'lastname': repo.user.last_name, | |
1686 | 'gravatar_link': h.gravatar_url(repo.user.email, 14), |
|
1688 | 'gravatar_link': h.gravatar_url(repo.user.email, 14), | |
1687 | }, |
|
1689 | }, | |
1688 | 'name': repo.repo_name, |
|
1690 | 'name': repo.repo_name, | |
1689 | 'link': RepoModel().get_url(repo), |
|
1691 | 'link': RepoModel().get_url(repo), | |
1690 | 'description': h.chop_at_smart(repo.description_safe, '\n'), |
|
1692 | 'description': h.chop_at_smart(repo.description_safe, '\n'), | |
1691 | 'refs': { |
|
1693 | 'refs': { | |
1692 | 'all_refs': all_refs, |
|
1694 | 'all_refs': all_refs, | |
1693 | 'selected_ref': selected_ref, |
|
1695 | 'selected_ref': selected_ref, | |
1694 | 'select2_refs': refs_select2 |
|
1696 | 'select2_refs': refs_select2 | |
1695 | } |
|
1697 | } | |
1696 | } |
|
1698 | } | |
1697 |
|
1699 | |||
1698 | def generate_pullrequest_title(self, source, source_ref, target): |
|
1700 | def generate_pullrequest_title(self, source, source_ref, target): | |
1699 | return u'{source}#{at_ref} to {target}'.format( |
|
1701 | return u'{source}#{at_ref} to {target}'.format( | |
1700 | source=source, |
|
1702 | source=source, | |
1701 | at_ref=source_ref, |
|
1703 | at_ref=source_ref, | |
1702 | target=target, |
|
1704 | target=target, | |
1703 | ) |
|
1705 | ) | |
1704 |
|
1706 | |||
1705 | def _cleanup_merge_workspace(self, pull_request): |
|
1707 | def _cleanup_merge_workspace(self, pull_request): | |
1706 | # Merging related cleanup |
|
1708 | # Merging related cleanup | |
1707 | repo_id = pull_request.target_repo.repo_id |
|
1709 | repo_id = pull_request.target_repo.repo_id | |
1708 | target_scm = pull_request.target_repo.scm_instance() |
|
1710 | target_scm = pull_request.target_repo.scm_instance() | |
1709 | workspace_id = self._workspace_id(pull_request) |
|
1711 | workspace_id = self._workspace_id(pull_request) | |
1710 |
|
1712 | |||
1711 | try: |
|
1713 | try: | |
1712 | target_scm.cleanup_merge_workspace(repo_id, workspace_id) |
|
1714 | target_scm.cleanup_merge_workspace(repo_id, workspace_id) | |
1713 | except NotImplementedError: |
|
1715 | except NotImplementedError: | |
1714 | pass |
|
1716 | pass | |
1715 |
|
1717 | |||
1716 | def _get_repo_pullrequest_sources( |
|
1718 | def _get_repo_pullrequest_sources( | |
1717 | self, repo, commit_id=None, branch=None, bookmark=None, |
|
1719 | self, repo, commit_id=None, branch=None, bookmark=None, | |
1718 | translator=None): |
|
1720 | translator=None): | |
1719 | """ |
|
1721 | """ | |
1720 | Return a structure with repo's interesting commits, suitable for |
|
1722 | Return a structure with repo's interesting commits, suitable for | |
1721 | the selectors in pullrequest controller |
|
1723 | the selectors in pullrequest controller | |
1722 |
|
1724 | |||
1723 | :param commit_id: a commit that must be in the list somehow |
|
1725 | :param commit_id: a commit that must be in the list somehow | |
1724 | and selected by default |
|
1726 | and selected by default | |
1725 | :param branch: a branch that must be in the list and selected |
|
1727 | :param branch: a branch that must be in the list and selected | |
1726 | by default - even if closed |
|
1728 | by default - even if closed | |
1727 | :param bookmark: a bookmark that must be in the list and selected |
|
1729 | :param bookmark: a bookmark that must be in the list and selected | |
1728 | """ |
|
1730 | """ | |
1729 | _ = translator or get_current_request().translate |
|
1731 | _ = translator or get_current_request().translate | |
1730 |
|
1732 | |||
1731 | commit_id = safe_str(commit_id) if commit_id else None |
|
1733 | commit_id = safe_str(commit_id) if commit_id else None | |
1732 | branch = safe_unicode(branch) if branch else None |
|
1734 | branch = safe_unicode(branch) if branch else None | |
1733 | bookmark = safe_unicode(bookmark) if bookmark else None |
|
1735 | bookmark = safe_unicode(bookmark) if bookmark else None | |
1734 |
|
1736 | |||
1735 | selected = None |
|
1737 | selected = None | |
1736 |
|
1738 | |||
1737 | # order matters: first source that has commit_id in it will be selected |
|
1739 | # order matters: first source that has commit_id in it will be selected | |
1738 | sources = [] |
|
1740 | sources = [] | |
1739 | sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark)) |
|
1741 | sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark)) | |
1740 | sources.append(('branch', repo.branches.items(), _('Branches'), branch)) |
|
1742 | sources.append(('branch', repo.branches.items(), _('Branches'), branch)) | |
1741 |
|
1743 | |||
1742 | if commit_id: |
|
1744 | if commit_id: | |
1743 | ref_commit = (h.short_id(commit_id), commit_id) |
|
1745 | ref_commit = (h.short_id(commit_id), commit_id) | |
1744 | sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id)) |
|
1746 | sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id)) | |
1745 |
|
1747 | |||
1746 | sources.append( |
|
1748 | sources.append( | |
1747 | ('branch', repo.branches_closed.items(), _('Closed Branches'), branch), |
|
1749 | ('branch', repo.branches_closed.items(), _('Closed Branches'), branch), | |
1748 | ) |
|
1750 | ) | |
1749 |
|
1751 | |||
1750 | groups = [] |
|
1752 | groups = [] | |
1751 |
|
1753 | |||
1752 | for group_key, ref_list, group_name, match in sources: |
|
1754 | for group_key, ref_list, group_name, match in sources: | |
1753 | group_refs = [] |
|
1755 | group_refs = [] | |
1754 | for ref_name, ref_id in ref_list: |
|
1756 | for ref_name, ref_id in ref_list: | |
1755 | ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id) |
|
1757 | ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id) | |
1756 | group_refs.append((ref_key, ref_name)) |
|
1758 | group_refs.append((ref_key, ref_name)) | |
1757 |
|
1759 | |||
1758 | if not selected: |
|
1760 | if not selected: | |
1759 | if set([commit_id, match]) & set([ref_id, ref_name]): |
|
1761 | if set([commit_id, match]) & set([ref_id, ref_name]): | |
1760 | selected = ref_key |
|
1762 | selected = ref_key | |
1761 |
|
1763 | |||
1762 | if group_refs: |
|
1764 | if group_refs: | |
1763 | groups.append((group_refs, group_name)) |
|
1765 | groups.append((group_refs, group_name)) | |
1764 |
|
1766 | |||
1765 | if not selected: |
|
1767 | if not selected: | |
1766 | ref = commit_id or branch or bookmark |
|
1768 | ref = commit_id or branch or bookmark | |
1767 | if ref: |
|
1769 | if ref: | |
1768 | raise CommitDoesNotExistError( |
|
1770 | raise CommitDoesNotExistError( | |
1769 | u'No commit refs could be found matching: {}'.format(ref)) |
|
1771 | u'No commit refs could be found matching: {}'.format(ref)) | |
1770 | elif repo.DEFAULT_BRANCH_NAME in repo.branches: |
|
1772 | elif repo.DEFAULT_BRANCH_NAME in repo.branches: | |
1771 | selected = u'branch:{}:{}'.format( |
|
1773 | selected = u'branch:{}:{}'.format( | |
1772 | safe_unicode(repo.DEFAULT_BRANCH_NAME), |
|
1774 | safe_unicode(repo.DEFAULT_BRANCH_NAME), | |
1773 | safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME]) |
|
1775 | safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME]) | |
1774 | ) |
|
1776 | ) | |
1775 | elif repo.commit_ids: |
|
1777 | elif repo.commit_ids: | |
1776 | # make the user select in this case |
|
1778 | # make the user select in this case | |
1777 | selected = None |
|
1779 | selected = None | |
1778 | else: |
|
1780 | else: | |
1779 | raise EmptyRepositoryError() |
|
1781 | raise EmptyRepositoryError() | |
1780 | return groups, selected |
|
1782 | return groups, selected | |
1781 |
|
1783 | |||
1782 | def get_diff(self, source_repo, source_ref_id, target_ref_id, |
|
1784 | def get_diff(self, source_repo, source_ref_id, target_ref_id, | |
1783 | hide_whitespace_changes, diff_context): |
|
1785 | hide_whitespace_changes, diff_context): | |
1784 |
|
1786 | |||
1785 | return self._get_diff_from_pr_or_version( |
|
1787 | return self._get_diff_from_pr_or_version( | |
1786 | source_repo, source_ref_id, target_ref_id, |
|
1788 | source_repo, source_ref_id, target_ref_id, | |
1787 | hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context) |
|
1789 | hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context) | |
1788 |
|
1790 | |||
1789 | def _get_diff_from_pr_or_version( |
|
1791 | def _get_diff_from_pr_or_version( | |
1790 | self, source_repo, source_ref_id, target_ref_id, |
|
1792 | self, source_repo, source_ref_id, target_ref_id, | |
1791 | hide_whitespace_changes, diff_context): |
|
1793 | hide_whitespace_changes, diff_context): | |
1792 |
|
1794 | |||
1793 | target_commit = source_repo.get_commit( |
|
1795 | target_commit = source_repo.get_commit( | |
1794 | commit_id=safe_str(target_ref_id)) |
|
1796 | commit_id=safe_str(target_ref_id)) | |
1795 | source_commit = source_repo.get_commit( |
|
1797 | source_commit = source_repo.get_commit( | |
1796 | commit_id=safe_str(source_ref_id), maybe_unreachable=True) |
|
1798 | commit_id=safe_str(source_ref_id), maybe_unreachable=True) | |
1797 | if isinstance(source_repo, Repository): |
|
1799 | if isinstance(source_repo, Repository): | |
1798 | vcs_repo = source_repo.scm_instance() |
|
1800 | vcs_repo = source_repo.scm_instance() | |
1799 | else: |
|
1801 | else: | |
1800 | vcs_repo = source_repo |
|
1802 | vcs_repo = source_repo | |
1801 |
|
1803 | |||
1802 | # TODO: johbo: In the context of an update, we cannot reach |
|
1804 | # TODO: johbo: In the context of an update, we cannot reach | |
1803 | # the old commit anymore with our normal mechanisms. It needs |
|
1805 | # the old commit anymore with our normal mechanisms. It needs | |
1804 | # some sort of special support in the vcs layer to avoid this |
|
1806 | # some sort of special support in the vcs layer to avoid this | |
1805 | # workaround. |
|
1807 | # workaround. | |
1806 | if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and |
|
1808 | if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and | |
1807 | vcs_repo.alias == 'git'): |
|
1809 | vcs_repo.alias == 'git'): | |
1808 | source_commit.raw_id = safe_str(source_ref_id) |
|
1810 | source_commit.raw_id = safe_str(source_ref_id) | |
1809 |
|
1811 | |||
1810 | log.debug('calculating diff between ' |
|
1812 | log.debug('calculating diff between ' | |
1811 | 'source_ref:%s and target_ref:%s for repo `%s`', |
|
1813 | 'source_ref:%s and target_ref:%s for repo `%s`', | |
1812 | target_ref_id, source_ref_id, |
|
1814 | target_ref_id, source_ref_id, | |
1813 | safe_unicode(vcs_repo.path)) |
|
1815 | safe_unicode(vcs_repo.path)) | |
1814 |
|
1816 | |||
1815 | vcs_diff = vcs_repo.get_diff( |
|
1817 | vcs_diff = vcs_repo.get_diff( | |
1816 | commit1=target_commit, commit2=source_commit, |
|
1818 | commit1=target_commit, commit2=source_commit, | |
1817 | ignore_whitespace=hide_whitespace_changes, context=diff_context) |
|
1819 | ignore_whitespace=hide_whitespace_changes, context=diff_context) | |
1818 | return vcs_diff |
|
1820 | return vcs_diff | |
1819 |
|
1821 | |||
1820 | def _is_merge_enabled(self, pull_request): |
|
1822 | def _is_merge_enabled(self, pull_request): | |
1821 | return self._get_general_setting( |
|
1823 | return self._get_general_setting( | |
1822 | pull_request, 'rhodecode_pr_merge_enabled') |
|
1824 | pull_request, 'rhodecode_pr_merge_enabled') | |
1823 |
|
1825 | |||
1824 | def _use_rebase_for_merging(self, pull_request): |
|
1826 | def _use_rebase_for_merging(self, pull_request): | |
1825 | repo_type = pull_request.target_repo.repo_type |
|
1827 | repo_type = pull_request.target_repo.repo_type | |
1826 | if repo_type == 'hg': |
|
1828 | if repo_type == 'hg': | |
1827 | return self._get_general_setting( |
|
1829 | return self._get_general_setting( | |
1828 | pull_request, 'rhodecode_hg_use_rebase_for_merging') |
|
1830 | pull_request, 'rhodecode_hg_use_rebase_for_merging') | |
1829 | elif repo_type == 'git': |
|
1831 | elif repo_type == 'git': | |
1830 | return self._get_general_setting( |
|
1832 | return self._get_general_setting( | |
1831 | pull_request, 'rhodecode_git_use_rebase_for_merging') |
|
1833 | pull_request, 'rhodecode_git_use_rebase_for_merging') | |
1832 |
|
1834 | |||
1833 | return False |
|
1835 | return False | |
1834 |
|
1836 | |||
1835 | def _user_name_for_merging(self, pull_request, user): |
|
1837 | def _user_name_for_merging(self, pull_request, user): | |
1836 | env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '') |
|
1838 | env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '') | |
1837 | if env_user_name_attr and hasattr(user, env_user_name_attr): |
|
1839 | if env_user_name_attr and hasattr(user, env_user_name_attr): | |
1838 | user_name_attr = env_user_name_attr |
|
1840 | user_name_attr = env_user_name_attr | |
1839 | else: |
|
1841 | else: | |
1840 | user_name_attr = 'short_contact' |
|
1842 | user_name_attr = 'short_contact' | |
1841 |
|
1843 | |||
1842 | user_name = getattr(user, user_name_attr) |
|
1844 | user_name = getattr(user, user_name_attr) | |
1843 | return user_name |
|
1845 | return user_name | |
1844 |
|
1846 | |||
1845 | def _close_branch_before_merging(self, pull_request): |
|
1847 | def _close_branch_before_merging(self, pull_request): | |
1846 | repo_type = pull_request.target_repo.repo_type |
|
1848 | repo_type = pull_request.target_repo.repo_type | |
1847 | if repo_type == 'hg': |
|
1849 | if repo_type == 'hg': | |
1848 | return self._get_general_setting( |
|
1850 | return self._get_general_setting( | |
1849 | pull_request, 'rhodecode_hg_close_branch_before_merging') |
|
1851 | pull_request, 'rhodecode_hg_close_branch_before_merging') | |
1850 | elif repo_type == 'git': |
|
1852 | elif repo_type == 'git': | |
1851 | return self._get_general_setting( |
|
1853 | return self._get_general_setting( | |
1852 | pull_request, 'rhodecode_git_close_branch_before_merging') |
|
1854 | pull_request, 'rhodecode_git_close_branch_before_merging') | |
1853 |
|
1855 | |||
1854 | return False |
|
1856 | return False | |
1855 |
|
1857 | |||
1856 | def _get_general_setting(self, pull_request, settings_key, default=False): |
|
1858 | def _get_general_setting(self, pull_request, settings_key, default=False): | |
1857 | settings_model = VcsSettingsModel(repo=pull_request.target_repo) |
|
1859 | settings_model = VcsSettingsModel(repo=pull_request.target_repo) | |
1858 | settings = settings_model.get_general_settings() |
|
1860 | settings = settings_model.get_general_settings() | |
1859 | return settings.get(settings_key, default) |
|
1861 | return settings.get(settings_key, default) | |
1860 |
|
1862 | |||
1861 | def _log_audit_action(self, action, action_data, user, pull_request): |
|
1863 | def _log_audit_action(self, action, action_data, user, pull_request): | |
1862 | audit_logger.store( |
|
1864 | audit_logger.store( | |
1863 | action=action, |
|
1865 | action=action, | |
1864 | action_data=action_data, |
|
1866 | action_data=action_data, | |
1865 | user=user, |
|
1867 | user=user, | |
1866 | repo=pull_request.target_repo) |
|
1868 | repo=pull_request.target_repo) | |
1867 |
|
1869 | |||
1868 | def get_reviewer_functions(self): |
|
1870 | def get_reviewer_functions(self): | |
1869 | """ |
|
1871 | """ | |
1870 | Fetches functions for validation and fetching default reviewers. |
|
1872 | Fetches functions for validation and fetching default reviewers. | |
1871 | If available we use the EE package, else we fallback to CE |
|
1873 | If available we use the EE package, else we fallback to CE | |
1872 | package functions |
|
1874 | package functions | |
1873 | """ |
|
1875 | """ | |
1874 | try: |
|
1876 | try: | |
1875 | from rc_reviewers.utils import get_default_reviewers_data |
|
1877 | from rc_reviewers.utils import get_default_reviewers_data | |
1876 | from rc_reviewers.utils import validate_default_reviewers |
|
1878 | from rc_reviewers.utils import validate_default_reviewers | |
1877 | except ImportError: |
|
1879 | except ImportError: | |
1878 | from rhodecode.apps.repository.utils import get_default_reviewers_data |
|
1880 | from rhodecode.apps.repository.utils import get_default_reviewers_data | |
1879 | from rhodecode.apps.repository.utils import validate_default_reviewers |
|
1881 | from rhodecode.apps.repository.utils import validate_default_reviewers | |
1880 |
|
1882 | |||
1881 | return get_default_reviewers_data, validate_default_reviewers |
|
1883 | return get_default_reviewers_data, validate_default_reviewers | |
1882 |
|
1884 | |||
1883 |
|
1885 | |||
1884 | class MergeCheck(object): |
|
1886 | class MergeCheck(object): | |
1885 | """ |
|
1887 | """ | |
1886 | Perform Merge Checks and returns a check object which stores information |
|
1888 | Perform Merge Checks and returns a check object which stores information | |
1887 | about merge errors, and merge conditions |
|
1889 | about merge errors, and merge conditions | |
1888 | """ |
|
1890 | """ | |
1889 | TODO_CHECK = 'todo' |
|
1891 | TODO_CHECK = 'todo' | |
1890 | PERM_CHECK = 'perm' |
|
1892 | PERM_CHECK = 'perm' | |
1891 | REVIEW_CHECK = 'review' |
|
1893 | REVIEW_CHECK = 'review' | |
1892 | MERGE_CHECK = 'merge' |
|
1894 | MERGE_CHECK = 'merge' | |
1893 | WIP_CHECK = 'wip' |
|
1895 | WIP_CHECK = 'wip' | |
1894 |
|
1896 | |||
1895 | def __init__(self): |
|
1897 | def __init__(self): | |
1896 | self.review_status = None |
|
1898 | self.review_status = None | |
1897 | self.merge_possible = None |
|
1899 | self.merge_possible = None | |
1898 | self.merge_msg = '' |
|
1900 | self.merge_msg = '' | |
1899 | self.merge_response = None |
|
1901 | self.merge_response = None | |
1900 | self.failed = None |
|
1902 | self.failed = None | |
1901 | self.errors = [] |
|
1903 | self.errors = [] | |
1902 | self.error_details = OrderedDict() |
|
1904 | self.error_details = OrderedDict() | |
1903 | self.source_commit = AttributeDict() |
|
1905 | self.source_commit = AttributeDict() | |
1904 | self.target_commit = AttributeDict() |
|
1906 | self.target_commit = AttributeDict() | |
1905 |
|
1907 | |||
1906 | def __repr__(self): |
|
1908 | def __repr__(self): | |
1907 | return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format( |
|
1909 | return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format( | |
1908 | self.merge_possible, self.failed, self.errors) |
|
1910 | self.merge_possible, self.failed, self.errors) | |
1909 |
|
1911 | |||
1910 | def push_error(self, error_type, message, error_key, details): |
|
1912 | def push_error(self, error_type, message, error_key, details): | |
1911 | self.failed = True |
|
1913 | self.failed = True | |
1912 | self.errors.append([error_type, message]) |
|
1914 | self.errors.append([error_type, message]) | |
1913 | self.error_details[error_key] = dict( |
|
1915 | self.error_details[error_key] = dict( | |
1914 | details=details, |
|
1916 | details=details, | |
1915 | error_type=error_type, |
|
1917 | error_type=error_type, | |
1916 | message=message |
|
1918 | message=message | |
1917 | ) |
|
1919 | ) | |
1918 |
|
1920 | |||
1919 | @classmethod |
|
1921 | @classmethod | |
1920 | def validate(cls, pull_request, auth_user, translator, fail_early=False, |
|
1922 | def validate(cls, pull_request, auth_user, translator, fail_early=False, | |
1921 | force_shadow_repo_refresh=False): |
|
1923 | force_shadow_repo_refresh=False): | |
1922 | _ = translator |
|
1924 | _ = translator | |
1923 | merge_check = cls() |
|
1925 | merge_check = cls() | |
1924 |
|
1926 | |||
1925 | # title has WIP: |
|
1927 | # title has WIP: | |
1926 | if pull_request.work_in_progress: |
|
1928 | if pull_request.work_in_progress: | |
1927 | log.debug("MergeCheck: cannot merge, title has wip: marker.") |
|
1929 | log.debug("MergeCheck: cannot merge, title has wip: marker.") | |
1928 |
|
1930 | |||
1929 | msg = _('WIP marker in title prevents from accidental merge.') |
|
1931 | msg = _('WIP marker in title prevents from accidental merge.') | |
1930 | merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title) |
|
1932 | merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title) | |
1931 | if fail_early: |
|
1933 | if fail_early: | |
1932 | return merge_check |
|
1934 | return merge_check | |
1933 |
|
1935 | |||
1934 | # permissions to merge |
|
1936 | # permissions to merge | |
1935 | user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user) |
|
1937 | user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user) | |
1936 | if not user_allowed_to_merge: |
|
1938 | if not user_allowed_to_merge: | |
1937 | log.debug("MergeCheck: cannot merge, approval is pending.") |
|
1939 | log.debug("MergeCheck: cannot merge, approval is pending.") | |
1938 |
|
1940 | |||
1939 | msg = _('User `{}` not allowed to perform merge.').format(auth_user.username) |
|
1941 | msg = _('User `{}` not allowed to perform merge.').format(auth_user.username) | |
1940 | merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username) |
|
1942 | merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username) | |
1941 | if fail_early: |
|
1943 | if fail_early: | |
1942 | return merge_check |
|
1944 | return merge_check | |
1943 |
|
1945 | |||
1944 | # permission to merge into the target branch |
|
1946 | # permission to merge into the target branch | |
1945 | target_commit_id = pull_request.target_ref_parts.commit_id |
|
1947 | target_commit_id = pull_request.target_ref_parts.commit_id | |
1946 | if pull_request.target_ref_parts.type == 'branch': |
|
1948 | if pull_request.target_ref_parts.type == 'branch': | |
1947 | branch_name = pull_request.target_ref_parts.name |
|
1949 | branch_name = pull_request.target_ref_parts.name | |
1948 | else: |
|
1950 | else: | |
1949 | # for mercurial we can always figure out the branch from the commit |
|
1951 | # for mercurial we can always figure out the branch from the commit | |
1950 | # in case of bookmark |
|
1952 | # in case of bookmark | |
1951 | target_commit = pull_request.target_repo.get_commit(target_commit_id) |
|
1953 | target_commit = pull_request.target_repo.get_commit(target_commit_id) | |
1952 | branch_name = target_commit.branch |
|
1954 | branch_name = target_commit.branch | |
1953 |
|
1955 | |||
1954 | rule, branch_perm = auth_user.get_rule_and_branch_permission( |
|
1956 | rule, branch_perm = auth_user.get_rule_and_branch_permission( | |
1955 | pull_request.target_repo.repo_name, branch_name) |
|
1957 | pull_request.target_repo.repo_name, branch_name) | |
1956 | if branch_perm and branch_perm == 'branch.none': |
|
1958 | if branch_perm and branch_perm == 'branch.none': | |
1957 | msg = _('Target branch `{}` changes rejected by rule {}.').format( |
|
1959 | msg = _('Target branch `{}` changes rejected by rule {}.').format( | |
1958 | branch_name, rule) |
|
1960 | branch_name, rule) | |
1959 | merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username) |
|
1961 | merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username) | |
1960 | if fail_early: |
|
1962 | if fail_early: | |
1961 | return merge_check |
|
1963 | return merge_check | |
1962 |
|
1964 | |||
1963 | # review status, must be always present |
|
1965 | # review status, must be always present | |
1964 | review_status = pull_request.calculated_review_status() |
|
1966 | review_status = pull_request.calculated_review_status() | |
1965 | merge_check.review_status = review_status |
|
1967 | merge_check.review_status = review_status | |
1966 |
|
1968 | |||
1967 | status_approved = review_status == ChangesetStatus.STATUS_APPROVED |
|
1969 | status_approved = review_status == ChangesetStatus.STATUS_APPROVED | |
1968 | if not status_approved: |
|
1970 | if not status_approved: | |
1969 | log.debug("MergeCheck: cannot merge, approval is pending.") |
|
1971 | log.debug("MergeCheck: cannot merge, approval is pending.") | |
1970 |
|
1972 | |||
1971 | msg = _('Pull request reviewer approval is pending.') |
|
1973 | msg = _('Pull request reviewer approval is pending.') | |
1972 |
|
1974 | |||
1973 | merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status) |
|
1975 | merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status) | |
1974 |
|
1976 | |||
1975 | if fail_early: |
|
1977 | if fail_early: | |
1976 | return merge_check |
|
1978 | return merge_check | |
1977 |
|
1979 | |||
1978 | # left over TODOs |
|
1980 | # left over TODOs | |
1979 | todos = CommentsModel().get_pull_request_unresolved_todos(pull_request) |
|
1981 | todos = CommentsModel().get_pull_request_unresolved_todos(pull_request) | |
1980 | if todos: |
|
1982 | if todos: | |
1981 | log.debug("MergeCheck: cannot merge, {} " |
|
1983 | log.debug("MergeCheck: cannot merge, {} " | |
1982 | "unresolved TODOs left.".format(len(todos))) |
|
1984 | "unresolved TODOs left.".format(len(todos))) | |
1983 |
|
1985 | |||
1984 | if len(todos) == 1: |
|
1986 | if len(todos) == 1: | |
1985 | msg = _('Cannot merge, {} TODO still not resolved.').format( |
|
1987 | msg = _('Cannot merge, {} TODO still not resolved.').format( | |
1986 | len(todos)) |
|
1988 | len(todos)) | |
1987 | else: |
|
1989 | else: | |
1988 | msg = _('Cannot merge, {} TODOs still not resolved.').format( |
|
1990 | msg = _('Cannot merge, {} TODOs still not resolved.').format( | |
1989 | len(todos)) |
|
1991 | len(todos)) | |
1990 |
|
1992 | |||
1991 | merge_check.push_error('warning', msg, cls.TODO_CHECK, todos) |
|
1993 | merge_check.push_error('warning', msg, cls.TODO_CHECK, todos) | |
1992 |
|
1994 | |||
1993 | if fail_early: |
|
1995 | if fail_early: | |
1994 | return merge_check |
|
1996 | return merge_check | |
1995 |
|
1997 | |||
1996 | # merge possible, here is the filesystem simulation + shadow repo |
|
1998 | # merge possible, here is the filesystem simulation + shadow repo | |
1997 | merge_response, merge_status, msg = PullRequestModel().merge_status( |
|
1999 | merge_response, merge_status, msg = PullRequestModel().merge_status( | |
1998 | pull_request, translator=translator, |
|
2000 | pull_request, translator=translator, | |
1999 | force_shadow_repo_refresh=force_shadow_repo_refresh) |
|
2001 | force_shadow_repo_refresh=force_shadow_repo_refresh) | |
2000 |
|
2002 | |||
2001 | merge_check.merge_possible = merge_status |
|
2003 | merge_check.merge_possible = merge_status | |
2002 | merge_check.merge_msg = msg |
|
2004 | merge_check.merge_msg = msg | |
2003 | merge_check.merge_response = merge_response |
|
2005 | merge_check.merge_response = merge_response | |
2004 |
|
2006 | |||
2005 | source_ref_id = pull_request.source_ref_parts.commit_id |
|
2007 | source_ref_id = pull_request.source_ref_parts.commit_id | |
2006 | target_ref_id = pull_request.target_ref_parts.commit_id |
|
2008 | target_ref_id = pull_request.target_ref_parts.commit_id | |
2007 |
|
2009 | |||
2008 | try: |
|
2010 | try: | |
2009 | source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request) |
|
2011 | source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request) | |
2010 | merge_check.source_commit.changed = source_ref_id != source_commit.raw_id |
|
2012 | merge_check.source_commit.changed = source_ref_id != source_commit.raw_id | |
2011 | merge_check.source_commit.ref_spec = pull_request.source_ref_parts |
|
2013 | merge_check.source_commit.ref_spec = pull_request.source_ref_parts | |
2012 | merge_check.source_commit.current_raw_id = source_commit.raw_id |
|
2014 | merge_check.source_commit.current_raw_id = source_commit.raw_id | |
2013 | merge_check.source_commit.previous_raw_id = source_ref_id |
|
2015 | merge_check.source_commit.previous_raw_id = source_ref_id | |
2014 |
|
2016 | |||
2015 | merge_check.target_commit.changed = target_ref_id != target_commit.raw_id |
|
2017 | merge_check.target_commit.changed = target_ref_id != target_commit.raw_id | |
2016 | merge_check.target_commit.ref_spec = pull_request.target_ref_parts |
|
2018 | merge_check.target_commit.ref_spec = pull_request.target_ref_parts | |
2017 | merge_check.target_commit.current_raw_id = target_commit.raw_id |
|
2019 | merge_check.target_commit.current_raw_id = target_commit.raw_id | |
2018 | merge_check.target_commit.previous_raw_id = target_ref_id |
|
2020 | merge_check.target_commit.previous_raw_id = target_ref_id | |
2019 | except (SourceRefMissing, TargetRefMissing): |
|
2021 | except (SourceRefMissing, TargetRefMissing): | |
2020 | pass |
|
2022 | pass | |
2021 |
|
2023 | |||
2022 | if not merge_status: |
|
2024 | if not merge_status: | |
2023 | log.debug("MergeCheck: cannot merge, pull request merge not possible.") |
|
2025 | log.debug("MergeCheck: cannot merge, pull request merge not possible.") | |
2024 | merge_check.push_error('warning', msg, cls.MERGE_CHECK, None) |
|
2026 | merge_check.push_error('warning', msg, cls.MERGE_CHECK, None) | |
2025 |
|
2027 | |||
2026 | if fail_early: |
|
2028 | if fail_early: | |
2027 | return merge_check |
|
2029 | return merge_check | |
2028 |
|
2030 | |||
2029 | log.debug('MergeCheck: is failed: %s', merge_check.failed) |
|
2031 | log.debug('MergeCheck: is failed: %s', merge_check.failed) | |
2030 | return merge_check |
|
2032 | return merge_check | |
2031 |
|
2033 | |||
2032 | @classmethod |
|
2034 | @classmethod | |
2033 | def get_merge_conditions(cls, pull_request, translator): |
|
2035 | def get_merge_conditions(cls, pull_request, translator): | |
2034 | _ = translator |
|
2036 | _ = translator | |
2035 | merge_details = {} |
|
2037 | merge_details = {} | |
2036 |
|
2038 | |||
2037 | model = PullRequestModel() |
|
2039 | model = PullRequestModel() | |
2038 | use_rebase = model._use_rebase_for_merging(pull_request) |
|
2040 | use_rebase = model._use_rebase_for_merging(pull_request) | |
2039 |
|
2041 | |||
2040 | if use_rebase: |
|
2042 | if use_rebase: | |
2041 | merge_details['merge_strategy'] = dict( |
|
2043 | merge_details['merge_strategy'] = dict( | |
2042 | details={}, |
|
2044 | details={}, | |
2043 | message=_('Merge strategy: rebase') |
|
2045 | message=_('Merge strategy: rebase') | |
2044 | ) |
|
2046 | ) | |
2045 | else: |
|
2047 | else: | |
2046 | merge_details['merge_strategy'] = dict( |
|
2048 | merge_details['merge_strategy'] = dict( | |
2047 | details={}, |
|
2049 | details={}, | |
2048 | message=_('Merge strategy: explicit merge commit') |
|
2050 | message=_('Merge strategy: explicit merge commit') | |
2049 | ) |
|
2051 | ) | |
2050 |
|
2052 | |||
2051 | close_branch = model._close_branch_before_merging(pull_request) |
|
2053 | close_branch = model._close_branch_before_merging(pull_request) | |
2052 | if close_branch: |
|
2054 | if close_branch: | |
2053 | repo_type = pull_request.target_repo.repo_type |
|
2055 | repo_type = pull_request.target_repo.repo_type | |
2054 | close_msg = '' |
|
2056 | close_msg = '' | |
2055 | if repo_type == 'hg': |
|
2057 | if repo_type == 'hg': | |
2056 | close_msg = _('Source branch will be closed before the merge.') |
|
2058 | close_msg = _('Source branch will be closed before the merge.') | |
2057 | elif repo_type == 'git': |
|
2059 | elif repo_type == 'git': | |
2058 | close_msg = _('Source branch will be deleted after the merge.') |
|
2060 | close_msg = _('Source branch will be deleted after the merge.') | |
2059 |
|
2061 | |||
2060 | merge_details['close_branch'] = dict( |
|
2062 | merge_details['close_branch'] = dict( | |
2061 | details={}, |
|
2063 | details={}, | |
2062 | message=close_msg |
|
2064 | message=close_msg | |
2063 | ) |
|
2065 | ) | |
2064 |
|
2066 | |||
2065 | return merge_details |
|
2067 | return merge_details | |
2066 |
|
2068 | |||
2067 |
|
2069 | |||
2068 | ChangeTuple = collections.namedtuple( |
|
2070 | ChangeTuple = collections.namedtuple( | |
2069 | 'ChangeTuple', ['added', 'common', 'removed', 'total']) |
|
2071 | 'ChangeTuple', ['added', 'common', 'removed', 'total']) | |
2070 |
|
2072 | |||
2071 | FileChangeTuple = collections.namedtuple( |
|
2073 | FileChangeTuple = collections.namedtuple( | |
2072 | 'FileChangeTuple', ['added', 'modified', 'removed']) |
|
2074 | 'FileChangeTuple', ['added', 'modified', 'removed']) |
@@ -1,100 +1,116 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import pytest |
|
21 | import pytest | |
22 |
|
22 | |||
23 | from rhodecode.tests.events.conftest import EventCatcher |
|
23 | from rhodecode.tests.events.conftest import EventCatcher | |
24 |
|
24 | |||
25 | from rhodecode.model.comment import CommentsModel |
|
25 | from rhodecode.model.comment import CommentsModel | |
26 | from rhodecode.model.pull_request import PullRequestModel |
|
26 | from rhodecode.model.pull_request import PullRequestModel | |
27 | from rhodecode.events import ( |
|
27 | from rhodecode.events import ( | |
28 | PullRequestCreateEvent, |
|
28 | PullRequestCreateEvent, | |
29 | PullRequestUpdateEvent, |
|
29 | PullRequestUpdateEvent, | |
30 | PullRequestCommentEvent, |
|
30 | PullRequestCommentEvent, | |
|
31 | PullRequestCommentEditEvent, | |||
31 | PullRequestReviewEvent, |
|
32 | PullRequestReviewEvent, | |
32 | PullRequestMergeEvent, |
|
33 | PullRequestMergeEvent, | |
33 | PullRequestCloseEvent, |
|
34 | PullRequestCloseEvent, | |
34 | ) |
|
35 | ) | |
35 |
|
36 | |||
36 | # TODO: dan: make the serialization tests complete json comparisons |
|
37 | # TODO: dan: make the serialization tests complete json comparisons | |
37 | @pytest.mark.backends("git", "hg") |
|
38 | @pytest.mark.backends("git", "hg") | |
38 | @pytest.mark.parametrize('EventClass', [ |
|
39 | @pytest.mark.parametrize('EventClass', [ | |
39 | PullRequestCreateEvent, |
|
40 | PullRequestCreateEvent, | |
40 | PullRequestUpdateEvent, |
|
41 | PullRequestUpdateEvent, | |
41 | PullRequestReviewEvent, |
|
42 | PullRequestReviewEvent, | |
42 | PullRequestMergeEvent, |
|
43 | PullRequestMergeEvent, | |
43 | PullRequestCloseEvent |
|
44 | PullRequestCloseEvent | |
44 | ]) |
|
45 | ]) | |
45 | def test_pullrequest_events_serialized(EventClass, pr_util, config_stub): |
|
46 | def test_pullrequest_events_serialized(EventClass, pr_util, config_stub): | |
46 | pr = pr_util.create_pull_request() |
|
47 | pr = pr_util.create_pull_request() | |
47 | if EventClass == PullRequestReviewEvent: |
|
48 | if EventClass == PullRequestReviewEvent: | |
48 | event = EventClass(pr, 'approved') |
|
49 | event = EventClass(pr, 'approved') | |
49 | else: |
|
50 | else: | |
50 | event = EventClass(pr) |
|
51 | event = EventClass(pr) | |
51 | data = event.as_dict() |
|
52 | data = event.as_dict() | |
52 | assert data['name'] == EventClass.name |
|
53 | assert data['name'] == EventClass.name | |
53 | assert data['repo']['repo_name'] == pr.target_repo.repo_name |
|
54 | assert data['repo']['repo_name'] == pr.target_repo.repo_name | |
54 | assert data['pullrequest']['pull_request_id'] == pr.pull_request_id |
|
55 | assert data['pullrequest']['pull_request_id'] == pr.pull_request_id | |
55 | assert data['pullrequest']['url'] |
|
56 | assert data['pullrequest']['url'] | |
56 | assert data['pullrequest']['permalink_url'] |
|
57 | assert data['pullrequest']['permalink_url'] | |
57 |
|
58 | |||
58 |
|
59 | |||
59 | @pytest.mark.backends("git", "hg") |
|
60 | @pytest.mark.backends("git", "hg") | |
60 | def test_create_pull_request_events(pr_util, config_stub): |
|
61 | def test_create_pull_request_events(pr_util, config_stub): | |
61 | with EventCatcher() as event_catcher: |
|
62 | with EventCatcher() as event_catcher: | |
62 | pr_util.create_pull_request() |
|
63 | pr_util.create_pull_request() | |
63 |
|
64 | |||
64 | assert PullRequestCreateEvent in event_catcher.events_types |
|
65 | assert PullRequestCreateEvent in event_catcher.events_types | |
65 |
|
66 | |||
66 |
|
67 | |||
67 | @pytest.mark.backends("git", "hg") |
|
68 | @pytest.mark.backends("git", "hg") | |
68 | def test_pullrequest_comment_events_serialized(pr_util, config_stub): |
|
69 | def test_pullrequest_comment_events_serialized(pr_util, config_stub): | |
69 | pr = pr_util.create_pull_request() |
|
70 | pr = pr_util.create_pull_request() | |
70 | comment = CommentsModel().get_comments( |
|
71 | comment = CommentsModel().get_comments( | |
71 | pr.target_repo.repo_id, pull_request=pr)[0] |
|
72 | pr.target_repo.repo_id, pull_request=pr)[0] | |
72 | event = PullRequestCommentEvent(pr, comment) |
|
73 | event = PullRequestCommentEvent(pr, comment) | |
73 | data = event.as_dict() |
|
74 | data = event.as_dict() | |
74 | assert data['name'] == PullRequestCommentEvent.name |
|
75 | assert data['name'] == PullRequestCommentEvent.name | |
75 | assert data['repo']['repo_name'] == pr.target_repo.repo_name |
|
76 | assert data['repo']['repo_name'] == pr.target_repo.repo_name | |
76 | assert data['pullrequest']['pull_request_id'] == pr.pull_request_id |
|
77 | assert data['pullrequest']['pull_request_id'] == pr.pull_request_id | |
77 | assert data['pullrequest']['url'] |
|
78 | assert data['pullrequest']['url'] | |
78 | assert data['pullrequest']['permalink_url'] |
|
79 | assert data['pullrequest']['permalink_url'] | |
79 | assert data['comment']['text'] == comment.text |
|
80 | assert data['comment']['text'] == comment.text | |
80 |
|
81 | |||
81 |
|
82 | |||
82 | @pytest.mark.backends("git", "hg") |
|
83 | @pytest.mark.backends("git", "hg") | |
|
84 | def test_pullrequest_comment_edit_events_serialized(pr_util, config_stub): | |||
|
85 | pr = pr_util.create_pull_request() | |||
|
86 | comment = CommentsModel().get_comments( | |||
|
87 | pr.target_repo.repo_id, pull_request=pr)[0] | |||
|
88 | event = PullRequestCommentEditEvent(pr, comment) | |||
|
89 | data = event.as_dict() | |||
|
90 | assert data['name'] == PullRequestCommentEditEvent.name | |||
|
91 | assert data['repo']['repo_name'] == pr.target_repo.repo_name | |||
|
92 | assert data['pullrequest']['pull_request_id'] == pr.pull_request_id | |||
|
93 | assert data['pullrequest']['url'] | |||
|
94 | assert data['pullrequest']['permalink_url'] | |||
|
95 | assert data['comment']['text'] == comment.text | |||
|
96 | ||||
|
97 | ||||
|
98 | @pytest.mark.backends("git", "hg") | |||
83 | def test_close_pull_request_events(pr_util, user_admin, config_stub): |
|
99 | def test_close_pull_request_events(pr_util, user_admin, config_stub): | |
84 | pr = pr_util.create_pull_request() |
|
100 | pr = pr_util.create_pull_request() | |
85 |
|
101 | |||
86 | with EventCatcher() as event_catcher: |
|
102 | with EventCatcher() as event_catcher: | |
87 | PullRequestModel().close_pull_request(pr, user_admin) |
|
103 | PullRequestModel().close_pull_request(pr, user_admin) | |
88 |
|
104 | |||
89 | assert PullRequestCloseEvent in event_catcher.events_types |
|
105 | assert PullRequestCloseEvent in event_catcher.events_types | |
90 |
|
106 | |||
91 |
|
107 | |||
92 | @pytest.mark.backends("git", "hg") |
|
108 | @pytest.mark.backends("git", "hg") | |
93 | def test_close_pull_request_with_comment_events(pr_util, user_admin, config_stub): |
|
109 | def test_close_pull_request_with_comment_events(pr_util, user_admin, config_stub): | |
94 | pr = pr_util.create_pull_request() |
|
110 | pr = pr_util.create_pull_request() | |
95 |
|
111 | |||
96 | with EventCatcher() as event_catcher: |
|
112 | with EventCatcher() as event_catcher: | |
97 | PullRequestModel().close_pull_request_with_comment( |
|
113 | PullRequestModel().close_pull_request_with_comment( | |
98 | pr, user_admin, pr.target_repo) |
|
114 | pr, user_admin, pr.target_repo) | |
99 |
|
115 | |||
100 | assert PullRequestCloseEvent in event_catcher.events_types |
|
116 | assert PullRequestCloseEvent in event_catcher.events_types |
@@ -1,145 +1,170 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import pytest |
|
21 | import pytest | |
22 |
|
22 | |||
23 | from rhodecode.lib.utils2 import StrictAttributeDict |
|
23 | from rhodecode.lib.utils2 import StrictAttributeDict | |
24 | from rhodecode.tests.events.conftest import EventCatcher |
|
24 | from rhodecode.tests.events.conftest import EventCatcher | |
25 |
|
25 | |||
26 | from rhodecode.lib import hooks_base, utils2 |
|
26 | from rhodecode.lib import hooks_base, utils2 | |
27 | from rhodecode.model.repo import RepoModel |
|
27 | from rhodecode.model.repo import RepoModel | |
28 | from rhodecode.events.repo import ( |
|
28 | from rhodecode.events.repo import ( | |
29 | RepoPrePullEvent, RepoPullEvent, |
|
29 | RepoPrePullEvent, RepoPullEvent, | |
30 | RepoPrePushEvent, RepoPushEvent, |
|
30 | RepoPrePushEvent, RepoPushEvent, | |
31 | RepoPreCreateEvent, RepoCreateEvent, |
|
31 | RepoPreCreateEvent, RepoCreateEvent, | |
32 |
RepoPreDeleteEvent, RepoDeleteEvent, |
|
32 | RepoPreDeleteEvent, RepoDeleteEvent, | |
|
33 | RepoCommitCommentEvent, RepoCommitCommentEditEvent | |||
33 | ) |
|
34 | ) | |
34 |
|
35 | |||
35 |
|
36 | |||
36 | @pytest.fixture() |
|
37 | @pytest.fixture() | |
37 | def scm_extras(user_regular, repo_stub): |
|
38 | def scm_extras(user_regular, repo_stub): | |
38 | extras = utils2.AttributeDict({ |
|
39 | extras = utils2.AttributeDict({ | |
39 | 'ip': '127.0.0.1', |
|
40 | 'ip': '127.0.0.1', | |
40 | 'username': user_regular.username, |
|
41 | 'username': user_regular.username, | |
41 | 'user_id': user_regular.user_id, |
|
42 | 'user_id': user_regular.user_id, | |
42 | 'action': '', |
|
43 | 'action': '', | |
43 | 'repository': repo_stub.repo_name, |
|
44 | 'repository': repo_stub.repo_name, | |
44 | 'scm': repo_stub.scm_instance().alias, |
|
45 | 'scm': repo_stub.scm_instance().alias, | |
45 | 'config': '', |
|
46 | 'config': '', | |
46 | 'repo_store': '', |
|
47 | 'repo_store': '', | |
47 | 'server_url': 'http://example.com', |
|
48 | 'server_url': 'http://example.com', | |
48 | 'make_lock': None, |
|
49 | 'make_lock': None, | |
49 | 'user_agent': 'some-client', |
|
50 | 'user_agent': 'some-client', | |
50 | 'locked_by': [None], |
|
51 | 'locked_by': [None], | |
51 | 'commit_ids': ['a' * 40] * 3, |
|
52 | 'commit_ids': ['a' * 40] * 3, | |
52 | 'hook_type': 'scm_extras_test', |
|
53 | 'hook_type': 'scm_extras_test', | |
53 | 'is_shadow_repo': False, |
|
54 | 'is_shadow_repo': False, | |
54 | }) |
|
55 | }) | |
55 | return extras |
|
56 | return extras | |
56 |
|
57 | |||
57 |
|
58 | |||
58 | # TODO: dan: make the serialization tests complete json comparisons |
|
59 | # TODO: dan: make the serialization tests complete json comparisons | |
59 | @pytest.mark.parametrize('EventClass', [ |
|
60 | @pytest.mark.parametrize('EventClass', [ | |
60 | RepoPreCreateEvent, RepoCreateEvent, |
|
61 | RepoPreCreateEvent, RepoCreateEvent, | |
61 | RepoPreDeleteEvent, RepoDeleteEvent, |
|
62 | RepoPreDeleteEvent, RepoDeleteEvent, | |
62 | ]) |
|
63 | ]) | |
63 | def test_repo_events_serialized(config_stub, repo_stub, EventClass): |
|
64 | def test_repo_events_serialized(config_stub, repo_stub, EventClass): | |
64 | event = EventClass(repo_stub) |
|
65 | event = EventClass(repo_stub) | |
65 | data = event.as_dict() |
|
66 | data = event.as_dict() | |
66 | assert data['name'] == EventClass.name |
|
67 | assert data['name'] == EventClass.name | |
67 | assert data['repo']['repo_name'] == repo_stub.repo_name |
|
68 | assert data['repo']['repo_name'] == repo_stub.repo_name | |
68 | assert data['repo']['url'] |
|
69 | assert data['repo']['url'] | |
69 | assert data['repo']['permalink_url'] |
|
70 | assert data['repo']['permalink_url'] | |
70 |
|
71 | |||
71 |
|
72 | |||
72 | @pytest.mark.parametrize('EventClass', [ |
|
73 | @pytest.mark.parametrize('EventClass', [ | |
73 | RepoPrePullEvent, RepoPullEvent, RepoPrePushEvent |
|
74 | RepoPrePullEvent, RepoPullEvent, RepoPrePushEvent | |
74 | ]) |
|
75 | ]) | |
75 | def test_vcs_repo_events_serialize(config_stub, repo_stub, scm_extras, EventClass): |
|
76 | def test_vcs_repo_events_serialize(config_stub, repo_stub, scm_extras, EventClass): | |
76 | event = EventClass(repo_name=repo_stub.repo_name, extras=scm_extras) |
|
77 | event = EventClass(repo_name=repo_stub.repo_name, extras=scm_extras) | |
77 | data = event.as_dict() |
|
78 | data = event.as_dict() | |
78 | assert data['name'] == EventClass.name |
|
79 | assert data['name'] == EventClass.name | |
79 | assert data['repo']['repo_name'] == repo_stub.repo_name |
|
80 | assert data['repo']['repo_name'] == repo_stub.repo_name | |
80 | assert data['repo']['url'] |
|
81 | assert data['repo']['url'] | |
81 | assert data['repo']['permalink_url'] |
|
82 | assert data['repo']['permalink_url'] | |
82 |
|
83 | |||
83 |
|
84 | |||
84 | @pytest.mark.parametrize('EventClass', [RepoPushEvent]) |
|
85 | @pytest.mark.parametrize('EventClass', [RepoPushEvent]) | |
85 | def test_vcs_repo_push_event_serialize(config_stub, repo_stub, scm_extras, EventClass): |
|
86 | def test_vcs_repo_push_event_serialize(config_stub, repo_stub, scm_extras, EventClass): | |
86 | event = EventClass(repo_name=repo_stub.repo_name, |
|
87 | event = EventClass(repo_name=repo_stub.repo_name, | |
87 | pushed_commit_ids=scm_extras['commit_ids'], |
|
88 | pushed_commit_ids=scm_extras['commit_ids'], | |
88 | extras=scm_extras) |
|
89 | extras=scm_extras) | |
89 | data = event.as_dict() |
|
90 | data = event.as_dict() | |
90 | assert data['name'] == EventClass.name |
|
91 | assert data['name'] == EventClass.name | |
91 | assert data['repo']['repo_name'] == repo_stub.repo_name |
|
92 | assert data['repo']['repo_name'] == repo_stub.repo_name | |
92 | assert data['repo']['url'] |
|
93 | assert data['repo']['url'] | |
93 | assert data['repo']['permalink_url'] |
|
94 | assert data['repo']['permalink_url'] | |
94 |
|
95 | |||
95 |
|
96 | |||
96 | def test_create_delete_repo_fires_events(backend): |
|
97 | def test_create_delete_repo_fires_events(backend): | |
97 | with EventCatcher() as event_catcher: |
|
98 | with EventCatcher() as event_catcher: | |
98 | repo = backend.create_repo() |
|
99 | repo = backend.create_repo() | |
99 | assert event_catcher.events_types == [RepoPreCreateEvent, RepoCreateEvent] |
|
100 | assert event_catcher.events_types == [RepoPreCreateEvent, RepoCreateEvent] | |
100 |
|
101 | |||
101 | with EventCatcher() as event_catcher: |
|
102 | with EventCatcher() as event_catcher: | |
102 | RepoModel().delete(repo) |
|
103 | RepoModel().delete(repo) | |
103 | assert event_catcher.events_types == [RepoPreDeleteEvent, RepoDeleteEvent] |
|
104 | assert event_catcher.events_types == [RepoPreDeleteEvent, RepoDeleteEvent] | |
104 |
|
105 | |||
105 |
|
106 | |||
106 | def test_pull_fires_events(scm_extras): |
|
107 | def test_pull_fires_events(scm_extras): | |
107 | with EventCatcher() as event_catcher: |
|
108 | with EventCatcher() as event_catcher: | |
108 | hooks_base.pre_push(scm_extras) |
|
109 | hooks_base.pre_push(scm_extras) | |
109 | assert event_catcher.events_types == [RepoPrePushEvent] |
|
110 | assert event_catcher.events_types == [RepoPrePushEvent] | |
110 |
|
111 | |||
111 | with EventCatcher() as event_catcher: |
|
112 | with EventCatcher() as event_catcher: | |
112 | hooks_base.post_push(scm_extras) |
|
113 | hooks_base.post_push(scm_extras) | |
113 | assert event_catcher.events_types == [RepoPushEvent] |
|
114 | assert event_catcher.events_types == [RepoPushEvent] | |
114 |
|
115 | |||
115 |
|
116 | |||
116 | def test_push_fires_events(scm_extras): |
|
117 | def test_push_fires_events(scm_extras): | |
117 | with EventCatcher() as event_catcher: |
|
118 | with EventCatcher() as event_catcher: | |
118 | hooks_base.pre_pull(scm_extras) |
|
119 | hooks_base.pre_pull(scm_extras) | |
119 | assert event_catcher.events_types == [RepoPrePullEvent] |
|
120 | assert event_catcher.events_types == [RepoPrePullEvent] | |
120 |
|
121 | |||
121 | with EventCatcher() as event_catcher: |
|
122 | with EventCatcher() as event_catcher: | |
122 | hooks_base.post_pull(scm_extras) |
|
123 | hooks_base.post_pull(scm_extras) | |
123 | assert event_catcher.events_types == [RepoPullEvent] |
|
124 | assert event_catcher.events_types == [RepoPullEvent] | |
124 |
|
125 | |||
125 |
|
126 | |||
126 | @pytest.mark.parametrize('EventClass', [RepoCommitCommentEvent]) |
|
127 | @pytest.mark.parametrize('EventClass', [RepoCommitCommentEvent]) | |
127 | def test_repo_commit_event(config_stub, repo_stub, EventClass): |
|
128 | def test_repo_commit_event(config_stub, repo_stub, EventClass): | |
128 |
|
129 | |||
129 | commit = StrictAttributeDict({ |
|
130 | commit = StrictAttributeDict({ | |
130 | 'raw_id': 'raw_id', |
|
131 | 'raw_id': 'raw_id', | |
131 | 'message': 'message', |
|
132 | 'message': 'message', | |
132 | 'branch': 'branch', |
|
133 | 'branch': 'branch', | |
133 | }) |
|
134 | }) | |
134 |
|
135 | |||
135 | comment = StrictAttributeDict({ |
|
136 | comment = StrictAttributeDict({ | |
136 | 'comment_id': 'comment_id', |
|
137 | 'comment_id': 'comment_id', | |
137 | 'text': 'text', |
|
138 | 'text': 'text', | |
138 | 'comment_type': 'comment_type', |
|
139 | 'comment_type': 'comment_type', | |
139 | 'f_path': 'f_path', |
|
140 | 'f_path': 'f_path', | |
140 | 'line_no': 'line_no', |
|
141 | 'line_no': 'line_no', | |
|
142 | 'last_version': 0, | |||
141 | }) |
|
143 | }) | |
142 | event = EventClass(repo=repo_stub, commit=commit, comment=comment) |
|
144 | event = EventClass(repo=repo_stub, commit=commit, comment=comment) | |
143 | data = event.as_dict() |
|
145 | data = event.as_dict() | |
144 | assert data['commit']['commit_id'] |
|
146 | assert data['commit']['commit_id'] | |
145 | assert data['comment']['comment_id'] |
|
147 | assert data['comment']['comment_id'] | |
|
148 | ||||
|
149 | ||||
|
150 | @pytest.mark.parametrize('EventClass', [RepoCommitCommentEditEvent]) | |||
|
151 | def test_repo_commit_edit_event(config_stub, repo_stub, EventClass): | |||
|
152 | ||||
|
153 | commit = StrictAttributeDict({ | |||
|
154 | 'raw_id': 'raw_id', | |||
|
155 | 'message': 'message', | |||
|
156 | 'branch': 'branch', | |||
|
157 | }) | |||
|
158 | ||||
|
159 | comment = StrictAttributeDict({ | |||
|
160 | 'comment_id': 'comment_id', | |||
|
161 | 'text': 'text', | |||
|
162 | 'comment_type': 'comment_type', | |||
|
163 | 'f_path': 'f_path', | |||
|
164 | 'line_no': 'line_no', | |||
|
165 | 'last_version': 0, | |||
|
166 | }) | |||
|
167 | event = EventClass(repo=repo_stub, commit=commit, comment=comment) | |||
|
168 | data = event.as_dict() | |||
|
169 | assert data['commit']['commit_id'] | |||
|
170 | assert data['comment']['comment_id'] |
General Comments 0
You need to be logged in to leave comments.
Login now