Show More
@@ -1,2491 +1,2506 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2011-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import logging |
|
22 | 22 | import time |
|
23 | 23 | |
|
24 | 24 | import rhodecode |
|
25 | 25 | from rhodecode.api import ( |
|
26 | 26 | jsonrpc_method, JSONRPCError, JSONRPCForbidden, JSONRPCValidationError) |
|
27 | 27 | from rhodecode.api.utils import ( |
|
28 | 28 | has_superadmin_permission, Optional, OAttr, get_repo_or_error, |
|
29 | 29 | get_user_group_or_error, get_user_or_error, validate_repo_permissions, |
|
30 | 30 | get_perm_or_error, parse_args, get_origin, build_commit_data, |
|
31 | 31 | validate_set_owner_permissions) |
|
32 | 32 | from rhodecode.lib import audit_logger, rc_cache |
|
33 | 33 | from rhodecode.lib import repo_maintenance |
|
34 | 34 | from rhodecode.lib.auth import ( |
|
35 | 35 | HasPermissionAnyApi, HasUserGroupPermissionAnyApi, |
|
36 | 36 | HasRepoPermissionAnyApi) |
|
37 | 37 | from rhodecode.lib.celerylib.utils import get_task_id |
|
38 | 38 | from rhodecode.lib.utils2 import ( |
|
39 | 39 | str2bool, time_to_datetime, safe_str, safe_int, safe_unicode) |
|
40 | 40 | from rhodecode.lib.ext_json import json |
|
41 | 41 | from rhodecode.lib.exceptions import ( |
|
42 | 42 | StatusChangeOnClosedPullRequestError, CommentVersionMismatch) |
|
43 | 43 | from rhodecode.lib.vcs import RepositoryError |
|
44 | 44 | from rhodecode.lib.vcs.exceptions import NodeDoesNotExistError |
|
45 | 45 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
46 | 46 | from rhodecode.model.comment import CommentsModel |
|
47 | 47 | from rhodecode.model.db import ( |
|
48 | 48 | Session, ChangesetStatus, RepositoryField, Repository, RepoGroup, |
|
49 | 49 | ChangesetComment) |
|
50 | 50 | from rhodecode.model.permission import PermissionModel |
|
51 | from rhodecode.model.pull_request import PullRequestModel | |
|
51 | 52 | from rhodecode.model.repo import RepoModel |
|
52 | 53 | from rhodecode.model.scm import ScmModel, RepoList |
|
53 | 54 | from rhodecode.model.settings import SettingsModel, VcsSettingsModel |
|
54 | 55 | from rhodecode.model import validation_schema |
|
55 | 56 | from rhodecode.model.validation_schema.schemas import repo_schema |
|
56 | 57 | |
|
57 | 58 | log = logging.getLogger(__name__) |
|
58 | 59 | |
|
59 | 60 | |
|
60 | 61 | @jsonrpc_method() |
|
61 | 62 | def get_repo(request, apiuser, repoid, cache=Optional(True)): |
|
62 | 63 | """ |
|
63 | 64 | Gets an existing repository by its name or repository_id. |
|
64 | 65 | |
|
65 | 66 | The members section so the output returns users groups or users |
|
66 | 67 | associated with that repository. |
|
67 | 68 | |
|
68 | 69 | This command can only be run using an |authtoken| with admin rights, |
|
69 | 70 | or users with at least read rights to the |repo|. |
|
70 | 71 | |
|
71 | 72 | :param apiuser: This is filled automatically from the |authtoken|. |
|
72 | 73 | :type apiuser: AuthUser |
|
73 | 74 | :param repoid: The repository name or repository id. |
|
74 | 75 | :type repoid: str or int |
|
75 | 76 | :param cache: use the cached value for last changeset |
|
76 | 77 | :type: cache: Optional(bool) |
|
77 | 78 | |
|
78 | 79 | Example output: |
|
79 | 80 | |
|
80 | 81 | .. code-block:: bash |
|
81 | 82 | |
|
82 | 83 | { |
|
83 | 84 | "error": null, |
|
84 | 85 | "id": <repo_id>, |
|
85 | 86 | "result": { |
|
86 | 87 | "clone_uri": null, |
|
87 | 88 | "created_on": "timestamp", |
|
88 | 89 | "description": "repo description", |
|
89 | 90 | "enable_downloads": false, |
|
90 | 91 | "enable_locking": false, |
|
91 | 92 | "enable_statistics": false, |
|
92 | 93 | "followers": [ |
|
93 | 94 | { |
|
94 | 95 | "active": true, |
|
95 | 96 | "admin": false, |
|
96 | 97 | "api_key": "****************************************", |
|
97 | 98 | "api_keys": [ |
|
98 | 99 | "****************************************" |
|
99 | 100 | ], |
|
100 | 101 | "email": "user@example.com", |
|
101 | 102 | "emails": [ |
|
102 | 103 | "user@example.com" |
|
103 | 104 | ], |
|
104 | 105 | "extern_name": "rhodecode", |
|
105 | 106 | "extern_type": "rhodecode", |
|
106 | 107 | "firstname": "username", |
|
107 | 108 | "ip_addresses": [], |
|
108 | 109 | "language": null, |
|
109 | 110 | "last_login": "2015-09-16T17:16:35.854", |
|
110 | 111 | "lastname": "surname", |
|
111 | 112 | "user_id": <user_id>, |
|
112 | 113 | "username": "name" |
|
113 | 114 | } |
|
114 | 115 | ], |
|
115 | 116 | "fork_of": "parent-repo", |
|
116 | 117 | "landing_rev": [ |
|
117 | 118 | "rev", |
|
118 | 119 | "tip" |
|
119 | 120 | ], |
|
120 | 121 | "last_changeset": { |
|
121 | 122 | "author": "User <user@example.com>", |
|
122 | 123 | "branch": "default", |
|
123 | 124 | "date": "timestamp", |
|
124 | 125 | "message": "last commit message", |
|
125 | 126 | "parents": [ |
|
126 | 127 | { |
|
127 | 128 | "raw_id": "commit-id" |
|
128 | 129 | } |
|
129 | 130 | ], |
|
130 | 131 | "raw_id": "commit-id", |
|
131 | 132 | "revision": <revision number>, |
|
132 | 133 | "short_id": "short id" |
|
133 | 134 | }, |
|
134 | 135 | "lock_reason": null, |
|
135 | 136 | "locked_by": null, |
|
136 | 137 | "locked_date": null, |
|
137 | 138 | "owner": "owner-name", |
|
138 | 139 | "permissions": [ |
|
139 | 140 | { |
|
140 | 141 | "name": "super-admin-name", |
|
141 | 142 | "origin": "super-admin", |
|
142 | 143 | "permission": "repository.admin", |
|
143 | 144 | "type": "user" |
|
144 | 145 | }, |
|
145 | 146 | { |
|
146 | 147 | "name": "owner-name", |
|
147 | 148 | "origin": "owner", |
|
148 | 149 | "permission": "repository.admin", |
|
149 | 150 | "type": "user" |
|
150 | 151 | }, |
|
151 | 152 | { |
|
152 | 153 | "name": "user-group-name", |
|
153 | 154 | "origin": "permission", |
|
154 | 155 | "permission": "repository.write", |
|
155 | 156 | "type": "user_group" |
|
156 | 157 | } |
|
157 | 158 | ], |
|
158 | 159 | "private": true, |
|
159 | 160 | "repo_id": 676, |
|
160 | 161 | "repo_name": "user-group/repo-name", |
|
161 | 162 | "repo_type": "hg" |
|
162 | 163 | } |
|
163 | 164 | } |
|
164 | 165 | """ |
|
165 | 166 | |
|
166 | 167 | repo = get_repo_or_error(repoid) |
|
167 | 168 | cache = Optional.extract(cache) |
|
168 | 169 | |
|
169 | 170 | include_secrets = False |
|
170 | 171 | if has_superadmin_permission(apiuser): |
|
171 | 172 | include_secrets = True |
|
172 | 173 | else: |
|
173 | 174 | # check if we have at least read permission for this repo ! |
|
174 | 175 | _perms = ( |
|
175 | 176 | 'repository.admin', 'repository.write', 'repository.read',) |
|
176 | 177 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
177 | 178 | |
|
178 | 179 | permissions = [] |
|
179 | 180 | for _user in repo.permissions(): |
|
180 | 181 | user_data = { |
|
181 | 182 | 'name': _user.username, |
|
182 | 183 | 'permission': _user.permission, |
|
183 | 184 | 'origin': get_origin(_user), |
|
184 | 185 | 'type': "user", |
|
185 | 186 | } |
|
186 | 187 | permissions.append(user_data) |
|
187 | 188 | |
|
188 | 189 | for _user_group in repo.permission_user_groups(): |
|
189 | 190 | user_group_data = { |
|
190 | 191 | 'name': _user_group.users_group_name, |
|
191 | 192 | 'permission': _user_group.permission, |
|
192 | 193 | 'origin': get_origin(_user_group), |
|
193 | 194 | 'type': "user_group", |
|
194 | 195 | } |
|
195 | 196 | permissions.append(user_group_data) |
|
196 | 197 | |
|
197 | 198 | following_users = [ |
|
198 | 199 | user.user.get_api_data(include_secrets=include_secrets) |
|
199 | 200 | for user in repo.followers] |
|
200 | 201 | |
|
201 | 202 | if not cache: |
|
202 | 203 | repo.update_commit_cache() |
|
203 | 204 | data = repo.get_api_data(include_secrets=include_secrets) |
|
204 | 205 | data['permissions'] = permissions |
|
205 | 206 | data['followers'] = following_users |
|
206 | 207 | return data |
|
207 | 208 | |
|
208 | 209 | |
|
209 | 210 | @jsonrpc_method() |
|
210 | 211 | def get_repos(request, apiuser, root=Optional(None), traverse=Optional(True)): |
|
211 | 212 | """ |
|
212 | 213 | Lists all existing repositories. |
|
213 | 214 | |
|
214 | 215 | This command can only be run using an |authtoken| with admin rights, |
|
215 | 216 | or users with at least read rights to |repos|. |
|
216 | 217 | |
|
217 | 218 | :param apiuser: This is filled automatically from the |authtoken|. |
|
218 | 219 | :type apiuser: AuthUser |
|
219 | 220 | :param root: specify root repository group to fetch repositories. |
|
220 | 221 | filters the returned repositories to be members of given root group. |
|
221 | 222 | :type root: Optional(None) |
|
222 | 223 | :param traverse: traverse given root into subrepositories. With this flag |
|
223 | 224 | set to False, it will only return top-level repositories from `root`. |
|
224 | 225 | if root is empty it will return just top-level repositories. |
|
225 | 226 | :type traverse: Optional(True) |
|
226 | 227 | |
|
227 | 228 | |
|
228 | 229 | Example output: |
|
229 | 230 | |
|
230 | 231 | .. code-block:: bash |
|
231 | 232 | |
|
232 | 233 | id : <id_given_in_input> |
|
233 | 234 | result: [ |
|
234 | 235 | { |
|
235 | 236 | "repo_id" : "<repo_id>", |
|
236 | 237 | "repo_name" : "<reponame>" |
|
237 | 238 | "repo_type" : "<repo_type>", |
|
238 | 239 | "clone_uri" : "<clone_uri>", |
|
239 | 240 | "private": : "<bool>", |
|
240 | 241 | "created_on" : "<datetimecreated>", |
|
241 | 242 | "description" : "<description>", |
|
242 | 243 | "landing_rev": "<landing_rev>", |
|
243 | 244 | "owner": "<repo_owner>", |
|
244 | 245 | "fork_of": "<name_of_fork_parent>", |
|
245 | 246 | "enable_downloads": "<bool>", |
|
246 | 247 | "enable_locking": "<bool>", |
|
247 | 248 | "enable_statistics": "<bool>", |
|
248 | 249 | }, |
|
249 | 250 | ... |
|
250 | 251 | ] |
|
251 | 252 | error: null |
|
252 | 253 | """ |
|
253 | 254 | |
|
254 | 255 | include_secrets = has_superadmin_permission(apiuser) |
|
255 | 256 | _perms = ('repository.read', 'repository.write', 'repository.admin',) |
|
256 | 257 | extras = {'user': apiuser} |
|
257 | 258 | |
|
258 | 259 | root = Optional.extract(root) |
|
259 | 260 | traverse = Optional.extract(traverse, binary=True) |
|
260 | 261 | |
|
261 | 262 | if root: |
|
262 | 263 | # verify parent existance, if it's empty return an error |
|
263 | 264 | parent = RepoGroup.get_by_group_name(root) |
|
264 | 265 | if not parent: |
|
265 | 266 | raise JSONRPCError( |
|
266 | 267 | 'Root repository group `{}` does not exist'.format(root)) |
|
267 | 268 | |
|
268 | 269 | if traverse: |
|
269 | 270 | repos = RepoModel().get_repos_for_root(root=root, traverse=traverse) |
|
270 | 271 | else: |
|
271 | 272 | repos = RepoModel().get_repos_for_root(root=parent) |
|
272 | 273 | else: |
|
273 | 274 | if traverse: |
|
274 | 275 | repos = RepoModel().get_all() |
|
275 | 276 | else: |
|
276 | 277 | # return just top-level |
|
277 | 278 | repos = RepoModel().get_repos_for_root(root=None) |
|
278 | 279 | |
|
279 | 280 | repo_list = RepoList(repos, perm_set=_perms, extra_kwargs=extras) |
|
280 | 281 | return [repo.get_api_data(include_secrets=include_secrets) |
|
281 | 282 | for repo in repo_list] |
|
282 | 283 | |
|
283 | 284 | |
|
284 | 285 | @jsonrpc_method() |
|
285 | 286 | def get_repo_changeset(request, apiuser, repoid, revision, |
|
286 | 287 | details=Optional('basic')): |
|
287 | 288 | """ |
|
288 | 289 | Returns information about a changeset. |
|
289 | 290 | |
|
290 | 291 | Additionally parameters define the amount of details returned by |
|
291 | 292 | this function. |
|
292 | 293 | |
|
293 | 294 | This command can only be run using an |authtoken| with admin rights, |
|
294 | 295 | or users with at least read rights to the |repo|. |
|
295 | 296 | |
|
296 | 297 | :param apiuser: This is filled automatically from the |authtoken|. |
|
297 | 298 | :type apiuser: AuthUser |
|
298 | 299 | :param repoid: The repository name or repository id |
|
299 | 300 | :type repoid: str or int |
|
300 | 301 | :param revision: revision for which listing should be done |
|
301 | 302 | :type revision: str |
|
302 | 303 | :param details: details can be 'basic|extended|full' full gives diff |
|
303 | 304 | info details like the diff itself, and number of changed files etc. |
|
304 | 305 | :type details: Optional(str) |
|
305 | 306 | |
|
306 | 307 | """ |
|
307 | 308 | repo = get_repo_or_error(repoid) |
|
308 | 309 | if not has_superadmin_permission(apiuser): |
|
309 | 310 | _perms = ( |
|
310 | 311 | 'repository.admin', 'repository.write', 'repository.read',) |
|
311 | 312 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
312 | 313 | |
|
313 | 314 | changes_details = Optional.extract(details) |
|
314 | 315 | _changes_details_types = ['basic', 'extended', 'full'] |
|
315 | 316 | if changes_details not in _changes_details_types: |
|
316 | 317 | raise JSONRPCError( |
|
317 | 318 | 'ret_type must be one of %s' % ( |
|
318 | 319 | ','.join(_changes_details_types))) |
|
319 | 320 | |
|
320 | 321 | pre_load = ['author', 'branch', 'date', 'message', 'parents', |
|
321 | 322 | 'status', '_commit', '_file_paths'] |
|
322 | 323 | |
|
323 | 324 | try: |
|
324 | 325 | cs = repo.get_commit(commit_id=revision, pre_load=pre_load) |
|
325 | 326 | except TypeError as e: |
|
326 | 327 | raise JSONRPCError(safe_str(e)) |
|
327 | 328 | _cs_json = cs.__json__() |
|
328 | 329 | _cs_json['diff'] = build_commit_data(cs, changes_details) |
|
329 | 330 | if changes_details == 'full': |
|
330 | 331 | _cs_json['refs'] = cs._get_refs() |
|
331 | 332 | return _cs_json |
|
332 | 333 | |
|
333 | 334 | |
|
334 | 335 | @jsonrpc_method() |
|
335 | 336 | def get_repo_changesets(request, apiuser, repoid, start_rev, limit, |
|
336 | 337 | details=Optional('basic')): |
|
337 | 338 | """ |
|
338 | 339 | Returns a set of commits limited by the number starting |
|
339 | 340 | from the `start_rev` option. |
|
340 | 341 | |
|
341 | 342 | Additional parameters define the amount of details returned by this |
|
342 | 343 | function. |
|
343 | 344 | |
|
344 | 345 | This command can only be run using an |authtoken| with admin rights, |
|
345 | 346 | or users with at least read rights to |repos|. |
|
346 | 347 | |
|
347 | 348 | :param apiuser: This is filled automatically from the |authtoken|. |
|
348 | 349 | :type apiuser: AuthUser |
|
349 | 350 | :param repoid: The repository name or repository ID. |
|
350 | 351 | :type repoid: str or int |
|
351 | 352 | :param start_rev: The starting revision from where to get changesets. |
|
352 | 353 | :type start_rev: str |
|
353 | 354 | :param limit: Limit the number of commits to this amount |
|
354 | 355 | :type limit: str or int |
|
355 | 356 | :param details: Set the level of detail returned. Valid option are: |
|
356 | 357 | ``basic``, ``extended`` and ``full``. |
|
357 | 358 | :type details: Optional(str) |
|
358 | 359 | |
|
359 | 360 | .. note:: |
|
360 | 361 | |
|
361 | 362 | Setting the parameter `details` to the value ``full`` is extensive |
|
362 | 363 | and returns details like the diff itself, and the number |
|
363 | 364 | of changed files. |
|
364 | 365 | |
|
365 | 366 | """ |
|
366 | 367 | repo = get_repo_or_error(repoid) |
|
367 | 368 | if not has_superadmin_permission(apiuser): |
|
368 | 369 | _perms = ( |
|
369 | 370 | 'repository.admin', 'repository.write', 'repository.read',) |
|
370 | 371 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
371 | 372 | |
|
372 | 373 | changes_details = Optional.extract(details) |
|
373 | 374 | _changes_details_types = ['basic', 'extended', 'full'] |
|
374 | 375 | if changes_details not in _changes_details_types: |
|
375 | 376 | raise JSONRPCError( |
|
376 | 377 | 'ret_type must be one of %s' % ( |
|
377 | 378 | ','.join(_changes_details_types))) |
|
378 | 379 | |
|
379 | 380 | limit = int(limit) |
|
380 | 381 | pre_load = ['author', 'branch', 'date', 'message', 'parents', |
|
381 | 382 | 'status', '_commit', '_file_paths'] |
|
382 | 383 | |
|
383 | 384 | vcs_repo = repo.scm_instance() |
|
384 | 385 | # SVN needs a special case to distinguish its index and commit id |
|
385 | 386 | if vcs_repo and vcs_repo.alias == 'svn' and (start_rev == '0'): |
|
386 | 387 | start_rev = vcs_repo.commit_ids[0] |
|
387 | 388 | |
|
388 | 389 | try: |
|
389 | 390 | commits = vcs_repo.get_commits( |
|
390 | 391 | start_id=start_rev, pre_load=pre_load, translate_tags=False) |
|
391 | 392 | except TypeError as e: |
|
392 | 393 | raise JSONRPCError(safe_str(e)) |
|
393 | 394 | except Exception: |
|
394 | 395 | log.exception('Fetching of commits failed') |
|
395 | 396 | raise JSONRPCError('Error occurred during commit fetching') |
|
396 | 397 | |
|
397 | 398 | ret = [] |
|
398 | 399 | for cnt, commit in enumerate(commits): |
|
399 | 400 | if cnt >= limit != -1: |
|
400 | 401 | break |
|
401 | 402 | _cs_json = commit.__json__() |
|
402 | 403 | _cs_json['diff'] = build_commit_data(commit, changes_details) |
|
403 | 404 | if changes_details == 'full': |
|
404 | 405 | _cs_json['refs'] = { |
|
405 | 406 | 'branches': [commit.branch], |
|
406 | 407 | 'bookmarks': getattr(commit, 'bookmarks', []), |
|
407 | 408 | 'tags': commit.tags |
|
408 | 409 | } |
|
409 | 410 | ret.append(_cs_json) |
|
410 | 411 | return ret |
|
411 | 412 | |
|
412 | 413 | |
|
413 | 414 | @jsonrpc_method() |
|
414 | 415 | def get_repo_nodes(request, apiuser, repoid, revision, root_path, |
|
415 | 416 | ret_type=Optional('all'), details=Optional('basic'), |
|
416 | 417 | max_file_bytes=Optional(None)): |
|
417 | 418 | """ |
|
418 | 419 | Returns a list of nodes and children in a flat list for a given |
|
419 | 420 | path at given revision. |
|
420 | 421 | |
|
421 | 422 | It's possible to specify ret_type to show only `files` or `dirs`. |
|
422 | 423 | |
|
423 | 424 | This command can only be run using an |authtoken| with admin rights, |
|
424 | 425 | or users with at least read rights to |repos|. |
|
425 | 426 | |
|
426 | 427 | :param apiuser: This is filled automatically from the |authtoken|. |
|
427 | 428 | :type apiuser: AuthUser |
|
428 | 429 | :param repoid: The repository name or repository ID. |
|
429 | 430 | :type repoid: str or int |
|
430 | 431 | :param revision: The revision for which listing should be done. |
|
431 | 432 | :type revision: str |
|
432 | 433 | :param root_path: The path from which to start displaying. |
|
433 | 434 | :type root_path: str |
|
434 | 435 | :param ret_type: Set the return type. Valid options are |
|
435 | 436 | ``all`` (default), ``files`` and ``dirs``. |
|
436 | 437 | :type ret_type: Optional(str) |
|
437 | 438 | :param details: Returns extended information about nodes, such as |
|
438 | 439 | md5, binary, and or content. |
|
439 | 440 | The valid options are ``basic`` and ``full``. |
|
440 | 441 | :type details: Optional(str) |
|
441 | 442 | :param max_file_bytes: Only return file content under this file size bytes |
|
442 | 443 | :type details: Optional(int) |
|
443 | 444 | |
|
444 | 445 | Example output: |
|
445 | 446 | |
|
446 | 447 | .. code-block:: bash |
|
447 | 448 | |
|
448 | 449 | id : <id_given_in_input> |
|
449 | 450 | result: [ |
|
450 | 451 | { |
|
451 | 452 | "binary": false, |
|
452 | 453 | "content": "File line", |
|
453 | 454 | "extension": "md", |
|
454 | 455 | "lines": 2, |
|
455 | 456 | "md5": "059fa5d29b19c0657e384749480f6422", |
|
456 | 457 | "mimetype": "text/x-minidsrc", |
|
457 | 458 | "name": "file.md", |
|
458 | 459 | "size": 580, |
|
459 | 460 | "type": "file" |
|
460 | 461 | }, |
|
461 | 462 | ... |
|
462 | 463 | ] |
|
463 | 464 | error: null |
|
464 | 465 | """ |
|
465 | 466 | |
|
466 | 467 | repo = get_repo_or_error(repoid) |
|
467 | 468 | if not has_superadmin_permission(apiuser): |
|
468 | 469 | _perms = ('repository.admin', 'repository.write', 'repository.read',) |
|
469 | 470 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
470 | 471 | |
|
471 | 472 | ret_type = Optional.extract(ret_type) |
|
472 | 473 | details = Optional.extract(details) |
|
473 | 474 | _extended_types = ['basic', 'full'] |
|
474 | 475 | if details not in _extended_types: |
|
475 | 476 | raise JSONRPCError('ret_type must be one of %s' % (','.join(_extended_types))) |
|
476 | 477 | extended_info = False |
|
477 | 478 | content = False |
|
478 | 479 | if details == 'basic': |
|
479 | 480 | extended_info = True |
|
480 | 481 | |
|
481 | 482 | if details == 'full': |
|
482 | 483 | extended_info = content = True |
|
483 | 484 | |
|
484 | 485 | _map = {} |
|
485 | 486 | try: |
|
486 | 487 | # check if repo is not empty by any chance, skip quicker if it is. |
|
487 | 488 | _scm = repo.scm_instance() |
|
488 | 489 | if _scm.is_empty(): |
|
489 | 490 | return [] |
|
490 | 491 | |
|
491 | 492 | _d, _f = ScmModel().get_nodes( |
|
492 | 493 | repo, revision, root_path, flat=False, |
|
493 | 494 | extended_info=extended_info, content=content, |
|
494 | 495 | max_file_bytes=max_file_bytes) |
|
495 | 496 | _map = { |
|
496 | 497 | 'all': _d + _f, |
|
497 | 498 | 'files': _f, |
|
498 | 499 | 'dirs': _d, |
|
499 | 500 | } |
|
500 | 501 | return _map[ret_type] |
|
501 | 502 | except KeyError: |
|
502 | 503 | raise JSONRPCError( |
|
503 | 504 | 'ret_type must be one of %s' % (','.join(sorted(_map.keys())))) |
|
504 | 505 | except Exception: |
|
505 | 506 | log.exception("Exception occurred while trying to get repo nodes") |
|
506 | 507 | raise JSONRPCError( |
|
507 | 508 | 'failed to get repo: `%s` nodes' % repo.repo_name |
|
508 | 509 | ) |
|
509 | 510 | |
|
510 | 511 | |
|
511 | 512 | @jsonrpc_method() |
|
512 | 513 | def get_repo_file(request, apiuser, repoid, commit_id, file_path, |
|
513 | 514 | max_file_bytes=Optional(None), details=Optional('basic'), |
|
514 | 515 | cache=Optional(True)): |
|
515 | 516 | """ |
|
516 | 517 | Returns a single file from repository at given revision. |
|
517 | 518 | |
|
518 | 519 | This command can only be run using an |authtoken| with admin rights, |
|
519 | 520 | or users with at least read rights to |repos|. |
|
520 | 521 | |
|
521 | 522 | :param apiuser: This is filled automatically from the |authtoken|. |
|
522 | 523 | :type apiuser: AuthUser |
|
523 | 524 | :param repoid: The repository name or repository ID. |
|
524 | 525 | :type repoid: str or int |
|
525 | 526 | :param commit_id: The revision for which listing should be done. |
|
526 | 527 | :type commit_id: str |
|
527 | 528 | :param file_path: The path from which to start displaying. |
|
528 | 529 | :type file_path: str |
|
529 | 530 | :param details: Returns different set of information about nodes. |
|
530 | 531 | The valid options are ``minimal`` ``basic`` and ``full``. |
|
531 | 532 | :type details: Optional(str) |
|
532 | 533 | :param max_file_bytes: Only return file content under this file size bytes |
|
533 | 534 | :type max_file_bytes: Optional(int) |
|
534 | 535 | :param cache: Use internal caches for fetching files. If disabled fetching |
|
535 | 536 | files is slower but more memory efficient |
|
536 | 537 | :type cache: Optional(bool) |
|
537 | 538 | |
|
538 | 539 | Example output: |
|
539 | 540 | |
|
540 | 541 | .. code-block:: bash |
|
541 | 542 | |
|
542 | 543 | id : <id_given_in_input> |
|
543 | 544 | result: { |
|
544 | 545 | "binary": false, |
|
545 | 546 | "extension": "py", |
|
546 | 547 | "lines": 35, |
|
547 | 548 | "content": "....", |
|
548 | 549 | "md5": "76318336366b0f17ee249e11b0c99c41", |
|
549 | 550 | "mimetype": "text/x-python", |
|
550 | 551 | "name": "python.py", |
|
551 | 552 | "size": 817, |
|
552 | 553 | "type": "file", |
|
553 | 554 | } |
|
554 | 555 | error: null |
|
555 | 556 | """ |
|
556 | 557 | |
|
557 | 558 | repo = get_repo_or_error(repoid) |
|
558 | 559 | if not has_superadmin_permission(apiuser): |
|
559 | 560 | _perms = ('repository.admin', 'repository.write', 'repository.read',) |
|
560 | 561 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
561 | 562 | |
|
562 | 563 | cache = Optional.extract(cache, binary=True) |
|
563 | 564 | details = Optional.extract(details) |
|
564 | 565 | _extended_types = ['minimal', 'minimal+search', 'basic', 'full'] |
|
565 | 566 | if details not in _extended_types: |
|
566 | 567 | raise JSONRPCError( |
|
567 | 568 | 'ret_type must be one of %s, got %s' % (','.join(_extended_types)), details) |
|
568 | 569 | extended_info = False |
|
569 | 570 | content = False |
|
570 | 571 | |
|
571 | 572 | if details == 'minimal': |
|
572 | 573 | extended_info = False |
|
573 | 574 | |
|
574 | 575 | elif details == 'basic': |
|
575 | 576 | extended_info = True |
|
576 | 577 | |
|
577 | 578 | elif details == 'full': |
|
578 | 579 | extended_info = content = True |
|
579 | 580 | |
|
580 | 581 | file_path = safe_unicode(file_path) |
|
581 | 582 | try: |
|
582 | 583 | # check if repo is not empty by any chance, skip quicker if it is. |
|
583 | 584 | _scm = repo.scm_instance() |
|
584 | 585 | if _scm.is_empty(): |
|
585 | 586 | return None |
|
586 | 587 | |
|
587 | 588 | node = ScmModel().get_node( |
|
588 | 589 | repo, commit_id, file_path, extended_info=extended_info, |
|
589 | 590 | content=content, max_file_bytes=max_file_bytes, cache=cache) |
|
590 | 591 | except NodeDoesNotExistError: |
|
591 | 592 | raise JSONRPCError(u'There is no file in repo: `{}` at path `{}` for commit: `{}`'.format( |
|
592 | 593 | repo.repo_name, file_path, commit_id)) |
|
593 | 594 | except Exception: |
|
594 | 595 | log.exception(u"Exception occurred while trying to get repo %s file", |
|
595 | 596 | repo.repo_name) |
|
596 | 597 | raise JSONRPCError(u'failed to get repo: `{}` file at path {}'.format( |
|
597 | 598 | repo.repo_name, file_path)) |
|
598 | 599 | |
|
599 | 600 | return node |
|
600 | 601 | |
|
601 | 602 | |
|
602 | 603 | @jsonrpc_method() |
|
603 | 604 | def get_repo_fts_tree(request, apiuser, repoid, commit_id, root_path): |
|
604 | 605 | """ |
|
605 | 606 | Returns a list of tree nodes for path at given revision. This api is built |
|
606 | 607 | strictly for usage in full text search building, and shouldn't be consumed |
|
607 | 608 | |
|
608 | 609 | This command can only be run using an |authtoken| with admin rights, |
|
609 | 610 | or users with at least read rights to |repos|. |
|
610 | 611 | |
|
611 | 612 | """ |
|
612 | 613 | |
|
613 | 614 | repo = get_repo_or_error(repoid) |
|
614 | 615 | if not has_superadmin_permission(apiuser): |
|
615 | 616 | _perms = ('repository.admin', 'repository.write', 'repository.read',) |
|
616 | 617 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
617 | 618 | |
|
618 | 619 | repo_id = repo.repo_id |
|
619 | 620 | cache_seconds = safe_int(rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time')) |
|
620 | 621 | cache_on = cache_seconds > 0 |
|
621 | 622 | |
|
622 | 623 | cache_namespace_uid = 'cache_repo.{}'.format(repo_id) |
|
623 | 624 | region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid) |
|
624 | 625 | |
|
625 | 626 | def compute_fts_tree(cache_ver, repo_id, commit_id, root_path): |
|
626 | 627 | return ScmModel().get_fts_data(repo_id, commit_id, root_path) |
|
627 | 628 | |
|
628 | 629 | try: |
|
629 | 630 | # check if repo is not empty by any chance, skip quicker if it is. |
|
630 | 631 | _scm = repo.scm_instance() |
|
631 | 632 | if _scm.is_empty(): |
|
632 | 633 | return [] |
|
633 | 634 | except RepositoryError: |
|
634 | 635 | log.exception("Exception occurred while trying to get repo nodes") |
|
635 | 636 | raise JSONRPCError('failed to get repo: `%s` nodes' % repo.repo_name) |
|
636 | 637 | |
|
637 | 638 | try: |
|
638 | 639 | # we need to resolve commit_id to a FULL sha for cache to work correctly. |
|
639 | 640 | # sending 'master' is a pointer that needs to be translated to current commit. |
|
640 | 641 | commit_id = _scm.get_commit(commit_id=commit_id).raw_id |
|
641 | 642 | log.debug( |
|
642 | 643 | 'Computing FTS REPO TREE for repo_id %s commit_id `%s` ' |
|
643 | 644 | 'with caching: %s[TTL: %ss]' % ( |
|
644 | 645 | repo_id, commit_id, cache_on, cache_seconds or 0)) |
|
645 | 646 | |
|
646 | 647 | tree_files = compute_fts_tree(rc_cache.FILE_TREE_CACHE_VER, repo_id, commit_id, root_path) |
|
647 | 648 | return tree_files |
|
648 | 649 | |
|
649 | 650 | except Exception: |
|
650 | 651 | log.exception("Exception occurred while trying to get repo nodes") |
|
651 | 652 | raise JSONRPCError('failed to get repo: `%s` nodes' % repo.repo_name) |
|
652 | 653 | |
|
653 | 654 | |
|
654 | 655 | @jsonrpc_method() |
|
655 | 656 | def get_repo_refs(request, apiuser, repoid): |
|
656 | 657 | """ |
|
657 | 658 | Returns a dictionary of current references. It returns |
|
658 | 659 | bookmarks, branches, closed_branches, and tags for given repository |
|
659 | 660 | |
|
660 | 661 | It's possible to specify ret_type to show only `files` or `dirs`. |
|
661 | 662 | |
|
662 | 663 | This command can only be run using an |authtoken| with admin rights, |
|
663 | 664 | or users with at least read rights to |repos|. |
|
664 | 665 | |
|
665 | 666 | :param apiuser: This is filled automatically from the |authtoken|. |
|
666 | 667 | :type apiuser: AuthUser |
|
667 | 668 | :param repoid: The repository name or repository ID. |
|
668 | 669 | :type repoid: str or int |
|
669 | 670 | |
|
670 | 671 | Example output: |
|
671 | 672 | |
|
672 | 673 | .. code-block:: bash |
|
673 | 674 | |
|
674 | 675 | id : <id_given_in_input> |
|
675 | 676 | "result": { |
|
676 | 677 | "bookmarks": { |
|
677 | 678 | "dev": "5611d30200f4040ba2ab4f3d64e5b06408a02188", |
|
678 | 679 | "master": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf" |
|
679 | 680 | }, |
|
680 | 681 | "branches": { |
|
681 | 682 | "default": "5611d30200f4040ba2ab4f3d64e5b06408a02188", |
|
682 | 683 | "stable": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf" |
|
683 | 684 | }, |
|
684 | 685 | "branches_closed": {}, |
|
685 | 686 | "tags": { |
|
686 | 687 | "tip": "5611d30200f4040ba2ab4f3d64e5b06408a02188", |
|
687 | 688 | "v4.4.0": "1232313f9e6adac5ce5399c2a891dc1e72b79022", |
|
688 | 689 | "v4.4.1": "cbb9f1d329ae5768379cdec55a62ebdd546c4e27", |
|
689 | 690 | "v4.4.2": "24ffe44a27fcd1c5b6936144e176b9f6dd2f3a17", |
|
690 | 691 | } |
|
691 | 692 | } |
|
692 | 693 | error: null |
|
693 | 694 | """ |
|
694 | 695 | |
|
695 | 696 | repo = get_repo_or_error(repoid) |
|
696 | 697 | if not has_superadmin_permission(apiuser): |
|
697 | 698 | _perms = ('repository.admin', 'repository.write', 'repository.read',) |
|
698 | 699 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
699 | 700 | |
|
700 | 701 | try: |
|
701 | 702 | # check if repo is not empty by any chance, skip quicker if it is. |
|
702 | 703 | vcs_instance = repo.scm_instance() |
|
703 | 704 | refs = vcs_instance.refs() |
|
704 | 705 | return refs |
|
705 | 706 | except Exception: |
|
706 | 707 | log.exception("Exception occurred while trying to get repo refs") |
|
707 | 708 | raise JSONRPCError( |
|
708 | 709 | 'failed to get repo: `%s` references' % repo.repo_name |
|
709 | 710 | ) |
|
710 | 711 | |
|
711 | 712 | |
|
712 | 713 | @jsonrpc_method() |
|
713 | 714 | def create_repo( |
|
714 | 715 | request, apiuser, repo_name, repo_type, |
|
715 | 716 | owner=Optional(OAttr('apiuser')), |
|
716 | 717 | description=Optional(''), |
|
717 | 718 | private=Optional(False), |
|
718 | 719 | clone_uri=Optional(None), |
|
719 | 720 | push_uri=Optional(None), |
|
720 | 721 | landing_rev=Optional(None), |
|
721 | 722 | enable_statistics=Optional(False), |
|
722 | 723 | enable_locking=Optional(False), |
|
723 | 724 | enable_downloads=Optional(False), |
|
724 | 725 | copy_permissions=Optional(False)): |
|
725 | 726 | """ |
|
726 | 727 | Creates a repository. |
|
727 | 728 | |
|
728 | 729 | * If the repository name contains "/", repository will be created inside |
|
729 | 730 | a repository group or nested repository groups |
|
730 | 731 | |
|
731 | 732 | For example "foo/bar/repo1" will create |repo| called "repo1" inside |
|
732 | 733 | group "foo/bar". You have to have permissions to access and write to |
|
733 | 734 | the last repository group ("bar" in this example) |
|
734 | 735 | |
|
735 | 736 | This command can only be run using an |authtoken| with at least |
|
736 | 737 | permissions to create repositories, or write permissions to |
|
737 | 738 | parent repository groups. |
|
738 | 739 | |
|
739 | 740 | :param apiuser: This is filled automatically from the |authtoken|. |
|
740 | 741 | :type apiuser: AuthUser |
|
741 | 742 | :param repo_name: Set the repository name. |
|
742 | 743 | :type repo_name: str |
|
743 | 744 | :param repo_type: Set the repository type; 'hg','git', or 'svn'. |
|
744 | 745 | :type repo_type: str |
|
745 | 746 | :param owner: user_id or username |
|
746 | 747 | :type owner: Optional(str) |
|
747 | 748 | :param description: Set the repository description. |
|
748 | 749 | :type description: Optional(str) |
|
749 | 750 | :param private: set repository as private |
|
750 | 751 | :type private: bool |
|
751 | 752 | :param clone_uri: set clone_uri |
|
752 | 753 | :type clone_uri: str |
|
753 | 754 | :param push_uri: set push_uri |
|
754 | 755 | :type push_uri: str |
|
755 | 756 | :param landing_rev: <rev_type>:<rev>, e.g branch:default, book:dev, rev:abcd |
|
756 | 757 | :type landing_rev: str |
|
757 | 758 | :param enable_locking: |
|
758 | 759 | :type enable_locking: bool |
|
759 | 760 | :param enable_downloads: |
|
760 | 761 | :type enable_downloads: bool |
|
761 | 762 | :param enable_statistics: |
|
762 | 763 | :type enable_statistics: bool |
|
763 | 764 | :param copy_permissions: Copy permission from group in which the |
|
764 | 765 | repository is being created. |
|
765 | 766 | :type copy_permissions: bool |
|
766 | 767 | |
|
767 | 768 | |
|
768 | 769 | Example output: |
|
769 | 770 | |
|
770 | 771 | .. code-block:: bash |
|
771 | 772 | |
|
772 | 773 | id : <id_given_in_input> |
|
773 | 774 | result: { |
|
774 | 775 | "msg": "Created new repository `<reponame>`", |
|
775 | 776 | "success": true, |
|
776 | 777 | "task": "<celery task id or None if done sync>" |
|
777 | 778 | } |
|
778 | 779 | error: null |
|
779 | 780 | |
|
780 | 781 | |
|
781 | 782 | Example error output: |
|
782 | 783 | |
|
783 | 784 | .. code-block:: bash |
|
784 | 785 | |
|
785 | 786 | id : <id_given_in_input> |
|
786 | 787 | result : null |
|
787 | 788 | error : { |
|
788 | 789 | 'failed to create repository `<repo_name>`' |
|
789 | 790 | } |
|
790 | 791 | |
|
791 | 792 | """ |
|
792 | 793 | |
|
793 | 794 | owner = validate_set_owner_permissions(apiuser, owner) |
|
794 | 795 | |
|
795 | 796 | description = Optional.extract(description) |
|
796 | 797 | copy_permissions = Optional.extract(copy_permissions) |
|
797 | 798 | clone_uri = Optional.extract(clone_uri) |
|
798 | 799 | push_uri = Optional.extract(push_uri) |
|
799 | 800 | |
|
800 | 801 | defs = SettingsModel().get_default_repo_settings(strip_prefix=True) |
|
801 | 802 | if isinstance(private, Optional): |
|
802 | 803 | private = defs.get('repo_private') or Optional.extract(private) |
|
803 | 804 | if isinstance(repo_type, Optional): |
|
804 | 805 | repo_type = defs.get('repo_type') |
|
805 | 806 | if isinstance(enable_statistics, Optional): |
|
806 | 807 | enable_statistics = defs.get('repo_enable_statistics') |
|
807 | 808 | if isinstance(enable_locking, Optional): |
|
808 | 809 | enable_locking = defs.get('repo_enable_locking') |
|
809 | 810 | if isinstance(enable_downloads, Optional): |
|
810 | 811 | enable_downloads = defs.get('repo_enable_downloads') |
|
811 | 812 | |
|
812 | 813 | landing_ref, _label = ScmModel.backend_landing_ref(repo_type) |
|
813 | 814 | ref_choices, _labels = ScmModel().get_repo_landing_revs(request.translate) |
|
814 | 815 | ref_choices = list(set(ref_choices + [landing_ref])) |
|
815 | 816 | |
|
816 | 817 | landing_commit_ref = Optional.extract(landing_rev) or landing_ref |
|
817 | 818 | |
|
818 | 819 | schema = repo_schema.RepoSchema().bind( |
|
819 | 820 | repo_type_options=rhodecode.BACKENDS.keys(), |
|
820 | 821 | repo_ref_options=ref_choices, |
|
821 | 822 | repo_type=repo_type, |
|
822 | 823 | # user caller |
|
823 | 824 | user=apiuser) |
|
824 | 825 | |
|
825 | 826 | try: |
|
826 | 827 | schema_data = schema.deserialize(dict( |
|
827 | 828 | repo_name=repo_name, |
|
828 | 829 | repo_type=repo_type, |
|
829 | 830 | repo_owner=owner.username, |
|
830 | 831 | repo_description=description, |
|
831 | 832 | repo_landing_commit_ref=landing_commit_ref, |
|
832 | 833 | repo_clone_uri=clone_uri, |
|
833 | 834 | repo_push_uri=push_uri, |
|
834 | 835 | repo_private=private, |
|
835 | 836 | repo_copy_permissions=copy_permissions, |
|
836 | 837 | repo_enable_statistics=enable_statistics, |
|
837 | 838 | repo_enable_downloads=enable_downloads, |
|
838 | 839 | repo_enable_locking=enable_locking)) |
|
839 | 840 | except validation_schema.Invalid as err: |
|
840 | 841 | raise JSONRPCValidationError(colander_exc=err) |
|
841 | 842 | |
|
842 | 843 | try: |
|
843 | 844 | data = { |
|
844 | 845 | 'owner': owner, |
|
845 | 846 | 'repo_name': schema_data['repo_group']['repo_name_without_group'], |
|
846 | 847 | 'repo_name_full': schema_data['repo_name'], |
|
847 | 848 | 'repo_group': schema_data['repo_group']['repo_group_id'], |
|
848 | 849 | 'repo_type': schema_data['repo_type'], |
|
849 | 850 | 'repo_description': schema_data['repo_description'], |
|
850 | 851 | 'repo_private': schema_data['repo_private'], |
|
851 | 852 | 'clone_uri': schema_data['repo_clone_uri'], |
|
852 | 853 | 'push_uri': schema_data['repo_push_uri'], |
|
853 | 854 | 'repo_landing_rev': schema_data['repo_landing_commit_ref'], |
|
854 | 855 | 'enable_statistics': schema_data['repo_enable_statistics'], |
|
855 | 856 | 'enable_locking': schema_data['repo_enable_locking'], |
|
856 | 857 | 'enable_downloads': schema_data['repo_enable_downloads'], |
|
857 | 858 | 'repo_copy_permissions': schema_data['repo_copy_permissions'], |
|
858 | 859 | } |
|
859 | 860 | |
|
860 | 861 | task = RepoModel().create(form_data=data, cur_user=owner.user_id) |
|
861 | 862 | task_id = get_task_id(task) |
|
862 | 863 | # no commit, it's done in RepoModel, or async via celery |
|
863 | 864 | return { |
|
864 | 865 | 'msg': "Created new repository `%s`" % (schema_data['repo_name'],), |
|
865 | 866 | 'success': True, # cannot return the repo data here since fork |
|
866 | 867 | # can be done async |
|
867 | 868 | 'task': task_id |
|
868 | 869 | } |
|
869 | 870 | except Exception: |
|
870 | 871 | log.exception( |
|
871 | 872 | u"Exception while trying to create the repository %s", |
|
872 | 873 | schema_data['repo_name']) |
|
873 | 874 | raise JSONRPCError( |
|
874 | 875 | 'failed to create repository `%s`' % (schema_data['repo_name'],)) |
|
875 | 876 | |
|
876 | 877 | |
|
877 | 878 | @jsonrpc_method() |
|
878 | 879 | def add_field_to_repo(request, apiuser, repoid, key, label=Optional(''), |
|
879 | 880 | description=Optional('')): |
|
880 | 881 | """ |
|
881 | 882 | Adds an extra field to a repository. |
|
882 | 883 | |
|
883 | 884 | This command can only be run using an |authtoken| with at least |
|
884 | 885 | write permissions to the |repo|. |
|
885 | 886 | |
|
886 | 887 | :param apiuser: This is filled automatically from the |authtoken|. |
|
887 | 888 | :type apiuser: AuthUser |
|
888 | 889 | :param repoid: Set the repository name or repository id. |
|
889 | 890 | :type repoid: str or int |
|
890 | 891 | :param key: Create a unique field key for this repository. |
|
891 | 892 | :type key: str |
|
892 | 893 | :param label: |
|
893 | 894 | :type label: Optional(str) |
|
894 | 895 | :param description: |
|
895 | 896 | :type description: Optional(str) |
|
896 | 897 | """ |
|
897 | 898 | repo = get_repo_or_error(repoid) |
|
898 | 899 | if not has_superadmin_permission(apiuser): |
|
899 | 900 | _perms = ('repository.admin',) |
|
900 | 901 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
901 | 902 | |
|
902 | 903 | label = Optional.extract(label) or key |
|
903 | 904 | description = Optional.extract(description) |
|
904 | 905 | |
|
905 | 906 | field = RepositoryField.get_by_key_name(key, repo) |
|
906 | 907 | if field: |
|
907 | 908 | raise JSONRPCError('Field with key ' |
|
908 | 909 | '`%s` exists for repo `%s`' % (key, repoid)) |
|
909 | 910 | |
|
910 | 911 | try: |
|
911 | 912 | RepoModel().add_repo_field(repo, key, field_label=label, |
|
912 | 913 | field_desc=description) |
|
913 | 914 | Session().commit() |
|
914 | 915 | return { |
|
915 | 916 | 'msg': "Added new repository field `%s`" % (key,), |
|
916 | 917 | 'success': True, |
|
917 | 918 | } |
|
918 | 919 | except Exception: |
|
919 | 920 | log.exception("Exception occurred while trying to add field to repo") |
|
920 | 921 | raise JSONRPCError( |
|
921 | 922 | 'failed to create new field for repository `%s`' % (repoid,)) |
|
922 | 923 | |
|
923 | 924 | |
|
924 | 925 | @jsonrpc_method() |
|
925 | 926 | def remove_field_from_repo(request, apiuser, repoid, key): |
|
926 | 927 | """ |
|
927 | 928 | Removes an extra field from a repository. |
|
928 | 929 | |
|
929 | 930 | This command can only be run using an |authtoken| with at least |
|
930 | 931 | write permissions to the |repo|. |
|
931 | 932 | |
|
932 | 933 | :param apiuser: This is filled automatically from the |authtoken|. |
|
933 | 934 | :type apiuser: AuthUser |
|
934 | 935 | :param repoid: Set the repository name or repository ID. |
|
935 | 936 | :type repoid: str or int |
|
936 | 937 | :param key: Set the unique field key for this repository. |
|
937 | 938 | :type key: str |
|
938 | 939 | """ |
|
939 | 940 | |
|
940 | 941 | repo = get_repo_or_error(repoid) |
|
941 | 942 | if not has_superadmin_permission(apiuser): |
|
942 | 943 | _perms = ('repository.admin',) |
|
943 | 944 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
944 | 945 | |
|
945 | 946 | field = RepositoryField.get_by_key_name(key, repo) |
|
946 | 947 | if not field: |
|
947 | 948 | raise JSONRPCError('Field with key `%s` does not ' |
|
948 | 949 | 'exists for repo `%s`' % (key, repoid)) |
|
949 | 950 | |
|
950 | 951 | try: |
|
951 | 952 | RepoModel().delete_repo_field(repo, field_key=key) |
|
952 | 953 | Session().commit() |
|
953 | 954 | return { |
|
954 | 955 | 'msg': "Deleted repository field `%s`" % (key,), |
|
955 | 956 | 'success': True, |
|
956 | 957 | } |
|
957 | 958 | except Exception: |
|
958 | 959 | log.exception( |
|
959 | 960 | "Exception occurred while trying to delete field from repo") |
|
960 | 961 | raise JSONRPCError( |
|
961 | 962 | 'failed to delete field for repository `%s`' % (repoid,)) |
|
962 | 963 | |
|
963 | 964 | |
|
964 | 965 | @jsonrpc_method() |
|
965 | 966 | def update_repo( |
|
966 | 967 | request, apiuser, repoid, repo_name=Optional(None), |
|
967 | 968 | owner=Optional(OAttr('apiuser')), description=Optional(''), |
|
968 | 969 | private=Optional(False), |
|
969 | 970 | clone_uri=Optional(None), push_uri=Optional(None), |
|
970 | 971 | landing_rev=Optional(None), fork_of=Optional(None), |
|
971 | 972 | enable_statistics=Optional(False), |
|
972 | 973 | enable_locking=Optional(False), |
|
973 | 974 | enable_downloads=Optional(False), fields=Optional('')): |
|
974 | 975 | """ |
|
975 | 976 | Updates a repository with the given information. |
|
976 | 977 | |
|
977 | 978 | This command can only be run using an |authtoken| with at least |
|
978 | 979 | admin permissions to the |repo|. |
|
979 | 980 | |
|
980 | 981 | * If the repository name contains "/", repository will be updated |
|
981 | 982 | accordingly with a repository group or nested repository groups |
|
982 | 983 | |
|
983 | 984 | For example repoid=repo-test name="foo/bar/repo-test" will update |repo| |
|
984 | 985 | called "repo-test" and place it inside group "foo/bar". |
|
985 | 986 | You have to have permissions to access and write to the last repository |
|
986 | 987 | group ("bar" in this example) |
|
987 | 988 | |
|
988 | 989 | :param apiuser: This is filled automatically from the |authtoken|. |
|
989 | 990 | :type apiuser: AuthUser |
|
990 | 991 | :param repoid: repository name or repository ID. |
|
991 | 992 | :type repoid: str or int |
|
992 | 993 | :param repo_name: Update the |repo| name, including the |
|
993 | 994 | repository group it's in. |
|
994 | 995 | :type repo_name: str |
|
995 | 996 | :param owner: Set the |repo| owner. |
|
996 | 997 | :type owner: str |
|
997 | 998 | :param fork_of: Set the |repo| as fork of another |repo|. |
|
998 | 999 | :type fork_of: str |
|
999 | 1000 | :param description: Update the |repo| description. |
|
1000 | 1001 | :type description: str |
|
1001 | 1002 | :param private: Set the |repo| as private. (True | False) |
|
1002 | 1003 | :type private: bool |
|
1003 | 1004 | :param clone_uri: Update the |repo| clone URI. |
|
1004 | 1005 | :type clone_uri: str |
|
1005 | 1006 | :param landing_rev: Set the |repo| landing revision. e.g branch:default, book:dev, rev:abcd |
|
1006 | 1007 | :type landing_rev: str |
|
1007 | 1008 | :param enable_statistics: Enable statistics on the |repo|, (True | False). |
|
1008 | 1009 | :type enable_statistics: bool |
|
1009 | 1010 | :param enable_locking: Enable |repo| locking. |
|
1010 | 1011 | :type enable_locking: bool |
|
1011 | 1012 | :param enable_downloads: Enable downloads from the |repo|, (True | False). |
|
1012 | 1013 | :type enable_downloads: bool |
|
1013 | 1014 | :param fields: Add extra fields to the |repo|. Use the following |
|
1014 | 1015 | example format: ``field_key=field_val,field_key2=fieldval2``. |
|
1015 | 1016 | Escape ', ' with \, |
|
1016 | 1017 | :type fields: str |
|
1017 | 1018 | """ |
|
1018 | 1019 | |
|
1019 | 1020 | repo = get_repo_or_error(repoid) |
|
1020 | 1021 | |
|
1021 | 1022 | include_secrets = False |
|
1022 | 1023 | if not has_superadmin_permission(apiuser): |
|
1023 | 1024 | validate_repo_permissions(apiuser, repoid, repo, ('repository.admin',)) |
|
1024 | 1025 | else: |
|
1025 | 1026 | include_secrets = True |
|
1026 | 1027 | |
|
1027 | 1028 | updates = dict( |
|
1028 | 1029 | repo_name=repo_name |
|
1029 | 1030 | if not isinstance(repo_name, Optional) else repo.repo_name, |
|
1030 | 1031 | |
|
1031 | 1032 | fork_id=fork_of |
|
1032 | 1033 | if not isinstance(fork_of, Optional) else repo.fork.repo_name if repo.fork else None, |
|
1033 | 1034 | |
|
1034 | 1035 | user=owner |
|
1035 | 1036 | if not isinstance(owner, Optional) else repo.user.username, |
|
1036 | 1037 | |
|
1037 | 1038 | repo_description=description |
|
1038 | 1039 | if not isinstance(description, Optional) else repo.description, |
|
1039 | 1040 | |
|
1040 | 1041 | repo_private=private |
|
1041 | 1042 | if not isinstance(private, Optional) else repo.private, |
|
1042 | 1043 | |
|
1043 | 1044 | clone_uri=clone_uri |
|
1044 | 1045 | if not isinstance(clone_uri, Optional) else repo.clone_uri, |
|
1045 | 1046 | |
|
1046 | 1047 | push_uri=push_uri |
|
1047 | 1048 | if not isinstance(push_uri, Optional) else repo.push_uri, |
|
1048 | 1049 | |
|
1049 | 1050 | repo_landing_rev=landing_rev |
|
1050 | 1051 | if not isinstance(landing_rev, Optional) else repo._landing_revision, |
|
1051 | 1052 | |
|
1052 | 1053 | repo_enable_statistics=enable_statistics |
|
1053 | 1054 | if not isinstance(enable_statistics, Optional) else repo.enable_statistics, |
|
1054 | 1055 | |
|
1055 | 1056 | repo_enable_locking=enable_locking |
|
1056 | 1057 | if not isinstance(enable_locking, Optional) else repo.enable_locking, |
|
1057 | 1058 | |
|
1058 | 1059 | repo_enable_downloads=enable_downloads |
|
1059 | 1060 | if not isinstance(enable_downloads, Optional) else repo.enable_downloads) |
|
1060 | 1061 | |
|
1061 | 1062 | landing_ref, _label = ScmModel.backend_landing_ref(repo.repo_type) |
|
1062 | 1063 | ref_choices, _labels = ScmModel().get_repo_landing_revs( |
|
1063 | 1064 | request.translate, repo=repo) |
|
1064 | 1065 | ref_choices = list(set(ref_choices + [landing_ref])) |
|
1065 | 1066 | |
|
1066 | 1067 | old_values = repo.get_api_data() |
|
1067 | 1068 | repo_type = repo.repo_type |
|
1068 | 1069 | schema = repo_schema.RepoSchema().bind( |
|
1069 | 1070 | repo_type_options=rhodecode.BACKENDS.keys(), |
|
1070 | 1071 | repo_ref_options=ref_choices, |
|
1071 | 1072 | repo_type=repo_type, |
|
1072 | 1073 | # user caller |
|
1073 | 1074 | user=apiuser, |
|
1074 | 1075 | old_values=old_values) |
|
1075 | 1076 | try: |
|
1076 | 1077 | schema_data = schema.deserialize(dict( |
|
1077 | 1078 | # we save old value, users cannot change type |
|
1078 | 1079 | repo_type=repo_type, |
|
1079 | 1080 | |
|
1080 | 1081 | repo_name=updates['repo_name'], |
|
1081 | 1082 | repo_owner=updates['user'], |
|
1082 | 1083 | repo_description=updates['repo_description'], |
|
1083 | 1084 | repo_clone_uri=updates['clone_uri'], |
|
1084 | 1085 | repo_push_uri=updates['push_uri'], |
|
1085 | 1086 | repo_fork_of=updates['fork_id'], |
|
1086 | 1087 | repo_private=updates['repo_private'], |
|
1087 | 1088 | repo_landing_commit_ref=updates['repo_landing_rev'], |
|
1088 | 1089 | repo_enable_statistics=updates['repo_enable_statistics'], |
|
1089 | 1090 | repo_enable_downloads=updates['repo_enable_downloads'], |
|
1090 | 1091 | repo_enable_locking=updates['repo_enable_locking'])) |
|
1091 | 1092 | except validation_schema.Invalid as err: |
|
1092 | 1093 | raise JSONRPCValidationError(colander_exc=err) |
|
1093 | 1094 | |
|
1094 | 1095 | # save validated data back into the updates dict |
|
1095 | 1096 | validated_updates = dict( |
|
1096 | 1097 | repo_name=schema_data['repo_group']['repo_name_without_group'], |
|
1097 | 1098 | repo_group=schema_data['repo_group']['repo_group_id'], |
|
1098 | 1099 | |
|
1099 | 1100 | user=schema_data['repo_owner'], |
|
1100 | 1101 | repo_description=schema_data['repo_description'], |
|
1101 | 1102 | repo_private=schema_data['repo_private'], |
|
1102 | 1103 | clone_uri=schema_data['repo_clone_uri'], |
|
1103 | 1104 | push_uri=schema_data['repo_push_uri'], |
|
1104 | 1105 | repo_landing_rev=schema_data['repo_landing_commit_ref'], |
|
1105 | 1106 | repo_enable_statistics=schema_data['repo_enable_statistics'], |
|
1106 | 1107 | repo_enable_locking=schema_data['repo_enable_locking'], |
|
1107 | 1108 | repo_enable_downloads=schema_data['repo_enable_downloads'], |
|
1108 | 1109 | ) |
|
1109 | 1110 | |
|
1110 | 1111 | if schema_data['repo_fork_of']: |
|
1111 | 1112 | fork_repo = get_repo_or_error(schema_data['repo_fork_of']) |
|
1112 | 1113 | validated_updates['fork_id'] = fork_repo.repo_id |
|
1113 | 1114 | |
|
1114 | 1115 | # extra fields |
|
1115 | 1116 | fields = parse_args(Optional.extract(fields), key_prefix='ex_') |
|
1116 | 1117 | if fields: |
|
1117 | 1118 | validated_updates.update(fields) |
|
1118 | 1119 | |
|
1119 | 1120 | try: |
|
1120 | 1121 | RepoModel().update(repo, **validated_updates) |
|
1121 | 1122 | audit_logger.store_api( |
|
1122 | 1123 | 'repo.edit', action_data={'old_data': old_values}, |
|
1123 | 1124 | user=apiuser, repo=repo) |
|
1124 | 1125 | Session().commit() |
|
1125 | 1126 | return { |
|
1126 | 1127 | 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo.repo_name), |
|
1127 | 1128 | 'repository': repo.get_api_data(include_secrets=include_secrets) |
|
1128 | 1129 | } |
|
1129 | 1130 | except Exception: |
|
1130 | 1131 | log.exception( |
|
1131 | 1132 | u"Exception while trying to update the repository %s", |
|
1132 | 1133 | repoid) |
|
1133 | 1134 | raise JSONRPCError('failed to update repo `%s`' % repoid) |
|
1134 | 1135 | |
|
1135 | 1136 | |
|
1136 | 1137 | @jsonrpc_method() |
|
1137 | 1138 | def fork_repo(request, apiuser, repoid, fork_name, |
|
1138 | 1139 | owner=Optional(OAttr('apiuser')), |
|
1139 | 1140 | description=Optional(''), |
|
1140 | 1141 | private=Optional(False), |
|
1141 | 1142 | clone_uri=Optional(None), |
|
1142 | 1143 | landing_rev=Optional(None), |
|
1143 | 1144 | copy_permissions=Optional(False)): |
|
1144 | 1145 | """ |
|
1145 | 1146 | Creates a fork of the specified |repo|. |
|
1146 | 1147 | |
|
1147 | 1148 | * If the fork_name contains "/", fork will be created inside |
|
1148 | 1149 | a repository group or nested repository groups |
|
1149 | 1150 | |
|
1150 | 1151 | For example "foo/bar/fork-repo" will create fork called "fork-repo" |
|
1151 | 1152 | inside group "foo/bar". You have to have permissions to access and |
|
1152 | 1153 | write to the last repository group ("bar" in this example) |
|
1153 | 1154 | |
|
1154 | 1155 | This command can only be run using an |authtoken| with minimum |
|
1155 | 1156 | read permissions of the forked repo, create fork permissions for an user. |
|
1156 | 1157 | |
|
1157 | 1158 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1158 | 1159 | :type apiuser: AuthUser |
|
1159 | 1160 | :param repoid: Set repository name or repository ID. |
|
1160 | 1161 | :type repoid: str or int |
|
1161 | 1162 | :param fork_name: Set the fork name, including it's repository group membership. |
|
1162 | 1163 | :type fork_name: str |
|
1163 | 1164 | :param owner: Set the fork owner. |
|
1164 | 1165 | :type owner: str |
|
1165 | 1166 | :param description: Set the fork description. |
|
1166 | 1167 | :type description: str |
|
1167 | 1168 | :param copy_permissions: Copy permissions from parent |repo|. The |
|
1168 | 1169 | default is False. |
|
1169 | 1170 | :type copy_permissions: bool |
|
1170 | 1171 | :param private: Make the fork private. The default is False. |
|
1171 | 1172 | :type private: bool |
|
1172 | 1173 | :param landing_rev: Set the landing revision. E.g branch:default, book:dev, rev:abcd |
|
1173 | 1174 | |
|
1174 | 1175 | Example output: |
|
1175 | 1176 | |
|
1176 | 1177 | .. code-block:: bash |
|
1177 | 1178 | |
|
1178 | 1179 | id : <id_for_response> |
|
1179 | 1180 | api_key : "<api_key>" |
|
1180 | 1181 | args: { |
|
1181 | 1182 | "repoid" : "<reponame or repo_id>", |
|
1182 | 1183 | "fork_name": "<forkname>", |
|
1183 | 1184 | "owner": "<username or user_id = Optional(=apiuser)>", |
|
1184 | 1185 | "description": "<description>", |
|
1185 | 1186 | "copy_permissions": "<bool>", |
|
1186 | 1187 | "private": "<bool>", |
|
1187 | 1188 | "landing_rev": "<landing_rev>" |
|
1188 | 1189 | } |
|
1189 | 1190 | |
|
1190 | 1191 | Example error output: |
|
1191 | 1192 | |
|
1192 | 1193 | .. code-block:: bash |
|
1193 | 1194 | |
|
1194 | 1195 | id : <id_given_in_input> |
|
1195 | 1196 | result: { |
|
1196 | 1197 | "msg": "Created fork of `<reponame>` as `<forkname>`", |
|
1197 | 1198 | "success": true, |
|
1198 | 1199 | "task": "<celery task id or None if done sync>" |
|
1199 | 1200 | } |
|
1200 | 1201 | error: null |
|
1201 | 1202 | |
|
1202 | 1203 | """ |
|
1203 | 1204 | |
|
1204 | 1205 | repo = get_repo_or_error(repoid) |
|
1205 | 1206 | repo_name = repo.repo_name |
|
1206 | 1207 | |
|
1207 | 1208 | if not has_superadmin_permission(apiuser): |
|
1208 | 1209 | # check if we have at least read permission for |
|
1209 | 1210 | # this repo that we fork ! |
|
1210 | 1211 | _perms = ( |
|
1211 | 1212 | 'repository.admin', 'repository.write', 'repository.read') |
|
1212 | 1213 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1213 | 1214 | |
|
1214 | 1215 | # check if the regular user has at least fork permissions as well |
|
1215 | 1216 | if not HasPermissionAnyApi('hg.fork.repository')(user=apiuser): |
|
1216 | 1217 | raise JSONRPCForbidden() |
|
1217 | 1218 | |
|
1218 | 1219 | # check if user can set owner parameter |
|
1219 | 1220 | owner = validate_set_owner_permissions(apiuser, owner) |
|
1220 | 1221 | |
|
1221 | 1222 | description = Optional.extract(description) |
|
1222 | 1223 | copy_permissions = Optional.extract(copy_permissions) |
|
1223 | 1224 | clone_uri = Optional.extract(clone_uri) |
|
1224 | 1225 | |
|
1225 | 1226 | landing_ref, _label = ScmModel.backend_landing_ref(repo.repo_type) |
|
1226 | 1227 | ref_choices, _labels = ScmModel().get_repo_landing_revs(request.translate) |
|
1227 | 1228 | ref_choices = list(set(ref_choices + [landing_ref])) |
|
1228 | 1229 | landing_commit_ref = Optional.extract(landing_rev) or landing_ref |
|
1229 | 1230 | |
|
1230 | 1231 | private = Optional.extract(private) |
|
1231 | 1232 | |
|
1232 | 1233 | schema = repo_schema.RepoSchema().bind( |
|
1233 | 1234 | repo_type_options=rhodecode.BACKENDS.keys(), |
|
1234 | 1235 | repo_ref_options=ref_choices, |
|
1235 | 1236 | repo_type=repo.repo_type, |
|
1236 | 1237 | # user caller |
|
1237 | 1238 | user=apiuser) |
|
1238 | 1239 | |
|
1239 | 1240 | try: |
|
1240 | 1241 | schema_data = schema.deserialize(dict( |
|
1241 | 1242 | repo_name=fork_name, |
|
1242 | 1243 | repo_type=repo.repo_type, |
|
1243 | 1244 | repo_owner=owner.username, |
|
1244 | 1245 | repo_description=description, |
|
1245 | 1246 | repo_landing_commit_ref=landing_commit_ref, |
|
1246 | 1247 | repo_clone_uri=clone_uri, |
|
1247 | 1248 | repo_private=private, |
|
1248 | 1249 | repo_copy_permissions=copy_permissions)) |
|
1249 | 1250 | except validation_schema.Invalid as err: |
|
1250 | 1251 | raise JSONRPCValidationError(colander_exc=err) |
|
1251 | 1252 | |
|
1252 | 1253 | try: |
|
1253 | 1254 | data = { |
|
1254 | 1255 | 'fork_parent_id': repo.repo_id, |
|
1255 | 1256 | |
|
1256 | 1257 | 'repo_name': schema_data['repo_group']['repo_name_without_group'], |
|
1257 | 1258 | 'repo_name_full': schema_data['repo_name'], |
|
1258 | 1259 | 'repo_group': schema_data['repo_group']['repo_group_id'], |
|
1259 | 1260 | 'repo_type': schema_data['repo_type'], |
|
1260 | 1261 | 'description': schema_data['repo_description'], |
|
1261 | 1262 | 'private': schema_data['repo_private'], |
|
1262 | 1263 | 'copy_permissions': schema_data['repo_copy_permissions'], |
|
1263 | 1264 | 'landing_rev': schema_data['repo_landing_commit_ref'], |
|
1264 | 1265 | } |
|
1265 | 1266 | |
|
1266 | 1267 | task = RepoModel().create_fork(data, cur_user=owner.user_id) |
|
1267 | 1268 | # no commit, it's done in RepoModel, or async via celery |
|
1268 | 1269 | task_id = get_task_id(task) |
|
1269 | 1270 | |
|
1270 | 1271 | return { |
|
1271 | 1272 | 'msg': 'Created fork of `%s` as `%s`' % ( |
|
1272 | 1273 | repo.repo_name, schema_data['repo_name']), |
|
1273 | 1274 | 'success': True, # cannot return the repo data here since fork |
|
1274 | 1275 | # can be done async |
|
1275 | 1276 | 'task': task_id |
|
1276 | 1277 | } |
|
1277 | 1278 | except Exception: |
|
1278 | 1279 | log.exception( |
|
1279 | 1280 | u"Exception while trying to create fork %s", |
|
1280 | 1281 | schema_data['repo_name']) |
|
1281 | 1282 | raise JSONRPCError( |
|
1282 | 1283 | 'failed to fork repository `%s` as `%s`' % ( |
|
1283 | 1284 | repo_name, schema_data['repo_name'])) |
|
1284 | 1285 | |
|
1285 | 1286 | |
|
1286 | 1287 | @jsonrpc_method() |
|
1287 | 1288 | def delete_repo(request, apiuser, repoid, forks=Optional('')): |
|
1288 | 1289 | """ |
|
1289 | 1290 | Deletes a repository. |
|
1290 | 1291 | |
|
1291 | 1292 | * When the `forks` parameter is set it's possible to detach or delete |
|
1292 | 1293 | forks of deleted repository. |
|
1293 | 1294 | |
|
1294 | 1295 | This command can only be run using an |authtoken| with admin |
|
1295 | 1296 | permissions on the |repo|. |
|
1296 | 1297 | |
|
1297 | 1298 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1298 | 1299 | :type apiuser: AuthUser |
|
1299 | 1300 | :param repoid: Set the repository name or repository ID. |
|
1300 | 1301 | :type repoid: str or int |
|
1301 | 1302 | :param forks: Set to `detach` or `delete` forks from the |repo|. |
|
1302 | 1303 | :type forks: Optional(str) |
|
1303 | 1304 | |
|
1304 | 1305 | Example error output: |
|
1305 | 1306 | |
|
1306 | 1307 | .. code-block:: bash |
|
1307 | 1308 | |
|
1308 | 1309 | id : <id_given_in_input> |
|
1309 | 1310 | result: { |
|
1310 | 1311 | "msg": "Deleted repository `<reponame>`", |
|
1311 | 1312 | "success": true |
|
1312 | 1313 | } |
|
1313 | 1314 | error: null |
|
1314 | 1315 | """ |
|
1315 | 1316 | |
|
1316 | 1317 | repo = get_repo_or_error(repoid) |
|
1317 | 1318 | repo_name = repo.repo_name |
|
1318 | 1319 | if not has_superadmin_permission(apiuser): |
|
1319 | 1320 | _perms = ('repository.admin',) |
|
1320 | 1321 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1321 | 1322 | |
|
1322 | 1323 | try: |
|
1323 | 1324 | handle_forks = Optional.extract(forks) |
|
1324 | 1325 | _forks_msg = '' |
|
1325 | 1326 | _forks = [f for f in repo.forks] |
|
1326 | 1327 | if handle_forks == 'detach': |
|
1327 | 1328 | _forks_msg = ' ' + 'Detached %s forks' % len(_forks) |
|
1328 | 1329 | elif handle_forks == 'delete': |
|
1329 | 1330 | _forks_msg = ' ' + 'Deleted %s forks' % len(_forks) |
|
1330 | 1331 | elif _forks: |
|
1331 | 1332 | raise JSONRPCError( |
|
1332 | 1333 | 'Cannot delete `%s` it still contains attached forks' % |
|
1333 | 1334 | (repo.repo_name,) |
|
1334 | 1335 | ) |
|
1335 | 1336 | old_data = repo.get_api_data() |
|
1336 | 1337 | RepoModel().delete(repo, forks=forks) |
|
1337 | 1338 | |
|
1338 | 1339 | repo = audit_logger.RepoWrap(repo_id=None, |
|
1339 | 1340 | repo_name=repo.repo_name) |
|
1340 | 1341 | |
|
1341 | 1342 | audit_logger.store_api( |
|
1342 | 1343 | 'repo.delete', action_data={'old_data': old_data}, |
|
1343 | 1344 | user=apiuser, repo=repo) |
|
1344 | 1345 | |
|
1345 | 1346 | ScmModel().mark_for_invalidation(repo_name, delete=True) |
|
1346 | 1347 | Session().commit() |
|
1347 | 1348 | return { |
|
1348 | 1349 | 'msg': 'Deleted repository `%s`%s' % (repo_name, _forks_msg), |
|
1349 | 1350 | 'success': True |
|
1350 | 1351 | } |
|
1351 | 1352 | except Exception: |
|
1352 | 1353 | log.exception("Exception occurred while trying to delete repo") |
|
1353 | 1354 | raise JSONRPCError( |
|
1354 | 1355 | 'failed to delete repository `%s`' % (repo_name,) |
|
1355 | 1356 | ) |
|
1356 | 1357 | |
|
1357 | 1358 | |
|
1358 | 1359 | #TODO: marcink, change name ? |
|
1359 | 1360 | @jsonrpc_method() |
|
1360 | 1361 | def invalidate_cache(request, apiuser, repoid, delete_keys=Optional(False)): |
|
1361 | 1362 | """ |
|
1362 | 1363 | Invalidates the cache for the specified repository. |
|
1363 | 1364 | |
|
1364 | 1365 | This command can only be run using an |authtoken| with admin rights to |
|
1365 | 1366 | the specified repository. |
|
1366 | 1367 | |
|
1367 | 1368 | This command takes the following options: |
|
1368 | 1369 | |
|
1369 | 1370 | :param apiuser: This is filled automatically from |authtoken|. |
|
1370 | 1371 | :type apiuser: AuthUser |
|
1371 | 1372 | :param repoid: Sets the repository name or repository ID. |
|
1372 | 1373 | :type repoid: str or int |
|
1373 | 1374 | :param delete_keys: This deletes the invalidated keys instead of |
|
1374 | 1375 | just flagging them. |
|
1375 | 1376 | :type delete_keys: Optional(``True`` | ``False``) |
|
1376 | 1377 | |
|
1377 | 1378 | Example output: |
|
1378 | 1379 | |
|
1379 | 1380 | .. code-block:: bash |
|
1380 | 1381 | |
|
1381 | 1382 | id : <id_given_in_input> |
|
1382 | 1383 | result : { |
|
1383 | 1384 | 'msg': Cache for repository `<repository name>` was invalidated, |
|
1384 | 1385 | 'repository': <repository name> |
|
1385 | 1386 | } |
|
1386 | 1387 | error : null |
|
1387 | 1388 | |
|
1388 | 1389 | Example error output: |
|
1389 | 1390 | |
|
1390 | 1391 | .. code-block:: bash |
|
1391 | 1392 | |
|
1392 | 1393 | id : <id_given_in_input> |
|
1393 | 1394 | result : null |
|
1394 | 1395 | error : { |
|
1395 | 1396 | 'Error occurred during cache invalidation action' |
|
1396 | 1397 | } |
|
1397 | 1398 | |
|
1398 | 1399 | """ |
|
1399 | 1400 | |
|
1400 | 1401 | repo = get_repo_or_error(repoid) |
|
1401 | 1402 | if not has_superadmin_permission(apiuser): |
|
1402 | 1403 | _perms = ('repository.admin', 'repository.write',) |
|
1403 | 1404 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1404 | 1405 | |
|
1405 | 1406 | delete = Optional.extract(delete_keys) |
|
1406 | 1407 | try: |
|
1407 | 1408 | ScmModel().mark_for_invalidation(repo.repo_name, delete=delete) |
|
1408 | 1409 | return { |
|
1409 | 1410 | 'msg': 'Cache for repository `%s` was invalidated' % (repoid,), |
|
1410 | 1411 | 'repository': repo.repo_name |
|
1411 | 1412 | } |
|
1412 | 1413 | except Exception: |
|
1413 | 1414 | log.exception( |
|
1414 | 1415 | "Exception occurred while trying to invalidate repo cache") |
|
1415 | 1416 | raise JSONRPCError( |
|
1416 | 1417 | 'Error occurred during cache invalidation action' |
|
1417 | 1418 | ) |
|
1418 | 1419 | |
|
1419 | 1420 | |
|
1420 | 1421 | #TODO: marcink, change name ? |
|
1421 | 1422 | @jsonrpc_method() |
|
1422 | 1423 | def lock(request, apiuser, repoid, locked=Optional(None), |
|
1423 | 1424 | userid=Optional(OAttr('apiuser'))): |
|
1424 | 1425 | """ |
|
1425 | 1426 | Sets the lock state of the specified |repo| by the given user. |
|
1426 | 1427 | From more information, see :ref:`repo-locking`. |
|
1427 | 1428 | |
|
1428 | 1429 | * If the ``userid`` option is not set, the repository is locked to the |
|
1429 | 1430 | user who called the method. |
|
1430 | 1431 | * If the ``locked`` parameter is not set, the current lock state of the |
|
1431 | 1432 | repository is displayed. |
|
1432 | 1433 | |
|
1433 | 1434 | This command can only be run using an |authtoken| with admin rights to |
|
1434 | 1435 | the specified repository. |
|
1435 | 1436 | |
|
1436 | 1437 | This command takes the following options: |
|
1437 | 1438 | |
|
1438 | 1439 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1439 | 1440 | :type apiuser: AuthUser |
|
1440 | 1441 | :param repoid: Sets the repository name or repository ID. |
|
1441 | 1442 | :type repoid: str or int |
|
1442 | 1443 | :param locked: Sets the lock state. |
|
1443 | 1444 | :type locked: Optional(``True`` | ``False``) |
|
1444 | 1445 | :param userid: Set the repository lock to this user. |
|
1445 | 1446 | :type userid: Optional(str or int) |
|
1446 | 1447 | |
|
1447 | 1448 | Example error output: |
|
1448 | 1449 | |
|
1449 | 1450 | .. code-block:: bash |
|
1450 | 1451 | |
|
1451 | 1452 | id : <id_given_in_input> |
|
1452 | 1453 | result : { |
|
1453 | 1454 | 'repo': '<reponame>', |
|
1454 | 1455 | 'locked': <bool: lock state>, |
|
1455 | 1456 | 'locked_since': <int: lock timestamp>, |
|
1456 | 1457 | 'locked_by': <username of person who made the lock>, |
|
1457 | 1458 | 'lock_reason': <str: reason for locking>, |
|
1458 | 1459 | 'lock_state_changed': <bool: True if lock state has been changed in this request>, |
|
1459 | 1460 | 'msg': 'Repo `<reponame>` locked by `<username>` on <timestamp>.' |
|
1460 | 1461 | or |
|
1461 | 1462 | 'msg': 'Repo `<repository name>` not locked.' |
|
1462 | 1463 | or |
|
1463 | 1464 | 'msg': 'User `<user name>` set lock state for repo `<repository name>` to `<new lock state>`' |
|
1464 | 1465 | } |
|
1465 | 1466 | error : null |
|
1466 | 1467 | |
|
1467 | 1468 | Example error output: |
|
1468 | 1469 | |
|
1469 | 1470 | .. code-block:: bash |
|
1470 | 1471 | |
|
1471 | 1472 | id : <id_given_in_input> |
|
1472 | 1473 | result : null |
|
1473 | 1474 | error : { |
|
1474 | 1475 | 'Error occurred locking repository `<reponame>`' |
|
1475 | 1476 | } |
|
1476 | 1477 | """ |
|
1477 | 1478 | |
|
1478 | 1479 | repo = get_repo_or_error(repoid) |
|
1479 | 1480 | if not has_superadmin_permission(apiuser): |
|
1480 | 1481 | # check if we have at least write permission for this repo ! |
|
1481 | 1482 | _perms = ('repository.admin', 'repository.write',) |
|
1482 | 1483 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1483 | 1484 | |
|
1484 | 1485 | # make sure normal user does not pass someone else userid, |
|
1485 | 1486 | # he is not allowed to do that |
|
1486 | 1487 | if not isinstance(userid, Optional) and userid != apiuser.user_id: |
|
1487 | 1488 | raise JSONRPCError('userid is not the same as your user') |
|
1488 | 1489 | |
|
1489 | 1490 | if isinstance(userid, Optional): |
|
1490 | 1491 | userid = apiuser.user_id |
|
1491 | 1492 | |
|
1492 | 1493 | user = get_user_or_error(userid) |
|
1493 | 1494 | |
|
1494 | 1495 | if isinstance(locked, Optional): |
|
1495 | 1496 | lockobj = repo.locked |
|
1496 | 1497 | |
|
1497 | 1498 | if lockobj[0] is None: |
|
1498 | 1499 | _d = { |
|
1499 | 1500 | 'repo': repo.repo_name, |
|
1500 | 1501 | 'locked': False, |
|
1501 | 1502 | 'locked_since': None, |
|
1502 | 1503 | 'locked_by': None, |
|
1503 | 1504 | 'lock_reason': None, |
|
1504 | 1505 | 'lock_state_changed': False, |
|
1505 | 1506 | 'msg': 'Repo `%s` not locked.' % repo.repo_name |
|
1506 | 1507 | } |
|
1507 | 1508 | return _d |
|
1508 | 1509 | else: |
|
1509 | 1510 | _user_id, _time, _reason = lockobj |
|
1510 | 1511 | lock_user = get_user_or_error(userid) |
|
1511 | 1512 | _d = { |
|
1512 | 1513 | 'repo': repo.repo_name, |
|
1513 | 1514 | 'locked': True, |
|
1514 | 1515 | 'locked_since': _time, |
|
1515 | 1516 | 'locked_by': lock_user.username, |
|
1516 | 1517 | 'lock_reason': _reason, |
|
1517 | 1518 | 'lock_state_changed': False, |
|
1518 | 1519 | 'msg': ('Repo `%s` locked by `%s` on `%s`.' |
|
1519 | 1520 | % (repo.repo_name, lock_user.username, |
|
1520 | 1521 | json.dumps(time_to_datetime(_time)))) |
|
1521 | 1522 | } |
|
1522 | 1523 | return _d |
|
1523 | 1524 | |
|
1524 | 1525 | # force locked state through a flag |
|
1525 | 1526 | else: |
|
1526 | 1527 | locked = str2bool(locked) |
|
1527 | 1528 | lock_reason = Repository.LOCK_API |
|
1528 | 1529 | try: |
|
1529 | 1530 | if locked: |
|
1530 | 1531 | lock_time = time.time() |
|
1531 | 1532 | Repository.lock(repo, user.user_id, lock_time, lock_reason) |
|
1532 | 1533 | else: |
|
1533 | 1534 | lock_time = None |
|
1534 | 1535 | Repository.unlock(repo) |
|
1535 | 1536 | _d = { |
|
1536 | 1537 | 'repo': repo.repo_name, |
|
1537 | 1538 | 'locked': locked, |
|
1538 | 1539 | 'locked_since': lock_time, |
|
1539 | 1540 | 'locked_by': user.username, |
|
1540 | 1541 | 'lock_reason': lock_reason, |
|
1541 | 1542 | 'lock_state_changed': True, |
|
1542 | 1543 | 'msg': ('User `%s` set lock state for repo `%s` to `%s`' |
|
1543 | 1544 | % (user.username, repo.repo_name, locked)) |
|
1544 | 1545 | } |
|
1545 | 1546 | return _d |
|
1546 | 1547 | except Exception: |
|
1547 | 1548 | log.exception( |
|
1548 | 1549 | "Exception occurred while trying to lock repository") |
|
1549 | 1550 | raise JSONRPCError( |
|
1550 | 1551 | 'Error occurred locking repository `%s`' % repo.repo_name |
|
1551 | 1552 | ) |
|
1552 | 1553 | |
|
1553 | 1554 | |
|
1554 | 1555 | @jsonrpc_method() |
|
1555 | 1556 | def comment_commit( |
|
1556 | 1557 | request, apiuser, repoid, commit_id, message, status=Optional(None), |
|
1557 | 1558 | comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE), |
|
1558 | 1559 | resolves_comment_id=Optional(None), extra_recipients=Optional([]), |
|
1559 | 1560 | userid=Optional(OAttr('apiuser')), send_email=Optional(True)): |
|
1560 | 1561 | """ |
|
1561 | 1562 | Set a commit comment, and optionally change the status of the commit. |
|
1562 | 1563 | |
|
1563 | 1564 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1564 | 1565 | :type apiuser: AuthUser |
|
1565 | 1566 | :param repoid: Set the repository name or repository ID. |
|
1566 | 1567 | :type repoid: str or int |
|
1567 | 1568 | :param commit_id: Specify the commit_id for which to set a comment. |
|
1568 | 1569 | :type commit_id: str |
|
1569 | 1570 | :param message: The comment text. |
|
1570 | 1571 | :type message: str |
|
1571 | 1572 | :param status: (**Optional**) status of commit, one of: 'not_reviewed', |
|
1572 | 1573 | 'approved', 'rejected', 'under_review' |
|
1573 | 1574 | :type status: str |
|
1574 | 1575 | :param comment_type: Comment type, one of: 'note', 'todo' |
|
1575 | 1576 | :type comment_type: Optional(str), default: 'note' |
|
1576 | 1577 | :param resolves_comment_id: id of comment which this one will resolve |
|
1577 | 1578 | :type resolves_comment_id: Optional(int) |
|
1578 | 1579 | :param extra_recipients: list of user ids or usernames to add |
|
1579 | 1580 | notifications for this comment. Acts like a CC for notification |
|
1580 | 1581 | :type extra_recipients: Optional(list) |
|
1581 | 1582 | :param userid: Set the user name of the comment creator. |
|
1582 | 1583 | :type userid: Optional(str or int) |
|
1583 | 1584 | :param send_email: Define if this comment should also send email notification |
|
1584 | 1585 | :type send_email: Optional(bool) |
|
1585 | 1586 | |
|
1586 | 1587 | Example error output: |
|
1587 | 1588 | |
|
1588 | 1589 | .. code-block:: bash |
|
1589 | 1590 | |
|
1590 | 1591 | { |
|
1591 | 1592 | "id" : <id_given_in_input>, |
|
1592 | 1593 | "result" : { |
|
1593 | 1594 | "msg": "Commented on commit `<commit_id>` for repository `<repoid>`", |
|
1594 | 1595 | "status_change": null or <status>, |
|
1595 | 1596 | "success": true |
|
1596 | 1597 | }, |
|
1597 | 1598 | "error" : null |
|
1598 | 1599 | } |
|
1599 | 1600 | |
|
1600 | 1601 | """ |
|
1601 | 1602 | repo = get_repo_or_error(repoid) |
|
1602 | 1603 | if not has_superadmin_permission(apiuser): |
|
1603 | 1604 | _perms = ('repository.read', 'repository.write', 'repository.admin') |
|
1604 | 1605 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1605 | 1606 | |
|
1606 | 1607 | try: |
|
1607 | 1608 | commit = repo.scm_instance().get_commit(commit_id=commit_id) |
|
1608 | 1609 | commit_id = commit.raw_id |
|
1609 | 1610 | except Exception as e: |
|
1610 | 1611 | log.exception('Failed to fetch commit') |
|
1611 | 1612 | raise JSONRPCError(safe_str(e)) |
|
1612 | 1613 | |
|
1613 | 1614 | if isinstance(userid, Optional): |
|
1614 | 1615 | userid = apiuser.user_id |
|
1615 | 1616 | |
|
1616 | 1617 | user = get_user_or_error(userid) |
|
1617 | 1618 | status = Optional.extract(status) |
|
1618 | 1619 | comment_type = Optional.extract(comment_type) |
|
1619 | 1620 | resolves_comment_id = Optional.extract(resolves_comment_id) |
|
1620 | 1621 | extra_recipients = Optional.extract(extra_recipients) |
|
1621 | 1622 | send_email = Optional.extract(send_email, binary=True) |
|
1622 | 1623 | |
|
1623 | 1624 | allowed_statuses = [x[0] for x in ChangesetStatus.STATUSES] |
|
1624 | 1625 | if status and status not in allowed_statuses: |
|
1625 | 1626 | raise JSONRPCError('Bad status, must be on ' |
|
1626 | 1627 | 'of %s got %s' % (allowed_statuses, status,)) |
|
1627 | 1628 | |
|
1628 | 1629 | if resolves_comment_id: |
|
1629 | 1630 | comment = ChangesetComment.get(resolves_comment_id) |
|
1630 | 1631 | if not comment: |
|
1631 | 1632 | raise JSONRPCError( |
|
1632 | 1633 | 'Invalid resolves_comment_id `%s` for this commit.' |
|
1633 | 1634 | % resolves_comment_id) |
|
1634 | 1635 | if comment.comment_type != ChangesetComment.COMMENT_TYPE_TODO: |
|
1635 | 1636 | raise JSONRPCError( |
|
1636 | 1637 | 'Comment `%s` is wrong type for setting status to resolved.' |
|
1637 | 1638 | % resolves_comment_id) |
|
1638 | 1639 | |
|
1639 | 1640 | try: |
|
1640 | 1641 | rc_config = SettingsModel().get_all_settings() |
|
1641 | 1642 | renderer = rc_config.get('rhodecode_markup_renderer', 'rst') |
|
1642 | 1643 | status_change_label = ChangesetStatus.get_status_lbl(status) |
|
1643 | 1644 | comment = CommentsModel().create( |
|
1644 | 1645 | message, repo, user, commit_id=commit_id, |
|
1645 | 1646 | status_change=status_change_label, |
|
1646 | 1647 | status_change_type=status, |
|
1647 | 1648 | renderer=renderer, |
|
1648 | 1649 | comment_type=comment_type, |
|
1649 | 1650 | resolves_comment_id=resolves_comment_id, |
|
1650 | 1651 | auth_user=apiuser, |
|
1651 | 1652 | extra_recipients=extra_recipients, |
|
1652 | 1653 | send_email=send_email |
|
1653 | 1654 | ) |
|
1654 | 1655 | if status: |
|
1655 | 1656 | # also do a status change |
|
1656 | 1657 | try: |
|
1657 | 1658 | ChangesetStatusModel().set_status( |
|
1658 | 1659 | repo, status, user, comment, revision=commit_id, |
|
1659 | 1660 | dont_allow_on_closed_pull_request=True |
|
1660 | 1661 | ) |
|
1661 | 1662 | except StatusChangeOnClosedPullRequestError: |
|
1662 | 1663 | log.exception( |
|
1663 | 1664 | "Exception occurred while trying to change repo commit status") |
|
1664 | 1665 | msg = ('Changing status on a commit associated with ' |
|
1665 | 1666 | 'a closed pull request is not allowed') |
|
1666 | 1667 | raise JSONRPCError(msg) |
|
1667 | 1668 | |
|
1668 | 1669 | CommentsModel().trigger_commit_comment_hook( |
|
1669 | 1670 | repo, apiuser, 'create', |
|
1670 | 1671 | data={'comment': comment, 'commit': commit}) |
|
1671 | 1672 | |
|
1672 | 1673 | Session().commit() |
|
1673 | 1674 | return { |
|
1674 | 1675 | 'msg': ( |
|
1675 | 1676 | 'Commented on commit `%s` for repository `%s`' % ( |
|
1676 | 1677 | comment.revision, repo.repo_name)), |
|
1677 | 1678 | 'status_change': status, |
|
1678 | 1679 | 'success': True, |
|
1679 | 1680 | } |
|
1680 | 1681 | except JSONRPCError: |
|
1681 | 1682 | # catch any inside errors, and re-raise them to prevent from |
|
1682 | 1683 | # below global catch to silence them |
|
1683 | 1684 | raise |
|
1684 | 1685 | except Exception: |
|
1685 | 1686 | log.exception("Exception occurred while trying to comment on commit") |
|
1686 | 1687 | raise JSONRPCError( |
|
1687 | 1688 | 'failed to set comment on repository `%s`' % (repo.repo_name,) |
|
1688 | 1689 | ) |
|
1689 | 1690 | |
|
1690 | 1691 | |
|
1691 | 1692 | @jsonrpc_method() |
|
1692 | 1693 | def get_repo_comments(request, apiuser, repoid, |
|
1693 | 1694 | commit_id=Optional(None), comment_type=Optional(None), |
|
1694 | 1695 | userid=Optional(None)): |
|
1695 | 1696 | """ |
|
1696 | 1697 | Get all comments for a repository |
|
1697 | 1698 | |
|
1698 | 1699 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1699 | 1700 | :type apiuser: AuthUser |
|
1700 | 1701 | :param repoid: Set the repository name or repository ID. |
|
1701 | 1702 | :type repoid: str or int |
|
1702 | 1703 | :param commit_id: Optionally filter the comments by the commit_id |
|
1703 | 1704 | :type commit_id: Optional(str), default: None |
|
1704 | 1705 | :param comment_type: Optionally filter the comments by the comment_type |
|
1705 | 1706 | one of: 'note', 'todo' |
|
1706 | 1707 | :type comment_type: Optional(str), default: None |
|
1707 | 1708 | :param userid: Optionally filter the comments by the author of comment |
|
1708 | 1709 | :type userid: Optional(str or int), Default: None |
|
1709 | 1710 | |
|
1710 | 1711 | Example error output: |
|
1711 | 1712 | |
|
1712 | 1713 | .. code-block:: bash |
|
1713 | 1714 | |
|
1714 | 1715 | { |
|
1715 | 1716 | "id" : <id_given_in_input>, |
|
1716 | 1717 | "result" : [ |
|
1717 | 1718 | { |
|
1718 | 1719 | "comment_author": <USER_DETAILS>, |
|
1719 | 1720 | "comment_created_on": "2017-02-01T14:38:16.309", |
|
1720 | 1721 | "comment_f_path": "file.txt", |
|
1721 | 1722 | "comment_id": 282, |
|
1722 | 1723 | "comment_lineno": "n1", |
|
1723 | 1724 | "comment_resolved_by": null, |
|
1724 | 1725 | "comment_status": [], |
|
1725 | 1726 | "comment_text": "This file needs a header", |
|
1726 | 1727 | "comment_type": "todo", |
|
1727 | 1728 | "comment_last_version: 0 |
|
1728 | 1729 | } |
|
1729 | 1730 | ], |
|
1730 | 1731 | "error" : null |
|
1731 | 1732 | } |
|
1732 | 1733 | |
|
1733 | 1734 | """ |
|
1734 | 1735 | repo = get_repo_or_error(repoid) |
|
1735 | 1736 | if not has_superadmin_permission(apiuser): |
|
1736 | 1737 | _perms = ('repository.read', 'repository.write', 'repository.admin') |
|
1737 | 1738 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1738 | 1739 | |
|
1739 | 1740 | commit_id = Optional.extract(commit_id) |
|
1740 | 1741 | |
|
1741 | 1742 | userid = Optional.extract(userid) |
|
1742 | 1743 | if userid: |
|
1743 | 1744 | user = get_user_or_error(userid) |
|
1744 | 1745 | else: |
|
1745 | 1746 | user = None |
|
1746 | 1747 | |
|
1747 | 1748 | comment_type = Optional.extract(comment_type) |
|
1748 | 1749 | if comment_type and comment_type not in ChangesetComment.COMMENT_TYPES: |
|
1749 | 1750 | raise JSONRPCError( |
|
1750 | 1751 | 'comment_type must be one of `{}` got {}'.format( |
|
1751 | 1752 | ChangesetComment.COMMENT_TYPES, comment_type) |
|
1752 | 1753 | ) |
|
1753 | 1754 | |
|
1754 | 1755 | comments = CommentsModel().get_repository_comments( |
|
1755 | 1756 | repo=repo, comment_type=comment_type, user=user, commit_id=commit_id) |
|
1756 | 1757 | return comments |
|
1757 | 1758 | |
|
1758 | 1759 | |
|
1759 | 1760 | @jsonrpc_method() |
|
1760 | 1761 | def get_comment(request, apiuser, comment_id): |
|
1761 | 1762 | """ |
|
1762 | 1763 | Get single comment from repository or pull_request |
|
1763 | 1764 | |
|
1764 | 1765 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1765 | 1766 | :type apiuser: AuthUser |
|
1766 | 1767 | :param comment_id: comment id found in the URL of comment |
|
1767 | 1768 | :type comment_id: str or int |
|
1768 | 1769 | |
|
1769 | 1770 | Example error output: |
|
1770 | 1771 | |
|
1771 | 1772 | .. code-block:: bash |
|
1772 | 1773 | |
|
1773 | 1774 | { |
|
1774 | 1775 | "id" : <id_given_in_input>, |
|
1775 | 1776 | "result" : { |
|
1776 | 1777 | "comment_author": <USER_DETAILS>, |
|
1777 | 1778 | "comment_created_on": "2017-02-01T14:38:16.309", |
|
1778 | 1779 | "comment_f_path": "file.txt", |
|
1779 | 1780 | "comment_id": 282, |
|
1780 | 1781 | "comment_lineno": "n1", |
|
1781 | 1782 | "comment_resolved_by": null, |
|
1782 | 1783 | "comment_status": [], |
|
1783 | 1784 | "comment_text": "This file needs a header", |
|
1784 | 1785 | "comment_type": "todo", |
|
1785 | 1786 | "comment_last_version: 0 |
|
1786 | 1787 | }, |
|
1787 | 1788 | "error" : null |
|
1788 | 1789 | } |
|
1789 | 1790 | |
|
1790 | 1791 | """ |
|
1791 | 1792 | |
|
1792 | 1793 | comment = ChangesetComment.get(comment_id) |
|
1793 | 1794 | if not comment: |
|
1794 | 1795 | raise JSONRPCError('comment `%s` does not exist' % (comment_id,)) |
|
1795 | 1796 | |
|
1796 | 1797 | perms = ('repository.read', 'repository.write', 'repository.admin') |
|
1797 | 1798 | has_comment_perm = HasRepoPermissionAnyApi(*perms)\ |
|
1798 | 1799 | (user=apiuser, repo_name=comment.repo.repo_name) |
|
1799 | 1800 | |
|
1800 | 1801 | if not has_comment_perm: |
|
1801 | 1802 | raise JSONRPCError('comment `%s` does not exist' % (comment_id,)) |
|
1802 | 1803 | |
|
1803 | 1804 | return comment |
|
1804 | 1805 | |
|
1805 | 1806 | |
|
1806 | 1807 | @jsonrpc_method() |
|
1807 | 1808 | def edit_comment(request, apiuser, message, comment_id, version, |
|
1808 | 1809 | userid=Optional(OAttr('apiuser'))): |
|
1809 | 1810 | """ |
|
1810 | 1811 | Edit comment on the pull request or commit, |
|
1811 | 1812 | specified by the `comment_id` and version. Initially version should be 0 |
|
1812 | 1813 | |
|
1813 | 1814 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1814 | 1815 | :type apiuser: AuthUser |
|
1815 | 1816 | :param comment_id: Specify the comment_id for editing |
|
1816 | 1817 | :type comment_id: int |
|
1817 | 1818 | :param version: version of the comment that will be created, starts from 0 |
|
1818 | 1819 | :type version: int |
|
1819 | 1820 | :param message: The text content of the comment. |
|
1820 | 1821 | :type message: str |
|
1821 | 1822 | :param userid: Comment on the pull request as this user |
|
1822 | 1823 | :type userid: Optional(str or int) |
|
1823 | 1824 | |
|
1824 | 1825 | Example output: |
|
1825 | 1826 | |
|
1826 | 1827 | .. code-block:: bash |
|
1827 | 1828 | |
|
1828 | 1829 | id : <id_given_in_input> |
|
1829 | 1830 | result : { |
|
1830 | 1831 | "comment": "<comment data>", |
|
1831 | 1832 | "version": "<Integer>", |
|
1832 | 1833 | }, |
|
1833 | 1834 | error : null |
|
1834 | 1835 | """ |
|
1835 | 1836 | |
|
1836 | 1837 | auth_user = apiuser |
|
1837 | 1838 | comment = ChangesetComment.get(comment_id) |
|
1838 | 1839 | if not comment: |
|
1839 | 1840 | raise JSONRPCError('comment `%s` does not exist' % (comment_id,)) |
|
1840 | 1841 | |
|
1841 | 1842 | is_super_admin = has_superadmin_permission(apiuser) |
|
1842 | 1843 | is_repo_admin = HasRepoPermissionAnyApi('repository.admin')\ |
|
1843 | 1844 | (user=apiuser, repo_name=comment.repo.repo_name) |
|
1844 | 1845 | |
|
1845 | 1846 | if not isinstance(userid, Optional): |
|
1846 | 1847 | if is_super_admin or is_repo_admin: |
|
1847 | 1848 | apiuser = get_user_or_error(userid) |
|
1848 | 1849 | auth_user = apiuser.AuthUser() |
|
1849 | 1850 | else: |
|
1850 | 1851 | raise JSONRPCError('userid is not the same as your user') |
|
1851 | 1852 | |
|
1852 | 1853 | comment_author = comment.author.user_id == auth_user.user_id |
|
1853 | 1854 | if not (comment.immutable is False and (is_super_admin or is_repo_admin) or comment_author): |
|
1854 | 1855 | raise JSONRPCError("you don't have access to edit this comment") |
|
1855 | 1856 | |
|
1856 | 1857 | try: |
|
1857 | 1858 | comment_history = CommentsModel().edit( |
|
1858 | 1859 | comment_id=comment_id, |
|
1859 | 1860 | text=message, |
|
1860 | 1861 | auth_user=auth_user, |
|
1861 | 1862 | version=version, |
|
1862 | 1863 | ) |
|
1863 | 1864 | Session().commit() |
|
1864 | 1865 | except CommentVersionMismatch: |
|
1865 | 1866 | raise JSONRPCError( |
|
1866 | 1867 | 'comment ({}) version ({}) mismatch'.format(comment_id, version) |
|
1867 | 1868 | ) |
|
1868 | 1869 | if not comment_history and not message: |
|
1869 | 1870 | raise JSONRPCError( |
|
1870 | 1871 | "comment ({}) can't be changed with empty string".format(comment_id) |
|
1871 | 1872 | ) |
|
1873 | ||
|
1874 | if comment.pull_request: | |
|
1875 | pull_request = comment.pull_request | |
|
1876 | PullRequestModel().trigger_pull_request_hook( | |
|
1877 | pull_request, apiuser, 'comment_edit', | |
|
1878 | data={'comment': comment}) | |
|
1879 | else: | |
|
1880 | db_repo = comment.repo | |
|
1881 | commit_id = comment.revision | |
|
1882 | commit = db_repo.get_commit(commit_id) | |
|
1883 | CommentsModel().trigger_commit_comment_hook( | |
|
1884 | db_repo, apiuser, 'edit', | |
|
1885 | data={'comment': comment, 'commit': commit}) | |
|
1886 | ||
|
1872 | 1887 | data = { |
|
1873 | 1888 | 'comment': comment, |
|
1874 | 1889 | 'version': comment_history.version if comment_history else None, |
|
1875 | 1890 | } |
|
1876 | 1891 | return data |
|
1877 | 1892 | |
|
1878 | 1893 | |
|
1879 | 1894 | # TODO(marcink): write this with all required logic for deleting a comments in PR or commits |
|
1880 | 1895 | # @jsonrpc_method() |
|
1881 | 1896 | # def delete_comment(request, apiuser, comment_id): |
|
1882 | 1897 | # auth_user = apiuser |
|
1883 | 1898 | # |
|
1884 | 1899 | # comment = ChangesetComment.get(comment_id) |
|
1885 | 1900 | # if not comment: |
|
1886 | 1901 | # raise JSONRPCError('comment `%s` does not exist' % (comment_id,)) |
|
1887 | 1902 | # |
|
1888 | 1903 | # is_super_admin = has_superadmin_permission(apiuser) |
|
1889 | 1904 | # is_repo_admin = HasRepoPermissionAnyApi('repository.admin')\ |
|
1890 | 1905 | # (user=apiuser, repo_name=comment.repo.repo_name) |
|
1891 | 1906 | # |
|
1892 | 1907 | # comment_author = comment.author.user_id == auth_user.user_id |
|
1893 | 1908 | # if not (comment.immutable is False and (is_super_admin or is_repo_admin) or comment_author): |
|
1894 | 1909 | # raise JSONRPCError("you don't have access to edit this comment") |
|
1895 | 1910 | |
|
1896 | 1911 | @jsonrpc_method() |
|
1897 | 1912 | def grant_user_permission(request, apiuser, repoid, userid, perm): |
|
1898 | 1913 | """ |
|
1899 | 1914 | Grant permissions for the specified user on the given repository, |
|
1900 | 1915 | or update existing permissions if found. |
|
1901 | 1916 | |
|
1902 | 1917 | This command can only be run using an |authtoken| with admin |
|
1903 | 1918 | permissions on the |repo|. |
|
1904 | 1919 | |
|
1905 | 1920 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1906 | 1921 | :type apiuser: AuthUser |
|
1907 | 1922 | :param repoid: Set the repository name or repository ID. |
|
1908 | 1923 | :type repoid: str or int |
|
1909 | 1924 | :param userid: Set the user name. |
|
1910 | 1925 | :type userid: str |
|
1911 | 1926 | :param perm: Set the user permissions, using the following format |
|
1912 | 1927 | ``(repository.(none|read|write|admin))`` |
|
1913 | 1928 | :type perm: str |
|
1914 | 1929 | |
|
1915 | 1930 | Example output: |
|
1916 | 1931 | |
|
1917 | 1932 | .. code-block:: bash |
|
1918 | 1933 | |
|
1919 | 1934 | id : <id_given_in_input> |
|
1920 | 1935 | result: { |
|
1921 | 1936 | "msg" : "Granted perm: `<perm>` for user: `<username>` in repo: `<reponame>`", |
|
1922 | 1937 | "success": true |
|
1923 | 1938 | } |
|
1924 | 1939 | error: null |
|
1925 | 1940 | """ |
|
1926 | 1941 | |
|
1927 | 1942 | repo = get_repo_or_error(repoid) |
|
1928 | 1943 | user = get_user_or_error(userid) |
|
1929 | 1944 | perm = get_perm_or_error(perm) |
|
1930 | 1945 | if not has_superadmin_permission(apiuser): |
|
1931 | 1946 | _perms = ('repository.admin',) |
|
1932 | 1947 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1933 | 1948 | |
|
1934 | 1949 | perm_additions = [[user.user_id, perm.permission_name, "user"]] |
|
1935 | 1950 | try: |
|
1936 | 1951 | changes = RepoModel().update_permissions( |
|
1937 | 1952 | repo=repo, perm_additions=perm_additions, cur_user=apiuser) |
|
1938 | 1953 | |
|
1939 | 1954 | action_data = { |
|
1940 | 1955 | 'added': changes['added'], |
|
1941 | 1956 | 'updated': changes['updated'], |
|
1942 | 1957 | 'deleted': changes['deleted'], |
|
1943 | 1958 | } |
|
1944 | 1959 | audit_logger.store_api( |
|
1945 | 1960 | 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo) |
|
1946 | 1961 | Session().commit() |
|
1947 | 1962 | PermissionModel().flush_user_permission_caches(changes) |
|
1948 | 1963 | |
|
1949 | 1964 | return { |
|
1950 | 1965 | 'msg': 'Granted perm: `%s` for user: `%s` in repo: `%s`' % ( |
|
1951 | 1966 | perm.permission_name, user.username, repo.repo_name |
|
1952 | 1967 | ), |
|
1953 | 1968 | 'success': True |
|
1954 | 1969 | } |
|
1955 | 1970 | except Exception: |
|
1956 | 1971 | log.exception("Exception occurred while trying edit permissions for repo") |
|
1957 | 1972 | raise JSONRPCError( |
|
1958 | 1973 | 'failed to edit permission for user: `%s` in repo: `%s`' % ( |
|
1959 | 1974 | userid, repoid |
|
1960 | 1975 | ) |
|
1961 | 1976 | ) |
|
1962 | 1977 | |
|
1963 | 1978 | |
|
1964 | 1979 | @jsonrpc_method() |
|
1965 | 1980 | def revoke_user_permission(request, apiuser, repoid, userid): |
|
1966 | 1981 | """ |
|
1967 | 1982 | Revoke permission for a user on the specified repository. |
|
1968 | 1983 | |
|
1969 | 1984 | This command can only be run using an |authtoken| with admin |
|
1970 | 1985 | permissions on the |repo|. |
|
1971 | 1986 | |
|
1972 | 1987 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1973 | 1988 | :type apiuser: AuthUser |
|
1974 | 1989 | :param repoid: Set the repository name or repository ID. |
|
1975 | 1990 | :type repoid: str or int |
|
1976 | 1991 | :param userid: Set the user name of revoked user. |
|
1977 | 1992 | :type userid: str or int |
|
1978 | 1993 | |
|
1979 | 1994 | Example error output: |
|
1980 | 1995 | |
|
1981 | 1996 | .. code-block:: bash |
|
1982 | 1997 | |
|
1983 | 1998 | id : <id_given_in_input> |
|
1984 | 1999 | result: { |
|
1985 | 2000 | "msg" : "Revoked perm for user: `<username>` in repo: `<reponame>`", |
|
1986 | 2001 | "success": true |
|
1987 | 2002 | } |
|
1988 | 2003 | error: null |
|
1989 | 2004 | """ |
|
1990 | 2005 | |
|
1991 | 2006 | repo = get_repo_or_error(repoid) |
|
1992 | 2007 | user = get_user_or_error(userid) |
|
1993 | 2008 | if not has_superadmin_permission(apiuser): |
|
1994 | 2009 | _perms = ('repository.admin',) |
|
1995 | 2010 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1996 | 2011 | |
|
1997 | 2012 | perm_deletions = [[user.user_id, None, "user"]] |
|
1998 | 2013 | try: |
|
1999 | 2014 | changes = RepoModel().update_permissions( |
|
2000 | 2015 | repo=repo, perm_deletions=perm_deletions, cur_user=user) |
|
2001 | 2016 | |
|
2002 | 2017 | action_data = { |
|
2003 | 2018 | 'added': changes['added'], |
|
2004 | 2019 | 'updated': changes['updated'], |
|
2005 | 2020 | 'deleted': changes['deleted'], |
|
2006 | 2021 | } |
|
2007 | 2022 | audit_logger.store_api( |
|
2008 | 2023 | 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo) |
|
2009 | 2024 | Session().commit() |
|
2010 | 2025 | PermissionModel().flush_user_permission_caches(changes) |
|
2011 | 2026 | |
|
2012 | 2027 | return { |
|
2013 | 2028 | 'msg': 'Revoked perm for user: `%s` in repo: `%s`' % ( |
|
2014 | 2029 | user.username, repo.repo_name |
|
2015 | 2030 | ), |
|
2016 | 2031 | 'success': True |
|
2017 | 2032 | } |
|
2018 | 2033 | except Exception: |
|
2019 | 2034 | log.exception("Exception occurred while trying revoke permissions to repo") |
|
2020 | 2035 | raise JSONRPCError( |
|
2021 | 2036 | 'failed to edit permission for user: `%s` in repo: `%s`' % ( |
|
2022 | 2037 | userid, repoid |
|
2023 | 2038 | ) |
|
2024 | 2039 | ) |
|
2025 | 2040 | |
|
2026 | 2041 | |
|
2027 | 2042 | @jsonrpc_method() |
|
2028 | 2043 | def grant_user_group_permission(request, apiuser, repoid, usergroupid, perm): |
|
2029 | 2044 | """ |
|
2030 | 2045 | Grant permission for a user group on the specified repository, |
|
2031 | 2046 | or update existing permissions. |
|
2032 | 2047 | |
|
2033 | 2048 | This command can only be run using an |authtoken| with admin |
|
2034 | 2049 | permissions on the |repo|. |
|
2035 | 2050 | |
|
2036 | 2051 | :param apiuser: This is filled automatically from the |authtoken|. |
|
2037 | 2052 | :type apiuser: AuthUser |
|
2038 | 2053 | :param repoid: Set the repository name or repository ID. |
|
2039 | 2054 | :type repoid: str or int |
|
2040 | 2055 | :param usergroupid: Specify the ID of the user group. |
|
2041 | 2056 | :type usergroupid: str or int |
|
2042 | 2057 | :param perm: Set the user group permissions using the following |
|
2043 | 2058 | format: (repository.(none|read|write|admin)) |
|
2044 | 2059 | :type perm: str |
|
2045 | 2060 | |
|
2046 | 2061 | Example output: |
|
2047 | 2062 | |
|
2048 | 2063 | .. code-block:: bash |
|
2049 | 2064 | |
|
2050 | 2065 | id : <id_given_in_input> |
|
2051 | 2066 | result : { |
|
2052 | 2067 | "msg" : "Granted perm: `<perm>` for group: `<usersgroupname>` in repo: `<reponame>`", |
|
2053 | 2068 | "success": true |
|
2054 | 2069 | |
|
2055 | 2070 | } |
|
2056 | 2071 | error : null |
|
2057 | 2072 | |
|
2058 | 2073 | Example error output: |
|
2059 | 2074 | |
|
2060 | 2075 | .. code-block:: bash |
|
2061 | 2076 | |
|
2062 | 2077 | id : <id_given_in_input> |
|
2063 | 2078 | result : null |
|
2064 | 2079 | error : { |
|
2065 | 2080 | "failed to edit permission for user group: `<usergroup>` in repo `<repo>`' |
|
2066 | 2081 | } |
|
2067 | 2082 | |
|
2068 | 2083 | """ |
|
2069 | 2084 | |
|
2070 | 2085 | repo = get_repo_or_error(repoid) |
|
2071 | 2086 | perm = get_perm_or_error(perm) |
|
2072 | 2087 | if not has_superadmin_permission(apiuser): |
|
2073 | 2088 | _perms = ('repository.admin',) |
|
2074 | 2089 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
2075 | 2090 | |
|
2076 | 2091 | user_group = get_user_group_or_error(usergroupid) |
|
2077 | 2092 | if not has_superadmin_permission(apiuser): |
|
2078 | 2093 | # check if we have at least read permission for this user group ! |
|
2079 | 2094 | _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',) |
|
2080 | 2095 | if not HasUserGroupPermissionAnyApi(*_perms)( |
|
2081 | 2096 | user=apiuser, user_group_name=user_group.users_group_name): |
|
2082 | 2097 | raise JSONRPCError( |
|
2083 | 2098 | 'user group `%s` does not exist' % (usergroupid,)) |
|
2084 | 2099 | |
|
2085 | 2100 | perm_additions = [[user_group.users_group_id, perm.permission_name, "user_group"]] |
|
2086 | 2101 | try: |
|
2087 | 2102 | changes = RepoModel().update_permissions( |
|
2088 | 2103 | repo=repo, perm_additions=perm_additions, cur_user=apiuser) |
|
2089 | 2104 | action_data = { |
|
2090 | 2105 | 'added': changes['added'], |
|
2091 | 2106 | 'updated': changes['updated'], |
|
2092 | 2107 | 'deleted': changes['deleted'], |
|
2093 | 2108 | } |
|
2094 | 2109 | audit_logger.store_api( |
|
2095 | 2110 | 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo) |
|
2096 | 2111 | Session().commit() |
|
2097 | 2112 | PermissionModel().flush_user_permission_caches(changes) |
|
2098 | 2113 | |
|
2099 | 2114 | return { |
|
2100 | 2115 | 'msg': 'Granted perm: `%s` for user group: `%s` in ' |
|
2101 | 2116 | 'repo: `%s`' % ( |
|
2102 | 2117 | perm.permission_name, user_group.users_group_name, |
|
2103 | 2118 | repo.repo_name |
|
2104 | 2119 | ), |
|
2105 | 2120 | 'success': True |
|
2106 | 2121 | } |
|
2107 | 2122 | except Exception: |
|
2108 | 2123 | log.exception( |
|
2109 | 2124 | "Exception occurred while trying change permission on repo") |
|
2110 | 2125 | raise JSONRPCError( |
|
2111 | 2126 | 'failed to edit permission for user group: `%s` in ' |
|
2112 | 2127 | 'repo: `%s`' % ( |
|
2113 | 2128 | usergroupid, repo.repo_name |
|
2114 | 2129 | ) |
|
2115 | 2130 | ) |
|
2116 | 2131 | |
|
2117 | 2132 | |
|
2118 | 2133 | @jsonrpc_method() |
|
2119 | 2134 | def revoke_user_group_permission(request, apiuser, repoid, usergroupid): |
|
2120 | 2135 | """ |
|
2121 | 2136 | Revoke the permissions of a user group on a given repository. |
|
2122 | 2137 | |
|
2123 | 2138 | This command can only be run using an |authtoken| with admin |
|
2124 | 2139 | permissions on the |repo|. |
|
2125 | 2140 | |
|
2126 | 2141 | :param apiuser: This is filled automatically from the |authtoken|. |
|
2127 | 2142 | :type apiuser: AuthUser |
|
2128 | 2143 | :param repoid: Set the repository name or repository ID. |
|
2129 | 2144 | :type repoid: str or int |
|
2130 | 2145 | :param usergroupid: Specify the user group ID. |
|
2131 | 2146 | :type usergroupid: str or int |
|
2132 | 2147 | |
|
2133 | 2148 | Example output: |
|
2134 | 2149 | |
|
2135 | 2150 | .. code-block:: bash |
|
2136 | 2151 | |
|
2137 | 2152 | id : <id_given_in_input> |
|
2138 | 2153 | result: { |
|
2139 | 2154 | "msg" : "Revoked perm for group: `<usersgroupname>` in repo: `<reponame>`", |
|
2140 | 2155 | "success": true |
|
2141 | 2156 | } |
|
2142 | 2157 | error: null |
|
2143 | 2158 | """ |
|
2144 | 2159 | |
|
2145 | 2160 | repo = get_repo_or_error(repoid) |
|
2146 | 2161 | if not has_superadmin_permission(apiuser): |
|
2147 | 2162 | _perms = ('repository.admin',) |
|
2148 | 2163 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
2149 | 2164 | |
|
2150 | 2165 | user_group = get_user_group_or_error(usergroupid) |
|
2151 | 2166 | if not has_superadmin_permission(apiuser): |
|
2152 | 2167 | # check if we have at least read permission for this user group ! |
|
2153 | 2168 | _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',) |
|
2154 | 2169 | if not HasUserGroupPermissionAnyApi(*_perms)( |
|
2155 | 2170 | user=apiuser, user_group_name=user_group.users_group_name): |
|
2156 | 2171 | raise JSONRPCError( |
|
2157 | 2172 | 'user group `%s` does not exist' % (usergroupid,)) |
|
2158 | 2173 | |
|
2159 | 2174 | perm_deletions = [[user_group.users_group_id, None, "user_group"]] |
|
2160 | 2175 | try: |
|
2161 | 2176 | changes = RepoModel().update_permissions( |
|
2162 | 2177 | repo=repo, perm_deletions=perm_deletions, cur_user=apiuser) |
|
2163 | 2178 | action_data = { |
|
2164 | 2179 | 'added': changes['added'], |
|
2165 | 2180 | 'updated': changes['updated'], |
|
2166 | 2181 | 'deleted': changes['deleted'], |
|
2167 | 2182 | } |
|
2168 | 2183 | audit_logger.store_api( |
|
2169 | 2184 | 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo) |
|
2170 | 2185 | Session().commit() |
|
2171 | 2186 | PermissionModel().flush_user_permission_caches(changes) |
|
2172 | 2187 | |
|
2173 | 2188 | return { |
|
2174 | 2189 | 'msg': 'Revoked perm for user group: `%s` in repo: `%s`' % ( |
|
2175 | 2190 | user_group.users_group_name, repo.repo_name |
|
2176 | 2191 | ), |
|
2177 | 2192 | 'success': True |
|
2178 | 2193 | } |
|
2179 | 2194 | except Exception: |
|
2180 | 2195 | log.exception("Exception occurred while trying revoke " |
|
2181 | 2196 | "user group permission on repo") |
|
2182 | 2197 | raise JSONRPCError( |
|
2183 | 2198 | 'failed to edit permission for user group: `%s` in ' |
|
2184 | 2199 | 'repo: `%s`' % ( |
|
2185 | 2200 | user_group.users_group_name, repo.repo_name |
|
2186 | 2201 | ) |
|
2187 | 2202 | ) |
|
2188 | 2203 | |
|
2189 | 2204 | |
|
2190 | 2205 | @jsonrpc_method() |
|
2191 | 2206 | def pull(request, apiuser, repoid, remote_uri=Optional(None)): |
|
2192 | 2207 | """ |
|
2193 | 2208 | Triggers a pull on the given repository from a remote location. You |
|
2194 | 2209 | can use this to keep remote repositories up-to-date. |
|
2195 | 2210 | |
|
2196 | 2211 | This command can only be run using an |authtoken| with admin |
|
2197 | 2212 | rights to the specified repository. For more information, |
|
2198 | 2213 | see :ref:`config-token-ref`. |
|
2199 | 2214 | |
|
2200 | 2215 | This command takes the following options: |
|
2201 | 2216 | |
|
2202 | 2217 | :param apiuser: This is filled automatically from the |authtoken|. |
|
2203 | 2218 | :type apiuser: AuthUser |
|
2204 | 2219 | :param repoid: The repository name or repository ID. |
|
2205 | 2220 | :type repoid: str or int |
|
2206 | 2221 | :param remote_uri: Optional remote URI to pass in for pull |
|
2207 | 2222 | :type remote_uri: str |
|
2208 | 2223 | |
|
2209 | 2224 | Example output: |
|
2210 | 2225 | |
|
2211 | 2226 | .. code-block:: bash |
|
2212 | 2227 | |
|
2213 | 2228 | id : <id_given_in_input> |
|
2214 | 2229 | result : { |
|
2215 | 2230 | "msg": "Pulled from url `<remote_url>` on repo `<repository name>`" |
|
2216 | 2231 | "repository": "<repository name>" |
|
2217 | 2232 | } |
|
2218 | 2233 | error : null |
|
2219 | 2234 | |
|
2220 | 2235 | Example error output: |
|
2221 | 2236 | |
|
2222 | 2237 | .. code-block:: bash |
|
2223 | 2238 | |
|
2224 | 2239 | id : <id_given_in_input> |
|
2225 | 2240 | result : null |
|
2226 | 2241 | error : { |
|
2227 | 2242 | "Unable to push changes from `<remote_url>`" |
|
2228 | 2243 | } |
|
2229 | 2244 | |
|
2230 | 2245 | """ |
|
2231 | 2246 | |
|
2232 | 2247 | repo = get_repo_or_error(repoid) |
|
2233 | 2248 | remote_uri = Optional.extract(remote_uri) |
|
2234 | 2249 | remote_uri_display = remote_uri or repo.clone_uri_hidden |
|
2235 | 2250 | if not has_superadmin_permission(apiuser): |
|
2236 | 2251 | _perms = ('repository.admin',) |
|
2237 | 2252 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
2238 | 2253 | |
|
2239 | 2254 | try: |
|
2240 | 2255 | ScmModel().pull_changes( |
|
2241 | 2256 | repo.repo_name, apiuser.username, remote_uri=remote_uri) |
|
2242 | 2257 | return { |
|
2243 | 2258 | 'msg': 'Pulled from url `%s` on repo `%s`' % ( |
|
2244 | 2259 | remote_uri_display, repo.repo_name), |
|
2245 | 2260 | 'repository': repo.repo_name |
|
2246 | 2261 | } |
|
2247 | 2262 | except Exception: |
|
2248 | 2263 | log.exception("Exception occurred while trying to " |
|
2249 | 2264 | "pull changes from remote location") |
|
2250 | 2265 | raise JSONRPCError( |
|
2251 | 2266 | 'Unable to pull changes from `%s`' % remote_uri_display |
|
2252 | 2267 | ) |
|
2253 | 2268 | |
|
2254 | 2269 | |
|
2255 | 2270 | @jsonrpc_method() |
|
2256 | 2271 | def strip(request, apiuser, repoid, revision, branch): |
|
2257 | 2272 | """ |
|
2258 | 2273 | Strips the given revision from the specified repository. |
|
2259 | 2274 | |
|
2260 | 2275 | * This will remove the revision and all of its decendants. |
|
2261 | 2276 | |
|
2262 | 2277 | This command can only be run using an |authtoken| with admin rights to |
|
2263 | 2278 | the specified repository. |
|
2264 | 2279 | |
|
2265 | 2280 | This command takes the following options: |
|
2266 | 2281 | |
|
2267 | 2282 | :param apiuser: This is filled automatically from the |authtoken|. |
|
2268 | 2283 | :type apiuser: AuthUser |
|
2269 | 2284 | :param repoid: The repository name or repository ID. |
|
2270 | 2285 | :type repoid: str or int |
|
2271 | 2286 | :param revision: The revision you wish to strip. |
|
2272 | 2287 | :type revision: str |
|
2273 | 2288 | :param branch: The branch from which to strip the revision. |
|
2274 | 2289 | :type branch: str |
|
2275 | 2290 | |
|
2276 | 2291 | Example output: |
|
2277 | 2292 | |
|
2278 | 2293 | .. code-block:: bash |
|
2279 | 2294 | |
|
2280 | 2295 | id : <id_given_in_input> |
|
2281 | 2296 | result : { |
|
2282 | 2297 | "msg": "'Stripped commit <commit_hash> from repo `<repository name>`'" |
|
2283 | 2298 | "repository": "<repository name>" |
|
2284 | 2299 | } |
|
2285 | 2300 | error : null |
|
2286 | 2301 | |
|
2287 | 2302 | Example error output: |
|
2288 | 2303 | |
|
2289 | 2304 | .. code-block:: bash |
|
2290 | 2305 | |
|
2291 | 2306 | id : <id_given_in_input> |
|
2292 | 2307 | result : null |
|
2293 | 2308 | error : { |
|
2294 | 2309 | "Unable to strip commit <commit_hash> from repo `<repository name>`" |
|
2295 | 2310 | } |
|
2296 | 2311 | |
|
2297 | 2312 | """ |
|
2298 | 2313 | |
|
2299 | 2314 | repo = get_repo_or_error(repoid) |
|
2300 | 2315 | if not has_superadmin_permission(apiuser): |
|
2301 | 2316 | _perms = ('repository.admin',) |
|
2302 | 2317 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
2303 | 2318 | |
|
2304 | 2319 | try: |
|
2305 | 2320 | ScmModel().strip(repo, revision, branch) |
|
2306 | 2321 | audit_logger.store_api( |
|
2307 | 2322 | 'repo.commit.strip', action_data={'commit_id': revision}, |
|
2308 | 2323 | repo=repo, |
|
2309 | 2324 | user=apiuser, commit=True) |
|
2310 | 2325 | |
|
2311 | 2326 | return { |
|
2312 | 2327 | 'msg': 'Stripped commit %s from repo `%s`' % ( |
|
2313 | 2328 | revision, repo.repo_name), |
|
2314 | 2329 | 'repository': repo.repo_name |
|
2315 | 2330 | } |
|
2316 | 2331 | except Exception: |
|
2317 | 2332 | log.exception("Exception while trying to strip") |
|
2318 | 2333 | raise JSONRPCError( |
|
2319 | 2334 | 'Unable to strip commit %s from repo `%s`' % ( |
|
2320 | 2335 | revision, repo.repo_name) |
|
2321 | 2336 | ) |
|
2322 | 2337 | |
|
2323 | 2338 | |
|
2324 | 2339 | @jsonrpc_method() |
|
2325 | 2340 | def get_repo_settings(request, apiuser, repoid, key=Optional(None)): |
|
2326 | 2341 | """ |
|
2327 | 2342 | Returns all settings for a repository. If key is given it only returns the |
|
2328 | 2343 | setting identified by the key or null. |
|
2329 | 2344 | |
|
2330 | 2345 | :param apiuser: This is filled automatically from the |authtoken|. |
|
2331 | 2346 | :type apiuser: AuthUser |
|
2332 | 2347 | :param repoid: The repository name or repository id. |
|
2333 | 2348 | :type repoid: str or int |
|
2334 | 2349 | :param key: Key of the setting to return. |
|
2335 | 2350 | :type: key: Optional(str) |
|
2336 | 2351 | |
|
2337 | 2352 | Example output: |
|
2338 | 2353 | |
|
2339 | 2354 | .. code-block:: bash |
|
2340 | 2355 | |
|
2341 | 2356 | { |
|
2342 | 2357 | "error": null, |
|
2343 | 2358 | "id": 237, |
|
2344 | 2359 | "result": { |
|
2345 | 2360 | "extensions_largefiles": true, |
|
2346 | 2361 | "extensions_evolve": true, |
|
2347 | 2362 | "hooks_changegroup_push_logger": true, |
|
2348 | 2363 | "hooks_changegroup_repo_size": false, |
|
2349 | 2364 | "hooks_outgoing_pull_logger": true, |
|
2350 | 2365 | "phases_publish": "True", |
|
2351 | 2366 | "rhodecode_hg_use_rebase_for_merging": true, |
|
2352 | 2367 | "rhodecode_pr_merge_enabled": true, |
|
2353 | 2368 | "rhodecode_use_outdated_comments": true |
|
2354 | 2369 | } |
|
2355 | 2370 | } |
|
2356 | 2371 | """ |
|
2357 | 2372 | |
|
2358 | 2373 | # Restrict access to this api method to admins only. |
|
2359 | 2374 | if not has_superadmin_permission(apiuser): |
|
2360 | 2375 | raise JSONRPCForbidden() |
|
2361 | 2376 | |
|
2362 | 2377 | try: |
|
2363 | 2378 | repo = get_repo_or_error(repoid) |
|
2364 | 2379 | settings_model = VcsSettingsModel(repo=repo) |
|
2365 | 2380 | settings = settings_model.get_global_settings() |
|
2366 | 2381 | settings.update(settings_model.get_repo_settings()) |
|
2367 | 2382 | |
|
2368 | 2383 | # If only a single setting is requested fetch it from all settings. |
|
2369 | 2384 | key = Optional.extract(key) |
|
2370 | 2385 | if key is not None: |
|
2371 | 2386 | settings = settings.get(key, None) |
|
2372 | 2387 | except Exception: |
|
2373 | 2388 | msg = 'Failed to fetch settings for repository `{}`'.format(repoid) |
|
2374 | 2389 | log.exception(msg) |
|
2375 | 2390 | raise JSONRPCError(msg) |
|
2376 | 2391 | |
|
2377 | 2392 | return settings |
|
2378 | 2393 | |
|
2379 | 2394 | |
|
2380 | 2395 | @jsonrpc_method() |
|
2381 | 2396 | def set_repo_settings(request, apiuser, repoid, settings): |
|
2382 | 2397 | """ |
|
2383 | 2398 | Update repository settings. Returns true on success. |
|
2384 | 2399 | |
|
2385 | 2400 | :param apiuser: This is filled automatically from the |authtoken|. |
|
2386 | 2401 | :type apiuser: AuthUser |
|
2387 | 2402 | :param repoid: The repository name or repository id. |
|
2388 | 2403 | :type repoid: str or int |
|
2389 | 2404 | :param settings: The new settings for the repository. |
|
2390 | 2405 | :type: settings: dict |
|
2391 | 2406 | |
|
2392 | 2407 | Example output: |
|
2393 | 2408 | |
|
2394 | 2409 | .. code-block:: bash |
|
2395 | 2410 | |
|
2396 | 2411 | { |
|
2397 | 2412 | "error": null, |
|
2398 | 2413 | "id": 237, |
|
2399 | 2414 | "result": true |
|
2400 | 2415 | } |
|
2401 | 2416 | """ |
|
2402 | 2417 | # Restrict access to this api method to admins only. |
|
2403 | 2418 | if not has_superadmin_permission(apiuser): |
|
2404 | 2419 | raise JSONRPCForbidden() |
|
2405 | 2420 | |
|
2406 | 2421 | if type(settings) is not dict: |
|
2407 | 2422 | raise JSONRPCError('Settings have to be a JSON Object.') |
|
2408 | 2423 | |
|
2409 | 2424 | try: |
|
2410 | 2425 | settings_model = VcsSettingsModel(repo=repoid) |
|
2411 | 2426 | |
|
2412 | 2427 | # Merge global, repo and incoming settings. |
|
2413 | 2428 | new_settings = settings_model.get_global_settings() |
|
2414 | 2429 | new_settings.update(settings_model.get_repo_settings()) |
|
2415 | 2430 | new_settings.update(settings) |
|
2416 | 2431 | |
|
2417 | 2432 | # Update the settings. |
|
2418 | 2433 | inherit_global_settings = new_settings.get( |
|
2419 | 2434 | 'inherit_global_settings', False) |
|
2420 | 2435 | settings_model.create_or_update_repo_settings( |
|
2421 | 2436 | new_settings, inherit_global_settings=inherit_global_settings) |
|
2422 | 2437 | Session().commit() |
|
2423 | 2438 | except Exception: |
|
2424 | 2439 | msg = 'Failed to update settings for repository `{}`'.format(repoid) |
|
2425 | 2440 | log.exception(msg) |
|
2426 | 2441 | raise JSONRPCError(msg) |
|
2427 | 2442 | |
|
2428 | 2443 | # Indicate success. |
|
2429 | 2444 | return True |
|
2430 | 2445 | |
|
2431 | 2446 | |
|
2432 | 2447 | @jsonrpc_method() |
|
2433 | 2448 | def maintenance(request, apiuser, repoid): |
|
2434 | 2449 | """ |
|
2435 | 2450 | Triggers a maintenance on the given repository. |
|
2436 | 2451 | |
|
2437 | 2452 | This command can only be run using an |authtoken| with admin |
|
2438 | 2453 | rights to the specified repository. For more information, |
|
2439 | 2454 | see :ref:`config-token-ref`. |
|
2440 | 2455 | |
|
2441 | 2456 | This command takes the following options: |
|
2442 | 2457 | |
|
2443 | 2458 | :param apiuser: This is filled automatically from the |authtoken|. |
|
2444 | 2459 | :type apiuser: AuthUser |
|
2445 | 2460 | :param repoid: The repository name or repository ID. |
|
2446 | 2461 | :type repoid: str or int |
|
2447 | 2462 | |
|
2448 | 2463 | Example output: |
|
2449 | 2464 | |
|
2450 | 2465 | .. code-block:: bash |
|
2451 | 2466 | |
|
2452 | 2467 | id : <id_given_in_input> |
|
2453 | 2468 | result : { |
|
2454 | 2469 | "msg": "executed maintenance command", |
|
2455 | 2470 | "executed_actions": [ |
|
2456 | 2471 | <action_message>, <action_message2>... |
|
2457 | 2472 | ], |
|
2458 | 2473 | "repository": "<repository name>" |
|
2459 | 2474 | } |
|
2460 | 2475 | error : null |
|
2461 | 2476 | |
|
2462 | 2477 | Example error output: |
|
2463 | 2478 | |
|
2464 | 2479 | .. code-block:: bash |
|
2465 | 2480 | |
|
2466 | 2481 | id : <id_given_in_input> |
|
2467 | 2482 | result : null |
|
2468 | 2483 | error : { |
|
2469 | 2484 | "Unable to execute maintenance on `<reponame>`" |
|
2470 | 2485 | } |
|
2471 | 2486 | |
|
2472 | 2487 | """ |
|
2473 | 2488 | |
|
2474 | 2489 | repo = get_repo_or_error(repoid) |
|
2475 | 2490 | if not has_superadmin_permission(apiuser): |
|
2476 | 2491 | _perms = ('repository.admin',) |
|
2477 | 2492 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
2478 | 2493 | |
|
2479 | 2494 | try: |
|
2480 | 2495 | maintenance = repo_maintenance.RepoMaintenance() |
|
2481 | 2496 | executed_actions = maintenance.execute(repo) |
|
2482 | 2497 | |
|
2483 | 2498 | return { |
|
2484 | 2499 | 'msg': 'executed maintenance command', |
|
2485 | 2500 | 'executed_actions': executed_actions, |
|
2486 | 2501 | 'repository': repo.repo_name |
|
2487 | 2502 | } |
|
2488 | 2503 | except Exception: |
|
2489 | 2504 | log.exception("Exception occurred while trying to run maintenance") |
|
2490 | 2505 | raise JSONRPCError( |
|
2491 | 2506 | 'Unable to execute maintenance on `%s`' % repo.repo_name) |
@@ -1,717 +1,723 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | |
|
22 | 22 | import logging |
|
23 | 23 | |
|
24 | 24 | from pyramid.httpexceptions import ( |
|
25 | 25 | HTTPNotFound, HTTPBadRequest, HTTPFound, HTTPForbidden, HTTPConflict) |
|
26 | 26 | from pyramid.view import view_config |
|
27 | 27 | from pyramid.renderers import render |
|
28 | 28 | from pyramid.response import Response |
|
29 | 29 | |
|
30 | 30 | from rhodecode.apps._base import RepoAppView |
|
31 | 31 | from rhodecode.apps.file_store import utils as store_utils |
|
32 | 32 | from rhodecode.apps.file_store.exceptions import FileNotAllowedException, FileOverSizeException |
|
33 | 33 | |
|
34 | 34 | from rhodecode.lib import diffs, codeblocks |
|
35 | 35 | from rhodecode.lib.auth import ( |
|
36 | 36 | LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous, CSRFRequired) |
|
37 | 37 | |
|
38 | 38 | from rhodecode.lib.compat import OrderedDict |
|
39 | 39 | from rhodecode.lib.diffs import ( |
|
40 | 40 | cache_diff, load_cached_diff, diff_cache_exist, get_diff_context, |
|
41 | 41 | get_diff_whitespace_flag) |
|
42 | 42 | from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError, CommentVersionMismatch |
|
43 | 43 | import rhodecode.lib.helpers as h |
|
44 | 44 | from rhodecode.lib.utils2 import safe_unicode, str2bool |
|
45 | 45 | from rhodecode.lib.vcs.backends.base import EmptyCommit |
|
46 | 46 | from rhodecode.lib.vcs.exceptions import ( |
|
47 | 47 | RepositoryError, CommitDoesNotExistError) |
|
48 | 48 | from rhodecode.model.db import ChangesetComment, ChangesetStatus, FileStore, \ |
|
49 | 49 | ChangesetCommentHistory |
|
50 | 50 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
51 | 51 | from rhodecode.model.comment import CommentsModel |
|
52 | 52 | from rhodecode.model.meta import Session |
|
53 | 53 | from rhodecode.model.settings import VcsSettingsModel |
|
54 | 54 | |
|
55 | 55 | log = logging.getLogger(__name__) |
|
56 | 56 | |
|
57 | 57 | |
|
58 | 58 | def _update_with_GET(params, request): |
|
59 | 59 | for k in ['diff1', 'diff2', 'diff']: |
|
60 | 60 | params[k] += request.GET.getall(k) |
|
61 | 61 | |
|
62 | 62 | |
|
63 | 63 | class RepoCommitsView(RepoAppView): |
|
64 | 64 | def load_default_context(self): |
|
65 | 65 | c = self._get_local_tmpl_context(include_app_defaults=True) |
|
66 | 66 | c.rhodecode_repo = self.rhodecode_vcs_repo |
|
67 | 67 | |
|
68 | 68 | return c |
|
69 | 69 | |
|
70 | 70 | def _is_diff_cache_enabled(self, target_repo): |
|
71 | 71 | caching_enabled = self._get_general_setting( |
|
72 | 72 | target_repo, 'rhodecode_diff_cache') |
|
73 | 73 | log.debug('Diff caching enabled: %s', caching_enabled) |
|
74 | 74 | return caching_enabled |
|
75 | 75 | |
|
76 | 76 | def _commit(self, commit_id_range, method): |
|
77 | 77 | _ = self.request.translate |
|
78 | 78 | c = self.load_default_context() |
|
79 | 79 | c.fulldiff = self.request.GET.get('fulldiff') |
|
80 | 80 | |
|
81 | 81 | # fetch global flags of ignore ws or context lines |
|
82 | 82 | diff_context = get_diff_context(self.request) |
|
83 | 83 | hide_whitespace_changes = get_diff_whitespace_flag(self.request) |
|
84 | 84 | |
|
85 | 85 | # diff_limit will cut off the whole diff if the limit is applied |
|
86 | 86 | # otherwise it will just hide the big files from the front-end |
|
87 | 87 | diff_limit = c.visual.cut_off_limit_diff |
|
88 | 88 | file_limit = c.visual.cut_off_limit_file |
|
89 | 89 | |
|
90 | 90 | # get ranges of commit ids if preset |
|
91 | 91 | commit_range = commit_id_range.split('...')[:2] |
|
92 | 92 | |
|
93 | 93 | try: |
|
94 | 94 | pre_load = ['affected_files', 'author', 'branch', 'date', |
|
95 | 95 | 'message', 'parents'] |
|
96 | 96 | if self.rhodecode_vcs_repo.alias == 'hg': |
|
97 | 97 | pre_load += ['hidden', 'obsolete', 'phase'] |
|
98 | 98 | |
|
99 | 99 | if len(commit_range) == 2: |
|
100 | 100 | commits = self.rhodecode_vcs_repo.get_commits( |
|
101 | 101 | start_id=commit_range[0], end_id=commit_range[1], |
|
102 | 102 | pre_load=pre_load, translate_tags=False) |
|
103 | 103 | commits = list(commits) |
|
104 | 104 | else: |
|
105 | 105 | commits = [self.rhodecode_vcs_repo.get_commit( |
|
106 | 106 | commit_id=commit_id_range, pre_load=pre_load)] |
|
107 | 107 | |
|
108 | 108 | c.commit_ranges = commits |
|
109 | 109 | if not c.commit_ranges: |
|
110 | 110 | raise RepositoryError('The commit range returned an empty result') |
|
111 | 111 | except CommitDoesNotExistError as e: |
|
112 | 112 | msg = _('No such commit exists. Org exception: `{}`').format(e) |
|
113 | 113 | h.flash(msg, category='error') |
|
114 | 114 | raise HTTPNotFound() |
|
115 | 115 | except Exception: |
|
116 | 116 | log.exception("General failure") |
|
117 | 117 | raise HTTPNotFound() |
|
118 | 118 | |
|
119 | 119 | c.changes = OrderedDict() |
|
120 | 120 | c.lines_added = 0 |
|
121 | 121 | c.lines_deleted = 0 |
|
122 | 122 | |
|
123 | 123 | # auto collapse if we have more than limit |
|
124 | 124 | collapse_limit = diffs.DiffProcessor._collapse_commits_over |
|
125 | 125 | c.collapse_all_commits = len(c.commit_ranges) > collapse_limit |
|
126 | 126 | |
|
127 | 127 | c.commit_statuses = ChangesetStatus.STATUSES |
|
128 | 128 | c.inline_comments = [] |
|
129 | 129 | c.files = [] |
|
130 | 130 | |
|
131 | 131 | c.statuses = [] |
|
132 | 132 | c.comments = [] |
|
133 | 133 | c.unresolved_comments = [] |
|
134 | 134 | c.resolved_comments = [] |
|
135 | 135 | if len(c.commit_ranges) == 1: |
|
136 | 136 | commit = c.commit_ranges[0] |
|
137 | 137 | c.comments = CommentsModel().get_comments( |
|
138 | 138 | self.db_repo.repo_id, |
|
139 | 139 | revision=commit.raw_id) |
|
140 | 140 | c.statuses.append(ChangesetStatusModel().get_status( |
|
141 | 141 | self.db_repo.repo_id, commit.raw_id)) |
|
142 | 142 | # comments from PR |
|
143 | 143 | statuses = ChangesetStatusModel().get_statuses( |
|
144 | 144 | self.db_repo.repo_id, commit.raw_id, |
|
145 | 145 | with_revisions=True) |
|
146 | 146 | prs = set(st.pull_request for st in statuses |
|
147 | 147 | if st.pull_request is not None) |
|
148 | 148 | # from associated statuses, check the pull requests, and |
|
149 | 149 | # show comments from them |
|
150 | 150 | for pr in prs: |
|
151 | 151 | c.comments.extend(pr.comments) |
|
152 | 152 | |
|
153 | 153 | c.unresolved_comments = CommentsModel()\ |
|
154 | 154 | .get_commit_unresolved_todos(commit.raw_id) |
|
155 | 155 | c.resolved_comments = CommentsModel()\ |
|
156 | 156 | .get_commit_resolved_todos(commit.raw_id) |
|
157 | 157 | |
|
158 | 158 | diff = None |
|
159 | 159 | # Iterate over ranges (default commit view is always one commit) |
|
160 | 160 | for commit in c.commit_ranges: |
|
161 | 161 | c.changes[commit.raw_id] = [] |
|
162 | 162 | |
|
163 | 163 | commit2 = commit |
|
164 | 164 | commit1 = commit.first_parent |
|
165 | 165 | |
|
166 | 166 | if method == 'show': |
|
167 | 167 | inline_comments = CommentsModel().get_inline_comments( |
|
168 | 168 | self.db_repo.repo_id, revision=commit.raw_id) |
|
169 | 169 | c.inline_cnt = CommentsModel().get_inline_comments_count( |
|
170 | 170 | inline_comments) |
|
171 | 171 | c.inline_comments = inline_comments |
|
172 | 172 | |
|
173 | 173 | cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path( |
|
174 | 174 | self.db_repo) |
|
175 | 175 | cache_file_path = diff_cache_exist( |
|
176 | 176 | cache_path, 'diff', commit.raw_id, |
|
177 | 177 | hide_whitespace_changes, diff_context, c.fulldiff) |
|
178 | 178 | |
|
179 | 179 | caching_enabled = self._is_diff_cache_enabled(self.db_repo) |
|
180 | 180 | force_recache = str2bool(self.request.GET.get('force_recache')) |
|
181 | 181 | |
|
182 | 182 | cached_diff = None |
|
183 | 183 | if caching_enabled: |
|
184 | 184 | cached_diff = load_cached_diff(cache_file_path) |
|
185 | 185 | |
|
186 | 186 | has_proper_diff_cache = cached_diff and cached_diff.get('diff') |
|
187 | 187 | if not force_recache and has_proper_diff_cache: |
|
188 | 188 | diffset = cached_diff['diff'] |
|
189 | 189 | else: |
|
190 | 190 | vcs_diff = self.rhodecode_vcs_repo.get_diff( |
|
191 | 191 | commit1, commit2, |
|
192 | 192 | ignore_whitespace=hide_whitespace_changes, |
|
193 | 193 | context=diff_context) |
|
194 | 194 | |
|
195 | 195 | diff_processor = diffs.DiffProcessor( |
|
196 | 196 | vcs_diff, format='newdiff', diff_limit=diff_limit, |
|
197 | 197 | file_limit=file_limit, show_full_diff=c.fulldiff) |
|
198 | 198 | |
|
199 | 199 | _parsed = diff_processor.prepare() |
|
200 | 200 | |
|
201 | 201 | diffset = codeblocks.DiffSet( |
|
202 | 202 | repo_name=self.db_repo_name, |
|
203 | 203 | source_node_getter=codeblocks.diffset_node_getter(commit1), |
|
204 | 204 | target_node_getter=codeblocks.diffset_node_getter(commit2)) |
|
205 | 205 | |
|
206 | 206 | diffset = self.path_filter.render_patchset_filtered( |
|
207 | 207 | diffset, _parsed, commit1.raw_id, commit2.raw_id) |
|
208 | 208 | |
|
209 | 209 | # save cached diff |
|
210 | 210 | if caching_enabled: |
|
211 | 211 | cache_diff(cache_file_path, diffset, None) |
|
212 | 212 | |
|
213 | 213 | c.limited_diff = diffset.limited_diff |
|
214 | 214 | c.changes[commit.raw_id] = diffset |
|
215 | 215 | else: |
|
216 | 216 | # TODO(marcink): no cache usage here... |
|
217 | 217 | _diff = self.rhodecode_vcs_repo.get_diff( |
|
218 | 218 | commit1, commit2, |
|
219 | 219 | ignore_whitespace=hide_whitespace_changes, context=diff_context) |
|
220 | 220 | diff_processor = diffs.DiffProcessor( |
|
221 | 221 | _diff, format='newdiff', diff_limit=diff_limit, |
|
222 | 222 | file_limit=file_limit, show_full_diff=c.fulldiff) |
|
223 | 223 | # downloads/raw we only need RAW diff nothing else |
|
224 | 224 | diff = self.path_filter.get_raw_patch(diff_processor) |
|
225 | 225 | c.changes[commit.raw_id] = [None, None, None, None, diff, None, None] |
|
226 | 226 | |
|
227 | 227 | # sort comments by how they were generated |
|
228 | 228 | c.comments = sorted(c.comments, key=lambda x: x.comment_id) |
|
229 | 229 | |
|
230 | 230 | if len(c.commit_ranges) == 1: |
|
231 | 231 | c.commit = c.commit_ranges[0] |
|
232 | 232 | c.parent_tmpl = ''.join( |
|
233 | 233 | '# Parent %s\n' % x.raw_id for x in c.commit.parents) |
|
234 | 234 | |
|
235 | 235 | if method == 'download': |
|
236 | 236 | response = Response(diff) |
|
237 | 237 | response.content_type = 'text/plain' |
|
238 | 238 | response.content_disposition = ( |
|
239 | 239 | 'attachment; filename=%s.diff' % commit_id_range[:12]) |
|
240 | 240 | return response |
|
241 | 241 | elif method == 'patch': |
|
242 | 242 | c.diff = safe_unicode(diff) |
|
243 | 243 | patch = render( |
|
244 | 244 | 'rhodecode:templates/changeset/patch_changeset.mako', |
|
245 | 245 | self._get_template_context(c), self.request) |
|
246 | 246 | response = Response(patch) |
|
247 | 247 | response.content_type = 'text/plain' |
|
248 | 248 | return response |
|
249 | 249 | elif method == 'raw': |
|
250 | 250 | response = Response(diff) |
|
251 | 251 | response.content_type = 'text/plain' |
|
252 | 252 | return response |
|
253 | 253 | elif method == 'show': |
|
254 | 254 | if len(c.commit_ranges) == 1: |
|
255 | 255 | html = render( |
|
256 | 256 | 'rhodecode:templates/changeset/changeset.mako', |
|
257 | 257 | self._get_template_context(c), self.request) |
|
258 | 258 | return Response(html) |
|
259 | 259 | else: |
|
260 | 260 | c.ancestor = None |
|
261 | 261 | c.target_repo = self.db_repo |
|
262 | 262 | html = render( |
|
263 | 263 | 'rhodecode:templates/changeset/changeset_range.mako', |
|
264 | 264 | self._get_template_context(c), self.request) |
|
265 | 265 | return Response(html) |
|
266 | 266 | |
|
267 | 267 | raise HTTPBadRequest() |
|
268 | 268 | |
|
269 | 269 | @LoginRequired() |
|
270 | 270 | @HasRepoPermissionAnyDecorator( |
|
271 | 271 | 'repository.read', 'repository.write', 'repository.admin') |
|
272 | 272 | @view_config( |
|
273 | 273 | route_name='repo_commit', request_method='GET', |
|
274 | 274 | renderer=None) |
|
275 | 275 | def repo_commit_show(self): |
|
276 | 276 | commit_id = self.request.matchdict['commit_id'] |
|
277 | 277 | return self._commit(commit_id, method='show') |
|
278 | 278 | |
|
279 | 279 | @LoginRequired() |
|
280 | 280 | @HasRepoPermissionAnyDecorator( |
|
281 | 281 | 'repository.read', 'repository.write', 'repository.admin') |
|
282 | 282 | @view_config( |
|
283 | 283 | route_name='repo_commit_raw', request_method='GET', |
|
284 | 284 | renderer=None) |
|
285 | 285 | @view_config( |
|
286 | 286 | route_name='repo_commit_raw_deprecated', request_method='GET', |
|
287 | 287 | renderer=None) |
|
288 | 288 | def repo_commit_raw(self): |
|
289 | 289 | commit_id = self.request.matchdict['commit_id'] |
|
290 | 290 | return self._commit(commit_id, method='raw') |
|
291 | 291 | |
|
292 | 292 | @LoginRequired() |
|
293 | 293 | @HasRepoPermissionAnyDecorator( |
|
294 | 294 | 'repository.read', 'repository.write', 'repository.admin') |
|
295 | 295 | @view_config( |
|
296 | 296 | route_name='repo_commit_patch', request_method='GET', |
|
297 | 297 | renderer=None) |
|
298 | 298 | def repo_commit_patch(self): |
|
299 | 299 | commit_id = self.request.matchdict['commit_id'] |
|
300 | 300 | return self._commit(commit_id, method='patch') |
|
301 | 301 | |
|
302 | 302 | @LoginRequired() |
|
303 | 303 | @HasRepoPermissionAnyDecorator( |
|
304 | 304 | 'repository.read', 'repository.write', 'repository.admin') |
|
305 | 305 | @view_config( |
|
306 | 306 | route_name='repo_commit_download', request_method='GET', |
|
307 | 307 | renderer=None) |
|
308 | 308 | def repo_commit_download(self): |
|
309 | 309 | commit_id = self.request.matchdict['commit_id'] |
|
310 | 310 | return self._commit(commit_id, method='download') |
|
311 | 311 | |
|
312 | 312 | @LoginRequired() |
|
313 | 313 | @NotAnonymous() |
|
314 | 314 | @HasRepoPermissionAnyDecorator( |
|
315 | 315 | 'repository.read', 'repository.write', 'repository.admin') |
|
316 | 316 | @CSRFRequired() |
|
317 | 317 | @view_config( |
|
318 | 318 | route_name='repo_commit_comment_create', request_method='POST', |
|
319 | 319 | renderer='json_ext') |
|
320 | 320 | def repo_commit_comment_create(self): |
|
321 | 321 | _ = self.request.translate |
|
322 | 322 | commit_id = self.request.matchdict['commit_id'] |
|
323 | 323 | |
|
324 | 324 | c = self.load_default_context() |
|
325 | 325 | status = self.request.POST.get('changeset_status', None) |
|
326 | 326 | text = self.request.POST.get('text') |
|
327 | 327 | comment_type = self.request.POST.get('comment_type') |
|
328 | 328 | resolves_comment_id = self.request.POST.get('resolves_comment_id', None) |
|
329 | 329 | |
|
330 | 330 | if status: |
|
331 | 331 | text = text or (_('Status change %(transition_icon)s %(status)s') |
|
332 | 332 | % {'transition_icon': '>', |
|
333 | 333 | 'status': ChangesetStatus.get_status_lbl(status)}) |
|
334 | 334 | |
|
335 | 335 | multi_commit_ids = [] |
|
336 | 336 | for _commit_id in self.request.POST.get('commit_ids', '').split(','): |
|
337 | 337 | if _commit_id not in ['', None, EmptyCommit.raw_id]: |
|
338 | 338 | if _commit_id not in multi_commit_ids: |
|
339 | 339 | multi_commit_ids.append(_commit_id) |
|
340 | 340 | |
|
341 | 341 | commit_ids = multi_commit_ids or [commit_id] |
|
342 | 342 | |
|
343 | 343 | comment = None |
|
344 | 344 | for current_id in filter(None, commit_ids): |
|
345 | 345 | comment = CommentsModel().create( |
|
346 | 346 | text=text, |
|
347 | 347 | repo=self.db_repo.repo_id, |
|
348 | 348 | user=self._rhodecode_db_user.user_id, |
|
349 | 349 | commit_id=current_id, |
|
350 | 350 | f_path=self.request.POST.get('f_path'), |
|
351 | 351 | line_no=self.request.POST.get('line'), |
|
352 | 352 | status_change=(ChangesetStatus.get_status_lbl(status) |
|
353 | 353 | if status else None), |
|
354 | 354 | status_change_type=status, |
|
355 | 355 | comment_type=comment_type, |
|
356 | 356 | resolves_comment_id=resolves_comment_id, |
|
357 | 357 | auth_user=self._rhodecode_user |
|
358 | 358 | ) |
|
359 | 359 | |
|
360 | 360 | # get status if set ! |
|
361 | 361 | if status: |
|
362 | 362 | # if latest status was from pull request and it's closed |
|
363 | 363 | # disallow changing status ! |
|
364 | 364 | # dont_allow_on_closed_pull_request = True ! |
|
365 | 365 | |
|
366 | 366 | try: |
|
367 | 367 | ChangesetStatusModel().set_status( |
|
368 | 368 | self.db_repo.repo_id, |
|
369 | 369 | status, |
|
370 | 370 | self._rhodecode_db_user.user_id, |
|
371 | 371 | comment, |
|
372 | 372 | revision=current_id, |
|
373 | 373 | dont_allow_on_closed_pull_request=True |
|
374 | 374 | ) |
|
375 | 375 | except StatusChangeOnClosedPullRequestError: |
|
376 | 376 | msg = _('Changing the status of a commit associated with ' |
|
377 | 377 | 'a closed pull request is not allowed') |
|
378 | 378 | log.exception(msg) |
|
379 | 379 | h.flash(msg, category='warning') |
|
380 | 380 | raise HTTPFound(h.route_path( |
|
381 | 381 | 'repo_commit', repo_name=self.db_repo_name, |
|
382 | 382 | commit_id=current_id)) |
|
383 | 383 | |
|
384 | 384 | commit = self.db_repo.get_commit(current_id) |
|
385 | 385 | CommentsModel().trigger_commit_comment_hook( |
|
386 | 386 | self.db_repo, self._rhodecode_user, 'create', |
|
387 | 387 | data={'comment': comment, 'commit': commit}) |
|
388 | 388 | |
|
389 | 389 | # finalize, commit and redirect |
|
390 | 390 | Session().commit() |
|
391 | 391 | |
|
392 | 392 | data = { |
|
393 | 393 | 'target_id': h.safeid(h.safe_unicode( |
|
394 | 394 | self.request.POST.get('f_path'))), |
|
395 | 395 | } |
|
396 | 396 | if comment: |
|
397 | 397 | c.co = comment |
|
398 | 398 | rendered_comment = render( |
|
399 | 399 | 'rhodecode:templates/changeset/changeset_comment_block.mako', |
|
400 | 400 | self._get_template_context(c), self.request) |
|
401 | 401 | |
|
402 | 402 | data.update(comment.get_dict()) |
|
403 | 403 | data.update({'rendered_text': rendered_comment}) |
|
404 | 404 | |
|
405 | 405 | return data |
|
406 | 406 | |
|
407 | 407 | @LoginRequired() |
|
408 | 408 | @NotAnonymous() |
|
409 | 409 | @HasRepoPermissionAnyDecorator( |
|
410 | 410 | 'repository.read', 'repository.write', 'repository.admin') |
|
411 | 411 | @CSRFRequired() |
|
412 | 412 | @view_config( |
|
413 | 413 | route_name='repo_commit_comment_preview', request_method='POST', |
|
414 | 414 | renderer='string', xhr=True) |
|
415 | 415 | def repo_commit_comment_preview(self): |
|
416 | 416 | # Technically a CSRF token is not needed as no state changes with this |
|
417 | 417 | # call. However, as this is a POST is better to have it, so automated |
|
418 | 418 | # tools don't flag it as potential CSRF. |
|
419 | 419 | # Post is required because the payload could be bigger than the maximum |
|
420 | 420 | # allowed by GET. |
|
421 | 421 | |
|
422 | 422 | text = self.request.POST.get('text') |
|
423 | 423 | renderer = self.request.POST.get('renderer') or 'rst' |
|
424 | 424 | if text: |
|
425 | 425 | return h.render(text, renderer=renderer, mentions=True, |
|
426 | 426 | repo_name=self.db_repo_name) |
|
427 | 427 | return '' |
|
428 | 428 | |
|
429 | 429 | @LoginRequired() |
|
430 | 430 | @NotAnonymous() |
|
431 | 431 | @HasRepoPermissionAnyDecorator( |
|
432 | 432 | 'repository.read', 'repository.write', 'repository.admin') |
|
433 | 433 | @CSRFRequired() |
|
434 | 434 | @view_config( |
|
435 | 435 | route_name='repo_commit_comment_history_view', request_method='POST', |
|
436 | 436 | renderer='string', xhr=True) |
|
437 | 437 | def repo_commit_comment_history_view(self): |
|
438 | 438 | c = self.load_default_context() |
|
439 | 439 | |
|
440 | 440 | comment_history_id = self.request.matchdict['comment_history_id'] |
|
441 | 441 | comment_history = ChangesetCommentHistory.get_or_404(comment_history_id) |
|
442 | 442 | is_repo_comment = comment_history.comment.repo.repo_id == self.db_repo.repo_id |
|
443 | 443 | |
|
444 | 444 | if is_repo_comment: |
|
445 | 445 | c.comment_history = comment_history |
|
446 | 446 | |
|
447 | 447 | rendered_comment = render( |
|
448 | 448 | 'rhodecode:templates/changeset/comment_history.mako', |
|
449 | 449 | self._get_template_context(c) |
|
450 | 450 | , self.request) |
|
451 | 451 | return rendered_comment |
|
452 | 452 | else: |
|
453 | 453 | log.warning('No permissions for user %s to show comment_history_id: %s', |
|
454 | 454 | self._rhodecode_db_user, comment_history_id) |
|
455 | 455 | raise HTTPNotFound() |
|
456 | 456 | |
|
457 | 457 | @LoginRequired() |
|
458 | 458 | @NotAnonymous() |
|
459 | 459 | @HasRepoPermissionAnyDecorator( |
|
460 | 460 | 'repository.read', 'repository.write', 'repository.admin') |
|
461 | 461 | @CSRFRequired() |
|
462 | 462 | @view_config( |
|
463 | 463 | route_name='repo_commit_comment_attachment_upload', request_method='POST', |
|
464 | 464 | renderer='json_ext', xhr=True) |
|
465 | 465 | def repo_commit_comment_attachment_upload(self): |
|
466 | 466 | c = self.load_default_context() |
|
467 | 467 | upload_key = 'attachment' |
|
468 | 468 | |
|
469 | 469 | file_obj = self.request.POST.get(upload_key) |
|
470 | 470 | |
|
471 | 471 | if file_obj is None: |
|
472 | 472 | self.request.response.status = 400 |
|
473 | 473 | return {'store_fid': None, |
|
474 | 474 | 'access_path': None, |
|
475 | 475 | 'error': '{} data field is missing'.format(upload_key)} |
|
476 | 476 | |
|
477 | 477 | if not hasattr(file_obj, 'filename'): |
|
478 | 478 | self.request.response.status = 400 |
|
479 | 479 | return {'store_fid': None, |
|
480 | 480 | 'access_path': None, |
|
481 | 481 | 'error': 'filename cannot be read from the data field'} |
|
482 | 482 | |
|
483 | 483 | filename = file_obj.filename |
|
484 | 484 | file_display_name = filename |
|
485 | 485 | |
|
486 | 486 | metadata = { |
|
487 | 487 | 'user_uploaded': {'username': self._rhodecode_user.username, |
|
488 | 488 | 'user_id': self._rhodecode_user.user_id, |
|
489 | 489 | 'ip': self._rhodecode_user.ip_addr}} |
|
490 | 490 | |
|
491 | 491 | # TODO(marcink): allow .ini configuration for allowed_extensions, and file-size |
|
492 | 492 | allowed_extensions = [ |
|
493 | 493 | 'gif', '.jpeg', '.jpg', '.png', '.docx', '.gz', '.log', '.pdf', |
|
494 | 494 | '.pptx', '.txt', '.xlsx', '.zip'] |
|
495 | 495 | max_file_size = 10 * 1024 * 1024 # 10MB, also validated via dropzone.js |
|
496 | 496 | |
|
497 | 497 | try: |
|
498 | 498 | storage = store_utils.get_file_storage(self.request.registry.settings) |
|
499 | 499 | store_uid, metadata = storage.save_file( |
|
500 | 500 | file_obj.file, filename, extra_metadata=metadata, |
|
501 | 501 | extensions=allowed_extensions, max_filesize=max_file_size) |
|
502 | 502 | except FileNotAllowedException: |
|
503 | 503 | self.request.response.status = 400 |
|
504 | 504 | permitted_extensions = ', '.join(allowed_extensions) |
|
505 | 505 | error_msg = 'File `{}` is not allowed. ' \ |
|
506 | 506 | 'Only following extensions are permitted: {}'.format( |
|
507 | 507 | filename, permitted_extensions) |
|
508 | 508 | return {'store_fid': None, |
|
509 | 509 | 'access_path': None, |
|
510 | 510 | 'error': error_msg} |
|
511 | 511 | except FileOverSizeException: |
|
512 | 512 | self.request.response.status = 400 |
|
513 | 513 | limit_mb = h.format_byte_size_binary(max_file_size) |
|
514 | 514 | return {'store_fid': None, |
|
515 | 515 | 'access_path': None, |
|
516 | 516 | 'error': 'File {} is exceeding allowed limit of {}.'.format( |
|
517 | 517 | filename, limit_mb)} |
|
518 | 518 | |
|
519 | 519 | try: |
|
520 | 520 | entry = FileStore.create( |
|
521 | 521 | file_uid=store_uid, filename=metadata["filename"], |
|
522 | 522 | file_hash=metadata["sha256"], file_size=metadata["size"], |
|
523 | 523 | file_display_name=file_display_name, |
|
524 | 524 | file_description=u'comment attachment `{}`'.format(safe_unicode(filename)), |
|
525 | 525 | hidden=True, check_acl=True, user_id=self._rhodecode_user.user_id, |
|
526 | 526 | scope_repo_id=self.db_repo.repo_id |
|
527 | 527 | ) |
|
528 | 528 | Session().add(entry) |
|
529 | 529 | Session().commit() |
|
530 | 530 | log.debug('Stored upload in DB as %s', entry) |
|
531 | 531 | except Exception: |
|
532 | 532 | log.exception('Failed to store file %s', filename) |
|
533 | 533 | self.request.response.status = 400 |
|
534 | 534 | return {'store_fid': None, |
|
535 | 535 | 'access_path': None, |
|
536 | 536 | 'error': 'File {} failed to store in DB.'.format(filename)} |
|
537 | 537 | |
|
538 | 538 | Session().commit() |
|
539 | 539 | |
|
540 | 540 | return { |
|
541 | 541 | 'store_fid': store_uid, |
|
542 | 542 | 'access_path': h.route_path( |
|
543 | 543 | 'download_file', fid=store_uid), |
|
544 | 544 | 'fqn_access_path': h.route_url( |
|
545 | 545 | 'download_file', fid=store_uid), |
|
546 | 546 | 'repo_access_path': h.route_path( |
|
547 | 547 | 'repo_artifacts_get', repo_name=self.db_repo_name, uid=store_uid), |
|
548 | 548 | 'repo_fqn_access_path': h.route_url( |
|
549 | 549 | 'repo_artifacts_get', repo_name=self.db_repo_name, uid=store_uid), |
|
550 | 550 | } |
|
551 | 551 | |
|
552 | 552 | @LoginRequired() |
|
553 | 553 | @NotAnonymous() |
|
554 | 554 | @HasRepoPermissionAnyDecorator( |
|
555 | 555 | 'repository.read', 'repository.write', 'repository.admin') |
|
556 | 556 | @CSRFRequired() |
|
557 | 557 | @view_config( |
|
558 | 558 | route_name='repo_commit_comment_delete', request_method='POST', |
|
559 | 559 | renderer='json_ext') |
|
560 | 560 | def repo_commit_comment_delete(self): |
|
561 | 561 | commit_id = self.request.matchdict['commit_id'] |
|
562 | 562 | comment_id = self.request.matchdict['comment_id'] |
|
563 | 563 | |
|
564 | 564 | comment = ChangesetComment.get_or_404(comment_id) |
|
565 | 565 | if not comment: |
|
566 | 566 | log.debug('Comment with id:%s not found, skipping', comment_id) |
|
567 | 567 | # comment already deleted in another call probably |
|
568 | 568 | return True |
|
569 | 569 | |
|
570 | 570 | if comment.immutable: |
|
571 | 571 | # don't allow deleting comments that are immutable |
|
572 | 572 | raise HTTPForbidden() |
|
573 | 573 | |
|
574 | 574 | is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name) |
|
575 | 575 | super_admin = h.HasPermissionAny('hg.admin')() |
|
576 | 576 | comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id) |
|
577 | 577 | is_repo_comment = comment.repo.repo_id == self.db_repo.repo_id |
|
578 | 578 | comment_repo_admin = is_repo_admin and is_repo_comment |
|
579 | 579 | |
|
580 | 580 | if super_admin or comment_owner or comment_repo_admin: |
|
581 | 581 | CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user) |
|
582 | 582 | Session().commit() |
|
583 | 583 | return True |
|
584 | 584 | else: |
|
585 | 585 | log.warning('No permissions for user %s to delete comment_id: %s', |
|
586 | 586 | self._rhodecode_db_user, comment_id) |
|
587 | 587 | raise HTTPNotFound() |
|
588 | 588 | |
|
589 | 589 | @LoginRequired() |
|
590 | 590 | @NotAnonymous() |
|
591 | 591 | @HasRepoPermissionAnyDecorator( |
|
592 | 592 | 'repository.read', 'repository.write', 'repository.admin') |
|
593 | 593 | @CSRFRequired() |
|
594 | 594 | @view_config( |
|
595 | 595 | route_name='repo_commit_comment_edit', request_method='POST', |
|
596 | 596 | renderer='json_ext') |
|
597 | 597 | def repo_commit_comment_edit(self): |
|
598 | 598 | self.load_default_context() |
|
599 | 599 | |
|
600 | 600 | comment_id = self.request.matchdict['comment_id'] |
|
601 | 601 | comment = ChangesetComment.get_or_404(comment_id) |
|
602 | 602 | |
|
603 | 603 | if comment.immutable: |
|
604 | 604 | # don't allow deleting comments that are immutable |
|
605 | 605 | raise HTTPForbidden() |
|
606 | 606 | |
|
607 | 607 | is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name) |
|
608 | 608 | super_admin = h.HasPermissionAny('hg.admin')() |
|
609 | 609 | comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id) |
|
610 | 610 | is_repo_comment = comment.repo.repo_id == self.db_repo.repo_id |
|
611 | 611 | comment_repo_admin = is_repo_admin and is_repo_comment |
|
612 | 612 | |
|
613 | 613 | if super_admin or comment_owner or comment_repo_admin: |
|
614 | 614 | text = self.request.POST.get('text') |
|
615 | 615 | version = self.request.POST.get('version') |
|
616 | 616 | if text == comment.text: |
|
617 | 617 | log.warning( |
|
618 | 618 | 'Comment(repo): ' |
|
619 | 619 | 'Trying to create new version ' |
|
620 | 620 | 'with the same comment body {}'.format( |
|
621 | 621 | comment_id, |
|
622 | 622 | ) |
|
623 | 623 | ) |
|
624 | 624 | raise HTTPNotFound() |
|
625 | 625 | |
|
626 | 626 | if version.isdigit(): |
|
627 | 627 | version = int(version) |
|
628 | 628 | else: |
|
629 | 629 | log.warning( |
|
630 | 630 | 'Comment(repo): Wrong version type {} {} ' |
|
631 | 631 | 'for comment {}'.format( |
|
632 | 632 | version, |
|
633 | 633 | type(version), |
|
634 | 634 | comment_id, |
|
635 | 635 | ) |
|
636 | 636 | ) |
|
637 | 637 | raise HTTPNotFound() |
|
638 | 638 | |
|
639 | 639 | try: |
|
640 | 640 | comment_history = CommentsModel().edit( |
|
641 | 641 | comment_id=comment_id, |
|
642 | 642 | text=text, |
|
643 | 643 | auth_user=self._rhodecode_user, |
|
644 | 644 | version=version, |
|
645 | 645 | ) |
|
646 | 646 | except CommentVersionMismatch: |
|
647 | 647 | raise HTTPConflict() |
|
648 | 648 | |
|
649 | 649 | if not comment_history: |
|
650 | 650 | raise HTTPNotFound() |
|
651 | 651 | |
|
652 | commit_id = self.request.matchdict['commit_id'] | |
|
653 | commit = self.db_repo.get_commit(commit_id) | |
|
654 | CommentsModel().trigger_commit_comment_hook( | |
|
655 | self.db_repo, self._rhodecode_user, 'edit', | |
|
656 | data={'comment': comment, 'commit': commit}) | |
|
657 | ||
|
652 | 658 | Session().commit() |
|
653 | 659 | return { |
|
654 | 660 | 'comment_history_id': comment_history.comment_history_id, |
|
655 | 661 | 'comment_id': comment.comment_id, |
|
656 | 662 | 'comment_version': comment_history.version, |
|
657 | 663 | 'comment_author_username': comment_history.author.username, |
|
658 | 664 | 'comment_author_gravatar': h.gravatar_url(comment_history.author.email, 16), |
|
659 | 665 | 'comment_created_on': h.age_component(comment_history.created_on, |
|
660 | 666 | time_is_local=True), |
|
661 | 667 | } |
|
662 | 668 | else: |
|
663 | 669 | log.warning('No permissions for user %s to edit comment_id: %s', |
|
664 | 670 | self._rhodecode_db_user, comment_id) |
|
665 | 671 | raise HTTPNotFound() |
|
666 | 672 | |
|
667 | 673 | @LoginRequired() |
|
668 | 674 | @HasRepoPermissionAnyDecorator( |
|
669 | 675 | 'repository.read', 'repository.write', 'repository.admin') |
|
670 | 676 | @view_config( |
|
671 | 677 | route_name='repo_commit_data', request_method='GET', |
|
672 | 678 | renderer='json_ext', xhr=True) |
|
673 | 679 | def repo_commit_data(self): |
|
674 | 680 | commit_id = self.request.matchdict['commit_id'] |
|
675 | 681 | self.load_default_context() |
|
676 | 682 | |
|
677 | 683 | try: |
|
678 | 684 | return self.rhodecode_vcs_repo.get_commit(commit_id=commit_id) |
|
679 | 685 | except CommitDoesNotExistError as e: |
|
680 | 686 | return EmptyCommit(message=str(e)) |
|
681 | 687 | |
|
682 | 688 | @LoginRequired() |
|
683 | 689 | @HasRepoPermissionAnyDecorator( |
|
684 | 690 | 'repository.read', 'repository.write', 'repository.admin') |
|
685 | 691 | @view_config( |
|
686 | 692 | route_name='repo_commit_children', request_method='GET', |
|
687 | 693 | renderer='json_ext', xhr=True) |
|
688 | 694 | def repo_commit_children(self): |
|
689 | 695 | commit_id = self.request.matchdict['commit_id'] |
|
690 | 696 | self.load_default_context() |
|
691 | 697 | |
|
692 | 698 | try: |
|
693 | 699 | commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id) |
|
694 | 700 | children = commit.children |
|
695 | 701 | except CommitDoesNotExistError: |
|
696 | 702 | children = [] |
|
697 | 703 | |
|
698 | 704 | result = {"results": children} |
|
699 | 705 | return result |
|
700 | 706 | |
|
701 | 707 | @LoginRequired() |
|
702 | 708 | @HasRepoPermissionAnyDecorator( |
|
703 | 709 | 'repository.read', 'repository.write', 'repository.admin') |
|
704 | 710 | @view_config( |
|
705 | 711 | route_name='repo_commit_parents', request_method='GET', |
|
706 | 712 | renderer='json_ext') |
|
707 | 713 | def repo_commit_parents(self): |
|
708 | 714 | commit_id = self.request.matchdict['commit_id'] |
|
709 | 715 | self.load_default_context() |
|
710 | 716 | |
|
711 | 717 | try: |
|
712 | 718 | commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id) |
|
713 | 719 | parents = commit.parents |
|
714 | 720 | except CommitDoesNotExistError: |
|
715 | 721 | parents = [] |
|
716 | 722 | result = {"results": parents} |
|
717 | 723 | return result |
@@ -1,1626 +1,1631 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2011-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import logging |
|
22 | 22 | import collections |
|
23 | 23 | |
|
24 | 24 | import formencode |
|
25 | 25 | import formencode.htmlfill |
|
26 | 26 | import peppercorn |
|
27 | 27 | from pyramid.httpexceptions import ( |
|
28 | 28 | HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest, HTTPConflict) |
|
29 | 29 | from pyramid.view import view_config |
|
30 | 30 | from pyramid.renderers import render |
|
31 | 31 | |
|
32 | 32 | from rhodecode.apps._base import RepoAppView, DataGridAppView |
|
33 | 33 | |
|
34 | 34 | from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream |
|
35 | 35 | from rhodecode.lib.base import vcs_operation_context |
|
36 | 36 | from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist |
|
37 | 37 | from rhodecode.lib.exceptions import CommentVersionMismatch |
|
38 | 38 | from rhodecode.lib.ext_json import json |
|
39 | 39 | from rhodecode.lib.auth import ( |
|
40 | 40 | LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator, |
|
41 | 41 | NotAnonymous, CSRFRequired) |
|
42 | 42 | from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode |
|
43 | 43 | from rhodecode.lib.vcs.backends.base import EmptyCommit, UpdateFailureReason |
|
44 | 44 | from rhodecode.lib.vcs.exceptions import ( |
|
45 | 45 | CommitDoesNotExistError, RepositoryRequirementError, EmptyRepositoryError) |
|
46 | 46 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
47 | 47 | from rhodecode.model.comment import CommentsModel |
|
48 | 48 | from rhodecode.model.db import ( |
|
49 | 49 | func, or_, PullRequest, ChangesetComment, ChangesetStatus, Repository) |
|
50 | 50 | from rhodecode.model.forms import PullRequestForm |
|
51 | 51 | from rhodecode.model.meta import Session |
|
52 | 52 | from rhodecode.model.pull_request import PullRequestModel, MergeCheck |
|
53 | 53 | from rhodecode.model.scm import ScmModel |
|
54 | 54 | |
|
55 | 55 | log = logging.getLogger(__name__) |
|
56 | 56 | |
|
57 | 57 | |
|
58 | 58 | class RepoPullRequestsView(RepoAppView, DataGridAppView): |
|
59 | 59 | |
|
60 | 60 | def load_default_context(self): |
|
61 | 61 | c = self._get_local_tmpl_context(include_app_defaults=True) |
|
62 | 62 | c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED |
|
63 | 63 | c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED |
|
64 | 64 | # backward compat., we use for OLD PRs a plain renderer |
|
65 | 65 | c.renderer = 'plain' |
|
66 | 66 | return c |
|
67 | 67 | |
|
68 | 68 | def _get_pull_requests_list( |
|
69 | 69 | self, repo_name, source, filter_type, opened_by, statuses): |
|
70 | 70 | |
|
71 | 71 | draw, start, limit = self._extract_chunk(self.request) |
|
72 | 72 | search_q, order_by, order_dir = self._extract_ordering(self.request) |
|
73 | 73 | _render = self.request.get_partial_renderer( |
|
74 | 74 | 'rhodecode:templates/data_table/_dt_elements.mako') |
|
75 | 75 | |
|
76 | 76 | # pagination |
|
77 | 77 | |
|
78 | 78 | if filter_type == 'awaiting_review': |
|
79 | 79 | pull_requests = PullRequestModel().get_awaiting_review( |
|
80 | 80 | repo_name, search_q=search_q, source=source, opened_by=opened_by, |
|
81 | 81 | statuses=statuses, offset=start, length=limit, |
|
82 | 82 | order_by=order_by, order_dir=order_dir) |
|
83 | 83 | pull_requests_total_count = PullRequestModel().count_awaiting_review( |
|
84 | 84 | repo_name, search_q=search_q, source=source, statuses=statuses, |
|
85 | 85 | opened_by=opened_by) |
|
86 | 86 | elif filter_type == 'awaiting_my_review': |
|
87 | 87 | pull_requests = PullRequestModel().get_awaiting_my_review( |
|
88 | 88 | repo_name, search_q=search_q, source=source, opened_by=opened_by, |
|
89 | 89 | user_id=self._rhodecode_user.user_id, statuses=statuses, |
|
90 | 90 | offset=start, length=limit, order_by=order_by, |
|
91 | 91 | order_dir=order_dir) |
|
92 | 92 | pull_requests_total_count = PullRequestModel().count_awaiting_my_review( |
|
93 | 93 | repo_name, search_q=search_q, source=source, user_id=self._rhodecode_user.user_id, |
|
94 | 94 | statuses=statuses, opened_by=opened_by) |
|
95 | 95 | else: |
|
96 | 96 | pull_requests = PullRequestModel().get_all( |
|
97 | 97 | repo_name, search_q=search_q, source=source, opened_by=opened_by, |
|
98 | 98 | statuses=statuses, offset=start, length=limit, |
|
99 | 99 | order_by=order_by, order_dir=order_dir) |
|
100 | 100 | pull_requests_total_count = PullRequestModel().count_all( |
|
101 | 101 | repo_name, search_q=search_q, source=source, statuses=statuses, |
|
102 | 102 | opened_by=opened_by) |
|
103 | 103 | |
|
104 | 104 | data = [] |
|
105 | 105 | comments_model = CommentsModel() |
|
106 | 106 | for pr in pull_requests: |
|
107 | 107 | comments = comments_model.get_all_comments( |
|
108 | 108 | self.db_repo.repo_id, pull_request=pr) |
|
109 | 109 | |
|
110 | 110 | data.append({ |
|
111 | 111 | 'name': _render('pullrequest_name', |
|
112 | 112 | pr.pull_request_id, pr.pull_request_state, |
|
113 | 113 | pr.work_in_progress, pr.target_repo.repo_name), |
|
114 | 114 | 'name_raw': pr.pull_request_id, |
|
115 | 115 | 'status': _render('pullrequest_status', |
|
116 | 116 | pr.calculated_review_status()), |
|
117 | 117 | 'title': _render('pullrequest_title', pr.title, pr.description), |
|
118 | 118 | 'description': h.escape(pr.description), |
|
119 | 119 | 'updated_on': _render('pullrequest_updated_on', |
|
120 | 120 | h.datetime_to_time(pr.updated_on)), |
|
121 | 121 | 'updated_on_raw': h.datetime_to_time(pr.updated_on), |
|
122 | 122 | 'created_on': _render('pullrequest_updated_on', |
|
123 | 123 | h.datetime_to_time(pr.created_on)), |
|
124 | 124 | 'created_on_raw': h.datetime_to_time(pr.created_on), |
|
125 | 125 | 'state': pr.pull_request_state, |
|
126 | 126 | 'author': _render('pullrequest_author', |
|
127 | 127 | pr.author.full_contact, ), |
|
128 | 128 | 'author_raw': pr.author.full_name, |
|
129 | 129 | 'comments': _render('pullrequest_comments', len(comments)), |
|
130 | 130 | 'comments_raw': len(comments), |
|
131 | 131 | 'closed': pr.is_closed(), |
|
132 | 132 | }) |
|
133 | 133 | |
|
134 | 134 | data = ({ |
|
135 | 135 | 'draw': draw, |
|
136 | 136 | 'data': data, |
|
137 | 137 | 'recordsTotal': pull_requests_total_count, |
|
138 | 138 | 'recordsFiltered': pull_requests_total_count, |
|
139 | 139 | }) |
|
140 | 140 | return data |
|
141 | 141 | |
|
142 | 142 | @LoginRequired() |
|
143 | 143 | @HasRepoPermissionAnyDecorator( |
|
144 | 144 | 'repository.read', 'repository.write', 'repository.admin') |
|
145 | 145 | @view_config( |
|
146 | 146 | route_name='pullrequest_show_all', request_method='GET', |
|
147 | 147 | renderer='rhodecode:templates/pullrequests/pullrequests.mako') |
|
148 | 148 | def pull_request_list(self): |
|
149 | 149 | c = self.load_default_context() |
|
150 | 150 | |
|
151 | 151 | req_get = self.request.GET |
|
152 | 152 | c.source = str2bool(req_get.get('source')) |
|
153 | 153 | c.closed = str2bool(req_get.get('closed')) |
|
154 | 154 | c.my = str2bool(req_get.get('my')) |
|
155 | 155 | c.awaiting_review = str2bool(req_get.get('awaiting_review')) |
|
156 | 156 | c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review')) |
|
157 | 157 | |
|
158 | 158 | c.active = 'open' |
|
159 | 159 | if c.my: |
|
160 | 160 | c.active = 'my' |
|
161 | 161 | if c.closed: |
|
162 | 162 | c.active = 'closed' |
|
163 | 163 | if c.awaiting_review and not c.source: |
|
164 | 164 | c.active = 'awaiting' |
|
165 | 165 | if c.source and not c.awaiting_review: |
|
166 | 166 | c.active = 'source' |
|
167 | 167 | if c.awaiting_my_review: |
|
168 | 168 | c.active = 'awaiting_my' |
|
169 | 169 | |
|
170 | 170 | return self._get_template_context(c) |
|
171 | 171 | |
|
172 | 172 | @LoginRequired() |
|
173 | 173 | @HasRepoPermissionAnyDecorator( |
|
174 | 174 | 'repository.read', 'repository.write', 'repository.admin') |
|
175 | 175 | @view_config( |
|
176 | 176 | route_name='pullrequest_show_all_data', request_method='GET', |
|
177 | 177 | renderer='json_ext', xhr=True) |
|
178 | 178 | def pull_request_list_data(self): |
|
179 | 179 | self.load_default_context() |
|
180 | 180 | |
|
181 | 181 | # additional filters |
|
182 | 182 | req_get = self.request.GET |
|
183 | 183 | source = str2bool(req_get.get('source')) |
|
184 | 184 | closed = str2bool(req_get.get('closed')) |
|
185 | 185 | my = str2bool(req_get.get('my')) |
|
186 | 186 | awaiting_review = str2bool(req_get.get('awaiting_review')) |
|
187 | 187 | awaiting_my_review = str2bool(req_get.get('awaiting_my_review')) |
|
188 | 188 | |
|
189 | 189 | filter_type = 'awaiting_review' if awaiting_review \ |
|
190 | 190 | else 'awaiting_my_review' if awaiting_my_review \ |
|
191 | 191 | else None |
|
192 | 192 | |
|
193 | 193 | opened_by = None |
|
194 | 194 | if my: |
|
195 | 195 | opened_by = [self._rhodecode_user.user_id] |
|
196 | 196 | |
|
197 | 197 | statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN] |
|
198 | 198 | if closed: |
|
199 | 199 | statuses = [PullRequest.STATUS_CLOSED] |
|
200 | 200 | |
|
201 | 201 | data = self._get_pull_requests_list( |
|
202 | 202 | repo_name=self.db_repo_name, source=source, |
|
203 | 203 | filter_type=filter_type, opened_by=opened_by, statuses=statuses) |
|
204 | 204 | |
|
205 | 205 | return data |
|
206 | 206 | |
|
207 | 207 | def _is_diff_cache_enabled(self, target_repo): |
|
208 | 208 | caching_enabled = self._get_general_setting( |
|
209 | 209 | target_repo, 'rhodecode_diff_cache') |
|
210 | 210 | log.debug('Diff caching enabled: %s', caching_enabled) |
|
211 | 211 | return caching_enabled |
|
212 | 212 | |
|
213 | 213 | def _get_diffset(self, source_repo_name, source_repo, |
|
214 | 214 | ancestor_commit, |
|
215 | 215 | source_ref_id, target_ref_id, |
|
216 | 216 | target_commit, source_commit, diff_limit, file_limit, |
|
217 | 217 | fulldiff, hide_whitespace_changes, diff_context, use_ancestor=True): |
|
218 | 218 | |
|
219 | 219 | if use_ancestor: |
|
220 | 220 | # we might want to not use it for versions |
|
221 | 221 | target_ref_id = ancestor_commit.raw_id |
|
222 | 222 | |
|
223 | 223 | vcs_diff = PullRequestModel().get_diff( |
|
224 | 224 | source_repo, source_ref_id, target_ref_id, |
|
225 | 225 | hide_whitespace_changes, diff_context) |
|
226 | 226 | |
|
227 | 227 | diff_processor = diffs.DiffProcessor( |
|
228 | 228 | vcs_diff, format='newdiff', diff_limit=diff_limit, |
|
229 | 229 | file_limit=file_limit, show_full_diff=fulldiff) |
|
230 | 230 | |
|
231 | 231 | _parsed = diff_processor.prepare() |
|
232 | 232 | |
|
233 | 233 | diffset = codeblocks.DiffSet( |
|
234 | 234 | repo_name=self.db_repo_name, |
|
235 | 235 | source_repo_name=source_repo_name, |
|
236 | 236 | source_node_getter=codeblocks.diffset_node_getter(target_commit), |
|
237 | 237 | target_node_getter=codeblocks.diffset_node_getter(source_commit), |
|
238 | 238 | ) |
|
239 | 239 | diffset = self.path_filter.render_patchset_filtered( |
|
240 | 240 | diffset, _parsed, target_commit.raw_id, source_commit.raw_id) |
|
241 | 241 | |
|
242 | 242 | return diffset |
|
243 | 243 | |
|
244 | 244 | def _get_range_diffset(self, source_scm, source_repo, |
|
245 | 245 | commit1, commit2, diff_limit, file_limit, |
|
246 | 246 | fulldiff, hide_whitespace_changes, diff_context): |
|
247 | 247 | vcs_diff = source_scm.get_diff( |
|
248 | 248 | commit1, commit2, |
|
249 | 249 | ignore_whitespace=hide_whitespace_changes, |
|
250 | 250 | context=diff_context) |
|
251 | 251 | |
|
252 | 252 | diff_processor = diffs.DiffProcessor( |
|
253 | 253 | vcs_diff, format='newdiff', diff_limit=diff_limit, |
|
254 | 254 | file_limit=file_limit, show_full_diff=fulldiff) |
|
255 | 255 | |
|
256 | 256 | _parsed = diff_processor.prepare() |
|
257 | 257 | |
|
258 | 258 | diffset = codeblocks.DiffSet( |
|
259 | 259 | repo_name=source_repo.repo_name, |
|
260 | 260 | source_node_getter=codeblocks.diffset_node_getter(commit1), |
|
261 | 261 | target_node_getter=codeblocks.diffset_node_getter(commit2)) |
|
262 | 262 | |
|
263 | 263 | diffset = self.path_filter.render_patchset_filtered( |
|
264 | 264 | diffset, _parsed, commit1.raw_id, commit2.raw_id) |
|
265 | 265 | |
|
266 | 266 | return diffset |
|
267 | 267 | |
|
268 | 268 | @LoginRequired() |
|
269 | 269 | @HasRepoPermissionAnyDecorator( |
|
270 | 270 | 'repository.read', 'repository.write', 'repository.admin') |
|
271 | 271 | @view_config( |
|
272 | 272 | route_name='pullrequest_show', request_method='GET', |
|
273 | 273 | renderer='rhodecode:templates/pullrequests/pullrequest_show.mako') |
|
274 | 274 | def pull_request_show(self): |
|
275 | 275 | _ = self.request.translate |
|
276 | 276 | c = self.load_default_context() |
|
277 | 277 | |
|
278 | 278 | pull_request = PullRequest.get_or_404( |
|
279 | 279 | self.request.matchdict['pull_request_id']) |
|
280 | 280 | pull_request_id = pull_request.pull_request_id |
|
281 | 281 | |
|
282 | 282 | c.state_progressing = pull_request.is_state_changing() |
|
283 | 283 | |
|
284 | 284 | _new_state = { |
|
285 | 285 | 'created': PullRequest.STATE_CREATED, |
|
286 | 286 | }.get(self.request.GET.get('force_state')) |
|
287 | 287 | |
|
288 | 288 | if c.is_super_admin and _new_state: |
|
289 | 289 | with pull_request.set_state(PullRequest.STATE_UPDATING, final_state=_new_state): |
|
290 | 290 | h.flash( |
|
291 | 291 | _('Pull Request state was force changed to `{}`').format(_new_state), |
|
292 | 292 | category='success') |
|
293 | 293 | Session().commit() |
|
294 | 294 | |
|
295 | 295 | raise HTTPFound(h.route_path( |
|
296 | 296 | 'pullrequest_show', repo_name=self.db_repo_name, |
|
297 | 297 | pull_request_id=pull_request_id)) |
|
298 | 298 | |
|
299 | 299 | version = self.request.GET.get('version') |
|
300 | 300 | from_version = self.request.GET.get('from_version') or version |
|
301 | 301 | merge_checks = self.request.GET.get('merge_checks') |
|
302 | 302 | c.fulldiff = str2bool(self.request.GET.get('fulldiff')) |
|
303 | 303 | |
|
304 | 304 | # fetch global flags of ignore ws or context lines |
|
305 | 305 | diff_context = diffs.get_diff_context(self.request) |
|
306 | 306 | hide_whitespace_changes = diffs.get_diff_whitespace_flag(self.request) |
|
307 | 307 | |
|
308 | 308 | force_refresh = str2bool(self.request.GET.get('force_refresh')) |
|
309 | 309 | |
|
310 | 310 | (pull_request_latest, |
|
311 | 311 | pull_request_at_ver, |
|
312 | 312 | pull_request_display_obj, |
|
313 | 313 | at_version) = PullRequestModel().get_pr_version( |
|
314 | 314 | pull_request_id, version=version) |
|
315 | 315 | pr_closed = pull_request_latest.is_closed() |
|
316 | 316 | |
|
317 | 317 | if pr_closed and (version or from_version): |
|
318 | 318 | # not allow to browse versions |
|
319 | 319 | raise HTTPFound(h.route_path( |
|
320 | 320 | 'pullrequest_show', repo_name=self.db_repo_name, |
|
321 | 321 | pull_request_id=pull_request_id)) |
|
322 | 322 | |
|
323 | 323 | versions = pull_request_display_obj.versions() |
|
324 | 324 | # used to store per-commit range diffs |
|
325 | 325 | c.changes = collections.OrderedDict() |
|
326 | 326 | c.range_diff_on = self.request.GET.get('range-diff') == "1" |
|
327 | 327 | |
|
328 | 328 | c.at_version = at_version |
|
329 | 329 | c.at_version_num = (at_version |
|
330 | 330 | if at_version and at_version != 'latest' |
|
331 | 331 | else None) |
|
332 | 332 | c.at_version_pos = ChangesetComment.get_index_from_version( |
|
333 | 333 | c.at_version_num, versions) |
|
334 | 334 | |
|
335 | 335 | (prev_pull_request_latest, |
|
336 | 336 | prev_pull_request_at_ver, |
|
337 | 337 | prev_pull_request_display_obj, |
|
338 | 338 | prev_at_version) = PullRequestModel().get_pr_version( |
|
339 | 339 | pull_request_id, version=from_version) |
|
340 | 340 | |
|
341 | 341 | c.from_version = prev_at_version |
|
342 | 342 | c.from_version_num = (prev_at_version |
|
343 | 343 | if prev_at_version and prev_at_version != 'latest' |
|
344 | 344 | else None) |
|
345 | 345 | c.from_version_pos = ChangesetComment.get_index_from_version( |
|
346 | 346 | c.from_version_num, versions) |
|
347 | 347 | |
|
348 | 348 | # define if we're in COMPARE mode or VIEW at version mode |
|
349 | 349 | compare = at_version != prev_at_version |
|
350 | 350 | |
|
351 | 351 | # pull_requests repo_name we opened it against |
|
352 | 352 | # ie. target_repo must match |
|
353 | 353 | if self.db_repo_name != pull_request_at_ver.target_repo.repo_name: |
|
354 | 354 | raise HTTPNotFound() |
|
355 | 355 | |
|
356 | 356 | c.shadow_clone_url = PullRequestModel().get_shadow_clone_url( |
|
357 | 357 | pull_request_at_ver) |
|
358 | 358 | |
|
359 | 359 | c.pull_request = pull_request_display_obj |
|
360 | 360 | c.renderer = pull_request_at_ver.description_renderer or c.renderer |
|
361 | 361 | c.pull_request_latest = pull_request_latest |
|
362 | 362 | |
|
363 | 363 | if compare or (at_version and not at_version == 'latest'): |
|
364 | 364 | c.allowed_to_change_status = False |
|
365 | 365 | c.allowed_to_update = False |
|
366 | 366 | c.allowed_to_merge = False |
|
367 | 367 | c.allowed_to_delete = False |
|
368 | 368 | c.allowed_to_comment = False |
|
369 | 369 | c.allowed_to_close = False |
|
370 | 370 | else: |
|
371 | 371 | can_change_status = PullRequestModel().check_user_change_status( |
|
372 | 372 | pull_request_at_ver, self._rhodecode_user) |
|
373 | 373 | c.allowed_to_change_status = can_change_status and not pr_closed |
|
374 | 374 | |
|
375 | 375 | c.allowed_to_update = PullRequestModel().check_user_update( |
|
376 | 376 | pull_request_latest, self._rhodecode_user) and not pr_closed |
|
377 | 377 | c.allowed_to_merge = PullRequestModel().check_user_merge( |
|
378 | 378 | pull_request_latest, self._rhodecode_user) and not pr_closed |
|
379 | 379 | c.allowed_to_delete = PullRequestModel().check_user_delete( |
|
380 | 380 | pull_request_latest, self._rhodecode_user) and not pr_closed |
|
381 | 381 | c.allowed_to_comment = not pr_closed |
|
382 | 382 | c.allowed_to_close = c.allowed_to_merge and not pr_closed |
|
383 | 383 | |
|
384 | 384 | c.forbid_adding_reviewers = False |
|
385 | 385 | c.forbid_author_to_review = False |
|
386 | 386 | c.forbid_commit_author_to_review = False |
|
387 | 387 | |
|
388 | 388 | if pull_request_latest.reviewer_data and \ |
|
389 | 389 | 'rules' in pull_request_latest.reviewer_data: |
|
390 | 390 | rules = pull_request_latest.reviewer_data['rules'] or {} |
|
391 | 391 | try: |
|
392 | 392 | c.forbid_adding_reviewers = rules.get( |
|
393 | 393 | 'forbid_adding_reviewers') |
|
394 | 394 | c.forbid_author_to_review = rules.get( |
|
395 | 395 | 'forbid_author_to_review') |
|
396 | 396 | c.forbid_commit_author_to_review = rules.get( |
|
397 | 397 | 'forbid_commit_author_to_review') |
|
398 | 398 | except Exception: |
|
399 | 399 | pass |
|
400 | 400 | |
|
401 | 401 | # check merge capabilities |
|
402 | 402 | _merge_check = MergeCheck.validate( |
|
403 | 403 | pull_request_latest, auth_user=self._rhodecode_user, |
|
404 | 404 | translator=self.request.translate, |
|
405 | 405 | force_shadow_repo_refresh=force_refresh) |
|
406 | 406 | |
|
407 | 407 | c.pr_merge_errors = _merge_check.error_details |
|
408 | 408 | c.pr_merge_possible = not _merge_check.failed |
|
409 | 409 | c.pr_merge_message = _merge_check.merge_msg |
|
410 | 410 | c.pr_merge_source_commit = _merge_check.source_commit |
|
411 | 411 | c.pr_merge_target_commit = _merge_check.target_commit |
|
412 | 412 | |
|
413 | 413 | c.pr_merge_info = MergeCheck.get_merge_conditions( |
|
414 | 414 | pull_request_latest, translator=self.request.translate) |
|
415 | 415 | |
|
416 | 416 | c.pull_request_review_status = _merge_check.review_status |
|
417 | 417 | if merge_checks: |
|
418 | 418 | self.request.override_renderer = \ |
|
419 | 419 | 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako' |
|
420 | 420 | return self._get_template_context(c) |
|
421 | 421 | |
|
422 | 422 | comments_model = CommentsModel() |
|
423 | 423 | |
|
424 | 424 | # reviewers and statuses |
|
425 | 425 | c.pull_request_reviewers = pull_request_at_ver.reviewers_statuses() |
|
426 | 426 | allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers] |
|
427 | 427 | |
|
428 | 428 | # GENERAL COMMENTS with versions # |
|
429 | 429 | q = comments_model._all_general_comments_of_pull_request(pull_request_latest) |
|
430 | 430 | q = q.order_by(ChangesetComment.comment_id.asc()) |
|
431 | 431 | general_comments = q |
|
432 | 432 | |
|
433 | 433 | # pick comments we want to render at current version |
|
434 | 434 | c.comment_versions = comments_model.aggregate_comments( |
|
435 | 435 | general_comments, versions, c.at_version_num) |
|
436 | 436 | c.comments = c.comment_versions[c.at_version_num]['until'] |
|
437 | 437 | |
|
438 | 438 | # INLINE COMMENTS with versions # |
|
439 | 439 | q = comments_model._all_inline_comments_of_pull_request(pull_request_latest) |
|
440 | 440 | q = q.order_by(ChangesetComment.comment_id.asc()) |
|
441 | 441 | inline_comments = q |
|
442 | 442 | |
|
443 | 443 | c.inline_versions = comments_model.aggregate_comments( |
|
444 | 444 | inline_comments, versions, c.at_version_num, inline=True) |
|
445 | 445 | |
|
446 | 446 | # TODOs |
|
447 | 447 | c.unresolved_comments = CommentsModel() \ |
|
448 | 448 | .get_pull_request_unresolved_todos(pull_request) |
|
449 | 449 | c.resolved_comments = CommentsModel() \ |
|
450 | 450 | .get_pull_request_resolved_todos(pull_request) |
|
451 | 451 | |
|
452 | 452 | # inject latest version |
|
453 | 453 | latest_ver = PullRequest.get_pr_display_object( |
|
454 | 454 | pull_request_latest, pull_request_latest) |
|
455 | 455 | |
|
456 | 456 | c.versions = versions + [latest_ver] |
|
457 | 457 | |
|
458 | 458 | # if we use version, then do not show later comments |
|
459 | 459 | # than current version |
|
460 | 460 | display_inline_comments = collections.defaultdict( |
|
461 | 461 | lambda: collections.defaultdict(list)) |
|
462 | 462 | for co in inline_comments: |
|
463 | 463 | if c.at_version_num: |
|
464 | 464 | # pick comments that are at least UPTO given version, so we |
|
465 | 465 | # don't render comments for higher version |
|
466 | 466 | should_render = co.pull_request_version_id and \ |
|
467 | 467 | co.pull_request_version_id <= c.at_version_num |
|
468 | 468 | else: |
|
469 | 469 | # showing all, for 'latest' |
|
470 | 470 | should_render = True |
|
471 | 471 | |
|
472 | 472 | if should_render: |
|
473 | 473 | display_inline_comments[co.f_path][co.line_no].append(co) |
|
474 | 474 | |
|
475 | 475 | # load diff data into template context, if we use compare mode then |
|
476 | 476 | # diff is calculated based on changes between versions of PR |
|
477 | 477 | |
|
478 | 478 | source_repo = pull_request_at_ver.source_repo |
|
479 | 479 | source_ref_id = pull_request_at_ver.source_ref_parts.commit_id |
|
480 | 480 | |
|
481 | 481 | target_repo = pull_request_at_ver.target_repo |
|
482 | 482 | target_ref_id = pull_request_at_ver.target_ref_parts.commit_id |
|
483 | 483 | |
|
484 | 484 | if compare: |
|
485 | 485 | # in compare switch the diff base to latest commit from prev version |
|
486 | 486 | target_ref_id = prev_pull_request_display_obj.revisions[0] |
|
487 | 487 | |
|
488 | 488 | # despite opening commits for bookmarks/branches/tags, we always |
|
489 | 489 | # convert this to rev to prevent changes after bookmark or branch change |
|
490 | 490 | c.source_ref_type = 'rev' |
|
491 | 491 | c.source_ref = source_ref_id |
|
492 | 492 | |
|
493 | 493 | c.target_ref_type = 'rev' |
|
494 | 494 | c.target_ref = target_ref_id |
|
495 | 495 | |
|
496 | 496 | c.source_repo = source_repo |
|
497 | 497 | c.target_repo = target_repo |
|
498 | 498 | |
|
499 | 499 | c.commit_ranges = [] |
|
500 | 500 | source_commit = EmptyCommit() |
|
501 | 501 | target_commit = EmptyCommit() |
|
502 | 502 | c.missing_requirements = False |
|
503 | 503 | |
|
504 | 504 | source_scm = source_repo.scm_instance() |
|
505 | 505 | target_scm = target_repo.scm_instance() |
|
506 | 506 | |
|
507 | 507 | shadow_scm = None |
|
508 | 508 | try: |
|
509 | 509 | shadow_scm = pull_request_latest.get_shadow_repo() |
|
510 | 510 | except Exception: |
|
511 | 511 | log.debug('Failed to get shadow repo', exc_info=True) |
|
512 | 512 | # try first the existing source_repo, and then shadow |
|
513 | 513 | # repo if we can obtain one |
|
514 | 514 | commits_source_repo = source_scm |
|
515 | 515 | if shadow_scm: |
|
516 | 516 | commits_source_repo = shadow_scm |
|
517 | 517 | |
|
518 | 518 | c.commits_source_repo = commits_source_repo |
|
519 | 519 | c.ancestor = None # set it to None, to hide it from PR view |
|
520 | 520 | |
|
521 | 521 | # empty version means latest, so we keep this to prevent |
|
522 | 522 | # double caching |
|
523 | 523 | version_normalized = version or 'latest' |
|
524 | 524 | from_version_normalized = from_version or 'latest' |
|
525 | 525 | |
|
526 | 526 | cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(target_repo) |
|
527 | 527 | cache_file_path = diff_cache_exist( |
|
528 | 528 | cache_path, 'pull_request', pull_request_id, version_normalized, |
|
529 | 529 | from_version_normalized, source_ref_id, target_ref_id, |
|
530 | 530 | hide_whitespace_changes, diff_context, c.fulldiff) |
|
531 | 531 | |
|
532 | 532 | caching_enabled = self._is_diff_cache_enabled(c.target_repo) |
|
533 | 533 | force_recache = self.get_recache_flag() |
|
534 | 534 | |
|
535 | 535 | cached_diff = None |
|
536 | 536 | if caching_enabled: |
|
537 | 537 | cached_diff = load_cached_diff(cache_file_path) |
|
538 | 538 | |
|
539 | 539 | has_proper_commit_cache = ( |
|
540 | 540 | cached_diff and cached_diff.get('commits') |
|
541 | 541 | and len(cached_diff.get('commits', [])) == 5 |
|
542 | 542 | and cached_diff.get('commits')[0] |
|
543 | 543 | and cached_diff.get('commits')[3]) |
|
544 | 544 | |
|
545 | 545 | if not force_recache and not c.range_diff_on and has_proper_commit_cache: |
|
546 | 546 | diff_commit_cache = \ |
|
547 | 547 | (ancestor_commit, commit_cache, missing_requirements, |
|
548 | 548 | source_commit, target_commit) = cached_diff['commits'] |
|
549 | 549 | else: |
|
550 | 550 | # NOTE(marcink): we reach potentially unreachable errors when a PR has |
|
551 | 551 | # merge errors resulting in potentially hidden commits in the shadow repo. |
|
552 | 552 | maybe_unreachable = _merge_check.MERGE_CHECK in _merge_check.error_details \ |
|
553 | 553 | and _merge_check.merge_response |
|
554 | 554 | maybe_unreachable = maybe_unreachable \ |
|
555 | 555 | and _merge_check.merge_response.metadata.get('unresolved_files') |
|
556 | 556 | log.debug("Using unreachable commits due to MERGE_CHECK in merge simulation") |
|
557 | 557 | diff_commit_cache = \ |
|
558 | 558 | (ancestor_commit, commit_cache, missing_requirements, |
|
559 | 559 | source_commit, target_commit) = self.get_commits( |
|
560 | 560 | commits_source_repo, |
|
561 | 561 | pull_request_at_ver, |
|
562 | 562 | source_commit, |
|
563 | 563 | source_ref_id, |
|
564 | 564 | source_scm, |
|
565 | 565 | target_commit, |
|
566 | 566 | target_ref_id, |
|
567 | 567 | target_scm, |
|
568 | 568 | maybe_unreachable=maybe_unreachable) |
|
569 | 569 | |
|
570 | 570 | # register our commit range |
|
571 | 571 | for comm in commit_cache.values(): |
|
572 | 572 | c.commit_ranges.append(comm) |
|
573 | 573 | |
|
574 | 574 | c.missing_requirements = missing_requirements |
|
575 | 575 | c.ancestor_commit = ancestor_commit |
|
576 | 576 | c.statuses = source_repo.statuses( |
|
577 | 577 | [x.raw_id for x in c.commit_ranges]) |
|
578 | 578 | |
|
579 | 579 | # auto collapse if we have more than limit |
|
580 | 580 | collapse_limit = diffs.DiffProcessor._collapse_commits_over |
|
581 | 581 | c.collapse_all_commits = len(c.commit_ranges) > collapse_limit |
|
582 | 582 | c.compare_mode = compare |
|
583 | 583 | |
|
584 | 584 | # diff_limit is the old behavior, will cut off the whole diff |
|
585 | 585 | # if the limit is applied otherwise will just hide the |
|
586 | 586 | # big files from the front-end |
|
587 | 587 | diff_limit = c.visual.cut_off_limit_diff |
|
588 | 588 | file_limit = c.visual.cut_off_limit_file |
|
589 | 589 | |
|
590 | 590 | c.missing_commits = False |
|
591 | 591 | if (c.missing_requirements |
|
592 | 592 | or isinstance(source_commit, EmptyCommit) |
|
593 | 593 | or source_commit == target_commit): |
|
594 | 594 | |
|
595 | 595 | c.missing_commits = True |
|
596 | 596 | else: |
|
597 | 597 | c.inline_comments = display_inline_comments |
|
598 | 598 | |
|
599 | 599 | use_ancestor = True |
|
600 | 600 | if from_version_normalized != version_normalized: |
|
601 | 601 | use_ancestor = False |
|
602 | 602 | |
|
603 | 603 | has_proper_diff_cache = cached_diff and cached_diff.get('commits') |
|
604 | 604 | if not force_recache and has_proper_diff_cache: |
|
605 | 605 | c.diffset = cached_diff['diff'] |
|
606 | 606 | else: |
|
607 | 607 | c.diffset = self._get_diffset( |
|
608 | 608 | c.source_repo.repo_name, commits_source_repo, |
|
609 | 609 | c.ancestor_commit, |
|
610 | 610 | source_ref_id, target_ref_id, |
|
611 | 611 | target_commit, source_commit, |
|
612 | 612 | diff_limit, file_limit, c.fulldiff, |
|
613 | 613 | hide_whitespace_changes, diff_context, |
|
614 | 614 | use_ancestor=use_ancestor |
|
615 | 615 | ) |
|
616 | 616 | |
|
617 | 617 | # save cached diff |
|
618 | 618 | if caching_enabled: |
|
619 | 619 | cache_diff(cache_file_path, c.diffset, diff_commit_cache) |
|
620 | 620 | |
|
621 | 621 | c.limited_diff = c.diffset.limited_diff |
|
622 | 622 | |
|
623 | 623 | # calculate removed files that are bound to comments |
|
624 | 624 | comment_deleted_files = [ |
|
625 | 625 | fname for fname in display_inline_comments |
|
626 | 626 | if fname not in c.diffset.file_stats] |
|
627 | 627 | |
|
628 | 628 | c.deleted_files_comments = collections.defaultdict(dict) |
|
629 | 629 | for fname, per_line_comments in display_inline_comments.items(): |
|
630 | 630 | if fname in comment_deleted_files: |
|
631 | 631 | c.deleted_files_comments[fname]['stats'] = 0 |
|
632 | 632 | c.deleted_files_comments[fname]['comments'] = list() |
|
633 | 633 | for lno, comments in per_line_comments.items(): |
|
634 | 634 | c.deleted_files_comments[fname]['comments'].extend(comments) |
|
635 | 635 | |
|
636 | 636 | # maybe calculate the range diff |
|
637 | 637 | if c.range_diff_on: |
|
638 | 638 | # TODO(marcink): set whitespace/context |
|
639 | 639 | context_lcl = 3 |
|
640 | 640 | ign_whitespace_lcl = False |
|
641 | 641 | |
|
642 | 642 | for commit in c.commit_ranges: |
|
643 | 643 | commit2 = commit |
|
644 | 644 | commit1 = commit.first_parent |
|
645 | 645 | |
|
646 | 646 | range_diff_cache_file_path = diff_cache_exist( |
|
647 | 647 | cache_path, 'diff', commit.raw_id, |
|
648 | 648 | ign_whitespace_lcl, context_lcl, c.fulldiff) |
|
649 | 649 | |
|
650 | 650 | cached_diff = None |
|
651 | 651 | if caching_enabled: |
|
652 | 652 | cached_diff = load_cached_diff(range_diff_cache_file_path) |
|
653 | 653 | |
|
654 | 654 | has_proper_diff_cache = cached_diff and cached_diff.get('diff') |
|
655 | 655 | if not force_recache and has_proper_diff_cache: |
|
656 | 656 | diffset = cached_diff['diff'] |
|
657 | 657 | else: |
|
658 | 658 | diffset = self._get_range_diffset( |
|
659 | 659 | commits_source_repo, source_repo, |
|
660 | 660 | commit1, commit2, diff_limit, file_limit, |
|
661 | 661 | c.fulldiff, ign_whitespace_lcl, context_lcl |
|
662 | 662 | ) |
|
663 | 663 | |
|
664 | 664 | # save cached diff |
|
665 | 665 | if caching_enabled: |
|
666 | 666 | cache_diff(range_diff_cache_file_path, diffset, None) |
|
667 | 667 | |
|
668 | 668 | c.changes[commit.raw_id] = diffset |
|
669 | 669 | |
|
670 | 670 | # this is a hack to properly display links, when creating PR, the |
|
671 | 671 | # compare view and others uses different notation, and |
|
672 | 672 | # compare_commits.mako renders links based on the target_repo. |
|
673 | 673 | # We need to swap that here to generate it properly on the html side |
|
674 | 674 | c.target_repo = c.source_repo |
|
675 | 675 | |
|
676 | 676 | c.commit_statuses = ChangesetStatus.STATUSES |
|
677 | 677 | |
|
678 | 678 | c.show_version_changes = not pr_closed |
|
679 | 679 | if c.show_version_changes: |
|
680 | 680 | cur_obj = pull_request_at_ver |
|
681 | 681 | prev_obj = prev_pull_request_at_ver |
|
682 | 682 | |
|
683 | 683 | old_commit_ids = prev_obj.revisions |
|
684 | 684 | new_commit_ids = cur_obj.revisions |
|
685 | 685 | commit_changes = PullRequestModel()._calculate_commit_id_changes( |
|
686 | 686 | old_commit_ids, new_commit_ids) |
|
687 | 687 | c.commit_changes_summary = commit_changes |
|
688 | 688 | |
|
689 | 689 | # calculate the diff for commits between versions |
|
690 | 690 | c.commit_changes = [] |
|
691 | 691 | |
|
692 | 692 | def mark(cs, fw): |
|
693 | 693 | return list(h.itertools.izip_longest([], cs, fillvalue=fw)) |
|
694 | 694 | |
|
695 | 695 | for c_type, raw_id in mark(commit_changes.added, 'a') \ |
|
696 | 696 | + mark(commit_changes.removed, 'r') \ |
|
697 | 697 | + mark(commit_changes.common, 'c'): |
|
698 | 698 | |
|
699 | 699 | if raw_id in commit_cache: |
|
700 | 700 | commit = commit_cache[raw_id] |
|
701 | 701 | else: |
|
702 | 702 | try: |
|
703 | 703 | commit = commits_source_repo.get_commit(raw_id) |
|
704 | 704 | except CommitDoesNotExistError: |
|
705 | 705 | # in case we fail extracting still use "dummy" commit |
|
706 | 706 | # for display in commit diff |
|
707 | 707 | commit = h.AttributeDict( |
|
708 | 708 | {'raw_id': raw_id, |
|
709 | 709 | 'message': 'EMPTY or MISSING COMMIT'}) |
|
710 | 710 | c.commit_changes.append([c_type, commit]) |
|
711 | 711 | |
|
712 | 712 | # current user review statuses for each version |
|
713 | 713 | c.review_versions = {} |
|
714 | 714 | if self._rhodecode_user.user_id in allowed_reviewers: |
|
715 | 715 | for co in general_comments: |
|
716 | 716 | if co.author.user_id == self._rhodecode_user.user_id: |
|
717 | 717 | status = co.status_change |
|
718 | 718 | if status: |
|
719 | 719 | _ver_pr = status[0].comment.pull_request_version_id |
|
720 | 720 | c.review_versions[_ver_pr] = status[0] |
|
721 | 721 | |
|
722 | 722 | return self._get_template_context(c) |
|
723 | 723 | |
|
724 | 724 | def get_commits( |
|
725 | 725 | self, commits_source_repo, pull_request_at_ver, source_commit, |
|
726 | 726 | source_ref_id, source_scm, target_commit, target_ref_id, target_scm, |
|
727 | 727 | maybe_unreachable=False): |
|
728 | 728 | |
|
729 | 729 | commit_cache = collections.OrderedDict() |
|
730 | 730 | missing_requirements = False |
|
731 | 731 | |
|
732 | 732 | try: |
|
733 | 733 | pre_load = ["author", "date", "message", "branch", "parents"] |
|
734 | 734 | |
|
735 | 735 | pull_request_commits = pull_request_at_ver.revisions |
|
736 | 736 | log.debug('Loading %s commits from %s', |
|
737 | 737 | len(pull_request_commits), commits_source_repo) |
|
738 | 738 | |
|
739 | 739 | for rev in pull_request_commits: |
|
740 | 740 | comm = commits_source_repo.get_commit(commit_id=rev, pre_load=pre_load, |
|
741 | 741 | maybe_unreachable=maybe_unreachable) |
|
742 | 742 | commit_cache[comm.raw_id] = comm |
|
743 | 743 | |
|
744 | 744 | # Order here matters, we first need to get target, and then |
|
745 | 745 | # the source |
|
746 | 746 | target_commit = commits_source_repo.get_commit( |
|
747 | 747 | commit_id=safe_str(target_ref_id)) |
|
748 | 748 | |
|
749 | 749 | source_commit = commits_source_repo.get_commit( |
|
750 | 750 | commit_id=safe_str(source_ref_id), maybe_unreachable=True) |
|
751 | 751 | except CommitDoesNotExistError: |
|
752 | 752 | log.warning('Failed to get commit from `{}` repo'.format( |
|
753 | 753 | commits_source_repo), exc_info=True) |
|
754 | 754 | except RepositoryRequirementError: |
|
755 | 755 | log.warning('Failed to get all required data from repo', exc_info=True) |
|
756 | 756 | missing_requirements = True |
|
757 | 757 | |
|
758 | 758 | pr_ancestor_id = pull_request_at_ver.common_ancestor_id |
|
759 | 759 | |
|
760 | 760 | try: |
|
761 | 761 | ancestor_commit = source_scm.get_commit(pr_ancestor_id) |
|
762 | 762 | except Exception: |
|
763 | 763 | ancestor_commit = None |
|
764 | 764 | |
|
765 | 765 | return ancestor_commit, commit_cache, missing_requirements, source_commit, target_commit |
|
766 | 766 | |
|
767 | 767 | def assure_not_empty_repo(self): |
|
768 | 768 | _ = self.request.translate |
|
769 | 769 | |
|
770 | 770 | try: |
|
771 | 771 | self.db_repo.scm_instance().get_commit() |
|
772 | 772 | except EmptyRepositoryError: |
|
773 | 773 | h.flash(h.literal(_('There are no commits yet')), |
|
774 | 774 | category='warning') |
|
775 | 775 | raise HTTPFound( |
|
776 | 776 | h.route_path('repo_summary', repo_name=self.db_repo.repo_name)) |
|
777 | 777 | |
|
778 | 778 | @LoginRequired() |
|
779 | 779 | @NotAnonymous() |
|
780 | 780 | @HasRepoPermissionAnyDecorator( |
|
781 | 781 | 'repository.read', 'repository.write', 'repository.admin') |
|
782 | 782 | @view_config( |
|
783 | 783 | route_name='pullrequest_new', request_method='GET', |
|
784 | 784 | renderer='rhodecode:templates/pullrequests/pullrequest.mako') |
|
785 | 785 | def pull_request_new(self): |
|
786 | 786 | _ = self.request.translate |
|
787 | 787 | c = self.load_default_context() |
|
788 | 788 | |
|
789 | 789 | self.assure_not_empty_repo() |
|
790 | 790 | source_repo = self.db_repo |
|
791 | 791 | |
|
792 | 792 | commit_id = self.request.GET.get('commit') |
|
793 | 793 | branch_ref = self.request.GET.get('branch') |
|
794 | 794 | bookmark_ref = self.request.GET.get('bookmark') |
|
795 | 795 | |
|
796 | 796 | try: |
|
797 | 797 | source_repo_data = PullRequestModel().generate_repo_data( |
|
798 | 798 | source_repo, commit_id=commit_id, |
|
799 | 799 | branch=branch_ref, bookmark=bookmark_ref, |
|
800 | 800 | translator=self.request.translate) |
|
801 | 801 | except CommitDoesNotExistError as e: |
|
802 | 802 | log.exception(e) |
|
803 | 803 | h.flash(_('Commit does not exist'), 'error') |
|
804 | 804 | raise HTTPFound( |
|
805 | 805 | h.route_path('pullrequest_new', repo_name=source_repo.repo_name)) |
|
806 | 806 | |
|
807 | 807 | default_target_repo = source_repo |
|
808 | 808 | |
|
809 | 809 | if source_repo.parent and c.has_origin_repo_read_perm: |
|
810 | 810 | parent_vcs_obj = source_repo.parent.scm_instance() |
|
811 | 811 | if parent_vcs_obj and not parent_vcs_obj.is_empty(): |
|
812 | 812 | # change default if we have a parent repo |
|
813 | 813 | default_target_repo = source_repo.parent |
|
814 | 814 | |
|
815 | 815 | target_repo_data = PullRequestModel().generate_repo_data( |
|
816 | 816 | default_target_repo, translator=self.request.translate) |
|
817 | 817 | |
|
818 | 818 | selected_source_ref = source_repo_data['refs']['selected_ref'] |
|
819 | 819 | title_source_ref = '' |
|
820 | 820 | if selected_source_ref: |
|
821 | 821 | title_source_ref = selected_source_ref.split(':', 2)[1] |
|
822 | 822 | c.default_title = PullRequestModel().generate_pullrequest_title( |
|
823 | 823 | source=source_repo.repo_name, |
|
824 | 824 | source_ref=title_source_ref, |
|
825 | 825 | target=default_target_repo.repo_name |
|
826 | 826 | ) |
|
827 | 827 | |
|
828 | 828 | c.default_repo_data = { |
|
829 | 829 | 'source_repo_name': source_repo.repo_name, |
|
830 | 830 | 'source_refs_json': json.dumps(source_repo_data), |
|
831 | 831 | 'target_repo_name': default_target_repo.repo_name, |
|
832 | 832 | 'target_refs_json': json.dumps(target_repo_data), |
|
833 | 833 | } |
|
834 | 834 | c.default_source_ref = selected_source_ref |
|
835 | 835 | |
|
836 | 836 | return self._get_template_context(c) |
|
837 | 837 | |
|
838 | 838 | @LoginRequired() |
|
839 | 839 | @NotAnonymous() |
|
840 | 840 | @HasRepoPermissionAnyDecorator( |
|
841 | 841 | 'repository.read', 'repository.write', 'repository.admin') |
|
842 | 842 | @view_config( |
|
843 | 843 | route_name='pullrequest_repo_refs', request_method='GET', |
|
844 | 844 | renderer='json_ext', xhr=True) |
|
845 | 845 | def pull_request_repo_refs(self): |
|
846 | 846 | self.load_default_context() |
|
847 | 847 | target_repo_name = self.request.matchdict['target_repo_name'] |
|
848 | 848 | repo = Repository.get_by_repo_name(target_repo_name) |
|
849 | 849 | if not repo: |
|
850 | 850 | raise HTTPNotFound() |
|
851 | 851 | |
|
852 | 852 | target_perm = HasRepoPermissionAny( |
|
853 | 853 | 'repository.read', 'repository.write', 'repository.admin')( |
|
854 | 854 | target_repo_name) |
|
855 | 855 | if not target_perm: |
|
856 | 856 | raise HTTPNotFound() |
|
857 | 857 | |
|
858 | 858 | return PullRequestModel().generate_repo_data( |
|
859 | 859 | repo, translator=self.request.translate) |
|
860 | 860 | |
|
861 | 861 | @LoginRequired() |
|
862 | 862 | @NotAnonymous() |
|
863 | 863 | @HasRepoPermissionAnyDecorator( |
|
864 | 864 | 'repository.read', 'repository.write', 'repository.admin') |
|
865 | 865 | @view_config( |
|
866 | 866 | route_name='pullrequest_repo_targets', request_method='GET', |
|
867 | 867 | renderer='json_ext', xhr=True) |
|
868 | 868 | def pullrequest_repo_targets(self): |
|
869 | 869 | _ = self.request.translate |
|
870 | 870 | filter_query = self.request.GET.get('query') |
|
871 | 871 | |
|
872 | 872 | # get the parents |
|
873 | 873 | parent_target_repos = [] |
|
874 | 874 | if self.db_repo.parent: |
|
875 | 875 | parents_query = Repository.query() \ |
|
876 | 876 | .order_by(func.length(Repository.repo_name)) \ |
|
877 | 877 | .filter(Repository.fork_id == self.db_repo.parent.repo_id) |
|
878 | 878 | |
|
879 | 879 | if filter_query: |
|
880 | 880 | ilike_expression = u'%{}%'.format(safe_unicode(filter_query)) |
|
881 | 881 | parents_query = parents_query.filter( |
|
882 | 882 | Repository.repo_name.ilike(ilike_expression)) |
|
883 | 883 | parents = parents_query.limit(20).all() |
|
884 | 884 | |
|
885 | 885 | for parent in parents: |
|
886 | 886 | parent_vcs_obj = parent.scm_instance() |
|
887 | 887 | if parent_vcs_obj and not parent_vcs_obj.is_empty(): |
|
888 | 888 | parent_target_repos.append(parent) |
|
889 | 889 | |
|
890 | 890 | # get other forks, and repo itself |
|
891 | 891 | query = Repository.query() \ |
|
892 | 892 | .order_by(func.length(Repository.repo_name)) \ |
|
893 | 893 | .filter( |
|
894 | 894 | or_(Repository.repo_id == self.db_repo.repo_id, # repo itself |
|
895 | 895 | Repository.fork_id == self.db_repo.repo_id) # forks of this repo |
|
896 | 896 | ) \ |
|
897 | 897 | .filter(~Repository.repo_id.in_([x.repo_id for x in parent_target_repos])) |
|
898 | 898 | |
|
899 | 899 | if filter_query: |
|
900 | 900 | ilike_expression = u'%{}%'.format(safe_unicode(filter_query)) |
|
901 | 901 | query = query.filter(Repository.repo_name.ilike(ilike_expression)) |
|
902 | 902 | |
|
903 | 903 | limit = max(20 - len(parent_target_repos), 5) # not less then 5 |
|
904 | 904 | target_repos = query.limit(limit).all() |
|
905 | 905 | |
|
906 | 906 | all_target_repos = target_repos + parent_target_repos |
|
907 | 907 | |
|
908 | 908 | repos = [] |
|
909 | 909 | # This checks permissions to the repositories |
|
910 | 910 | for obj in ScmModel().get_repos(all_target_repos): |
|
911 | 911 | repos.append({ |
|
912 | 912 | 'id': obj['name'], |
|
913 | 913 | 'text': obj['name'], |
|
914 | 914 | 'type': 'repo', |
|
915 | 915 | 'repo_id': obj['dbrepo']['repo_id'], |
|
916 | 916 | 'repo_type': obj['dbrepo']['repo_type'], |
|
917 | 917 | 'private': obj['dbrepo']['private'], |
|
918 | 918 | |
|
919 | 919 | }) |
|
920 | 920 | |
|
921 | 921 | data = { |
|
922 | 922 | 'more': False, |
|
923 | 923 | 'results': [{ |
|
924 | 924 | 'text': _('Repositories'), |
|
925 | 925 | 'children': repos |
|
926 | 926 | }] if repos else [] |
|
927 | 927 | } |
|
928 | 928 | return data |
|
929 | 929 | |
|
930 | 930 | @LoginRequired() |
|
931 | 931 | @NotAnonymous() |
|
932 | 932 | @HasRepoPermissionAnyDecorator( |
|
933 | 933 | 'repository.read', 'repository.write', 'repository.admin') |
|
934 | 934 | @CSRFRequired() |
|
935 | 935 | @view_config( |
|
936 | 936 | route_name='pullrequest_create', request_method='POST', |
|
937 | 937 | renderer=None) |
|
938 | 938 | def pull_request_create(self): |
|
939 | 939 | _ = self.request.translate |
|
940 | 940 | self.assure_not_empty_repo() |
|
941 | 941 | self.load_default_context() |
|
942 | 942 | |
|
943 | 943 | controls = peppercorn.parse(self.request.POST.items()) |
|
944 | 944 | |
|
945 | 945 | try: |
|
946 | 946 | form = PullRequestForm( |
|
947 | 947 | self.request.translate, self.db_repo.repo_id)() |
|
948 | 948 | _form = form.to_python(controls) |
|
949 | 949 | except formencode.Invalid as errors: |
|
950 | 950 | if errors.error_dict.get('revisions'): |
|
951 | 951 | msg = 'Revisions: %s' % errors.error_dict['revisions'] |
|
952 | 952 | elif errors.error_dict.get('pullrequest_title'): |
|
953 | 953 | msg = errors.error_dict.get('pullrequest_title') |
|
954 | 954 | else: |
|
955 | 955 | msg = _('Error creating pull request: {}').format(errors) |
|
956 | 956 | log.exception(msg) |
|
957 | 957 | h.flash(msg, 'error') |
|
958 | 958 | |
|
959 | 959 | # would rather just go back to form ... |
|
960 | 960 | raise HTTPFound( |
|
961 | 961 | h.route_path('pullrequest_new', repo_name=self.db_repo_name)) |
|
962 | 962 | |
|
963 | 963 | source_repo = _form['source_repo'] |
|
964 | 964 | source_ref = _form['source_ref'] |
|
965 | 965 | target_repo = _form['target_repo'] |
|
966 | 966 | target_ref = _form['target_ref'] |
|
967 | 967 | commit_ids = _form['revisions'][::-1] |
|
968 | 968 | common_ancestor_id = _form['common_ancestor'] |
|
969 | 969 | |
|
970 | 970 | # find the ancestor for this pr |
|
971 | 971 | source_db_repo = Repository.get_by_repo_name(_form['source_repo']) |
|
972 | 972 | target_db_repo = Repository.get_by_repo_name(_form['target_repo']) |
|
973 | 973 | |
|
974 | 974 | if not (source_db_repo or target_db_repo): |
|
975 | 975 | h.flash(_('source_repo or target repo not found'), category='error') |
|
976 | 976 | raise HTTPFound( |
|
977 | 977 | h.route_path('pullrequest_new', repo_name=self.db_repo_name)) |
|
978 | 978 | |
|
979 | 979 | # re-check permissions again here |
|
980 | 980 | # source_repo we must have read permissions |
|
981 | 981 | |
|
982 | 982 | source_perm = HasRepoPermissionAny( |
|
983 | 983 | 'repository.read', 'repository.write', 'repository.admin')( |
|
984 | 984 | source_db_repo.repo_name) |
|
985 | 985 | if not source_perm: |
|
986 | 986 | msg = _('Not Enough permissions to source repo `{}`.'.format( |
|
987 | 987 | source_db_repo.repo_name)) |
|
988 | 988 | h.flash(msg, category='error') |
|
989 | 989 | # copy the args back to redirect |
|
990 | 990 | org_query = self.request.GET.mixed() |
|
991 | 991 | raise HTTPFound( |
|
992 | 992 | h.route_path('pullrequest_new', repo_name=self.db_repo_name, |
|
993 | 993 | _query=org_query)) |
|
994 | 994 | |
|
995 | 995 | # target repo we must have read permissions, and also later on |
|
996 | 996 | # we want to check branch permissions here |
|
997 | 997 | target_perm = HasRepoPermissionAny( |
|
998 | 998 | 'repository.read', 'repository.write', 'repository.admin')( |
|
999 | 999 | target_db_repo.repo_name) |
|
1000 | 1000 | if not target_perm: |
|
1001 | 1001 | msg = _('Not Enough permissions to target repo `{}`.'.format( |
|
1002 | 1002 | target_db_repo.repo_name)) |
|
1003 | 1003 | h.flash(msg, category='error') |
|
1004 | 1004 | # copy the args back to redirect |
|
1005 | 1005 | org_query = self.request.GET.mixed() |
|
1006 | 1006 | raise HTTPFound( |
|
1007 | 1007 | h.route_path('pullrequest_new', repo_name=self.db_repo_name, |
|
1008 | 1008 | _query=org_query)) |
|
1009 | 1009 | |
|
1010 | 1010 | source_scm = source_db_repo.scm_instance() |
|
1011 | 1011 | target_scm = target_db_repo.scm_instance() |
|
1012 | 1012 | |
|
1013 | 1013 | source_commit = source_scm.get_commit(source_ref.split(':')[-1]) |
|
1014 | 1014 | target_commit = target_scm.get_commit(target_ref.split(':')[-1]) |
|
1015 | 1015 | |
|
1016 | 1016 | ancestor = source_scm.get_common_ancestor( |
|
1017 | 1017 | source_commit.raw_id, target_commit.raw_id, target_scm) |
|
1018 | 1018 | |
|
1019 | 1019 | # recalculate target ref based on ancestor |
|
1020 | 1020 | target_ref_type, target_ref_name, __ = _form['target_ref'].split(':') |
|
1021 | 1021 | target_ref = ':'.join((target_ref_type, target_ref_name, ancestor)) |
|
1022 | 1022 | |
|
1023 | 1023 | get_default_reviewers_data, validate_default_reviewers = \ |
|
1024 | 1024 | PullRequestModel().get_reviewer_functions() |
|
1025 | 1025 | |
|
1026 | 1026 | # recalculate reviewers logic, to make sure we can validate this |
|
1027 | 1027 | reviewer_rules = get_default_reviewers_data( |
|
1028 | 1028 | self._rhodecode_db_user, source_db_repo, |
|
1029 | 1029 | source_commit, target_db_repo, target_commit) |
|
1030 | 1030 | |
|
1031 | 1031 | given_reviewers = _form['review_members'] |
|
1032 | 1032 | reviewers = validate_default_reviewers( |
|
1033 | 1033 | given_reviewers, reviewer_rules) |
|
1034 | 1034 | |
|
1035 | 1035 | pullrequest_title = _form['pullrequest_title'] |
|
1036 | 1036 | title_source_ref = source_ref.split(':', 2)[1] |
|
1037 | 1037 | if not pullrequest_title: |
|
1038 | 1038 | pullrequest_title = PullRequestModel().generate_pullrequest_title( |
|
1039 | 1039 | source=source_repo, |
|
1040 | 1040 | source_ref=title_source_ref, |
|
1041 | 1041 | target=target_repo |
|
1042 | 1042 | ) |
|
1043 | 1043 | |
|
1044 | 1044 | description = _form['pullrequest_desc'] |
|
1045 | 1045 | description_renderer = _form['description_renderer'] |
|
1046 | 1046 | |
|
1047 | 1047 | try: |
|
1048 | 1048 | pull_request = PullRequestModel().create( |
|
1049 | 1049 | created_by=self._rhodecode_user.user_id, |
|
1050 | 1050 | source_repo=source_repo, |
|
1051 | 1051 | source_ref=source_ref, |
|
1052 | 1052 | target_repo=target_repo, |
|
1053 | 1053 | target_ref=target_ref, |
|
1054 | 1054 | revisions=commit_ids, |
|
1055 | 1055 | common_ancestor_id=common_ancestor_id, |
|
1056 | 1056 | reviewers=reviewers, |
|
1057 | 1057 | title=pullrequest_title, |
|
1058 | 1058 | description=description, |
|
1059 | 1059 | description_renderer=description_renderer, |
|
1060 | 1060 | reviewer_data=reviewer_rules, |
|
1061 | 1061 | auth_user=self._rhodecode_user |
|
1062 | 1062 | ) |
|
1063 | 1063 | Session().commit() |
|
1064 | 1064 | |
|
1065 | 1065 | h.flash(_('Successfully opened new pull request'), |
|
1066 | 1066 | category='success') |
|
1067 | 1067 | except Exception: |
|
1068 | 1068 | msg = _('Error occurred during creation of this pull request.') |
|
1069 | 1069 | log.exception(msg) |
|
1070 | 1070 | h.flash(msg, category='error') |
|
1071 | 1071 | |
|
1072 | 1072 | # copy the args back to redirect |
|
1073 | 1073 | org_query = self.request.GET.mixed() |
|
1074 | 1074 | raise HTTPFound( |
|
1075 | 1075 | h.route_path('pullrequest_new', repo_name=self.db_repo_name, |
|
1076 | 1076 | _query=org_query)) |
|
1077 | 1077 | |
|
1078 | 1078 | raise HTTPFound( |
|
1079 | 1079 | h.route_path('pullrequest_show', repo_name=target_repo, |
|
1080 | 1080 | pull_request_id=pull_request.pull_request_id)) |
|
1081 | 1081 | |
|
1082 | 1082 | @LoginRequired() |
|
1083 | 1083 | @NotAnonymous() |
|
1084 | 1084 | @HasRepoPermissionAnyDecorator( |
|
1085 | 1085 | 'repository.read', 'repository.write', 'repository.admin') |
|
1086 | 1086 | @CSRFRequired() |
|
1087 | 1087 | @view_config( |
|
1088 | 1088 | route_name='pullrequest_update', request_method='POST', |
|
1089 | 1089 | renderer='json_ext') |
|
1090 | 1090 | def pull_request_update(self): |
|
1091 | 1091 | pull_request = PullRequest.get_or_404( |
|
1092 | 1092 | self.request.matchdict['pull_request_id']) |
|
1093 | 1093 | _ = self.request.translate |
|
1094 | 1094 | |
|
1095 | 1095 | self.load_default_context() |
|
1096 | 1096 | redirect_url = None |
|
1097 | 1097 | |
|
1098 | 1098 | if pull_request.is_closed(): |
|
1099 | 1099 | log.debug('update: forbidden because pull request is closed') |
|
1100 | 1100 | msg = _(u'Cannot update closed pull requests.') |
|
1101 | 1101 | h.flash(msg, category='error') |
|
1102 | 1102 | return {'response': True, |
|
1103 | 1103 | 'redirect_url': redirect_url} |
|
1104 | 1104 | |
|
1105 | 1105 | is_state_changing = pull_request.is_state_changing() |
|
1106 | 1106 | |
|
1107 | 1107 | # only owner or admin can update it |
|
1108 | 1108 | allowed_to_update = PullRequestModel().check_user_update( |
|
1109 | 1109 | pull_request, self._rhodecode_user) |
|
1110 | 1110 | if allowed_to_update: |
|
1111 | 1111 | controls = peppercorn.parse(self.request.POST.items()) |
|
1112 | 1112 | force_refresh = str2bool(self.request.POST.get('force_refresh')) |
|
1113 | 1113 | |
|
1114 | 1114 | if 'review_members' in controls: |
|
1115 | 1115 | self._update_reviewers( |
|
1116 | 1116 | pull_request, controls['review_members'], |
|
1117 | 1117 | pull_request.reviewer_data) |
|
1118 | 1118 | elif str2bool(self.request.POST.get('update_commits', 'false')): |
|
1119 | 1119 | if is_state_changing: |
|
1120 | 1120 | log.debug('commits update: forbidden because pull request is in state %s', |
|
1121 | 1121 | pull_request.pull_request_state) |
|
1122 | 1122 | msg = _(u'Cannot update pull requests commits in state other than `{}`. ' |
|
1123 | 1123 | u'Current state is: `{}`').format( |
|
1124 | 1124 | PullRequest.STATE_CREATED, pull_request.pull_request_state) |
|
1125 | 1125 | h.flash(msg, category='error') |
|
1126 | 1126 | return {'response': True, |
|
1127 | 1127 | 'redirect_url': redirect_url} |
|
1128 | 1128 | |
|
1129 | 1129 | self._update_commits(pull_request) |
|
1130 | 1130 | if force_refresh: |
|
1131 | 1131 | redirect_url = h.route_path( |
|
1132 | 1132 | 'pullrequest_show', repo_name=self.db_repo_name, |
|
1133 | 1133 | pull_request_id=pull_request.pull_request_id, |
|
1134 | 1134 | _query={"force_refresh": 1}) |
|
1135 | 1135 | elif str2bool(self.request.POST.get('edit_pull_request', 'false')): |
|
1136 | 1136 | self._edit_pull_request(pull_request) |
|
1137 | 1137 | else: |
|
1138 | 1138 | raise HTTPBadRequest() |
|
1139 | 1139 | |
|
1140 | 1140 | return {'response': True, |
|
1141 | 1141 | 'redirect_url': redirect_url} |
|
1142 | 1142 | raise HTTPForbidden() |
|
1143 | 1143 | |
|
1144 | 1144 | def _edit_pull_request(self, pull_request): |
|
1145 | 1145 | _ = self.request.translate |
|
1146 | 1146 | |
|
1147 | 1147 | try: |
|
1148 | 1148 | PullRequestModel().edit( |
|
1149 | 1149 | pull_request, |
|
1150 | 1150 | self.request.POST.get('title'), |
|
1151 | 1151 | self.request.POST.get('description'), |
|
1152 | 1152 | self.request.POST.get('description_renderer'), |
|
1153 | 1153 | self._rhodecode_user) |
|
1154 | 1154 | except ValueError: |
|
1155 | 1155 | msg = _(u'Cannot update closed pull requests.') |
|
1156 | 1156 | h.flash(msg, category='error') |
|
1157 | 1157 | return |
|
1158 | 1158 | else: |
|
1159 | 1159 | Session().commit() |
|
1160 | 1160 | |
|
1161 | 1161 | msg = _(u'Pull request title & description updated.') |
|
1162 | 1162 | h.flash(msg, category='success') |
|
1163 | 1163 | return |
|
1164 | 1164 | |
|
1165 | 1165 | def _update_commits(self, pull_request): |
|
1166 | 1166 | _ = self.request.translate |
|
1167 | 1167 | |
|
1168 | 1168 | with pull_request.set_state(PullRequest.STATE_UPDATING): |
|
1169 | 1169 | resp = PullRequestModel().update_commits( |
|
1170 | 1170 | pull_request, self._rhodecode_db_user) |
|
1171 | 1171 | |
|
1172 | 1172 | if resp.executed: |
|
1173 | 1173 | |
|
1174 | 1174 | if resp.target_changed and resp.source_changed: |
|
1175 | 1175 | changed = 'target and source repositories' |
|
1176 | 1176 | elif resp.target_changed and not resp.source_changed: |
|
1177 | 1177 | changed = 'target repository' |
|
1178 | 1178 | elif not resp.target_changed and resp.source_changed: |
|
1179 | 1179 | changed = 'source repository' |
|
1180 | 1180 | else: |
|
1181 | 1181 | changed = 'nothing' |
|
1182 | 1182 | |
|
1183 | 1183 | msg = _(u'Pull request updated to "{source_commit_id}" with ' |
|
1184 | 1184 | u'{count_added} added, {count_removed} removed commits. ' |
|
1185 | 1185 | u'Source of changes: {change_source}') |
|
1186 | 1186 | msg = msg.format( |
|
1187 | 1187 | source_commit_id=pull_request.source_ref_parts.commit_id, |
|
1188 | 1188 | count_added=len(resp.changes.added), |
|
1189 | 1189 | count_removed=len(resp.changes.removed), |
|
1190 | 1190 | change_source=changed) |
|
1191 | 1191 | h.flash(msg, category='success') |
|
1192 | 1192 | |
|
1193 | 1193 | channel = '/repo${}$/pr/{}'.format( |
|
1194 | 1194 | pull_request.target_repo.repo_name, pull_request.pull_request_id) |
|
1195 | 1195 | message = msg + ( |
|
1196 | 1196 | ' - <a onclick="window.location.reload()">' |
|
1197 | 1197 | '<strong>{}</strong></a>'.format(_('Reload page'))) |
|
1198 | 1198 | channelstream.post_message( |
|
1199 | 1199 | channel, message, self._rhodecode_user.username, |
|
1200 | 1200 | registry=self.request.registry) |
|
1201 | 1201 | else: |
|
1202 | 1202 | msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason] |
|
1203 | 1203 | warning_reasons = [ |
|
1204 | 1204 | UpdateFailureReason.NO_CHANGE, |
|
1205 | 1205 | UpdateFailureReason.WRONG_REF_TYPE, |
|
1206 | 1206 | ] |
|
1207 | 1207 | category = 'warning' if resp.reason in warning_reasons else 'error' |
|
1208 | 1208 | h.flash(msg, category=category) |
|
1209 | 1209 | |
|
1210 | 1210 | @LoginRequired() |
|
1211 | 1211 | @NotAnonymous() |
|
1212 | 1212 | @HasRepoPermissionAnyDecorator( |
|
1213 | 1213 | 'repository.read', 'repository.write', 'repository.admin') |
|
1214 | 1214 | @CSRFRequired() |
|
1215 | 1215 | @view_config( |
|
1216 | 1216 | route_name='pullrequest_merge', request_method='POST', |
|
1217 | 1217 | renderer='json_ext') |
|
1218 | 1218 | def pull_request_merge(self): |
|
1219 | 1219 | """ |
|
1220 | 1220 | Merge will perform a server-side merge of the specified |
|
1221 | 1221 | pull request, if the pull request is approved and mergeable. |
|
1222 | 1222 | After successful merging, the pull request is automatically |
|
1223 | 1223 | closed, with a relevant comment. |
|
1224 | 1224 | """ |
|
1225 | 1225 | pull_request = PullRequest.get_or_404( |
|
1226 | 1226 | self.request.matchdict['pull_request_id']) |
|
1227 | 1227 | _ = self.request.translate |
|
1228 | 1228 | |
|
1229 | 1229 | if pull_request.is_state_changing(): |
|
1230 | 1230 | log.debug('show: forbidden because pull request is in state %s', |
|
1231 | 1231 | pull_request.pull_request_state) |
|
1232 | 1232 | msg = _(u'Cannot merge pull requests in state other than `{}`. ' |
|
1233 | 1233 | u'Current state is: `{}`').format(PullRequest.STATE_CREATED, |
|
1234 | 1234 | pull_request.pull_request_state) |
|
1235 | 1235 | h.flash(msg, category='error') |
|
1236 | 1236 | raise HTTPFound( |
|
1237 | 1237 | h.route_path('pullrequest_show', |
|
1238 | 1238 | repo_name=pull_request.target_repo.repo_name, |
|
1239 | 1239 | pull_request_id=pull_request.pull_request_id)) |
|
1240 | 1240 | |
|
1241 | 1241 | self.load_default_context() |
|
1242 | 1242 | |
|
1243 | 1243 | with pull_request.set_state(PullRequest.STATE_UPDATING): |
|
1244 | 1244 | check = MergeCheck.validate( |
|
1245 | 1245 | pull_request, auth_user=self._rhodecode_user, |
|
1246 | 1246 | translator=self.request.translate) |
|
1247 | 1247 | merge_possible = not check.failed |
|
1248 | 1248 | |
|
1249 | 1249 | for err_type, error_msg in check.errors: |
|
1250 | 1250 | h.flash(error_msg, category=err_type) |
|
1251 | 1251 | |
|
1252 | 1252 | if merge_possible: |
|
1253 | 1253 | log.debug("Pre-conditions checked, trying to merge.") |
|
1254 | 1254 | extras = vcs_operation_context( |
|
1255 | 1255 | self.request.environ, repo_name=pull_request.target_repo.repo_name, |
|
1256 | 1256 | username=self._rhodecode_db_user.username, action='push', |
|
1257 | 1257 | scm=pull_request.target_repo.repo_type) |
|
1258 | 1258 | with pull_request.set_state(PullRequest.STATE_UPDATING): |
|
1259 | 1259 | self._merge_pull_request( |
|
1260 | 1260 | pull_request, self._rhodecode_db_user, extras) |
|
1261 | 1261 | else: |
|
1262 | 1262 | log.debug("Pre-conditions failed, NOT merging.") |
|
1263 | 1263 | |
|
1264 | 1264 | raise HTTPFound( |
|
1265 | 1265 | h.route_path('pullrequest_show', |
|
1266 | 1266 | repo_name=pull_request.target_repo.repo_name, |
|
1267 | 1267 | pull_request_id=pull_request.pull_request_id)) |
|
1268 | 1268 | |
|
1269 | 1269 | def _merge_pull_request(self, pull_request, user, extras): |
|
1270 | 1270 | _ = self.request.translate |
|
1271 | 1271 | merge_resp = PullRequestModel().merge_repo(pull_request, user, extras=extras) |
|
1272 | 1272 | |
|
1273 | 1273 | if merge_resp.executed: |
|
1274 | 1274 | log.debug("The merge was successful, closing the pull request.") |
|
1275 | 1275 | PullRequestModel().close_pull_request( |
|
1276 | 1276 | pull_request.pull_request_id, user) |
|
1277 | 1277 | Session().commit() |
|
1278 | 1278 | msg = _('Pull request was successfully merged and closed.') |
|
1279 | 1279 | h.flash(msg, category='success') |
|
1280 | 1280 | else: |
|
1281 | 1281 | log.debug( |
|
1282 | 1282 | "The merge was not successful. Merge response: %s", merge_resp) |
|
1283 | 1283 | msg = merge_resp.merge_status_message |
|
1284 | 1284 | h.flash(msg, category='error') |
|
1285 | 1285 | |
|
1286 | 1286 | def _update_reviewers(self, pull_request, review_members, reviewer_rules): |
|
1287 | 1287 | _ = self.request.translate |
|
1288 | 1288 | |
|
1289 | 1289 | get_default_reviewers_data, validate_default_reviewers = \ |
|
1290 | 1290 | PullRequestModel().get_reviewer_functions() |
|
1291 | 1291 | |
|
1292 | 1292 | try: |
|
1293 | 1293 | reviewers = validate_default_reviewers(review_members, reviewer_rules) |
|
1294 | 1294 | except ValueError as e: |
|
1295 | 1295 | log.error('Reviewers Validation: {}'.format(e)) |
|
1296 | 1296 | h.flash(e, category='error') |
|
1297 | 1297 | return |
|
1298 | 1298 | |
|
1299 | 1299 | old_calculated_status = pull_request.calculated_review_status() |
|
1300 | 1300 | PullRequestModel().update_reviewers( |
|
1301 | 1301 | pull_request, reviewers, self._rhodecode_user) |
|
1302 | 1302 | h.flash(_('Pull request reviewers updated.'), category='success') |
|
1303 | 1303 | Session().commit() |
|
1304 | 1304 | |
|
1305 | 1305 | # trigger status changed if change in reviewers changes the status |
|
1306 | 1306 | calculated_status = pull_request.calculated_review_status() |
|
1307 | 1307 | if old_calculated_status != calculated_status: |
|
1308 | 1308 | PullRequestModel().trigger_pull_request_hook( |
|
1309 | 1309 | pull_request, self._rhodecode_user, 'review_status_change', |
|
1310 | 1310 | data={'status': calculated_status}) |
|
1311 | 1311 | |
|
1312 | 1312 | @LoginRequired() |
|
1313 | 1313 | @NotAnonymous() |
|
1314 | 1314 | @HasRepoPermissionAnyDecorator( |
|
1315 | 1315 | 'repository.read', 'repository.write', 'repository.admin') |
|
1316 | 1316 | @CSRFRequired() |
|
1317 | 1317 | @view_config( |
|
1318 | 1318 | route_name='pullrequest_delete', request_method='POST', |
|
1319 | 1319 | renderer='json_ext') |
|
1320 | 1320 | def pull_request_delete(self): |
|
1321 | 1321 | _ = self.request.translate |
|
1322 | 1322 | |
|
1323 | 1323 | pull_request = PullRequest.get_or_404( |
|
1324 | 1324 | self.request.matchdict['pull_request_id']) |
|
1325 | 1325 | self.load_default_context() |
|
1326 | 1326 | |
|
1327 | 1327 | pr_closed = pull_request.is_closed() |
|
1328 | 1328 | allowed_to_delete = PullRequestModel().check_user_delete( |
|
1329 | 1329 | pull_request, self._rhodecode_user) and not pr_closed |
|
1330 | 1330 | |
|
1331 | 1331 | # only owner can delete it ! |
|
1332 | 1332 | if allowed_to_delete: |
|
1333 | 1333 | PullRequestModel().delete(pull_request, self._rhodecode_user) |
|
1334 | 1334 | Session().commit() |
|
1335 | 1335 | h.flash(_('Successfully deleted pull request'), |
|
1336 | 1336 | category='success') |
|
1337 | 1337 | raise HTTPFound(h.route_path('pullrequest_show_all', |
|
1338 | 1338 | repo_name=self.db_repo_name)) |
|
1339 | 1339 | |
|
1340 | 1340 | log.warning('user %s tried to delete pull request without access', |
|
1341 | 1341 | self._rhodecode_user) |
|
1342 | 1342 | raise HTTPNotFound() |
|
1343 | 1343 | |
|
1344 | 1344 | @LoginRequired() |
|
1345 | 1345 | @NotAnonymous() |
|
1346 | 1346 | @HasRepoPermissionAnyDecorator( |
|
1347 | 1347 | 'repository.read', 'repository.write', 'repository.admin') |
|
1348 | 1348 | @CSRFRequired() |
|
1349 | 1349 | @view_config( |
|
1350 | 1350 | route_name='pullrequest_comment_create', request_method='POST', |
|
1351 | 1351 | renderer='json_ext') |
|
1352 | 1352 | def pull_request_comment_create(self): |
|
1353 | 1353 | _ = self.request.translate |
|
1354 | 1354 | |
|
1355 | 1355 | pull_request = PullRequest.get_or_404( |
|
1356 | 1356 | self.request.matchdict['pull_request_id']) |
|
1357 | 1357 | pull_request_id = pull_request.pull_request_id |
|
1358 | 1358 | |
|
1359 | 1359 | if pull_request.is_closed(): |
|
1360 | 1360 | log.debug('comment: forbidden because pull request is closed') |
|
1361 | 1361 | raise HTTPForbidden() |
|
1362 | 1362 | |
|
1363 | 1363 | allowed_to_comment = PullRequestModel().check_user_comment( |
|
1364 | 1364 | pull_request, self._rhodecode_user) |
|
1365 | 1365 | if not allowed_to_comment: |
|
1366 | 1366 | log.debug( |
|
1367 | 1367 | 'comment: forbidden because pull request is from forbidden repo') |
|
1368 | 1368 | raise HTTPForbidden() |
|
1369 | 1369 | |
|
1370 | 1370 | c = self.load_default_context() |
|
1371 | 1371 | |
|
1372 | 1372 | status = self.request.POST.get('changeset_status', None) |
|
1373 | 1373 | text = self.request.POST.get('text') |
|
1374 | 1374 | comment_type = self.request.POST.get('comment_type') |
|
1375 | 1375 | resolves_comment_id = self.request.POST.get('resolves_comment_id', None) |
|
1376 | 1376 | close_pull_request = self.request.POST.get('close_pull_request') |
|
1377 | 1377 | |
|
1378 | 1378 | # the logic here should work like following, if we submit close |
|
1379 | 1379 | # pr comment, use `close_pull_request_with_comment` function |
|
1380 | 1380 | # else handle regular comment logic |
|
1381 | 1381 | |
|
1382 | 1382 | if close_pull_request: |
|
1383 | 1383 | # only owner or admin or person with write permissions |
|
1384 | 1384 | allowed_to_close = PullRequestModel().check_user_update( |
|
1385 | 1385 | pull_request, self._rhodecode_user) |
|
1386 | 1386 | if not allowed_to_close: |
|
1387 | 1387 | log.debug('comment: forbidden because not allowed to close ' |
|
1388 | 1388 | 'pull request %s', pull_request_id) |
|
1389 | 1389 | raise HTTPForbidden() |
|
1390 | 1390 | |
|
1391 | 1391 | # This also triggers `review_status_change` |
|
1392 | 1392 | comment, status = PullRequestModel().close_pull_request_with_comment( |
|
1393 | 1393 | pull_request, self._rhodecode_user, self.db_repo, message=text, |
|
1394 | 1394 | auth_user=self._rhodecode_user) |
|
1395 | 1395 | Session().flush() |
|
1396 | 1396 | |
|
1397 | 1397 | PullRequestModel().trigger_pull_request_hook( |
|
1398 | 1398 | pull_request, self._rhodecode_user, 'comment', |
|
1399 | 1399 | data={'comment': comment}) |
|
1400 | 1400 | |
|
1401 | 1401 | else: |
|
1402 | 1402 | # regular comment case, could be inline, or one with status. |
|
1403 | 1403 | # for that one we check also permissions |
|
1404 | 1404 | |
|
1405 | 1405 | allowed_to_change_status = PullRequestModel().check_user_change_status( |
|
1406 | 1406 | pull_request, self._rhodecode_user) |
|
1407 | 1407 | |
|
1408 | 1408 | if status and allowed_to_change_status: |
|
1409 | 1409 | message = (_('Status change %(transition_icon)s %(status)s') |
|
1410 | 1410 | % {'transition_icon': '>', |
|
1411 | 1411 | 'status': ChangesetStatus.get_status_lbl(status)}) |
|
1412 | 1412 | text = text or message |
|
1413 | 1413 | |
|
1414 | 1414 | comment = CommentsModel().create( |
|
1415 | 1415 | text=text, |
|
1416 | 1416 | repo=self.db_repo.repo_id, |
|
1417 | 1417 | user=self._rhodecode_user.user_id, |
|
1418 | 1418 | pull_request=pull_request, |
|
1419 | 1419 | f_path=self.request.POST.get('f_path'), |
|
1420 | 1420 | line_no=self.request.POST.get('line'), |
|
1421 | 1421 | status_change=(ChangesetStatus.get_status_lbl(status) |
|
1422 | 1422 | if status and allowed_to_change_status else None), |
|
1423 | 1423 | status_change_type=(status |
|
1424 | 1424 | if status and allowed_to_change_status else None), |
|
1425 | 1425 | comment_type=comment_type, |
|
1426 | 1426 | resolves_comment_id=resolves_comment_id, |
|
1427 | 1427 | auth_user=self._rhodecode_user |
|
1428 | 1428 | ) |
|
1429 | 1429 | |
|
1430 | 1430 | if allowed_to_change_status: |
|
1431 | 1431 | # calculate old status before we change it |
|
1432 | 1432 | old_calculated_status = pull_request.calculated_review_status() |
|
1433 | 1433 | |
|
1434 | 1434 | # get status if set ! |
|
1435 | 1435 | if status: |
|
1436 | 1436 | ChangesetStatusModel().set_status( |
|
1437 | 1437 | self.db_repo.repo_id, |
|
1438 | 1438 | status, |
|
1439 | 1439 | self._rhodecode_user.user_id, |
|
1440 | 1440 | comment, |
|
1441 | 1441 | pull_request=pull_request |
|
1442 | 1442 | ) |
|
1443 | 1443 | |
|
1444 | 1444 | Session().flush() |
|
1445 | 1445 | # this is somehow required to get access to some relationship |
|
1446 | 1446 | # loaded on comment |
|
1447 | 1447 | Session().refresh(comment) |
|
1448 | 1448 | |
|
1449 | 1449 | PullRequestModel().trigger_pull_request_hook( |
|
1450 | 1450 | pull_request, self._rhodecode_user, 'comment', |
|
1451 | 1451 | data={'comment': comment}) |
|
1452 | 1452 | |
|
1453 | 1453 | # we now calculate the status of pull request, and based on that |
|
1454 | 1454 | # calculation we set the commits status |
|
1455 | 1455 | calculated_status = pull_request.calculated_review_status() |
|
1456 | 1456 | if old_calculated_status != calculated_status: |
|
1457 | 1457 | PullRequestModel().trigger_pull_request_hook( |
|
1458 | 1458 | pull_request, self._rhodecode_user, 'review_status_change', |
|
1459 | 1459 | data={'status': calculated_status}) |
|
1460 | 1460 | |
|
1461 | 1461 | Session().commit() |
|
1462 | 1462 | |
|
1463 | 1463 | data = { |
|
1464 | 1464 | 'target_id': h.safeid(h.safe_unicode( |
|
1465 | 1465 | self.request.POST.get('f_path'))), |
|
1466 | 1466 | } |
|
1467 | 1467 | if comment: |
|
1468 | 1468 | c.co = comment |
|
1469 | 1469 | rendered_comment = render( |
|
1470 | 1470 | 'rhodecode:templates/changeset/changeset_comment_block.mako', |
|
1471 | 1471 | self._get_template_context(c), self.request) |
|
1472 | 1472 | |
|
1473 | 1473 | data.update(comment.get_dict()) |
|
1474 | 1474 | data.update({'rendered_text': rendered_comment}) |
|
1475 | 1475 | |
|
1476 | 1476 | return data |
|
1477 | 1477 | |
|
1478 | 1478 | @LoginRequired() |
|
1479 | 1479 | @NotAnonymous() |
|
1480 | 1480 | @HasRepoPermissionAnyDecorator( |
|
1481 | 1481 | 'repository.read', 'repository.write', 'repository.admin') |
|
1482 | 1482 | @CSRFRequired() |
|
1483 | 1483 | @view_config( |
|
1484 | 1484 | route_name='pullrequest_comment_delete', request_method='POST', |
|
1485 | 1485 | renderer='json_ext') |
|
1486 | 1486 | def pull_request_comment_delete(self): |
|
1487 | 1487 | pull_request = PullRequest.get_or_404( |
|
1488 | 1488 | self.request.matchdict['pull_request_id']) |
|
1489 | 1489 | |
|
1490 | 1490 | comment = ChangesetComment.get_or_404( |
|
1491 | 1491 | self.request.matchdict['comment_id']) |
|
1492 | 1492 | comment_id = comment.comment_id |
|
1493 | 1493 | |
|
1494 | 1494 | if comment.immutable: |
|
1495 | 1495 | # don't allow deleting comments that are immutable |
|
1496 | 1496 | raise HTTPForbidden() |
|
1497 | 1497 | |
|
1498 | 1498 | if pull_request.is_closed(): |
|
1499 | 1499 | log.debug('comment: forbidden because pull request is closed') |
|
1500 | 1500 | raise HTTPForbidden() |
|
1501 | 1501 | |
|
1502 | 1502 | if not comment: |
|
1503 | 1503 | log.debug('Comment with id:%s not found, skipping', comment_id) |
|
1504 | 1504 | # comment already deleted in another call probably |
|
1505 | 1505 | return True |
|
1506 | 1506 | |
|
1507 | 1507 | if comment.pull_request.is_closed(): |
|
1508 | 1508 | # don't allow deleting comments on closed pull request |
|
1509 | 1509 | raise HTTPForbidden() |
|
1510 | 1510 | |
|
1511 | 1511 | is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name) |
|
1512 | 1512 | super_admin = h.HasPermissionAny('hg.admin')() |
|
1513 | 1513 | comment_owner = comment.author.user_id == self._rhodecode_user.user_id |
|
1514 | 1514 | is_repo_comment = comment.repo.repo_name == self.db_repo_name |
|
1515 | 1515 | comment_repo_admin = is_repo_admin and is_repo_comment |
|
1516 | 1516 | |
|
1517 | 1517 | if super_admin or comment_owner or comment_repo_admin: |
|
1518 | 1518 | old_calculated_status = comment.pull_request.calculated_review_status() |
|
1519 | 1519 | CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user) |
|
1520 | 1520 | Session().commit() |
|
1521 | 1521 | calculated_status = comment.pull_request.calculated_review_status() |
|
1522 | 1522 | if old_calculated_status != calculated_status: |
|
1523 | 1523 | PullRequestModel().trigger_pull_request_hook( |
|
1524 | 1524 | comment.pull_request, self._rhodecode_user, 'review_status_change', |
|
1525 | 1525 | data={'status': calculated_status}) |
|
1526 | 1526 | return True |
|
1527 | 1527 | else: |
|
1528 | 1528 | log.warning('No permissions for user %s to delete comment_id: %s', |
|
1529 | 1529 | self._rhodecode_db_user, comment_id) |
|
1530 | 1530 | raise HTTPNotFound() |
|
1531 | 1531 | |
|
1532 | 1532 | @LoginRequired() |
|
1533 | 1533 | @NotAnonymous() |
|
1534 | 1534 | @HasRepoPermissionAnyDecorator( |
|
1535 | 1535 | 'repository.read', 'repository.write', 'repository.admin') |
|
1536 | 1536 | @CSRFRequired() |
|
1537 | 1537 | @view_config( |
|
1538 | 1538 | route_name='pullrequest_comment_edit', request_method='POST', |
|
1539 | 1539 | renderer='json_ext') |
|
1540 | 1540 | def pull_request_comment_edit(self): |
|
1541 | 1541 | self.load_default_context() |
|
1542 | 1542 | |
|
1543 | 1543 | pull_request = PullRequest.get_or_404( |
|
1544 | 1544 | self.request.matchdict['pull_request_id'] |
|
1545 | 1545 | ) |
|
1546 | 1546 | comment = ChangesetComment.get_or_404( |
|
1547 | 1547 | self.request.matchdict['comment_id'] |
|
1548 | 1548 | ) |
|
1549 | 1549 | comment_id = comment.comment_id |
|
1550 | 1550 | |
|
1551 | 1551 | if comment.immutable: |
|
1552 | 1552 | # don't allow deleting comments that are immutable |
|
1553 | 1553 | raise HTTPForbidden() |
|
1554 | 1554 | |
|
1555 | 1555 | if pull_request.is_closed(): |
|
1556 | 1556 | log.debug('comment: forbidden because pull request is closed') |
|
1557 | 1557 | raise HTTPForbidden() |
|
1558 | 1558 | |
|
1559 | 1559 | if not comment: |
|
1560 | 1560 | log.debug('Comment with id:%s not found, skipping', comment_id) |
|
1561 | 1561 | # comment already deleted in another call probably |
|
1562 | 1562 | return True |
|
1563 | 1563 | |
|
1564 | 1564 | if comment.pull_request.is_closed(): |
|
1565 | 1565 | # don't allow deleting comments on closed pull request |
|
1566 | 1566 | raise HTTPForbidden() |
|
1567 | 1567 | |
|
1568 | 1568 | is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name) |
|
1569 | 1569 | super_admin = h.HasPermissionAny('hg.admin')() |
|
1570 | 1570 | comment_owner = comment.author.user_id == self._rhodecode_user.user_id |
|
1571 | 1571 | is_repo_comment = comment.repo.repo_name == self.db_repo_name |
|
1572 | 1572 | comment_repo_admin = is_repo_admin and is_repo_comment |
|
1573 | 1573 | |
|
1574 | 1574 | if super_admin or comment_owner or comment_repo_admin: |
|
1575 | 1575 | text = self.request.POST.get('text') |
|
1576 | 1576 | version = self.request.POST.get('version') |
|
1577 | 1577 | if text == comment.text: |
|
1578 | 1578 | log.warning( |
|
1579 | 1579 | 'Comment(PR): ' |
|
1580 | 1580 | 'Trying to create new version ' |
|
1581 | 1581 | 'with the same comment body {}'.format( |
|
1582 | 1582 | comment_id, |
|
1583 | 1583 | ) |
|
1584 | 1584 | ) |
|
1585 | 1585 | raise HTTPNotFound() |
|
1586 | 1586 | |
|
1587 | 1587 | if version.isdigit(): |
|
1588 | 1588 | version = int(version) |
|
1589 | 1589 | else: |
|
1590 | 1590 | log.warning( |
|
1591 | 1591 | 'Comment(PR): Wrong version type {} {} ' |
|
1592 | 1592 | 'for comment {}'.format( |
|
1593 | 1593 | version, |
|
1594 | 1594 | type(version), |
|
1595 | 1595 | comment_id, |
|
1596 | 1596 | ) |
|
1597 | 1597 | ) |
|
1598 | 1598 | raise HTTPNotFound() |
|
1599 | 1599 | |
|
1600 | 1600 | try: |
|
1601 | 1601 | comment_history = CommentsModel().edit( |
|
1602 | 1602 | comment_id=comment_id, |
|
1603 | 1603 | text=text, |
|
1604 | 1604 | auth_user=self._rhodecode_user, |
|
1605 | 1605 | version=version, |
|
1606 | 1606 | ) |
|
1607 | 1607 | except CommentVersionMismatch: |
|
1608 | 1608 | raise HTTPConflict() |
|
1609 | 1609 | |
|
1610 | 1610 | if not comment_history: |
|
1611 | 1611 | raise HTTPNotFound() |
|
1612 | 1612 | |
|
1613 | 1613 | Session().commit() |
|
1614 | ||
|
1615 | PullRequestModel().trigger_pull_request_hook( | |
|
1616 | pull_request, self._rhodecode_user, 'comment_edit', | |
|
1617 | data={'comment': comment}) | |
|
1618 | ||
|
1614 | 1619 | return { |
|
1615 | 1620 | 'comment_history_id': comment_history.comment_history_id, |
|
1616 | 1621 | 'comment_id': comment.comment_id, |
|
1617 | 1622 | 'comment_version': comment_history.version, |
|
1618 | 1623 | 'comment_author_username': comment_history.author.username, |
|
1619 | 1624 | 'comment_author_gravatar': h.gravatar_url(comment_history.author.email, 16), |
|
1620 | 1625 | 'comment_created_on': h.age_component(comment_history.created_on, |
|
1621 | 1626 | time_is_local=True), |
|
1622 | 1627 | } |
|
1623 | 1628 | else: |
|
1624 | 1629 | log.warning('No permissions for user %s to edit comment_id: %s', |
|
1625 | 1630 | self._rhodecode_db_user, comment_id) |
|
1626 | 1631 | raise HTTPNotFound() |
@@ -1,79 +1,80 b'' | |||
|
1 | 1 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
2 | 2 | # |
|
3 | 3 | # This program is free software: you can redistribute it and/or modify |
|
4 | 4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
5 | 5 | # (only), as published by the Free Software Foundation. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU Affero General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | # |
|
15 | 15 | # This program is dual-licensed. If you wish to learn more about the |
|
16 | 16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
17 | 17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
18 | 18 | |
|
19 | 19 | import logging |
|
20 | 20 | from pyramid.threadlocal import get_current_registry |
|
21 | 21 | from rhodecode.events.base import RhodeCodeIntegrationEvent |
|
22 | 22 | |
|
23 | 23 | |
|
24 | 24 | log = logging.getLogger(__name__) |
|
25 | 25 | |
|
26 | 26 | |
|
27 | 27 | def trigger(event, registry=None): |
|
28 | 28 | """ |
|
29 | 29 | Helper method to send an event. This wraps the pyramid logic to send an |
|
30 | 30 | event. |
|
31 | 31 | """ |
|
32 | 32 | # For the first step we are using pyramids thread locals here. If the |
|
33 | 33 | # event mechanism works out as a good solution we should think about |
|
34 | 34 | # passing the registry as an argument to get rid of it. |
|
35 | 35 | event_name = event.__class__ |
|
36 | 36 | log.debug('event %s sent for execution', event_name) |
|
37 | 37 | registry = registry or get_current_registry() |
|
38 | 38 | registry.notify(event) |
|
39 | 39 | log.debug('event %s triggered using registry %s', event_name, registry) |
|
40 | 40 | |
|
41 | 41 | # Send the events to integrations directly |
|
42 | 42 | from rhodecode.integrations import integrations_event_handler |
|
43 | 43 | if isinstance(event, RhodeCodeIntegrationEvent): |
|
44 | 44 | integrations_event_handler(event) |
|
45 | 45 | |
|
46 | 46 | |
|
47 | 47 | from rhodecode.events.user import ( # pragma: no cover |
|
48 | 48 | UserPreCreate, |
|
49 | 49 | UserPostCreate, |
|
50 | 50 | UserPreUpdate, |
|
51 | 51 | UserRegistered, |
|
52 | 52 | UserPermissionsChange, |
|
53 | 53 | ) |
|
54 | 54 | |
|
55 | 55 | from rhodecode.events.repo import ( # pragma: no cover |
|
56 | RepoEvent, RepoCommitCommentEvent, | |
|
56 | RepoEvent, | |
|
57 | RepoCommitCommentEvent, RepoCommitCommentEditEvent, | |
|
57 | 58 | RepoPreCreateEvent, RepoCreateEvent, |
|
58 | 59 | RepoPreDeleteEvent, RepoDeleteEvent, |
|
59 | 60 | RepoPrePushEvent, RepoPushEvent, |
|
60 | 61 | RepoPrePullEvent, RepoPullEvent, |
|
61 | 62 | ) |
|
62 | 63 | |
|
63 | 64 | from rhodecode.events.repo_group import ( # pragma: no cover |
|
64 | 65 | RepoGroupEvent, |
|
65 | 66 | RepoGroupCreateEvent, |
|
66 | 67 | RepoGroupUpdateEvent, |
|
67 | 68 | RepoGroupDeleteEvent, |
|
68 | 69 | ) |
|
69 | 70 | |
|
70 | 71 | from rhodecode.events.pullrequest import ( # pragma: no cover |
|
71 | 72 | PullRequestEvent, |
|
72 | 73 | PullRequestCreateEvent, |
|
73 | 74 | PullRequestUpdateEvent, |
|
74 | 75 | PullRequestCommentEvent, |
|
76 | PullRequestCommentEditEvent, | |
|
75 | 77 | PullRequestReviewEvent, |
|
76 | 78 | PullRequestMergeEvent, |
|
77 | 79 | PullRequestCloseEvent, |
|
78 | PullRequestCommentEvent, | |
|
79 | 80 | ) |
@@ -1,164 +1,203 b'' | |||
|
1 | 1 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
2 | 2 | # |
|
3 | 3 | # This program is free software: you can redistribute it and/or modify |
|
4 | 4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
5 | 5 | # (only), as published by the Free Software Foundation. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU Affero General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | # |
|
15 | 15 | # This program is dual-licensed. If you wish to learn more about the |
|
16 | 16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
17 | 17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
18 | 18 | |
|
19 | 19 | import logging |
|
20 | 20 | |
|
21 | 21 | from rhodecode.translation import lazy_ugettext |
|
22 | from rhodecode.events.repo import ( | |
|
23 | RepoEvent, _commits_as_dict, _issues_as_dict) | |
|
22 | from rhodecode.events.repo import (RepoEvent, _commits_as_dict, _issues_as_dict) | |
|
24 | 23 | |
|
25 | 24 | log = logging.getLogger(__name__) |
|
26 | 25 | |
|
27 | 26 | |
|
28 | 27 | class PullRequestEvent(RepoEvent): |
|
29 | 28 | """ |
|
30 | 29 | Base class for pull request events. |
|
31 | 30 | |
|
32 | 31 | :param pullrequest: a :class:`PullRequest` instance |
|
33 | 32 | """ |
|
34 | 33 | |
|
35 | 34 | def __init__(self, pullrequest): |
|
36 | 35 | super(PullRequestEvent, self).__init__(pullrequest.target_repo) |
|
37 | 36 | self.pullrequest = pullrequest |
|
38 | 37 | |
|
39 | 38 | def as_dict(self): |
|
40 | 39 | from rhodecode.lib.utils2 import md5_safe |
|
41 | 40 | from rhodecode.model.pull_request import PullRequestModel |
|
42 | 41 | data = super(PullRequestEvent, self).as_dict() |
|
43 | 42 | |
|
44 | 43 | commits = _commits_as_dict( |
|
45 | 44 | self, |
|
46 | 45 | commit_ids=self.pullrequest.revisions, |
|
47 | 46 | repos=[self.pullrequest.source_repo] |
|
48 | 47 | ) |
|
49 | 48 | issues = _issues_as_dict(commits) |
|
50 | 49 | # calculate hashes of all commits for unique identifier of commits |
|
51 | 50 | # inside that pull request |
|
52 | 51 | commits_hash = md5_safe(':'.join(x.get('raw_id', '') for x in commits)) |
|
53 | 52 | |
|
54 | 53 | data.update({ |
|
55 | 54 | 'pullrequest': { |
|
56 | 55 | 'title': self.pullrequest.title, |
|
57 | 56 | 'issues': issues, |
|
58 | 57 | 'pull_request_id': self.pullrequest.pull_request_id, |
|
59 | 58 | 'url': PullRequestModel().get_url( |
|
60 | 59 | self.pullrequest, request=self.request), |
|
61 | 60 | 'permalink_url': PullRequestModel().get_url( |
|
62 | 61 | self.pullrequest, request=self.request, permalink=True), |
|
63 | 62 | 'shadow_url': PullRequestModel().get_shadow_clone_url( |
|
64 | 63 | self.pullrequest, request=self.request), |
|
65 | 64 | 'status': self.pullrequest.calculated_review_status(), |
|
66 | 65 | 'commits_uid': commits_hash, |
|
67 | 66 | 'commits': commits, |
|
68 | 67 | } |
|
69 | 68 | }) |
|
70 | 69 | return data |
|
71 | 70 | |
|
72 | 71 | |
|
73 | 72 | class PullRequestCreateEvent(PullRequestEvent): |
|
74 | 73 | """ |
|
75 | 74 | An instance of this class is emitted as an :term:`event` after a pull |
|
76 | 75 | request is created. |
|
77 | 76 | """ |
|
78 | 77 | name = 'pullrequest-create' |
|
79 | 78 | display_name = lazy_ugettext('pullrequest created') |
|
80 | 79 | description = lazy_ugettext('Event triggered after pull request was created') |
|
81 | 80 | |
|
82 | 81 | |
|
83 | 82 | class PullRequestCloseEvent(PullRequestEvent): |
|
84 | 83 | """ |
|
85 | 84 | An instance of this class is emitted as an :term:`event` after a pull |
|
86 | 85 | request is closed. |
|
87 | 86 | """ |
|
88 | 87 | name = 'pullrequest-close' |
|
89 | 88 | display_name = lazy_ugettext('pullrequest closed') |
|
90 | 89 | description = lazy_ugettext('Event triggered after pull request was closed') |
|
91 | 90 | |
|
92 | 91 | |
|
93 | 92 | class PullRequestUpdateEvent(PullRequestEvent): |
|
94 | 93 | """ |
|
95 | 94 | An instance of this class is emitted as an :term:`event` after a pull |
|
96 | 95 | request's commits have been updated. |
|
97 | 96 | """ |
|
98 | 97 | name = 'pullrequest-update' |
|
99 | 98 | display_name = lazy_ugettext('pullrequest commits updated') |
|
100 | 99 | description = lazy_ugettext('Event triggered after pull requests was updated') |
|
101 | 100 | |
|
102 | 101 | |
|
103 | 102 | class PullRequestReviewEvent(PullRequestEvent): |
|
104 | 103 | """ |
|
105 | 104 | An instance of this class is emitted as an :term:`event` after a pull |
|
106 | 105 | request review has changed. A status defines new status of review. |
|
107 | 106 | """ |
|
108 | 107 | name = 'pullrequest-review' |
|
109 | 108 | display_name = lazy_ugettext('pullrequest review changed') |
|
110 | 109 | description = lazy_ugettext('Event triggered after a review status of a ' |
|
111 | 110 | 'pull requests has changed to other.') |
|
112 | 111 | |
|
113 | 112 | def __init__(self, pullrequest, status): |
|
114 | 113 | super(PullRequestReviewEvent, self).__init__(pullrequest) |
|
115 | 114 | self.status = status |
|
116 | 115 | |
|
117 | 116 | |
|
118 | 117 | class PullRequestMergeEvent(PullRequestEvent): |
|
119 | 118 | """ |
|
120 | 119 | An instance of this class is emitted as an :term:`event` after a pull |
|
121 | 120 | request is merged. |
|
122 | 121 | """ |
|
123 | 122 | name = 'pullrequest-merge' |
|
124 | 123 | display_name = lazy_ugettext('pullrequest merged') |
|
125 | 124 | description = lazy_ugettext('Event triggered after a successful merge operation ' |
|
126 | 125 | 'was executed on a pull request') |
|
127 | 126 | |
|
128 | 127 | |
|
129 | 128 | class PullRequestCommentEvent(PullRequestEvent): |
|
130 | 129 | """ |
|
131 | 130 | An instance of this class is emitted as an :term:`event` after a pull |
|
132 | 131 | request comment is created. |
|
133 | 132 | """ |
|
134 | 133 | name = 'pullrequest-comment' |
|
135 | 134 | display_name = lazy_ugettext('pullrequest commented') |
|
136 | 135 | description = lazy_ugettext('Event triggered after a comment was made on a code ' |
|
137 | 136 | 'in the pull request') |
|
138 | 137 | |
|
139 | 138 | def __init__(self, pullrequest, comment): |
|
140 | 139 | super(PullRequestCommentEvent, self).__init__(pullrequest) |
|
141 | 140 | self.comment = comment |
|
142 | 141 | |
|
143 | 142 | def as_dict(self): |
|
144 | 143 | from rhodecode.model.comment import CommentsModel |
|
145 | 144 | data = super(PullRequestCommentEvent, self).as_dict() |
|
146 | 145 | |
|
147 | 146 | status = None |
|
148 | 147 | if self.comment.status_change: |
|
149 | 148 | status = self.comment.status_change[0].status |
|
150 | 149 | |
|
151 | 150 | data.update({ |
|
152 | 151 | 'comment': { |
|
153 | 152 | 'status': status, |
|
154 | 153 | 'text': self.comment.text, |
|
155 | 154 | 'type': self.comment.comment_type, |
|
156 | 155 | 'file': self.comment.f_path, |
|
157 | 156 | 'line': self.comment.line_no, |
|
157 | 'version': self.comment.last_version, | |
|
158 | 158 | 'url': CommentsModel().get_url( |
|
159 | 159 | self.comment, request=self.request), |
|
160 | 160 | 'permalink_url': CommentsModel().get_url( |
|
161 | 161 | self.comment, request=self.request, permalink=True), |
|
162 | 162 | } |
|
163 | 163 | }) |
|
164 | 164 | return data |
|
165 | ||
|
166 | ||
|
167 | class PullRequestCommentEditEvent(PullRequestEvent): | |
|
168 | """ | |
|
169 | An instance of this class is emitted as an :term:`event` after a pull | |
|
170 | request comment is edited. | |
|
171 | """ | |
|
172 | name = 'pullrequest-comment-edit' | |
|
173 | display_name = lazy_ugettext('pullrequest comment edited') | |
|
174 | description = lazy_ugettext('Event triggered after a comment was edited on a code ' | |
|
175 | 'in the pull request') | |
|
176 | ||
|
177 | def __init__(self, pullrequest, comment): | |
|
178 | super(PullRequestCommentEditEvent, self).__init__(pullrequest) | |
|
179 | self.comment = comment | |
|
180 | ||
|
181 | def as_dict(self): | |
|
182 | from rhodecode.model.comment import CommentsModel | |
|
183 | data = super(PullRequestCommentEditEvent, self).as_dict() | |
|
184 | ||
|
185 | status = None | |
|
186 | if self.comment.status_change: | |
|
187 | status = self.comment.status_change[0].status | |
|
188 | ||
|
189 | data.update({ | |
|
190 | 'comment': { | |
|
191 | 'status': status, | |
|
192 | 'text': self.comment.text, | |
|
193 | 'type': self.comment.comment_type, | |
|
194 | 'file': self.comment.f_path, | |
|
195 | 'line': self.comment.line_no, | |
|
196 | 'version': self.comment.last_version, | |
|
197 | 'url': CommentsModel().get_url( | |
|
198 | self.comment, request=self.request), | |
|
199 | 'permalink_url': CommentsModel().get_url( | |
|
200 | self.comment, request=self.request, permalink=True), | |
|
201 | } | |
|
202 | }) | |
|
203 | return data |
@@ -1,400 +1,436 b'' | |||
|
1 | 1 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
2 | 2 | # |
|
3 | 3 | # This program is free software: you can redistribute it and/or modify |
|
4 | 4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
5 | 5 | # (only), as published by the Free Software Foundation. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU Affero General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | # |
|
15 | 15 | # This program is dual-licensed. If you wish to learn more about the |
|
16 | 16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
17 | 17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
18 | 18 | |
|
19 | 19 | import collections |
|
20 | 20 | import logging |
|
21 | 21 | import datetime |
|
22 | 22 | |
|
23 | 23 | from rhodecode.translation import lazy_ugettext |
|
24 | 24 | from rhodecode.model.db import User, Repository, Session |
|
25 | 25 | from rhodecode.events.base import RhodeCodeIntegrationEvent |
|
26 | 26 | from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError |
|
27 | 27 | |
|
28 | 28 | log = logging.getLogger(__name__) |
|
29 | 29 | |
|
30 | 30 | |
|
31 | 31 | def _commits_as_dict(event, commit_ids, repos): |
|
32 | 32 | """ |
|
33 | 33 | Helper function to serialize commit_ids |
|
34 | 34 | |
|
35 | 35 | :param event: class calling this method |
|
36 | 36 | :param commit_ids: commits to get |
|
37 | 37 | :param repos: list of repos to check |
|
38 | 38 | """ |
|
39 | 39 | from rhodecode.lib.utils2 import extract_mentioned_users |
|
40 | 40 | from rhodecode.lib.helpers import ( |
|
41 | 41 | urlify_commit_message, process_patterns, chop_at_smart) |
|
42 | 42 | from rhodecode.model.repo import RepoModel |
|
43 | 43 | |
|
44 | 44 | if not repos: |
|
45 | 45 | raise Exception('no repo defined') |
|
46 | 46 | |
|
47 | 47 | if not isinstance(repos, (tuple, list)): |
|
48 | 48 | repos = [repos] |
|
49 | 49 | |
|
50 | 50 | if not commit_ids: |
|
51 | 51 | return [] |
|
52 | 52 | |
|
53 | 53 | needed_commits = list(commit_ids) |
|
54 | 54 | |
|
55 | 55 | commits = [] |
|
56 | 56 | reviewers = [] |
|
57 | 57 | for repo in repos: |
|
58 | 58 | if not needed_commits: |
|
59 | 59 | return commits # return early if we have the commits we need |
|
60 | 60 | |
|
61 | 61 | vcs_repo = repo.scm_instance(cache=False) |
|
62 | 62 | |
|
63 | 63 | try: |
|
64 | 64 | # use copy of needed_commits since we modify it while iterating |
|
65 | 65 | for commit_id in list(needed_commits): |
|
66 | 66 | if commit_id.startswith('tag=>'): |
|
67 | 67 | raw_id = commit_id[5:] |
|
68 | 68 | cs_data = { |
|
69 | 69 | 'raw_id': commit_id, 'short_id': commit_id, |
|
70 | 70 | 'branch': None, |
|
71 | 71 | 'git_ref_change': 'tag_add', |
|
72 | 72 | 'message': 'Added new tag {}'.format(raw_id), |
|
73 | 73 | 'author': event.actor.full_contact, |
|
74 | 74 | 'date': datetime.datetime.now(), |
|
75 | 75 | 'refs': { |
|
76 | 76 | 'branches': [], |
|
77 | 77 | 'bookmarks': [], |
|
78 | 78 | 'tags': [] |
|
79 | 79 | } |
|
80 | 80 | } |
|
81 | 81 | commits.append(cs_data) |
|
82 | 82 | |
|
83 | 83 | elif commit_id.startswith('delete_branch=>'): |
|
84 | 84 | raw_id = commit_id[15:] |
|
85 | 85 | cs_data = { |
|
86 | 86 | 'raw_id': commit_id, 'short_id': commit_id, |
|
87 | 87 | 'branch': None, |
|
88 | 88 | 'git_ref_change': 'branch_delete', |
|
89 | 89 | 'message': 'Deleted branch {}'.format(raw_id), |
|
90 | 90 | 'author': event.actor.full_contact, |
|
91 | 91 | 'date': datetime.datetime.now(), |
|
92 | 92 | 'refs': { |
|
93 | 93 | 'branches': [], |
|
94 | 94 | 'bookmarks': [], |
|
95 | 95 | 'tags': [] |
|
96 | 96 | } |
|
97 | 97 | } |
|
98 | 98 | commits.append(cs_data) |
|
99 | 99 | |
|
100 | 100 | else: |
|
101 | 101 | try: |
|
102 | 102 | cs = vcs_repo.get_commit(commit_id) |
|
103 | 103 | except CommitDoesNotExistError: |
|
104 | 104 | continue # maybe its in next repo |
|
105 | 105 | |
|
106 | 106 | cs_data = cs.__json__() |
|
107 | 107 | cs_data['refs'] = cs._get_refs() |
|
108 | 108 | |
|
109 | 109 | cs_data['mentions'] = extract_mentioned_users(cs_data['message']) |
|
110 | 110 | cs_data['reviewers'] = reviewers |
|
111 | 111 | cs_data['url'] = RepoModel().get_commit_url( |
|
112 | 112 | repo, cs_data['raw_id'], request=event.request) |
|
113 | 113 | cs_data['permalink_url'] = RepoModel().get_commit_url( |
|
114 | 114 | repo, cs_data['raw_id'], request=event.request, |
|
115 | 115 | permalink=True) |
|
116 | 116 | urlified_message, issues_data = process_patterns( |
|
117 | 117 | cs_data['message'], repo.repo_name) |
|
118 | 118 | cs_data['issues'] = issues_data |
|
119 | 119 | cs_data['message_html'] = urlify_commit_message( |
|
120 | 120 | cs_data['message'], repo.repo_name) |
|
121 | 121 | cs_data['message_html_title'] = chop_at_smart( |
|
122 | 122 | cs_data['message'], '\n', suffix_if_chopped='...') |
|
123 | 123 | commits.append(cs_data) |
|
124 | 124 | |
|
125 | 125 | needed_commits.remove(commit_id) |
|
126 | 126 | |
|
127 | 127 | except Exception: |
|
128 | 128 | log.exception('Failed to extract commits data') |
|
129 | 129 | # we don't send any commits when crash happens, only full list |
|
130 | 130 | # matters we short circuit then. |
|
131 | 131 | return [] |
|
132 | 132 | |
|
133 | 133 | missing_commits = set(commit_ids) - set(c['raw_id'] for c in commits) |
|
134 | 134 | if missing_commits: |
|
135 | 135 | log.error('Inconsistent repository state. ' |
|
136 | 136 | 'Missing commits: %s', ', '.join(missing_commits)) |
|
137 | 137 | |
|
138 | 138 | return commits |
|
139 | 139 | |
|
140 | 140 | |
|
141 | 141 | def _issues_as_dict(commits): |
|
142 | 142 | """ Helper function to serialize issues from commits """ |
|
143 | 143 | issues = {} |
|
144 | 144 | for commit in commits: |
|
145 | 145 | for issue in commit['issues']: |
|
146 | 146 | issues[issue['id']] = issue |
|
147 | 147 | return issues |
|
148 | 148 | |
|
149 | 149 | |
|
150 | 150 | class RepoEvent(RhodeCodeIntegrationEvent): |
|
151 | 151 | """ |
|
152 | 152 | Base class for events acting on a repository. |
|
153 | 153 | |
|
154 | 154 | :param repo: a :class:`Repository` instance |
|
155 | 155 | """ |
|
156 | 156 | |
|
157 | 157 | def __init__(self, repo): |
|
158 | 158 | super(RepoEvent, self).__init__() |
|
159 | 159 | self.repo = repo |
|
160 | 160 | |
|
161 | 161 | def as_dict(self): |
|
162 | 162 | from rhodecode.model.repo import RepoModel |
|
163 | 163 | data = super(RepoEvent, self).as_dict() |
|
164 | 164 | |
|
165 | 165 | extra_fields = collections.OrderedDict() |
|
166 | 166 | for field in self.repo.extra_fields: |
|
167 | 167 | extra_fields[field.field_key] = field.field_value |
|
168 | 168 | |
|
169 | 169 | data.update({ |
|
170 | 170 | 'repo': { |
|
171 | 171 | 'repo_id': self.repo.repo_id, |
|
172 | 172 | 'repo_name': self.repo.repo_name, |
|
173 | 173 | 'repo_type': self.repo.repo_type, |
|
174 | 174 | 'url': RepoModel().get_url( |
|
175 | 175 | self.repo, request=self.request), |
|
176 | 176 | 'permalink_url': RepoModel().get_url( |
|
177 | 177 | self.repo, request=self.request, permalink=True), |
|
178 | 178 | 'extra_fields': extra_fields |
|
179 | 179 | } |
|
180 | 180 | }) |
|
181 | 181 | return data |
|
182 | 182 | |
|
183 | 183 | |
|
184 | 184 | class RepoCommitCommentEvent(RepoEvent): |
|
185 | 185 | """ |
|
186 | 186 | An instance of this class is emitted as an :term:`event` after a comment is made |
|
187 | 187 | on repository commit. |
|
188 | 188 | """ |
|
189 | 189 | |
|
190 | 190 | name = 'repo-commit-comment' |
|
191 | 191 | display_name = lazy_ugettext('repository commit comment') |
|
192 | 192 | description = lazy_ugettext('Event triggered after a comment was made ' |
|
193 | 193 | 'on commit inside a repository') |
|
194 | 194 | |
|
195 | 195 | def __init__(self, repo, commit, comment): |
|
196 | 196 | super(RepoCommitCommentEvent, self).__init__(repo) |
|
197 | 197 | self.commit = commit |
|
198 | 198 | self.comment = comment |
|
199 | 199 | |
|
200 | 200 | def as_dict(self): |
|
201 | 201 | data = super(RepoCommitCommentEvent, self).as_dict() |
|
202 | 202 | data['commit'] = { |
|
203 | 203 | 'commit_id': self.commit.raw_id, |
|
204 | 204 | 'commit_message': self.commit.message, |
|
205 | 205 | 'commit_branch': self.commit.branch, |
|
206 | 206 | } |
|
207 | 207 | |
|
208 | 208 | data['comment'] = { |
|
209 | 209 | 'comment_id': self.comment.comment_id, |
|
210 | 210 | 'comment_text': self.comment.text, |
|
211 | 211 | 'comment_type': self.comment.comment_type, |
|
212 | 212 | 'comment_f_path': self.comment.f_path, |
|
213 | 213 | 'comment_line_no': self.comment.line_no, |
|
214 | 'comment_version': self.comment.last_version, | |
|
215 | } | |
|
216 | return data | |
|
217 | ||
|
218 | ||
|
219 | class RepoCommitCommentEditEvent(RepoEvent): | |
|
220 | """ | |
|
221 | An instance of this class is emitted as an :term:`event` after a comment is edited | |
|
222 | on repository commit. | |
|
223 | """ | |
|
224 | ||
|
225 | name = 'repo-commit-edit-comment' | |
|
226 | display_name = lazy_ugettext('repository commit edit comment') | |
|
227 | description = lazy_ugettext('Event triggered after a comment was edited ' | |
|
228 | 'on commit inside a repository') | |
|
229 | ||
|
230 | def __init__(self, repo, commit, comment): | |
|
231 | super(RepoCommitCommentEditEvent, self).__init__(repo) | |
|
232 | self.commit = commit | |
|
233 | self.comment = comment | |
|
234 | ||
|
235 | def as_dict(self): | |
|
236 | data = super(RepoCommitCommentEditEvent, self).as_dict() | |
|
237 | data['commit'] = { | |
|
238 | 'commit_id': self.commit.raw_id, | |
|
239 | 'commit_message': self.commit.message, | |
|
240 | 'commit_branch': self.commit.branch, | |
|
241 | } | |
|
242 | ||
|
243 | data['comment'] = { | |
|
244 | 'comment_id': self.comment.comment_id, | |
|
245 | 'comment_text': self.comment.text, | |
|
246 | 'comment_type': self.comment.comment_type, | |
|
247 | 'comment_f_path': self.comment.f_path, | |
|
248 | 'comment_line_no': self.comment.line_no, | |
|
249 | 'comment_version': self.comment.last_version, | |
|
214 | 250 | } |
|
215 | 251 | return data |
|
216 | 252 | |
|
217 | 253 | |
|
218 | 254 | class RepoPreCreateEvent(RepoEvent): |
|
219 | 255 | """ |
|
220 | 256 | An instance of this class is emitted as an :term:`event` before a repo is |
|
221 | 257 | created. |
|
222 | 258 | """ |
|
223 | 259 | name = 'repo-pre-create' |
|
224 | 260 | display_name = lazy_ugettext('repository pre create') |
|
225 | 261 | description = lazy_ugettext('Event triggered before repository is created') |
|
226 | 262 | |
|
227 | 263 | |
|
228 | 264 | class RepoCreateEvent(RepoEvent): |
|
229 | 265 | """ |
|
230 | 266 | An instance of this class is emitted as an :term:`event` whenever a repo is |
|
231 | 267 | created. |
|
232 | 268 | """ |
|
233 | 269 | name = 'repo-create' |
|
234 | 270 | display_name = lazy_ugettext('repository created') |
|
235 | 271 | description = lazy_ugettext('Event triggered after repository was created') |
|
236 | 272 | |
|
237 | 273 | |
|
238 | 274 | class RepoPreDeleteEvent(RepoEvent): |
|
239 | 275 | """ |
|
240 | 276 | An instance of this class is emitted as an :term:`event` whenever a repo is |
|
241 | 277 | created. |
|
242 | 278 | """ |
|
243 | 279 | name = 'repo-pre-delete' |
|
244 | 280 | display_name = lazy_ugettext('repository pre delete') |
|
245 | 281 | description = lazy_ugettext('Event triggered before a repository is deleted') |
|
246 | 282 | |
|
247 | 283 | |
|
248 | 284 | class RepoDeleteEvent(RepoEvent): |
|
249 | 285 | """ |
|
250 | 286 | An instance of this class is emitted as an :term:`event` whenever a repo is |
|
251 | 287 | created. |
|
252 | 288 | """ |
|
253 | 289 | name = 'repo-delete' |
|
254 | 290 | display_name = lazy_ugettext('repository deleted') |
|
255 | 291 | description = lazy_ugettext('Event triggered after repository was deleted') |
|
256 | 292 | |
|
257 | 293 | |
|
258 | 294 | class RepoVCSEvent(RepoEvent): |
|
259 | 295 | """ |
|
260 | 296 | Base class for events triggered by the VCS |
|
261 | 297 | """ |
|
262 | 298 | def __init__(self, repo_name, extras): |
|
263 | 299 | self.repo = Repository.get_by_repo_name(repo_name) |
|
264 | 300 | if not self.repo: |
|
265 | 301 | raise Exception('repo by this name %s does not exist' % repo_name) |
|
266 | 302 | self.extras = extras |
|
267 | 303 | super(RepoVCSEvent, self).__init__(self.repo) |
|
268 | 304 | |
|
269 | 305 | @property |
|
270 | 306 | def actor(self): |
|
271 | 307 | if self.extras.get('username'): |
|
272 | 308 | return User.get_by_username(self.extras['username']) |
|
273 | 309 | |
|
274 | 310 | @property |
|
275 | 311 | def actor_ip(self): |
|
276 | 312 | if self.extras.get('ip'): |
|
277 | 313 | return self.extras['ip'] |
|
278 | 314 | |
|
279 | 315 | @property |
|
280 | 316 | def server_url(self): |
|
281 | 317 | if self.extras.get('server_url'): |
|
282 | 318 | return self.extras['server_url'] |
|
283 | 319 | |
|
284 | 320 | @property |
|
285 | 321 | def request(self): |
|
286 | 322 | return self.extras.get('request') or self.get_request() |
|
287 | 323 | |
|
288 | 324 | |
|
289 | 325 | class RepoPrePullEvent(RepoVCSEvent): |
|
290 | 326 | """ |
|
291 | 327 | An instance of this class is emitted as an :term:`event` before commits |
|
292 | 328 | are pulled from a repo. |
|
293 | 329 | """ |
|
294 | 330 | name = 'repo-pre-pull' |
|
295 | 331 | display_name = lazy_ugettext('repository pre pull') |
|
296 | 332 | description = lazy_ugettext('Event triggered before repository code is pulled') |
|
297 | 333 | |
|
298 | 334 | |
|
299 | 335 | class RepoPullEvent(RepoVCSEvent): |
|
300 | 336 | """ |
|
301 | 337 | An instance of this class is emitted as an :term:`event` after commits |
|
302 | 338 | are pulled from a repo. |
|
303 | 339 | """ |
|
304 | 340 | name = 'repo-pull' |
|
305 | 341 | display_name = lazy_ugettext('repository pull') |
|
306 | 342 | description = lazy_ugettext('Event triggered after repository code was pulled') |
|
307 | 343 | |
|
308 | 344 | |
|
309 | 345 | class RepoPrePushEvent(RepoVCSEvent): |
|
310 | 346 | """ |
|
311 | 347 | An instance of this class is emitted as an :term:`event` before commits |
|
312 | 348 | are pushed to a repo. |
|
313 | 349 | """ |
|
314 | 350 | name = 'repo-pre-push' |
|
315 | 351 | display_name = lazy_ugettext('repository pre push') |
|
316 | 352 | description = lazy_ugettext('Event triggered before the code is ' |
|
317 | 353 | 'pushed to a repository') |
|
318 | 354 | |
|
319 | 355 | |
|
320 | 356 | class RepoPushEvent(RepoVCSEvent): |
|
321 | 357 | """ |
|
322 | 358 | An instance of this class is emitted as an :term:`event` after commits |
|
323 | 359 | are pushed to a repo. |
|
324 | 360 | |
|
325 | 361 | :param extras: (optional) dict of data from proxied VCS actions |
|
326 | 362 | """ |
|
327 | 363 | name = 'repo-push' |
|
328 | 364 | display_name = lazy_ugettext('repository push') |
|
329 | 365 | description = lazy_ugettext('Event triggered after the code was ' |
|
330 | 366 | 'pushed to a repository') |
|
331 | 367 | |
|
332 | 368 | def __init__(self, repo_name, pushed_commit_ids, extras): |
|
333 | 369 | super(RepoPushEvent, self).__init__(repo_name, extras) |
|
334 | 370 | self.pushed_commit_ids = pushed_commit_ids |
|
335 | 371 | self.new_refs = extras.new_refs |
|
336 | 372 | |
|
337 | 373 | def as_dict(self): |
|
338 | 374 | data = super(RepoPushEvent, self).as_dict() |
|
339 | 375 | |
|
340 | 376 | def branch_url(branch_name): |
|
341 | 377 | return '{}/changelog?branch={}'.format( |
|
342 | 378 | data['repo']['url'], branch_name) |
|
343 | 379 | |
|
344 | 380 | def tag_url(tag_name): |
|
345 | 381 | return '{}/files/{}/'.format( |
|
346 | 382 | data['repo']['url'], tag_name) |
|
347 | 383 | |
|
348 | 384 | commits = _commits_as_dict( |
|
349 | 385 | self, commit_ids=self.pushed_commit_ids, repos=[self.repo]) |
|
350 | 386 | |
|
351 | 387 | last_branch = None |
|
352 | 388 | for commit in reversed(commits): |
|
353 | 389 | commit['branch'] = commit['branch'] or last_branch |
|
354 | 390 | last_branch = commit['branch'] |
|
355 | 391 | issues = _issues_as_dict(commits) |
|
356 | 392 | |
|
357 | 393 | branches = set() |
|
358 | 394 | tags = set() |
|
359 | 395 | for commit in commits: |
|
360 | 396 | if commit['refs']['tags']: |
|
361 | 397 | for tag in commit['refs']['tags']: |
|
362 | 398 | tags.add(tag) |
|
363 | 399 | if commit['branch']: |
|
364 | 400 | branches.add(commit['branch']) |
|
365 | 401 | |
|
366 | 402 | # maybe we have branches in new_refs ? |
|
367 | 403 | try: |
|
368 | 404 | branches = branches.union(set(self.new_refs['branches'])) |
|
369 | 405 | except Exception: |
|
370 | 406 | pass |
|
371 | 407 | |
|
372 | 408 | branches = [ |
|
373 | 409 | { |
|
374 | 410 | 'name': branch, |
|
375 | 411 | 'url': branch_url(branch) |
|
376 | 412 | } |
|
377 | 413 | for branch in branches |
|
378 | 414 | ] |
|
379 | 415 | |
|
380 | 416 | # maybe we have branches in new_refs ? |
|
381 | 417 | try: |
|
382 | 418 | tags = tags.union(set(self.new_refs['tags'])) |
|
383 | 419 | except Exception: |
|
384 | 420 | pass |
|
385 | 421 | |
|
386 | 422 | tags = [ |
|
387 | 423 | { |
|
388 | 424 | 'name': tag, |
|
389 | 425 | 'url': tag_url(tag) |
|
390 | 426 | } |
|
391 | 427 | for tag in tags |
|
392 | 428 | ] |
|
393 | 429 | |
|
394 | 430 | data['push'] = { |
|
395 | 431 | 'commits': commits, |
|
396 | 432 | 'issues': issues, |
|
397 | 433 | 'branches': branches, |
|
398 | 434 | 'tags': tags, |
|
399 | 435 | } |
|
400 | 436 | return data |
@@ -1,428 +1,450 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2012-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import colander |
|
22 | 22 | import string |
|
23 | 23 | import collections |
|
24 | 24 | import logging |
|
25 | 25 | import requests |
|
26 | 26 | import urllib |
|
27 | 27 | from requests.adapters import HTTPAdapter |
|
28 | 28 | from requests.packages.urllib3.util.retry import Retry |
|
29 | 29 | |
|
30 | 30 | from mako import exceptions |
|
31 | 31 | |
|
32 | 32 | from rhodecode.lib.utils2 import safe_str |
|
33 | 33 | from rhodecode.translation import _ |
|
34 | 34 | |
|
35 | 35 | |
|
36 | 36 | log = logging.getLogger(__name__) |
|
37 | 37 | |
|
38 | 38 | |
|
39 | 39 | class UrlTmpl(string.Template): |
|
40 | 40 | |
|
41 | 41 | def safe_substitute(self, **kws): |
|
42 | 42 | # url encode the kw for usage in url |
|
43 | 43 | kws = {k: urllib.quote(safe_str(v)) for k, v in kws.items()} |
|
44 | 44 | return super(UrlTmpl, self).safe_substitute(**kws) |
|
45 | 45 | |
|
46 | 46 | |
|
47 | 47 | class IntegrationTypeBase(object): |
|
48 | 48 | """ Base class for IntegrationType plugins """ |
|
49 | 49 | is_dummy = False |
|
50 | 50 | description = '' |
|
51 | 51 | |
|
52 | 52 | @classmethod |
|
53 | 53 | def icon(cls): |
|
54 | 54 | return ''' |
|
55 | 55 | <?xml version="1.0" encoding="UTF-8" standalone="no"?> |
|
56 | 56 | <svg |
|
57 | 57 | xmlns:dc="http://purl.org/dc/elements/1.1/" |
|
58 | 58 | xmlns:cc="http://creativecommons.org/ns#" |
|
59 | 59 | xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" |
|
60 | 60 | xmlns:svg="http://www.w3.org/2000/svg" |
|
61 | 61 | xmlns="http://www.w3.org/2000/svg" |
|
62 | 62 | xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" |
|
63 | 63 | xmlns:inkscape="http://setwww.inkscape.org/namespaces/inkscape" |
|
64 | 64 | viewBox="0 -256 1792 1792" |
|
65 | 65 | id="svg3025" |
|
66 | 66 | version="1.1" |
|
67 | 67 | inkscape:version="0.48.3.1 r9886" |
|
68 | 68 | width="100%" |
|
69 | 69 | height="100%" |
|
70 | 70 | sodipodi:docname="cog_font_awesome.svg"> |
|
71 | 71 | <metadata |
|
72 | 72 | id="metadata3035"> |
|
73 | 73 | <rdf:RDF> |
|
74 | 74 | <cc:Work |
|
75 | 75 | rdf:about=""> |
|
76 | 76 | <dc:format>image/svg+xml</dc:format> |
|
77 | 77 | <dc:type |
|
78 | 78 | rdf:resource="http://purl.org/dc/dcmitype/StillImage" /> |
|
79 | 79 | </cc:Work> |
|
80 | 80 | </rdf:RDF> |
|
81 | 81 | </metadata> |
|
82 | 82 | <defs |
|
83 | 83 | id="defs3033" /> |
|
84 | 84 | <sodipodi:namedview |
|
85 | 85 | pagecolor="#ffffff" |
|
86 | 86 | bordercolor="#666666" |
|
87 | 87 | borderopacity="1" |
|
88 | 88 | objecttolerance="10" |
|
89 | 89 | gridtolerance="10" |
|
90 | 90 | guidetolerance="10" |
|
91 | 91 | inkscape:pageopacity="0" |
|
92 | 92 | inkscape:pageshadow="2" |
|
93 | 93 | inkscape:window-width="640" |
|
94 | 94 | inkscape:window-height="480" |
|
95 | 95 | id="namedview3031" |
|
96 | 96 | showgrid="false" |
|
97 | 97 | inkscape:zoom="0.13169643" |
|
98 | 98 | inkscape:cx="896" |
|
99 | 99 | inkscape:cy="896" |
|
100 | 100 | inkscape:window-x="0" |
|
101 | 101 | inkscape:window-y="25" |
|
102 | 102 | inkscape:window-maximized="0" |
|
103 | 103 | inkscape:current-layer="svg3025" /> |
|
104 | 104 | <g |
|
105 | 105 | transform="matrix(1,0,0,-1,121.49153,1285.4237)" |
|
106 | 106 | id="g3027"> |
|
107 | 107 | <path |
|
108 | 108 | d="m 1024,640 q 0,106 -75,181 -75,75 -181,75 -106,0 -181,-75 -75,-75 -75,-181 0,-106 75,-181 75,-75 181,-75 106,0 181,75 75,75 75,181 z m 512,109 V 527 q 0,-12 -8,-23 -8,-11 -20,-13 l -185,-28 q -19,-54 -39,-91 35,-50 107,-138 10,-12 10,-25 0,-13 -9,-23 -27,-37 -99,-108 -72,-71 -94,-71 -12,0 -26,9 l -138,108 q -44,-23 -91,-38 -16,-136 -29,-186 -7,-28 -36,-28 H 657 q -14,0 -24.5,8.5 Q 622,-111 621,-98 L 593,86 q -49,16 -90,37 L 362,16 Q 352,7 337,7 323,7 312,18 186,132 147,186 q -7,10 -7,23 0,12 8,23 15,21 51,66.5 36,45.5 54,70.5 -27,50 -41,99 L 29,495 Q 16,497 8,507.5 0,518 0,531 v 222 q 0,12 8,23 8,11 19,13 l 186,28 q 14,46 39,92 -40,57 -107,138 -10,12 -10,24 0,10 9,23 26,36 98.5,107.5 72.5,71.5 94.5,71.5 13,0 26,-10 l 138,-107 q 44,23 91,38 16,136 29,186 7,28 36,28 h 222 q 14,0 24.5,-8.5 Q 914,1391 915,1378 l 28,-184 q 49,-16 90,-37 l 142,107 q 9,9 24,9 13,0 25,-10 129,-119 165,-170 7,-8 7,-22 0,-12 -8,-23 -15,-21 -51,-66.5 -36,-45.5 -54,-70.5 26,-50 41,-98 l 183,-28 q 13,-2 21,-12.5 8,-10.5 8,-23.5 z" |
|
109 | 109 | id="path3029" |
|
110 | 110 | inkscape:connector-curvature="0" |
|
111 | 111 | style="fill:currentColor" /> |
|
112 | 112 | </g> |
|
113 | 113 | </svg> |
|
114 | 114 | ''' |
|
115 | 115 | |
|
116 | 116 | def __init__(self, settings): |
|
117 | 117 | """ |
|
118 | 118 | :param settings: dict of settings to be used for the integration |
|
119 | 119 | """ |
|
120 | 120 | self.settings = settings |
|
121 | 121 | |
|
122 | 122 | def settings_schema(self): |
|
123 | 123 | """ |
|
124 | 124 | A colander schema of settings for the integration type |
|
125 | 125 | """ |
|
126 | 126 | return colander.Schema() |
|
127 | 127 | |
|
128 | 128 | def event_enabled(self, event): |
|
129 | 129 | """ |
|
130 | 130 | Checks if submitted event is enabled based on the plugin settings |
|
131 | 131 | :param event: |
|
132 | 132 | :return: bool |
|
133 | 133 | """ |
|
134 | 134 | allowed_events = self.settings['events'] |
|
135 | 135 | if event.name not in allowed_events: |
|
136 | 136 | log.debug('event ignored: %r event %s not in allowed set of events %s', |
|
137 | 137 | event, event.name, allowed_events) |
|
138 | 138 | return False |
|
139 | 139 | return True |
|
140 | 140 | |
|
141 | 141 | |
|
142 | 142 | class EEIntegration(IntegrationTypeBase): |
|
143 | 143 | description = 'Integration available in RhodeCode EE edition.' |
|
144 | 144 | is_dummy = True |
|
145 | 145 | |
|
146 | 146 | def __init__(self, name, key, settings=None): |
|
147 | 147 | self.display_name = name |
|
148 | 148 | self.key = key |
|
149 | 149 | super(EEIntegration, self).__init__(settings) |
|
150 | 150 | |
|
151 | 151 | |
|
152 | 152 | # Helpers # |
|
153 | 153 | # updating this required to update the `common_vars` as well. |
|
154 | 154 | WEBHOOK_URL_VARS = [ |
|
155 | 155 | # GENERAL |
|
156 | 156 | ('General', [ |
|
157 | 157 | ('event_name', 'Unique name of the event type, e.g pullrequest-update'), |
|
158 | 158 | ('repo_name', 'Full name of the repository'), |
|
159 | 159 | ('repo_type', 'VCS type of repository'), |
|
160 | 160 | ('repo_id', 'Unique id of repository'), |
|
161 | 161 | ('repo_url', 'Repository url'), |
|
162 | 162 | ] |
|
163 | 163 | ), |
|
164 | 164 | # extra repo fields |
|
165 | 165 | ('Repository', [ |
|
166 | 166 | ('extra:<extra_key_name>', 'Extra repo variables, read from its settings.'), |
|
167 | 167 | ] |
|
168 | 168 | ), |
|
169 | 169 | # special attrs below that we handle, using multi-call |
|
170 | 170 | ('Commit push - Multicalls', [ |
|
171 | 171 | ('branch', 'Name of each branch submitted, if any.'), |
|
172 | 172 | ('branch_head', 'Head ID of pushed branch (full sha of last commit), if any.'), |
|
173 | 173 | ('commit_id', 'ID (full sha) of each commit submitted, if any.'), |
|
174 | 174 | ] |
|
175 | 175 | ), |
|
176 | 176 | # pr events vars |
|
177 | 177 | ('Pull request', [ |
|
178 | 178 | ('pull_request_id', 'Unique ID of the pull request.'), |
|
179 | 179 | ('pull_request_title', 'Title of the pull request.'), |
|
180 | 180 | ('pull_request_url', 'Pull request url.'), |
|
181 | 181 | ('pull_request_shadow_url', 'Pull request shadow repo clone url.'), |
|
182 | 182 | ('pull_request_commits_uid', 'Calculated UID of all commits inside the PR. ' |
|
183 | 183 | 'Changes after PR update'), |
|
184 | 184 | ] |
|
185 | 185 | ), |
|
186 | 186 | # commit comment event vars |
|
187 | 187 | ('Commit comment', [ |
|
188 | 188 | ('commit_comment_id', 'Unique ID of the comment made on a commit.'), |
|
189 | 189 | ('commit_comment_text', 'Text of commit comment.'), |
|
190 | 190 | ('commit_comment_type', 'Type of comment, e.g note/todo.'), |
|
191 | 191 | |
|
192 | 192 | ('commit_comment_f_path', 'Optionally path of file for inline comments.'), |
|
193 | 193 | ('commit_comment_line_no', 'Line number of the file: eg o10, or n200'), |
|
194 | 194 | |
|
195 | 195 | ('commit_comment_commit_id', 'Commit id that comment was left at.'), |
|
196 | 196 | ('commit_comment_commit_branch', 'Commit branch that comment was left at'), |
|
197 | 197 | ('commit_comment_commit_message', 'Commit message that comment was left at'), |
|
198 | 198 | ] |
|
199 | 199 | ), |
|
200 | 200 | # user who triggers the call |
|
201 | 201 | ('Caller', [ |
|
202 | 202 | ('username', 'User who triggered the call.'), |
|
203 | 203 | ('user_id', 'User id who triggered the call.'), |
|
204 | 204 | ] |
|
205 | 205 | ), |
|
206 | 206 | ] |
|
207 | 207 | |
|
208 | 208 | # common vars for url template used for CI plugins. Shared with webhook |
|
209 | 209 | CI_URL_VARS = WEBHOOK_URL_VARS |
|
210 | 210 | |
|
211 | 211 | |
|
212 | 212 | class CommitParsingDataHandler(object): |
|
213 | 213 | |
|
214 | 214 | def aggregate_branch_data(self, branches, commits): |
|
215 | 215 | branch_data = collections.OrderedDict() |
|
216 | 216 | for obj in branches: |
|
217 | 217 | branch_data[obj['name']] = obj |
|
218 | 218 | |
|
219 | 219 | branches_commits = collections.OrderedDict() |
|
220 | 220 | for commit in commits: |
|
221 | 221 | if commit.get('git_ref_change'): |
|
222 | 222 | # special case for GIT that allows creating tags, |
|
223 | 223 | # deleting branches without associated commit |
|
224 | 224 | continue |
|
225 | 225 | commit_branch = commit['branch'] |
|
226 | 226 | |
|
227 | 227 | if commit_branch not in branches_commits: |
|
228 | 228 | _branch = branch_data[commit_branch] \ |
|
229 | 229 | if commit_branch else commit_branch |
|
230 | 230 | branch_commits = {'branch': _branch, |
|
231 | 231 | 'branch_head': '', |
|
232 | 232 | 'commits': []} |
|
233 | 233 | branches_commits[commit_branch] = branch_commits |
|
234 | 234 | |
|
235 | 235 | branch_commits = branches_commits[commit_branch] |
|
236 | 236 | branch_commits['commits'].append(commit) |
|
237 | 237 | branch_commits['branch_head'] = commit['raw_id'] |
|
238 | 238 | return branches_commits |
|
239 | 239 | |
|
240 | 240 | |
|
241 | 241 | class WebhookDataHandler(CommitParsingDataHandler): |
|
242 | 242 | name = 'webhook' |
|
243 | 243 | |
|
244 | 244 | def __init__(self, template_url, headers): |
|
245 | 245 | self.template_url = template_url |
|
246 | 246 | self.headers = headers |
|
247 | 247 | |
|
248 | 248 | def get_base_parsed_template(self, data): |
|
249 | 249 | """ |
|
250 | 250 | initially parses the passed in template with some common variables |
|
251 | 251 | available on ALL calls |
|
252 | 252 | """ |
|
253 | 253 | # note: make sure to update the `WEBHOOK_URL_VARS` if this changes |
|
254 | 254 | common_vars = { |
|
255 | 255 | 'repo_name': data['repo']['repo_name'], |
|
256 | 256 | 'repo_type': data['repo']['repo_type'], |
|
257 | 257 | 'repo_id': data['repo']['repo_id'], |
|
258 | 258 | 'repo_url': data['repo']['url'], |
|
259 | 259 | 'username': data['actor']['username'], |
|
260 | 260 | 'user_id': data['actor']['user_id'], |
|
261 | 261 | 'event_name': data['name'] |
|
262 | 262 | } |
|
263 | 263 | |
|
264 | 264 | extra_vars = {} |
|
265 | 265 | for extra_key, extra_val in data['repo']['extra_fields'].items(): |
|
266 | 266 | extra_vars['extra__{}'.format(extra_key)] = extra_val |
|
267 | 267 | common_vars.update(extra_vars) |
|
268 | 268 | |
|
269 | 269 | template_url = self.template_url.replace('${extra:', '${extra__') |
|
270 | 270 | for k, v in common_vars.items(): |
|
271 | 271 | template_url = UrlTmpl(template_url).safe_substitute(**{k: v}) |
|
272 | 272 | return template_url |
|
273 | 273 | |
|
274 | 274 | def repo_push_event_handler(self, event, data): |
|
275 | 275 | url = self.get_base_parsed_template(data) |
|
276 | 276 | url_calls = [] |
|
277 | 277 | |
|
278 | 278 | branches_commits = self.aggregate_branch_data( |
|
279 | 279 | data['push']['branches'], data['push']['commits']) |
|
280 | 280 | if '${branch}' in url or '${branch_head}' in url or '${commit_id}' in url: |
|
281 | 281 | # call it multiple times, for each branch if used in variables |
|
282 | 282 | for branch, commit_ids in branches_commits.items(): |
|
283 | 283 | branch_url = UrlTmpl(url).safe_substitute(branch=branch) |
|
284 | 284 | |
|
285 | 285 | if '${branch_head}' in branch_url: |
|
286 | 286 | # last commit in the aggregate is the head of the branch |
|
287 | 287 | branch_head = commit_ids['branch_head'] |
|
288 | 288 | branch_url = UrlTmpl(branch_url).safe_substitute(branch_head=branch_head) |
|
289 | 289 | |
|
290 | 290 | # call further down for each commit if used |
|
291 | 291 | if '${commit_id}' in branch_url: |
|
292 | 292 | for commit_data in commit_ids['commits']: |
|
293 | 293 | commit_id = commit_data['raw_id'] |
|
294 | 294 | commit_url = UrlTmpl(branch_url).safe_substitute(commit_id=commit_id) |
|
295 | 295 | # register per-commit call |
|
296 | 296 | log.debug( |
|
297 | 297 | 'register %s call(%s) to url %s', |
|
298 | 298 | self.name, event, commit_url) |
|
299 | 299 | url_calls.append( |
|
300 | 300 | (commit_url, self.headers, data)) |
|
301 | 301 | |
|
302 | 302 | else: |
|
303 | 303 | # register per-branch call |
|
304 | 304 | log.debug('register %s call(%s) to url %s', |
|
305 | 305 | self.name, event, branch_url) |
|
306 | 306 | url_calls.append((branch_url, self.headers, data)) |
|
307 | 307 | |
|
308 | 308 | else: |
|
309 | 309 | log.debug('register %s call(%s) to url %s', self.name, event, url) |
|
310 | 310 | url_calls.append((url, self.headers, data)) |
|
311 | 311 | |
|
312 | 312 | return url_calls |
|
313 | 313 | |
|
314 | 314 | def repo_commit_comment_handler(self, event, data): |
|
315 | 315 | url = self.get_base_parsed_template(data) |
|
316 | 316 | log.debug('register %s call(%s) to url %s', self.name, event, url) |
|
317 | 317 | comment_vars = [ |
|
318 | 318 | ('commit_comment_id', data['comment']['comment_id']), |
|
319 | 319 | ('commit_comment_text', data['comment']['comment_text']), |
|
320 | 320 | ('commit_comment_type', data['comment']['comment_type']), |
|
321 | 321 | |
|
322 | 322 | ('commit_comment_f_path', data['comment']['comment_f_path']), |
|
323 | 323 | ('commit_comment_line_no', data['comment']['comment_line_no']), |
|
324 | 324 | |
|
325 | 325 | ('commit_comment_commit_id', data['commit']['commit_id']), |
|
326 | 326 | ('commit_comment_commit_branch', data['commit']['commit_branch']), |
|
327 | 327 | ('commit_comment_commit_message', data['commit']['commit_message']), |
|
328 | 328 | ] |
|
329 | 329 | for k, v in comment_vars: |
|
330 | 330 | url = UrlTmpl(url).safe_substitute(**{k: v}) |
|
331 | 331 | |
|
332 | 332 | return [(url, self.headers, data)] |
|
333 | 333 | |
|
334 | def repo_commit_comment_edit_handler(self, event, data): | |
|
335 | url = self.get_base_parsed_template(data) | |
|
336 | log.debug('register %s call(%s) to url %s', self.name, event, url) | |
|
337 | comment_vars = [ | |
|
338 | ('commit_comment_id', data['comment']['comment_id']), | |
|
339 | ('commit_comment_text', data['comment']['comment_text']), | |
|
340 | ('commit_comment_type', data['comment']['comment_type']), | |
|
341 | ||
|
342 | ('commit_comment_f_path', data['comment']['comment_f_path']), | |
|
343 | ('commit_comment_line_no', data['comment']['comment_line_no']), | |
|
344 | ||
|
345 | ('commit_comment_commit_id', data['commit']['commit_id']), | |
|
346 | ('commit_comment_commit_branch', data['commit']['commit_branch']), | |
|
347 | ('commit_comment_commit_message', data['commit']['commit_message']), | |
|
348 | ] | |
|
349 | for k, v in comment_vars: | |
|
350 | url = UrlTmpl(url).safe_substitute(**{k: v}) | |
|
351 | ||
|
352 | return [(url, self.headers, data)] | |
|
353 | ||
|
334 | 354 | def repo_create_event_handler(self, event, data): |
|
335 | 355 | url = self.get_base_parsed_template(data) |
|
336 | 356 | log.debug('register %s call(%s) to url %s', self.name, event, url) |
|
337 | 357 | return [(url, self.headers, data)] |
|
338 | 358 | |
|
339 | 359 | def pull_request_event_handler(self, event, data): |
|
340 | 360 | url = self.get_base_parsed_template(data) |
|
341 | 361 | log.debug('register %s call(%s) to url %s', self.name, event, url) |
|
342 | 362 | pr_vars = [ |
|
343 | 363 | ('pull_request_id', data['pullrequest']['pull_request_id']), |
|
344 | 364 | ('pull_request_title', data['pullrequest']['title']), |
|
345 | 365 | ('pull_request_url', data['pullrequest']['url']), |
|
346 | 366 | ('pull_request_shadow_url', data['pullrequest']['shadow_url']), |
|
347 | 367 | ('pull_request_commits_uid', data['pullrequest']['commits_uid']), |
|
348 | 368 | ] |
|
349 | 369 | for k, v in pr_vars: |
|
350 | 370 | url = UrlTmpl(url).safe_substitute(**{k: v}) |
|
351 | 371 | |
|
352 | 372 | return [(url, self.headers, data)] |
|
353 | 373 | |
|
354 | 374 | def __call__(self, event, data): |
|
355 | 375 | from rhodecode import events |
|
356 | 376 | |
|
357 | 377 | if isinstance(event, events.RepoPushEvent): |
|
358 | 378 | return self.repo_push_event_handler(event, data) |
|
359 | 379 | elif isinstance(event, events.RepoCreateEvent): |
|
360 | 380 | return self.repo_create_event_handler(event, data) |
|
361 | 381 | elif isinstance(event, events.RepoCommitCommentEvent): |
|
362 | 382 | return self.repo_commit_comment_handler(event, data) |
|
383 | elif isinstance(event, events.RepoCommitCommentEditEvent): | |
|
384 | return self.repo_commit_comment_edit_handler(event, data) | |
|
363 | 385 | elif isinstance(event, events.PullRequestEvent): |
|
364 | 386 | return self.pull_request_event_handler(event, data) |
|
365 | 387 | else: |
|
366 | 388 | raise ValueError( |
|
367 | 389 | 'event type `{}` has no handler defined'.format(event.__class__)) |
|
368 | 390 | |
|
369 | 391 | |
|
370 | 392 | def get_auth(settings): |
|
371 | 393 | from requests.auth import HTTPBasicAuth |
|
372 | 394 | username = settings.get('username') |
|
373 | 395 | password = settings.get('password') |
|
374 | 396 | if username and password: |
|
375 | 397 | return HTTPBasicAuth(username, password) |
|
376 | 398 | return None |
|
377 | 399 | |
|
378 | 400 | |
|
379 | 401 | def get_web_token(settings): |
|
380 | 402 | return settings['secret_token'] |
|
381 | 403 | |
|
382 | 404 | |
|
383 | 405 | def get_url_vars(url_vars): |
|
384 | 406 | items = [] |
|
385 | 407 | |
|
386 | 408 | for section, section_items in url_vars: |
|
387 | 409 | items.append('\n*{}*'.format(section)) |
|
388 | 410 | for key, explanation in section_items: |
|
389 | 411 | items.append(' {} - {}'.format('${' + key + '}', explanation)) |
|
390 | 412 | return '\n'.join(items) |
|
391 | 413 | |
|
392 | 414 | |
|
393 | 415 | def render_with_traceback(template, *args, **kwargs): |
|
394 | 416 | try: |
|
395 | 417 | return template.render(*args, **kwargs) |
|
396 | 418 | except Exception: |
|
397 | 419 | log.error(exceptions.text_error_template().render()) |
|
398 | 420 | raise |
|
399 | 421 | |
|
400 | 422 | |
|
401 | 423 | STATUS_400 = (400, 401, 403) |
|
402 | 424 | STATUS_500 = (500, 502, 504) |
|
403 | 425 | |
|
404 | 426 | |
|
405 | 427 | def requests_retry_call( |
|
406 | 428 | retries=3, backoff_factor=0.3, status_forcelist=STATUS_400+STATUS_500, |
|
407 | 429 | session=None): |
|
408 | 430 | """ |
|
409 | 431 | session = requests_retry_session() |
|
410 | 432 | response = session.get('http://example.com') |
|
411 | 433 | |
|
412 | 434 | :param retries: |
|
413 | 435 | :param backoff_factor: |
|
414 | 436 | :param status_forcelist: |
|
415 | 437 | :param session: |
|
416 | 438 | """ |
|
417 | 439 | session = session or requests.Session() |
|
418 | 440 | retry = Retry( |
|
419 | 441 | total=retries, |
|
420 | 442 | read=retries, |
|
421 | 443 | connect=retries, |
|
422 | 444 | backoff_factor=backoff_factor, |
|
423 | 445 | status_forcelist=status_forcelist, |
|
424 | 446 | ) |
|
425 | 447 | adapter = HTTPAdapter(max_retries=retry) |
|
426 | 448 | session.mount('http://', adapter) |
|
427 | 449 | session.mount('https://', adapter) |
|
428 | 450 | return session |
@@ -1,251 +1,253 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2012-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | from __future__ import unicode_literals |
|
22 | 22 | import deform |
|
23 | 23 | import logging |
|
24 | 24 | import requests |
|
25 | 25 | import colander |
|
26 | 26 | import textwrap |
|
27 | 27 | from mako.template import Template |
|
28 | 28 | from rhodecode import events |
|
29 | 29 | from rhodecode.model.validation_schema.widgets import CheckboxChoiceWidgetDesc |
|
30 | 30 | from rhodecode.translation import _ |
|
31 | 31 | from rhodecode.lib import helpers as h |
|
32 | 32 | from rhodecode.lib.celerylib import run_task, async_task, RequestContextTask |
|
33 | 33 | from rhodecode.lib.colander_utils import strip_whitespace |
|
34 | 34 | from rhodecode.integrations.types.base import ( |
|
35 | 35 | IntegrationTypeBase, CommitParsingDataHandler, render_with_traceback, |
|
36 | 36 | requests_retry_call) |
|
37 | 37 | |
|
38 | 38 | log = logging.getLogger(__name__) |
|
39 | 39 | |
|
40 | 40 | REPO_PUSH_TEMPLATE = Template(''' |
|
41 | 41 | <b>${data['actor']['username']}</b> pushed to repo <a href="${data['repo']['url']}">${data['repo']['repo_name']}</a>: |
|
42 | 42 | <br> |
|
43 | 43 | <ul> |
|
44 | 44 | %for branch, branch_commits in branches_commits.items(): |
|
45 | 45 | <li> |
|
46 | 46 | % if branch: |
|
47 | 47 | <a href="${branch_commits['branch']['url']}">branch: ${branch_commits['branch']['name']}</a> |
|
48 | 48 | % else: |
|
49 | 49 | to trunk |
|
50 | 50 | % endif |
|
51 | 51 | <ul> |
|
52 | 52 | % for commit in branch_commits['commits']: |
|
53 | 53 | <li><a href="${commit['url']}">${commit['short_id']}</a> - ${commit['message_html']}</li> |
|
54 | 54 | % endfor |
|
55 | 55 | </ul> |
|
56 | 56 | </li> |
|
57 | 57 | %endfor |
|
58 | 58 | ''') |
|
59 | 59 | |
|
60 | 60 | |
|
61 | 61 | class HipchatSettingsSchema(colander.Schema): |
|
62 | 62 | color_choices = [ |
|
63 | 63 | ('yellow', _('Yellow')), |
|
64 | 64 | ('red', _('Red')), |
|
65 | 65 | ('green', _('Green')), |
|
66 | 66 | ('purple', _('Purple')), |
|
67 | 67 | ('gray', _('Gray')), |
|
68 | 68 | ] |
|
69 | 69 | |
|
70 | 70 | server_url = colander.SchemaNode( |
|
71 | 71 | colander.String(), |
|
72 | 72 | title=_('Hipchat server URL'), |
|
73 | 73 | description=_('Hipchat integration url.'), |
|
74 | 74 | default='', |
|
75 | 75 | preparer=strip_whitespace, |
|
76 | 76 | validator=colander.url, |
|
77 | 77 | widget=deform.widget.TextInputWidget( |
|
78 | 78 | placeholder='https://?.hipchat.com/v2/room/?/notification?auth_token=?', |
|
79 | 79 | ), |
|
80 | 80 | ) |
|
81 | 81 | notify = colander.SchemaNode( |
|
82 | 82 | colander.Bool(), |
|
83 | 83 | title=_('Notify'), |
|
84 | 84 | description=_('Make a notification to the users in room.'), |
|
85 | 85 | missing=False, |
|
86 | 86 | default=False, |
|
87 | 87 | ) |
|
88 | 88 | color = colander.SchemaNode( |
|
89 | 89 | colander.String(), |
|
90 | 90 | title=_('Color'), |
|
91 | 91 | description=_('Background color of message.'), |
|
92 | 92 | missing='', |
|
93 | 93 | validator=colander.OneOf([x[0] for x in color_choices]), |
|
94 | 94 | widget=deform.widget.Select2Widget( |
|
95 | 95 | values=color_choices, |
|
96 | 96 | ), |
|
97 | 97 | ) |
|
98 | 98 | |
|
99 | 99 | |
|
100 | 100 | class HipchatIntegrationType(IntegrationTypeBase, CommitParsingDataHandler): |
|
101 | 101 | key = 'hipchat' |
|
102 | 102 | display_name = _('Hipchat') |
|
103 | 103 | description = _('Send events such as repo pushes and pull requests to ' |
|
104 | 104 | 'your hipchat channel.') |
|
105 | 105 | |
|
106 | 106 | @classmethod |
|
107 | 107 | def icon(cls): |
|
108 | 108 | return '''<?xml version="1.0" encoding="utf-8"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" viewBox="0 0 1000 1000" enable-background="new 0 0 1000 1000" xml:space="preserve"><g><g transform="translate(0.000000,511.000000) scale(0.100000,-0.100000)"><path fill="#205281" d="M4197.1,4662.4c-1661.5-260.4-3018-1171.6-3682.6-2473.3C219.9,1613.6,100,1120.3,100,462.6c0-1014,376.8-1918.4,1127-2699.4C2326.7-3377.6,3878.5-3898.3,5701-3730.5l486.5,44.5l208.9-123.3c637.2-373.4,1551.8-640.6,2240.4-650.9c304.9-6.9,335.7,0,417.9,75.4c185,174.7,147.3,411.1-89.1,548.1c-315.2,181.6-620,544.7-733.1,870.1l-51.4,157.6l472.7,472.7c349.4,349.4,520.7,551.5,657.7,774.2c784.5,1281.2,784.5,2788.5,0,4052.6c-236.4,376.8-794.8,966-1178.4,1236.7c-572.1,407.7-1264.1,709.1-1993.7,870.1c-267.2,58.2-479.6,75.4-1038,82.2C4714.4,4686.4,4310.2,4679.6,4197.1,4662.4z M5947.6,3740.9c1856.7-380.3,3127.6-1709.4,3127.6-3275c0-1000.3-534.4-1949.2-1466.2-2600.1c-188.4-133.6-287.8-226.1-301.5-284.4c-41.1-157.6,263.8-938.6,397.4-1020.8c20.5-10.3,34.3-44.5,34.3-75.4c0-167.8-811.9,195.3-1363.4,609.8l-181.6,137l-332.3-58.2c-445.3-78.8-1281.2-78.8-1702.6,0C2796-2569.2,1734.1-1832.6,1220.2-801.5C983.8-318.5,905,51.5,929,613.3c27.4,640.6,243.2,1192.1,685.1,1740.3c620,770.8,1661.5,1305.2,2822.8,1452.5C4806.9,3854,5553.7,3819.7,5947.6,3740.9z"/><path fill="#205281" d="M2381.5-345.9c-75.4-106.2-68.5-167.8,34.3-322c332.3-500.2,1010.6-928.4,1760.8-1120.2c417.9-106.2,1226.4-106.2,1644.3,0c712.5,181.6,1270.9,517.3,1685.4,1014C7681-561.7,7715.3-424.7,7616-325.4c-89.1,89.1-167.9,65.1-431.7-133.6c-835.8-630.3-2028-856.4-3086.5-585.8C3683.3-938.6,3142-685,2830.3-448.7C2576.8-253.4,2463.7-229.4,2381.5-345.9z"/></g></g><!-- Svg Vector Icons : http://www.onlinewebfonts.com/icon --></svg>''' |
|
109 | 109 | |
|
110 | 110 | valid_events = [ |
|
111 | 111 | events.PullRequestCloseEvent, |
|
112 | 112 | events.PullRequestMergeEvent, |
|
113 | 113 | events.PullRequestUpdateEvent, |
|
114 | 114 | events.PullRequestCommentEvent, |
|
115 | 115 | events.PullRequestReviewEvent, |
|
116 | 116 | events.PullRequestCreateEvent, |
|
117 | 117 | events.RepoPushEvent, |
|
118 | 118 | events.RepoCreateEvent, |
|
119 | 119 | ] |
|
120 | 120 | |
|
121 | 121 | def send_event(self, event): |
|
122 | 122 | if event.__class__ not in self.valid_events: |
|
123 | 123 | log.debug('event %r not present in valid event list (%s)', event, self.valid_events) |
|
124 | 124 | return |
|
125 | 125 | |
|
126 | 126 | if not self.event_enabled(event): |
|
127 | 127 | return |
|
128 | 128 | |
|
129 | 129 | data = event.as_dict() |
|
130 | 130 | |
|
131 | 131 | text = '<b>%s<b> caused a <b>%s</b> event' % ( |
|
132 | 132 | data['actor']['username'], event.name) |
|
133 | 133 | |
|
134 | 134 | if isinstance(event, events.PullRequestCommentEvent): |
|
135 | 135 | text = self.format_pull_request_comment_event(event, data) |
|
136 | elif isinstance(event, events.PullRequestCommentEditEvent): | |
|
137 | text = self.format_pull_request_comment_event(event, data) | |
|
136 | 138 | elif isinstance(event, events.PullRequestReviewEvent): |
|
137 | 139 | text = self.format_pull_request_review_event(event, data) |
|
138 | 140 | elif isinstance(event, events.PullRequestEvent): |
|
139 | 141 | text = self.format_pull_request_event(event, data) |
|
140 | 142 | elif isinstance(event, events.RepoPushEvent): |
|
141 | 143 | text = self.format_repo_push_event(data) |
|
142 | 144 | elif isinstance(event, events.RepoCreateEvent): |
|
143 | 145 | text = self.format_repo_create_event(data) |
|
144 | 146 | else: |
|
145 | 147 | log.error('unhandled event type: %r', event) |
|
146 | 148 | |
|
147 | 149 | run_task(post_text_to_hipchat, self.settings, text) |
|
148 | 150 | |
|
149 | 151 | def settings_schema(self): |
|
150 | 152 | schema = HipchatSettingsSchema() |
|
151 | 153 | schema.add(colander.SchemaNode( |
|
152 | 154 | colander.Set(), |
|
153 | 155 | widget=CheckboxChoiceWidgetDesc( |
|
154 | 156 | values=sorted( |
|
155 | 157 | [(e.name, e.display_name, e.description) for e in self.valid_events] |
|
156 | 158 | ), |
|
157 | 159 | ), |
|
158 | 160 | description="List of events activated for this integration", |
|
159 | 161 | name='events' |
|
160 | 162 | )) |
|
161 | 163 | |
|
162 | 164 | return schema |
|
163 | 165 | |
|
164 | 166 | def format_pull_request_comment_event(self, event, data): |
|
165 | 167 | comment_text = data['comment']['text'] |
|
166 | 168 | if len(comment_text) > 200: |
|
167 | 169 | comment_text = '{comment_text}<a href="{comment_url}">...<a/>'.format( |
|
168 | 170 | comment_text=h.html_escape(comment_text[:200]), |
|
169 | 171 | comment_url=data['comment']['url'], |
|
170 | 172 | ) |
|
171 | 173 | |
|
172 | 174 | comment_status = '' |
|
173 | 175 | if data['comment']['status']: |
|
174 | 176 | comment_status = '[{}]: '.format(data['comment']['status']) |
|
175 | 177 | |
|
176 | 178 | return (textwrap.dedent( |
|
177 | 179 | ''' |
|
178 | 180 | {user} commented on pull request <a href="{pr_url}">{number}</a> - {pr_title}: |
|
179 | 181 | >>> {comment_status}{comment_text} |
|
180 | 182 | ''').format( |
|
181 | 183 | comment_status=comment_status, |
|
182 | 184 | user=data['actor']['username'], |
|
183 | 185 | number=data['pullrequest']['pull_request_id'], |
|
184 | 186 | pr_url=data['pullrequest']['url'], |
|
185 | 187 | pr_status=data['pullrequest']['status'], |
|
186 | 188 | pr_title=h.html_escape(data['pullrequest']['title']), |
|
187 | 189 | comment_text=h.html_escape(comment_text) |
|
188 | 190 | ) |
|
189 | 191 | ) |
|
190 | 192 | |
|
191 | 193 | def format_pull_request_review_event(self, event, data): |
|
192 | 194 | return (textwrap.dedent( |
|
193 | 195 | ''' |
|
194 | 196 | Status changed to {pr_status} for pull request <a href="{pr_url}">#{number}</a> - {pr_title} |
|
195 | 197 | ''').format( |
|
196 | 198 | user=data['actor']['username'], |
|
197 | 199 | number=data['pullrequest']['pull_request_id'], |
|
198 | 200 | pr_url=data['pullrequest']['url'], |
|
199 | 201 | pr_status=data['pullrequest']['status'], |
|
200 | 202 | pr_title=h.html_escape(data['pullrequest']['title']), |
|
201 | 203 | ) |
|
202 | 204 | ) |
|
203 | 205 | |
|
204 | 206 | def format_pull_request_event(self, event, data): |
|
205 | 207 | action = { |
|
206 | 208 | events.PullRequestCloseEvent: 'closed', |
|
207 | 209 | events.PullRequestMergeEvent: 'merged', |
|
208 | 210 | events.PullRequestUpdateEvent: 'updated', |
|
209 | 211 | events.PullRequestCreateEvent: 'created', |
|
210 | 212 | }.get(event.__class__, str(event.__class__)) |
|
211 | 213 | |
|
212 | 214 | return ('Pull request <a href="{url}">#{number}</a> - {title} ' |
|
213 | 215 | '{action} by <b>{user}</b>').format( |
|
214 | 216 | user=data['actor']['username'], |
|
215 | 217 | number=data['pullrequest']['pull_request_id'], |
|
216 | 218 | url=data['pullrequest']['url'], |
|
217 | 219 | title=h.html_escape(data['pullrequest']['title']), |
|
218 | 220 | action=action |
|
219 | 221 | ) |
|
220 | 222 | |
|
221 | 223 | def format_repo_push_event(self, data): |
|
222 | 224 | branches_commits = self.aggregate_branch_data( |
|
223 | 225 | data['push']['branches'], data['push']['commits']) |
|
224 | 226 | |
|
225 | 227 | result = render_with_traceback( |
|
226 | 228 | REPO_PUSH_TEMPLATE, |
|
227 | 229 | data=data, |
|
228 | 230 | branches_commits=branches_commits, |
|
229 | 231 | ) |
|
230 | 232 | return result |
|
231 | 233 | |
|
232 | 234 | def format_repo_create_event(self, data): |
|
233 | 235 | return '<a href="{}">{}</a> ({}) repository created by <b>{}</b>'.format( |
|
234 | 236 | data['repo']['url'], |
|
235 | 237 | h.html_escape(data['repo']['repo_name']), |
|
236 | 238 | data['repo']['repo_type'], |
|
237 | 239 | data['actor']['username'], |
|
238 | 240 | ) |
|
239 | 241 | |
|
240 | 242 | |
|
241 | 243 | @async_task(ignore_result=True, base=RequestContextTask) |
|
242 | 244 | def post_text_to_hipchat(settings, text): |
|
243 | 245 | log.debug('sending %s to hipchat %s', text, settings['server_url']) |
|
244 | 246 | json_message = { |
|
245 | 247 | "message": text, |
|
246 | 248 | "color": settings.get('color', 'yellow'), |
|
247 | 249 | "notify": settings.get('notify', False), |
|
248 | 250 | } |
|
249 | 251 | req_session = requests_retry_call() |
|
250 | 252 | resp = req_session.post(settings['server_url'], json=json_message, timeout=60) |
|
251 | 253 | resp.raise_for_status() # raise exception on a failed request |
@@ -1,351 +1,354 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2012-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | from __future__ import unicode_literals |
|
22 | 22 | import re |
|
23 | 23 | import time |
|
24 | 24 | import textwrap |
|
25 | 25 | import logging |
|
26 | 26 | |
|
27 | 27 | import deform |
|
28 | 28 | import requests |
|
29 | 29 | import colander |
|
30 | 30 | from mako.template import Template |
|
31 | 31 | |
|
32 | 32 | from rhodecode import events |
|
33 | 33 | from rhodecode.model.validation_schema.widgets import CheckboxChoiceWidgetDesc |
|
34 | 34 | from rhodecode.translation import _ |
|
35 | 35 | from rhodecode.lib import helpers as h |
|
36 | 36 | from rhodecode.lib.celerylib import run_task, async_task, RequestContextTask |
|
37 | 37 | from rhodecode.lib.colander_utils import strip_whitespace |
|
38 | 38 | from rhodecode.integrations.types.base import ( |
|
39 | 39 | IntegrationTypeBase, CommitParsingDataHandler, render_with_traceback, |
|
40 | 40 | requests_retry_call) |
|
41 | 41 | |
|
42 | 42 | log = logging.getLogger(__name__) |
|
43 | 43 | |
|
44 | 44 | |
|
45 | 45 | def html_to_slack_links(message): |
|
46 | 46 | return re.compile(r'<a .*?href=["\'](.+?)".*?>(.+?)</a>').sub( |
|
47 | 47 | r'<\1|\2>', message) |
|
48 | 48 | |
|
49 | 49 | |
|
50 | 50 | REPO_PUSH_TEMPLATE = Template(''' |
|
51 | 51 | <% |
|
52 | 52 | def branch_text(branch): |
|
53 | 53 | if branch: |
|
54 | 54 | return 'on branch: <{}|{}>'.format(branch_commits['branch']['url'], branch_commits['branch']['name']) |
|
55 | 55 | else: |
|
56 | 56 | ## case for SVN no branch push... |
|
57 | 57 | return 'to trunk' |
|
58 | 58 | %> \ |
|
59 | 59 | |
|
60 | 60 | % for branch, branch_commits in branches_commits.items(): |
|
61 | 61 | ${len(branch_commits['commits'])} ${'commit' if len(branch_commits['commits']) == 1 else 'commits'} ${branch_text(branch)} |
|
62 | 62 | % for commit in branch_commits['commits']: |
|
63 | 63 | `<${commit['url']}|${commit['short_id']}>` - ${commit['message_html']|html_to_slack_links} |
|
64 | 64 | % endfor |
|
65 | 65 | % endfor |
|
66 | 66 | ''') |
|
67 | 67 | |
|
68 | 68 | |
|
69 | 69 | class SlackSettingsSchema(colander.Schema): |
|
70 | 70 | service = colander.SchemaNode( |
|
71 | 71 | colander.String(), |
|
72 | 72 | title=_('Slack service URL'), |
|
73 | 73 | description=h.literal(_( |
|
74 | 74 | 'This can be setup at the ' |
|
75 | 75 | '<a href="https://my.slack.com/services/new/incoming-webhook/">' |
|
76 | 76 | 'slack app manager</a>')), |
|
77 | 77 | default='', |
|
78 | 78 | preparer=strip_whitespace, |
|
79 | 79 | validator=colander.url, |
|
80 | 80 | widget=deform.widget.TextInputWidget( |
|
81 | 81 | placeholder='https://hooks.slack.com/services/...', |
|
82 | 82 | ), |
|
83 | 83 | ) |
|
84 | 84 | username = colander.SchemaNode( |
|
85 | 85 | colander.String(), |
|
86 | 86 | title=_('Username'), |
|
87 | 87 | description=_('Username to show notifications coming from.'), |
|
88 | 88 | missing='Rhodecode', |
|
89 | 89 | preparer=strip_whitespace, |
|
90 | 90 | widget=deform.widget.TextInputWidget( |
|
91 | 91 | placeholder='Rhodecode' |
|
92 | 92 | ), |
|
93 | 93 | ) |
|
94 | 94 | channel = colander.SchemaNode( |
|
95 | 95 | colander.String(), |
|
96 | 96 | title=_('Channel'), |
|
97 | 97 | description=_('Channel to send notifications to.'), |
|
98 | 98 | missing='', |
|
99 | 99 | preparer=strip_whitespace, |
|
100 | 100 | widget=deform.widget.TextInputWidget( |
|
101 | 101 | placeholder='#general' |
|
102 | 102 | ), |
|
103 | 103 | ) |
|
104 | 104 | icon_emoji = colander.SchemaNode( |
|
105 | 105 | colander.String(), |
|
106 | 106 | title=_('Emoji'), |
|
107 | 107 | description=_('Emoji to use eg. :studio_microphone:'), |
|
108 | 108 | missing='', |
|
109 | 109 | preparer=strip_whitespace, |
|
110 | 110 | widget=deform.widget.TextInputWidget( |
|
111 | 111 | placeholder=':studio_microphone:' |
|
112 | 112 | ), |
|
113 | 113 | ) |
|
114 | 114 | |
|
115 | 115 | |
|
116 | 116 | class SlackIntegrationType(IntegrationTypeBase, CommitParsingDataHandler): |
|
117 | 117 | key = 'slack' |
|
118 | 118 | display_name = _('Slack') |
|
119 | 119 | description = _('Send events such as repo pushes and pull requests to ' |
|
120 | 120 | 'your slack channel.') |
|
121 | 121 | |
|
122 | 122 | @classmethod |
|
123 | 123 | def icon(cls): |
|
124 | 124 | return '''<?xml version="1.0" encoding="UTF-8" standalone="no"?><svg viewBox="0 0 256 256" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" preserveAspectRatio="xMidYMid"><g><path d="M165.963541,15.8384262 C162.07318,3.86308197 149.212328,-2.69009836 137.239082,1.20236066 C125.263738,5.09272131 118.710557,17.9535738 122.603016,29.9268197 L181.550164,211.292328 C185.597902,222.478689 197.682361,228.765377 209.282098,225.426885 C221.381246,221.943607 228.756984,209.093246 224.896,197.21023 C224.749115,196.756984 165.963541,15.8384262 165.963541,15.8384262" fill="#DFA22F"></path><path d="M74.6260984,45.515541 C70.7336393,33.5422951 57.8727869,26.9891148 45.899541,30.8794754 C33.9241967,34.7698361 27.3710164,47.6306885 31.2634754,59.6060328 L90.210623,240.971541 C94.2583607,252.157902 106.34282,258.44459 117.942557,255.104 C130.041705,251.62282 137.417443,238.772459 133.556459,226.887344 C133.409574,226.436197 74.6260984,45.515541 74.6260984,45.515541" fill="#3CB187"></path><path d="M240.161574,166.045377 C252.136918,162.155016 258.688,149.294164 254.797639,137.31882 C250.907279,125.345574 238.046426,118.792393 226.07318,122.682754 L44.7076721,181.632 C33.5213115,185.677639 27.234623,197.762098 30.5731148,209.361836 C34.0563934,221.460984 46.9067541,228.836721 58.7897705,224.975738 C59.2430164,224.828852 240.161574,166.045377 240.161574,166.045377" fill="#CE1E5B"></path><path d="M82.507541,217.270557 C94.312918,213.434754 109.528131,208.491016 125.855475,203.186361 C122.019672,191.380984 117.075934,176.163672 111.76918,159.83423 L68.4191475,173.924721 L82.507541,217.270557" fill="#392538"></path><path d="M173.847082,187.591344 C190.235279,182.267803 205.467279,177.31777 217.195016,173.507148 C213.359213,161.70177 208.413377,146.480262 203.106623,130.146623 L159.75659,144.237115 L173.847082,187.591344" fill="#BB242A"></path><path d="M210.484459,74.7058361 C222.457705,70.8154754 229.010885,57.954623 225.120525,45.9792787 C221.230164,34.0060328 208.369311,27.4528525 196.393967,31.3432131 L15.028459,90.292459 C3.84209836,94.3380984 -2.44459016,106.422557 0.896,118.022295 C4.37718033,130.121443 17.227541,137.49718 29.1126557,133.636197 C29.5638033,133.489311 210.484459,74.7058361 210.484459,74.7058361" fill="#72C5CD"></path><path d="M52.8220328,125.933115 C64.6274098,122.097311 79.8468197,117.151475 96.1762623,111.84682 C90.8527213,95.4565246 85.9026885,80.2245246 82.0920656,68.4946885 L38.731541,82.5872787 L52.8220328,125.933115" fill="#248C73"></path><path d="M144.159475,96.256 C160.551869,90.9303607 175.785967,85.9803279 187.515803,82.1676066 C182.190164,65.7752131 177.240131,50.5390164 173.42741,38.807082 L130.068984,52.8996721 L144.159475,96.256" fill="#62803A"></path></g></svg>''' |
|
125 | 125 | |
|
126 | 126 | valid_events = [ |
|
127 | 127 | events.PullRequestCloseEvent, |
|
128 | 128 | events.PullRequestMergeEvent, |
|
129 | 129 | events.PullRequestUpdateEvent, |
|
130 | 130 | events.PullRequestCommentEvent, |
|
131 | 131 | events.PullRequestReviewEvent, |
|
132 | 132 | events.PullRequestCreateEvent, |
|
133 | 133 | events.RepoPushEvent, |
|
134 | 134 | events.RepoCreateEvent, |
|
135 | 135 | ] |
|
136 | 136 | |
|
137 | 137 | def send_event(self, event): |
|
138 | 138 | log.debug('handling event %s with integration %s', event.name, self) |
|
139 | 139 | |
|
140 | 140 | if event.__class__ not in self.valid_events: |
|
141 | 141 | log.debug('event %r not present in valid event list (%s)', event, self.valid_events) |
|
142 | 142 | return |
|
143 | 143 | |
|
144 | 144 | if not self.event_enabled(event): |
|
145 | 145 | return |
|
146 | 146 | |
|
147 | 147 | data = event.as_dict() |
|
148 | 148 | |
|
149 | 149 | # defaults |
|
150 | 150 | title = '*%s* caused a *%s* event' % ( |
|
151 | 151 | data['actor']['username'], event.name) |
|
152 | 152 | text = '*%s* caused a *%s* event' % ( |
|
153 | 153 | data['actor']['username'], event.name) |
|
154 | 154 | fields = None |
|
155 | 155 | overrides = None |
|
156 | 156 | |
|
157 | 157 | if isinstance(event, events.PullRequestCommentEvent): |
|
158 | 158 | (title, text, fields, overrides) \ |
|
159 | 159 | = self.format_pull_request_comment_event(event, data) |
|
160 | elif isinstance(event, events.PullRequestCommentEditEvent): | |
|
161 | (title, text, fields, overrides) \ | |
|
162 | = self.format_pull_request_comment_event(event, data) | |
|
160 | 163 | elif isinstance(event, events.PullRequestReviewEvent): |
|
161 | 164 | title, text = self.format_pull_request_review_event(event, data) |
|
162 | 165 | elif isinstance(event, events.PullRequestEvent): |
|
163 | 166 | title, text = self.format_pull_request_event(event, data) |
|
164 | 167 | elif isinstance(event, events.RepoPushEvent): |
|
165 | 168 | title, text = self.format_repo_push_event(data) |
|
166 | 169 | elif isinstance(event, events.RepoCreateEvent): |
|
167 | 170 | title, text = self.format_repo_create_event(data) |
|
168 | 171 | else: |
|
169 | 172 | log.error('unhandled event type: %r', event) |
|
170 | 173 | |
|
171 | 174 | run_task(post_text_to_slack, self.settings, title, text, fields, overrides) |
|
172 | 175 | |
|
173 | 176 | def settings_schema(self): |
|
174 | 177 | schema = SlackSettingsSchema() |
|
175 | 178 | schema.add(colander.SchemaNode( |
|
176 | 179 | colander.Set(), |
|
177 | 180 | widget=CheckboxChoiceWidgetDesc( |
|
178 | 181 | values=sorted( |
|
179 | 182 | [(e.name, e.display_name, e.description) for e in self.valid_events] |
|
180 | 183 | ), |
|
181 | 184 | ), |
|
182 | 185 | description="List of events activated for this integration", |
|
183 | 186 | name='events' |
|
184 | 187 | )) |
|
185 | 188 | |
|
186 | 189 | return schema |
|
187 | 190 | |
|
188 | 191 | def format_pull_request_comment_event(self, event, data): |
|
189 | 192 | comment_text = data['comment']['text'] |
|
190 | 193 | if len(comment_text) > 200: |
|
191 | 194 | comment_text = '<{comment_url}|{comment_text}...>'.format( |
|
192 | 195 | comment_text=comment_text[:200], |
|
193 | 196 | comment_url=data['comment']['url'], |
|
194 | 197 | ) |
|
195 | 198 | |
|
196 | 199 | fields = None |
|
197 | 200 | overrides = None |
|
198 | 201 | status_text = None |
|
199 | 202 | |
|
200 | 203 | if data['comment']['status']: |
|
201 | 204 | status_color = { |
|
202 | 205 | 'approved': '#0ac878', |
|
203 | 206 | 'rejected': '#e85e4d'}.get(data['comment']['status']) |
|
204 | 207 | |
|
205 | 208 | if status_color: |
|
206 | 209 | overrides = {"color": status_color} |
|
207 | 210 | |
|
208 | 211 | status_text = data['comment']['status'] |
|
209 | 212 | |
|
210 | 213 | if data['comment']['file']: |
|
211 | 214 | fields = [ |
|
212 | 215 | { |
|
213 | 216 | "title": "file", |
|
214 | 217 | "value": data['comment']['file'] |
|
215 | 218 | }, |
|
216 | 219 | { |
|
217 | 220 | "title": "line", |
|
218 | 221 | "value": data['comment']['line'] |
|
219 | 222 | } |
|
220 | 223 | ] |
|
221 | 224 | |
|
222 | 225 | template = Template(textwrap.dedent(r''' |
|
223 | 226 | *${data['actor']['username']}* left ${data['comment']['type']} on pull request <${data['pullrequest']['url']}|#${data['pullrequest']['pull_request_id']}>: |
|
224 | 227 | ''')) |
|
225 | 228 | title = render_with_traceback( |
|
226 | 229 | template, data=data, comment=event.comment) |
|
227 | 230 | |
|
228 | 231 | template = Template(textwrap.dedent(r''' |
|
229 | 232 | *pull request title*: ${pr_title} |
|
230 | 233 | % if status_text: |
|
231 | 234 | *submitted status*: `${status_text}` |
|
232 | 235 | % endif |
|
233 | 236 | >>> ${comment_text} |
|
234 | 237 | ''')) |
|
235 | 238 | text = render_with_traceback( |
|
236 | 239 | template, |
|
237 | 240 | comment_text=comment_text, |
|
238 | 241 | pr_title=data['pullrequest']['title'], |
|
239 | 242 | status_text=status_text) |
|
240 | 243 | |
|
241 | 244 | return title, text, fields, overrides |
|
242 | 245 | |
|
243 | 246 | def format_pull_request_review_event(self, event, data): |
|
244 | 247 | template = Template(textwrap.dedent(r''' |
|
245 | 248 | *${data['actor']['username']}* changed status of pull request <${data['pullrequest']['url']}|#${data['pullrequest']['pull_request_id']} to `${data['pullrequest']['status']}`>: |
|
246 | 249 | ''')) |
|
247 | 250 | title = render_with_traceback(template, data=data) |
|
248 | 251 | |
|
249 | 252 | template = Template(textwrap.dedent(r''' |
|
250 | 253 | *pull request title*: ${pr_title} |
|
251 | 254 | ''')) |
|
252 | 255 | text = render_with_traceback( |
|
253 | 256 | template, |
|
254 | 257 | pr_title=data['pullrequest']['title']) |
|
255 | 258 | |
|
256 | 259 | return title, text |
|
257 | 260 | |
|
258 | 261 | def format_pull_request_event(self, event, data): |
|
259 | 262 | action = { |
|
260 | 263 | events.PullRequestCloseEvent: 'closed', |
|
261 | 264 | events.PullRequestMergeEvent: 'merged', |
|
262 | 265 | events.PullRequestUpdateEvent: 'updated', |
|
263 | 266 | events.PullRequestCreateEvent: 'created', |
|
264 | 267 | }.get(event.__class__, str(event.__class__)) |
|
265 | 268 | |
|
266 | 269 | template = Template(textwrap.dedent(r''' |
|
267 | 270 | *${data['actor']['username']}* `${action}` pull request <${data['pullrequest']['url']}|#${data['pullrequest']['pull_request_id']}>: |
|
268 | 271 | ''')) |
|
269 | 272 | title = render_with_traceback(template, data=data, action=action) |
|
270 | 273 | |
|
271 | 274 | template = Template(textwrap.dedent(r''' |
|
272 | 275 | *pull request title*: ${pr_title} |
|
273 | 276 | %if data['pullrequest']['commits']: |
|
274 | 277 | *commits*: ${len(data['pullrequest']['commits'])} |
|
275 | 278 | %endif |
|
276 | 279 | ''')) |
|
277 | 280 | text = render_with_traceback( |
|
278 | 281 | template, |
|
279 | 282 | pr_title=data['pullrequest']['title'], |
|
280 | 283 | data=data) |
|
281 | 284 | |
|
282 | 285 | return title, text |
|
283 | 286 | |
|
284 | 287 | def format_repo_push_event(self, data): |
|
285 | 288 | branches_commits = self.aggregate_branch_data( |
|
286 | 289 | data['push']['branches'], data['push']['commits']) |
|
287 | 290 | |
|
288 | 291 | template = Template(r''' |
|
289 | 292 | *${data['actor']['username']}* pushed to repo <${data['repo']['url']}|${data['repo']['repo_name']}>: |
|
290 | 293 | ''') |
|
291 | 294 | title = render_with_traceback(template, data=data) |
|
292 | 295 | |
|
293 | 296 | text = render_with_traceback( |
|
294 | 297 | REPO_PUSH_TEMPLATE, |
|
295 | 298 | data=data, |
|
296 | 299 | branches_commits=branches_commits, |
|
297 | 300 | html_to_slack_links=html_to_slack_links, |
|
298 | 301 | ) |
|
299 | 302 | |
|
300 | 303 | return title, text |
|
301 | 304 | |
|
302 | 305 | def format_repo_create_event(self, data): |
|
303 | 306 | template = Template(r''' |
|
304 | 307 | *${data['actor']['username']}* created new repository ${data['repo']['repo_name']}: |
|
305 | 308 | ''') |
|
306 | 309 | title = render_with_traceback(template, data=data) |
|
307 | 310 | |
|
308 | 311 | template = Template(textwrap.dedent(r''' |
|
309 | 312 | repo_url: ${data['repo']['url']} |
|
310 | 313 | repo_type: ${data['repo']['repo_type']} |
|
311 | 314 | ''')) |
|
312 | 315 | text = render_with_traceback(template, data=data) |
|
313 | 316 | |
|
314 | 317 | return title, text |
|
315 | 318 | |
|
316 | 319 | |
|
317 | 320 | @async_task(ignore_result=True, base=RequestContextTask) |
|
318 | 321 | def post_text_to_slack(settings, title, text, fields=None, overrides=None): |
|
319 | 322 | log.debug('sending %s (%s) to slack %s', title, text, settings['service']) |
|
320 | 323 | |
|
321 | 324 | fields = fields or [] |
|
322 | 325 | overrides = overrides or {} |
|
323 | 326 | |
|
324 | 327 | message_data = { |
|
325 | 328 | "fallback": text, |
|
326 | 329 | "color": "#427cc9", |
|
327 | 330 | "pretext": title, |
|
328 | 331 | #"author_name": "Bobby Tables", |
|
329 | 332 | #"author_link": "http://flickr.com/bobby/", |
|
330 | 333 | #"author_icon": "http://flickr.com/icons/bobby.jpg", |
|
331 | 334 | #"title": "Slack API Documentation", |
|
332 | 335 | #"title_link": "https://api.slack.com/", |
|
333 | 336 | "text": text, |
|
334 | 337 | "fields": fields, |
|
335 | 338 | #"image_url": "http://my-website.com/path/to/image.jpg", |
|
336 | 339 | #"thumb_url": "http://example.com/path/to/thumb.png", |
|
337 | 340 | "footer": "RhodeCode", |
|
338 | 341 | #"footer_icon": "", |
|
339 | 342 | "ts": time.time(), |
|
340 | 343 | "mrkdwn_in": ["pretext", "text"] |
|
341 | 344 | } |
|
342 | 345 | message_data.update(overrides) |
|
343 | 346 | json_message = { |
|
344 | 347 | "icon_emoji": settings.get('icon_emoji', ':studio_microphone:'), |
|
345 | 348 | "channel": settings.get('channel', ''), |
|
346 | 349 | "username": settings.get('username', 'Rhodecode'), |
|
347 | 350 | "attachments": [message_data] |
|
348 | 351 | } |
|
349 | 352 | req_session = requests_retry_call() |
|
350 | 353 | resp = req_session.post(settings['service'], json=json_message, timeout=60) |
|
351 | 354 | resp.raise_for_status() # raise exception on a failed request |
@@ -1,264 +1,266 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2012-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | from __future__ import unicode_literals |
|
22 | 22 | |
|
23 | 23 | import deform.widget |
|
24 | 24 | import logging |
|
25 | 25 | import colander |
|
26 | 26 | |
|
27 | 27 | import rhodecode |
|
28 | 28 | from rhodecode import events |
|
29 | 29 | from rhodecode.lib.colander_utils import strip_whitespace |
|
30 | 30 | from rhodecode.model.validation_schema.widgets import CheckboxChoiceWidgetDesc |
|
31 | 31 | from rhodecode.translation import _ |
|
32 | 32 | from rhodecode.integrations.types.base import ( |
|
33 | 33 | IntegrationTypeBase, get_auth, get_web_token, get_url_vars, |
|
34 | 34 | WebhookDataHandler, WEBHOOK_URL_VARS, requests_retry_call) |
|
35 | 35 | from rhodecode.lib.celerylib import run_task, async_task, RequestContextTask |
|
36 | 36 | from rhodecode.model.validation_schema import widgets |
|
37 | 37 | |
|
38 | 38 | log = logging.getLogger(__name__) |
|
39 | 39 | |
|
40 | 40 | |
|
41 | 41 | # updating this required to update the `common_vars` passed in url calling func |
|
42 | 42 | |
|
43 | 43 | URL_VARS = get_url_vars(WEBHOOK_URL_VARS) |
|
44 | 44 | |
|
45 | 45 | |
|
46 | 46 | class WebhookSettingsSchema(colander.Schema): |
|
47 | 47 | url = colander.SchemaNode( |
|
48 | 48 | colander.String(), |
|
49 | 49 | title=_('Webhook URL'), |
|
50 | 50 | description= |
|
51 | 51 | _('URL to which Webhook should submit data. If used some of the ' |
|
52 | 52 | 'variables would trigger multiple calls, like ${branch} or ' |
|
53 | 53 | '${commit_id}. Webhook will be called as many times as unique ' |
|
54 | 54 | 'objects in data in such cases.'), |
|
55 | 55 | missing=colander.required, |
|
56 | 56 | required=True, |
|
57 | 57 | preparer=strip_whitespace, |
|
58 | 58 | validator=colander.url, |
|
59 | 59 | widget=widgets.CodeMirrorWidget( |
|
60 | 60 | help_block_collapsable_name='Show url variables', |
|
61 | 61 | help_block_collapsable=( |
|
62 | 62 | 'E.g http://my-serv.com/trigger_job/${{event_name}}' |
|
63 | 63 | '?PR_ID=${{pull_request_id}}' |
|
64 | 64 | '\nFull list of vars:\n{}'.format(URL_VARS)), |
|
65 | 65 | codemirror_mode='text', |
|
66 | 66 | codemirror_options='{"lineNumbers": false, "lineWrapping": true}'), |
|
67 | 67 | ) |
|
68 | 68 | secret_token = colander.SchemaNode( |
|
69 | 69 | colander.String(), |
|
70 | 70 | title=_('Secret Token'), |
|
71 | 71 | description=_('Optional string used to validate received payloads. ' |
|
72 | 72 | 'It will be sent together with event data in JSON'), |
|
73 | 73 | default='', |
|
74 | 74 | missing='', |
|
75 | 75 | widget=deform.widget.TextInputWidget( |
|
76 | 76 | placeholder='e.g. secret_token' |
|
77 | 77 | ), |
|
78 | 78 | ) |
|
79 | 79 | username = colander.SchemaNode( |
|
80 | 80 | colander.String(), |
|
81 | 81 | title=_('Username'), |
|
82 | 82 | description=_('Optional username to authenticate the call.'), |
|
83 | 83 | default='', |
|
84 | 84 | missing='', |
|
85 | 85 | widget=deform.widget.TextInputWidget( |
|
86 | 86 | placeholder='e.g. admin' |
|
87 | 87 | ), |
|
88 | 88 | ) |
|
89 | 89 | password = colander.SchemaNode( |
|
90 | 90 | colander.String(), |
|
91 | 91 | title=_('Password'), |
|
92 | 92 | description=_('Optional password to authenticate the call.'), |
|
93 | 93 | default='', |
|
94 | 94 | missing='', |
|
95 | 95 | widget=deform.widget.PasswordWidget( |
|
96 | 96 | placeholder='e.g. secret.', |
|
97 | 97 | redisplay=True, |
|
98 | 98 | ), |
|
99 | 99 | ) |
|
100 | 100 | custom_header_key = colander.SchemaNode( |
|
101 | 101 | colander.String(), |
|
102 | 102 | title=_('Custom Header Key'), |
|
103 | 103 | description=_('Custom Header name to be set when calling endpoint.'), |
|
104 | 104 | default='', |
|
105 | 105 | missing='', |
|
106 | 106 | widget=deform.widget.TextInputWidget( |
|
107 | 107 | placeholder='e.g: Authorization' |
|
108 | 108 | ), |
|
109 | 109 | ) |
|
110 | 110 | custom_header_val = colander.SchemaNode( |
|
111 | 111 | colander.String(), |
|
112 | 112 | title=_('Custom Header Value'), |
|
113 | 113 | description=_('Custom Header value to be set when calling endpoint.'), |
|
114 | 114 | default='', |
|
115 | 115 | missing='', |
|
116 | 116 | widget=deform.widget.TextInputWidget( |
|
117 | 117 | placeholder='e.g. Basic XxXxXx' |
|
118 | 118 | ), |
|
119 | 119 | ) |
|
120 | 120 | method_type = colander.SchemaNode( |
|
121 | 121 | colander.String(), |
|
122 | 122 | title=_('Call Method'), |
|
123 | 123 | description=_('Select a HTTP method to use when calling the Webhook.'), |
|
124 | 124 | default='post', |
|
125 | 125 | missing='', |
|
126 | 126 | widget=deform.widget.RadioChoiceWidget( |
|
127 | 127 | values=[('get', 'GET'), ('post', 'POST'), ('put', 'PUT')], |
|
128 | 128 | inline=True |
|
129 | 129 | ), |
|
130 | 130 | ) |
|
131 | 131 | |
|
132 | 132 | |
|
133 | 133 | class WebhookIntegrationType(IntegrationTypeBase): |
|
134 | 134 | key = 'webhook' |
|
135 | 135 | display_name = _('Webhook') |
|
136 | 136 | description = _('send JSON data to a url endpoint') |
|
137 | 137 | |
|
138 | 138 | @classmethod |
|
139 | 139 | def icon(cls): |
|
140 | 140 | return '''<?xml version="1.0" encoding="UTF-8" standalone="no"?><svg viewBox="0 0 256 239" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" preserveAspectRatio="xMidYMid"><g><path d="M119.540432,100.502743 C108.930124,118.338815 98.7646301,135.611455 88.3876025,152.753617 C85.7226696,157.154315 84.4040417,160.738531 86.5332204,166.333309 C92.4107024,181.787152 84.1193605,196.825836 68.5350381,200.908244 C53.8383677,204.759349 39.5192953,195.099955 36.6032893,179.365384 C34.0194114,165.437749 44.8274148,151.78491 60.1824106,149.608284 C61.4694072,149.424428 62.7821041,149.402681 64.944891,149.240571 C72.469175,136.623655 80.1773157,123.700312 88.3025935,110.073173 C73.611854,95.4654658 64.8677898,78.3885437 66.803227,57.2292132 C68.1712787,42.2715849 74.0527146,29.3462646 84.8033863,18.7517722 C105.393354,-1.53572199 136.805164,-4.82141828 161.048542,10.7510424 C184.333097,25.7086706 194.996783,54.8450075 185.906752,79.7822957 C179.052655,77.9239597 172.151111,76.049808 164.563565,73.9917997 C167.418285,60.1274266 165.306899,47.6765751 155.95591,37.0109123 C149.777932,29.9690049 141.850349,26.2780332 132.835442,24.9178894 C114.764113,22.1877169 97.0209573,33.7983633 91.7563309,51.5355878 C85.7800012,71.6669027 94.8245623,88.1111998 119.540432,100.502743 L119.540432,100.502743 Z" fill="#C73A63"></path><path d="M149.841194,79.4106285 C157.316054,92.5969067 164.905578,105.982857 172.427885,119.246236 C210.44865,107.483365 239.114472,128.530009 249.398582,151.063322 C261.81978,178.282014 253.328765,210.520191 228.933162,227.312431 C203.893073,244.551464 172.226236,241.605803 150.040866,219.46195 C155.694953,214.729124 161.376716,209.974552 167.44794,204.895759 C189.360489,219.088306 208.525074,218.420096 222.753207,201.614016 C234.885769,187.277151 234.622834,165.900356 222.138374,151.863988 C207.730339,135.66681 188.431321,135.172572 165.103273,150.721309 C155.426087,133.553447 145.58086,116.521995 136.210101,99.2295848 C133.05093,93.4015266 129.561608,90.0209366 122.440622,88.7873178 C110.547271,86.7253555 102.868785,76.5124151 102.408155,65.0698097 C101.955433,53.7537294 108.621719,43.5249733 119.04224,39.5394355 C129.363912,35.5914599 141.476705,38.7783085 148.419765,47.554004 C154.093621,54.7244134 155.896602,62.7943365 152.911402,71.6372484 C152.081082,74.1025091 151.00562,76.4886916 149.841194,79.4106285 L149.841194,79.4106285 Z" fill="#4B4B4B"></path><path d="M167.706921,187.209935 L121.936499,187.209935 C117.54964,205.253587 108.074103,219.821756 91.7464461,229.085759 C79.0544063,236.285822 65.3738898,238.72736 50.8136292,236.376762 C24.0061432,232.053165 2.08568567,207.920497 0.156179306,180.745298 C-2.02835403,149.962159 19.1309765,122.599149 47.3341915,116.452801 C49.2814904,123.524363 51.2485589,130.663141 53.1958579,137.716911 C27.3195169,150.919004 18.3639187,167.553089 25.6054984,188.352614 C31.9811726,206.657224 50.0900643,216.690262 69.7528413,212.809503 C89.8327554,208.847688 99.9567329,192.160226 98.7211371,165.37844 C117.75722,165.37844 136.809118,165.180745 155.847178,165.475311 C163.280522,165.591951 169.019617,164.820939 174.620326,158.267339 C183.840836,147.48306 200.811003,148.455721 210.741239,158.640984 C220.88894,169.049642 220.402609,185.79839 209.663799,195.768166 C199.302587,205.38802 182.933414,204.874012 173.240413,194.508846 C171.247644,192.37176 169.677943,189.835329 167.706921,187.209935 L167.706921,187.209935 Z" fill="#4A4A4A"></path></g></svg>''' |
|
141 | 141 | |
|
142 | 142 | valid_events = [ |
|
143 | 143 | events.PullRequestCloseEvent, |
|
144 | 144 | events.PullRequestMergeEvent, |
|
145 | 145 | events.PullRequestUpdateEvent, |
|
146 | 146 | events.PullRequestCommentEvent, |
|
147 | events.PullRequestCommentEditEvent, | |
|
147 | 148 | events.PullRequestReviewEvent, |
|
148 | 149 | events.PullRequestCreateEvent, |
|
149 | 150 | events.RepoPushEvent, |
|
150 | 151 | events.RepoCreateEvent, |
|
151 | 152 | events.RepoCommitCommentEvent, |
|
153 | events.RepoCommitCommentEditEvent, | |
|
152 | 154 | ] |
|
153 | 155 | |
|
154 | 156 | def settings_schema(self): |
|
155 | 157 | schema = WebhookSettingsSchema() |
|
156 | 158 | schema.add(colander.SchemaNode( |
|
157 | 159 | colander.Set(), |
|
158 | 160 | widget=CheckboxChoiceWidgetDesc( |
|
159 | 161 | values=sorted( |
|
160 | 162 | [(e.name, e.display_name, e.description) for e in self.valid_events] |
|
161 | 163 | ), |
|
162 | 164 | ), |
|
163 | 165 | description="List of events activated for this integration", |
|
164 | 166 | name='events' |
|
165 | 167 | )) |
|
166 | 168 | return schema |
|
167 | 169 | |
|
168 | 170 | def send_event(self, event): |
|
169 | 171 | log.debug('handling event %s with integration %s', event.name, self) |
|
170 | 172 | |
|
171 | 173 | if event.__class__ not in self.valid_events: |
|
172 | 174 | log.debug('event %r not present in valid event list (%s)', event, self.valid_events) |
|
173 | 175 | return |
|
174 | 176 | |
|
175 | 177 | if not self.event_enabled(event): |
|
176 | 178 | return |
|
177 | 179 | |
|
178 | 180 | data = event.as_dict() |
|
179 | 181 | template_url = self.settings['url'] |
|
180 | 182 | |
|
181 | 183 | headers = {} |
|
182 | 184 | head_key = self.settings.get('custom_header_key') |
|
183 | 185 | head_val = self.settings.get('custom_header_val') |
|
184 | 186 | if head_key and head_val: |
|
185 | 187 | headers = {head_key: head_val} |
|
186 | 188 | |
|
187 | 189 | handler = WebhookDataHandler(template_url, headers) |
|
188 | 190 | |
|
189 | 191 | url_calls = handler(event, data) |
|
190 | 192 | log.debug('Webhook: calling following urls: %s', [x[0] for x in url_calls]) |
|
191 | 193 | |
|
192 | 194 | run_task(post_to_webhook, url_calls, self.settings) |
|
193 | 195 | |
|
194 | 196 | |
|
195 | 197 | @async_task(ignore_result=True, base=RequestContextTask) |
|
196 | 198 | def post_to_webhook(url_calls, settings): |
|
197 | 199 | """ |
|
198 | 200 | Example data:: |
|
199 | 201 | |
|
200 | 202 | {'actor': {'user_id': 2, 'username': u'admin'}, |
|
201 | 203 | 'actor_ip': u'192.168.157.1', |
|
202 | 204 | 'name': 'repo-push', |
|
203 | 205 | 'push': {'branches': [{'name': u'default', |
|
204 | 206 | 'url': 'http://rc.local:8080/hg-repo/changelog?branch=default'}], |
|
205 | 207 | 'commits': [{'author': u'Marcin Kuzminski <marcin@rhodecode.com>', |
|
206 | 208 | 'branch': u'default', |
|
207 | 209 | 'date': datetime.datetime(2017, 11, 30, 12, 59, 48), |
|
208 | 210 | 'issues': [], |
|
209 | 211 | 'mentions': [], |
|
210 | 212 | 'message': u'commit Thu 30 Nov 2017 13:59:48 CET', |
|
211 | 213 | 'message_html': u'commit Thu 30 Nov 2017 13:59:48 CET', |
|
212 | 214 | 'message_html_title': u'commit Thu 30 Nov 2017 13:59:48 CET', |
|
213 | 215 | 'parents': [{'raw_id': '431b772a5353dad9974b810dd3707d79e3a7f6e0'}], |
|
214 | 216 | 'permalink_url': u'http://rc.local:8080/_7/changeset/a815cc738b9651eb5ffbcfb1ce6ccd7c701a5ddf', |
|
215 | 217 | 'raw_id': 'a815cc738b9651eb5ffbcfb1ce6ccd7c701a5ddf', |
|
216 | 218 | 'refs': {'bookmarks': [], |
|
217 | 219 | 'branches': [u'default'], |
|
218 | 220 | 'tags': [u'tip']}, |
|
219 | 221 | 'reviewers': [], |
|
220 | 222 | 'revision': 9L, |
|
221 | 223 | 'short_id': 'a815cc738b96', |
|
222 | 224 | 'url': u'http://rc.local:8080/hg-repo/changeset/a815cc738b9651eb5ffbcfb1ce6ccd7c701a5ddf'}], |
|
223 | 225 | 'issues': {}}, |
|
224 | 226 | 'repo': {'extra_fields': '', |
|
225 | 227 | 'permalink_url': u'http://rc.local:8080/_7', |
|
226 | 228 | 'repo_id': 7, |
|
227 | 229 | 'repo_name': u'hg-repo', |
|
228 | 230 | 'repo_type': u'hg', |
|
229 | 231 | 'url': u'http://rc.local:8080/hg-repo'}, |
|
230 | 232 | 'server_url': u'http://rc.local:8080', |
|
231 | 233 | 'utc_timestamp': datetime.datetime(2017, 11, 30, 13, 0, 1, 569276) |
|
232 | 234 | } |
|
233 | 235 | """ |
|
234 | 236 | |
|
235 | 237 | call_headers = { |
|
236 | 238 | 'User-Agent': 'RhodeCode-webhook-caller/{}'.format(rhodecode.__version__) |
|
237 | 239 | } # updated below with custom ones, allows override |
|
238 | 240 | |
|
239 | 241 | auth = get_auth(settings) |
|
240 | 242 | token = get_web_token(settings) |
|
241 | 243 | |
|
242 | 244 | for url, headers, data in url_calls: |
|
243 | 245 | req_session = requests_retry_call() |
|
244 | 246 | |
|
245 | 247 | method = settings.get('method_type') or 'post' |
|
246 | 248 | call_method = getattr(req_session, method) |
|
247 | 249 | |
|
248 | 250 | headers = headers or {} |
|
249 | 251 | call_headers.update(headers) |
|
250 | 252 | |
|
251 | 253 | log.debug('calling Webhook with method: %s, and auth:%s', call_method, auth) |
|
252 | 254 | if settings.get('log_data'): |
|
253 | 255 | log.debug('calling webhook with data: %s', data) |
|
254 | 256 | resp = call_method(url, json={ |
|
255 | 257 | 'token': token, |
|
256 | 258 | 'event': data |
|
257 | 259 | }, headers=call_headers, auth=auth, timeout=60) |
|
258 | 260 | log.debug('Got Webhook response: %s', resp) |
|
259 | 261 | |
|
260 | 262 | try: |
|
261 | 263 | resp.raise_for_status() # raise exception on a failed request |
|
262 | 264 | except Exception: |
|
263 | 265 | log.error(resp.text) |
|
264 | 266 | raise |
@@ -1,215 +1,266 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import webob |
|
22 | 22 | from pyramid.threadlocal import get_current_request |
|
23 | 23 | |
|
24 | 24 | from rhodecode import events |
|
25 | 25 | from rhodecode.lib import hooks_base |
|
26 | 26 | from rhodecode.lib import utils2 |
|
27 | 27 | |
|
28 | 28 | |
|
29 | 29 | def _supports_repo_type(repo_type): |
|
30 | 30 | if repo_type in ('hg', 'git'): |
|
31 | 31 | return True |
|
32 | 32 | return False |
|
33 | 33 | |
|
34 | 34 | |
|
35 | 35 | def _get_vcs_operation_context(username, repo_name, repo_type, action): |
|
36 | 36 | # NOTE(dan): import loop |
|
37 | 37 | from rhodecode.lib.base import vcs_operation_context |
|
38 | 38 | |
|
39 | 39 | check_locking = action in ('pull', 'push') |
|
40 | 40 | |
|
41 | 41 | request = get_current_request() |
|
42 | 42 | |
|
43 | 43 | try: |
|
44 | 44 | environ = request.environ |
|
45 | 45 | except TypeError: |
|
46 | 46 | # we might use this outside of request context |
|
47 | 47 | environ = {} |
|
48 | 48 | |
|
49 | 49 | if not environ: |
|
50 | 50 | environ = webob.Request.blank('').environ |
|
51 | 51 | |
|
52 | 52 | extras = vcs_operation_context(environ, repo_name, username, action, repo_type, check_locking) |
|
53 | 53 | return utils2.AttributeDict(extras) |
|
54 | 54 | |
|
55 | 55 | |
|
56 | 56 | def trigger_post_push_hook(username, action, hook_type, repo_name, repo_type, commit_ids): |
|
57 | 57 | """ |
|
58 | 58 | Triggers push action hooks |
|
59 | 59 | |
|
60 | 60 | :param username: username who pushes |
|
61 | 61 | :param action: push/push_local/push_remote |
|
62 | 62 | :param hook_type: type of hook executed |
|
63 | 63 | :param repo_name: name of repo |
|
64 | 64 | :param repo_type: the type of SCM repo |
|
65 | 65 | :param commit_ids: list of commit ids that we pushed |
|
66 | 66 | """ |
|
67 | 67 | extras = _get_vcs_operation_context(username, repo_name, repo_type, action) |
|
68 | 68 | extras.commit_ids = commit_ids |
|
69 | 69 | extras.hook_type = hook_type |
|
70 | 70 | hooks_base.post_push(extras) |
|
71 | 71 | |
|
72 | 72 | |
|
73 | 73 | def trigger_comment_commit_hooks(username, repo_name, repo_type, repo, data=None): |
|
74 | 74 | """ |
|
75 | 75 | Triggers when a comment is made on a commit |
|
76 | 76 | |
|
77 | 77 | :param username: username who creates the comment |
|
78 | 78 | :param repo_name: name of target repo |
|
79 | 79 | :param repo_type: the type of SCM target repo |
|
80 | 80 | :param repo: the repo object we trigger the event for |
|
81 | 81 | :param data: extra data for specific events e.g {'comment': comment_obj, 'commit': commit_obj} |
|
82 | 82 | """ |
|
83 | 83 | if not _supports_repo_type(repo_type): |
|
84 | 84 | return |
|
85 | 85 | |
|
86 | 86 | extras = _get_vcs_operation_context(username, repo_name, repo_type, 'comment_commit') |
|
87 | 87 | |
|
88 | 88 | comment = data['comment'] |
|
89 | 89 | commit = data['commit'] |
|
90 | 90 | |
|
91 | 91 | events.trigger(events.RepoCommitCommentEvent(repo, commit, comment)) |
|
92 | 92 | extras.update(repo.get_dict()) |
|
93 | 93 | |
|
94 | 94 | extras.commit = commit.serialize() |
|
95 | 95 | extras.comment = comment.get_api_data() |
|
96 | 96 | extras.created_by = username |
|
97 | 97 | hooks_base.log_comment_commit_repository(**extras) |
|
98 | 98 | |
|
99 | 99 | |
|
100 | def trigger_comment_commit_edit_hooks(username, repo_name, repo_type, repo, data=None): | |
|
101 | """ | |
|
102 | Triggers when a comment is edited on a commit | |
|
103 | ||
|
104 | :param username: username who edits the comment | |
|
105 | :param repo_name: name of target repo | |
|
106 | :param repo_type: the type of SCM target repo | |
|
107 | :param repo: the repo object we trigger the event for | |
|
108 | :param data: extra data for specific events e.g {'comment': comment_obj, 'commit': commit_obj} | |
|
109 | """ | |
|
110 | if not _supports_repo_type(repo_type): | |
|
111 | return | |
|
112 | ||
|
113 | extras = _get_vcs_operation_context(username, repo_name, repo_type, 'comment_commit') | |
|
114 | ||
|
115 | comment = data['comment'] | |
|
116 | commit = data['commit'] | |
|
117 | ||
|
118 | events.trigger(events.RepoCommitCommentEditEvent(repo, commit, comment)) | |
|
119 | extras.update(repo.get_dict()) | |
|
120 | ||
|
121 | extras.commit = commit.serialize() | |
|
122 | extras.comment = comment.get_api_data() | |
|
123 | extras.created_by = username | |
|
124 | # TODO(marcink): rcextensions handlers ?? | |
|
125 | hooks_base.log_comment_commit_repository(**extras) | |
|
126 | ||
|
127 | ||
|
100 | 128 | def trigger_create_pull_request_hook(username, repo_name, repo_type, pull_request, data=None): |
|
101 | 129 | """ |
|
102 | 130 | Triggers create pull request action hooks |
|
103 | 131 | |
|
104 | 132 | :param username: username who creates the pull request |
|
105 | 133 | :param repo_name: name of target repo |
|
106 | 134 | :param repo_type: the type of SCM target repo |
|
107 | 135 | :param pull_request: the pull request that was created |
|
108 | 136 | :param data: extra data for specific events e.g {'comment': comment_obj} |
|
109 | 137 | """ |
|
110 | 138 | if not _supports_repo_type(repo_type): |
|
111 | 139 | return |
|
112 | 140 | |
|
113 | 141 | extras = _get_vcs_operation_context(username, repo_name, repo_type, 'create_pull_request') |
|
114 | 142 | events.trigger(events.PullRequestCreateEvent(pull_request)) |
|
115 | 143 | extras.update(pull_request.get_api_data(with_merge_state=False)) |
|
116 | 144 | hooks_base.log_create_pull_request(**extras) |
|
117 | 145 | |
|
118 | 146 | |
|
119 | 147 | def trigger_merge_pull_request_hook(username, repo_name, repo_type, pull_request, data=None): |
|
120 | 148 | """ |
|
121 | 149 | Triggers merge pull request action hooks |
|
122 | 150 | |
|
123 | 151 | :param username: username who creates the pull request |
|
124 | 152 | :param repo_name: name of target repo |
|
125 | 153 | :param repo_type: the type of SCM target repo |
|
126 | 154 | :param pull_request: the pull request that was merged |
|
127 | 155 | :param data: extra data for specific events e.g {'comment': comment_obj} |
|
128 | 156 | """ |
|
129 | 157 | if not _supports_repo_type(repo_type): |
|
130 | 158 | return |
|
131 | 159 | |
|
132 | 160 | extras = _get_vcs_operation_context(username, repo_name, repo_type, 'merge_pull_request') |
|
133 | 161 | events.trigger(events.PullRequestMergeEvent(pull_request)) |
|
134 | 162 | extras.update(pull_request.get_api_data()) |
|
135 | 163 | hooks_base.log_merge_pull_request(**extras) |
|
136 | 164 | |
|
137 | 165 | |
|
138 | 166 | def trigger_close_pull_request_hook(username, repo_name, repo_type, pull_request, data=None): |
|
139 | 167 | """ |
|
140 | 168 | Triggers close pull request action hooks |
|
141 | 169 | |
|
142 | 170 | :param username: username who creates the pull request |
|
143 | 171 | :param repo_name: name of target repo |
|
144 | 172 | :param repo_type: the type of SCM target repo |
|
145 | 173 | :param pull_request: the pull request that was closed |
|
146 | 174 | :param data: extra data for specific events e.g {'comment': comment_obj} |
|
147 | 175 | """ |
|
148 | 176 | if not _supports_repo_type(repo_type): |
|
149 | 177 | return |
|
150 | 178 | |
|
151 | 179 | extras = _get_vcs_operation_context(username, repo_name, repo_type, 'close_pull_request') |
|
152 | 180 | events.trigger(events.PullRequestCloseEvent(pull_request)) |
|
153 | 181 | extras.update(pull_request.get_api_data()) |
|
154 | 182 | hooks_base.log_close_pull_request(**extras) |
|
155 | 183 | |
|
156 | 184 | |
|
157 | 185 | def trigger_review_pull_request_hook(username, repo_name, repo_type, pull_request, data=None): |
|
158 | 186 | """ |
|
159 | 187 | Triggers review status change pull request action hooks |
|
160 | 188 | |
|
161 | 189 | :param username: username who creates the pull request |
|
162 | 190 | :param repo_name: name of target repo |
|
163 | 191 | :param repo_type: the type of SCM target repo |
|
164 | 192 | :param pull_request: the pull request that review status changed |
|
165 | 193 | :param data: extra data for specific events e.g {'comment': comment_obj} |
|
166 | 194 | """ |
|
167 | 195 | if not _supports_repo_type(repo_type): |
|
168 | 196 | return |
|
169 | 197 | |
|
170 | 198 | extras = _get_vcs_operation_context(username, repo_name, repo_type, 'review_pull_request') |
|
171 | 199 | status = data.get('status') |
|
172 | 200 | events.trigger(events.PullRequestReviewEvent(pull_request, status)) |
|
173 | 201 | extras.update(pull_request.get_api_data()) |
|
174 | 202 | hooks_base.log_review_pull_request(**extras) |
|
175 | 203 | |
|
176 | 204 | |
|
177 | 205 | def trigger_comment_pull_request_hook(username, repo_name, repo_type, pull_request, data=None): |
|
178 | 206 | """ |
|
179 | 207 | Triggers when a comment is made on a pull request |
|
180 | 208 | |
|
181 | 209 | :param username: username who creates the pull request |
|
182 | 210 | :param repo_name: name of target repo |
|
183 | 211 | :param repo_type: the type of SCM target repo |
|
184 | 212 | :param pull_request: the pull request that comment was made on |
|
185 | 213 | :param data: extra data for specific events e.g {'comment': comment_obj} |
|
186 | 214 | """ |
|
187 | 215 | if not _supports_repo_type(repo_type): |
|
188 | 216 | return |
|
189 | 217 | |
|
190 | 218 | extras = _get_vcs_operation_context(username, repo_name, repo_type, 'comment_pull_request') |
|
191 | 219 | |
|
192 | 220 | comment = data['comment'] |
|
193 | 221 | events.trigger(events.PullRequestCommentEvent(pull_request, comment)) |
|
194 | 222 | extras.update(pull_request.get_api_data()) |
|
195 | 223 | extras.comment = comment.get_api_data() |
|
196 | 224 | hooks_base.log_comment_pull_request(**extras) |
|
197 | 225 | |
|
198 | 226 | |
|
227 | def trigger_comment_pull_request_edit_hook(username, repo_name, repo_type, pull_request, data=None): | |
|
228 | """ | |
|
229 | Triggers when a comment was edited on a pull request | |
|
230 | ||
|
231 | :param username: username who made the edit | |
|
232 | :param repo_name: name of target repo | |
|
233 | :param repo_type: the type of SCM target repo | |
|
234 | :param pull_request: the pull request that comment was made on | |
|
235 | :param data: extra data for specific events e.g {'comment': comment_obj} | |
|
236 | """ | |
|
237 | if not _supports_repo_type(repo_type): | |
|
238 | return | |
|
239 | ||
|
240 | extras = _get_vcs_operation_context(username, repo_name, repo_type, 'comment_pull_request') | |
|
241 | ||
|
242 | comment = data['comment'] | |
|
243 | events.trigger(events.PullRequestCommentEditEvent(pull_request, comment)) | |
|
244 | extras.update(pull_request.get_api_data()) | |
|
245 | extras.comment = comment.get_api_data() | |
|
246 | # TODO(marcink): handle rcextensions... | |
|
247 | hooks_base.log_comment_pull_request(**extras) | |
|
248 | ||
|
249 | ||
|
199 | 250 | def trigger_update_pull_request_hook(username, repo_name, repo_type, pull_request, data=None): |
|
200 | 251 | """ |
|
201 | 252 | Triggers update pull request action hooks |
|
202 | 253 | |
|
203 | 254 | :param username: username who creates the pull request |
|
204 | 255 | :param repo_name: name of target repo |
|
205 | 256 | :param repo_type: the type of SCM target repo |
|
206 | 257 | :param pull_request: the pull request that was updated |
|
207 | 258 | :param data: extra data for specific events e.g {'comment': comment_obj} |
|
208 | 259 | """ |
|
209 | 260 | if not _supports_repo_type(repo_type): |
|
210 | 261 | return |
|
211 | 262 | |
|
212 | 263 | extras = _get_vcs_operation_context(username, repo_name, repo_type, 'update_pull_request') |
|
213 | 264 | events.trigger(events.PullRequestUpdateEvent(pull_request)) |
|
214 | 265 | extras.update(pull_request.get_api_data()) |
|
215 | 266 | hooks_base.log_update_pull_request(**extras) |
@@ -1,837 +1,836 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2011-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | comments model for RhodeCode |
|
23 | 23 | """ |
|
24 | 24 | import datetime |
|
25 | 25 | |
|
26 | 26 | import logging |
|
27 | 27 | import traceback |
|
28 | 28 | import collections |
|
29 | 29 | |
|
30 | 30 | from pyramid.threadlocal import get_current_registry, get_current_request |
|
31 | 31 | from sqlalchemy.sql.expression import null |
|
32 | 32 | from sqlalchemy.sql.functions import coalesce |
|
33 | 33 | |
|
34 | 34 | from rhodecode.lib import helpers as h, diffs, channelstream, hooks_utils |
|
35 | 35 | from rhodecode.lib import audit_logger |
|
36 | 36 | from rhodecode.lib.exceptions import CommentVersionMismatch |
|
37 | 37 | from rhodecode.lib.utils2 import extract_mentioned_users, safe_str, safe_int |
|
38 | 38 | from rhodecode.model import BaseModel |
|
39 | 39 | from rhodecode.model.db import ( |
|
40 | 40 | ChangesetComment, |
|
41 | 41 | User, |
|
42 | 42 | Notification, |
|
43 | 43 | PullRequest, |
|
44 | 44 | AttributeDict, |
|
45 | 45 | ChangesetCommentHistory, |
|
46 | 46 | ) |
|
47 | 47 | from rhodecode.model.notification import NotificationModel |
|
48 | 48 | from rhodecode.model.meta import Session |
|
49 | 49 | from rhodecode.model.settings import VcsSettingsModel |
|
50 | 50 | from rhodecode.model.notification import EmailNotificationModel |
|
51 | 51 | from rhodecode.model.validation_schema.schemas import comment_schema |
|
52 | 52 | |
|
53 | 53 | |
|
54 | 54 | log = logging.getLogger(__name__) |
|
55 | 55 | |
|
56 | 56 | |
|
57 | 57 | class CommentsModel(BaseModel): |
|
58 | 58 | |
|
59 | 59 | cls = ChangesetComment |
|
60 | 60 | |
|
61 | 61 | DIFF_CONTEXT_BEFORE = 3 |
|
62 | 62 | DIFF_CONTEXT_AFTER = 3 |
|
63 | 63 | |
|
64 | 64 | def __get_commit_comment(self, changeset_comment): |
|
65 | 65 | return self._get_instance(ChangesetComment, changeset_comment) |
|
66 | 66 | |
|
67 | 67 | def __get_pull_request(self, pull_request): |
|
68 | 68 | return self._get_instance(PullRequest, pull_request) |
|
69 | 69 | |
|
70 | 70 | def _extract_mentions(self, s): |
|
71 | 71 | user_objects = [] |
|
72 | 72 | for username in extract_mentioned_users(s): |
|
73 | 73 | user_obj = User.get_by_username(username, case_insensitive=True) |
|
74 | 74 | if user_obj: |
|
75 | 75 | user_objects.append(user_obj) |
|
76 | 76 | return user_objects |
|
77 | 77 | |
|
78 | 78 | def _get_renderer(self, global_renderer='rst', request=None): |
|
79 | 79 | request = request or get_current_request() |
|
80 | 80 | |
|
81 | 81 | try: |
|
82 | 82 | global_renderer = request.call_context.visual.default_renderer |
|
83 | 83 | except AttributeError: |
|
84 | 84 | log.debug("Renderer not set, falling back " |
|
85 | 85 | "to default renderer '%s'", global_renderer) |
|
86 | 86 | except Exception: |
|
87 | 87 | log.error(traceback.format_exc()) |
|
88 | 88 | return global_renderer |
|
89 | 89 | |
|
90 | 90 | def aggregate_comments(self, comments, versions, show_version, inline=False): |
|
91 | 91 | # group by versions, and count until, and display objects |
|
92 | 92 | |
|
93 | 93 | comment_groups = collections.defaultdict(list) |
|
94 | 94 | [comment_groups[ |
|
95 | 95 | _co.pull_request_version_id].append(_co) for _co in comments] |
|
96 | 96 | |
|
97 | 97 | def yield_comments(pos): |
|
98 | 98 | for co in comment_groups[pos]: |
|
99 | 99 | yield co |
|
100 | 100 | |
|
101 | 101 | comment_versions = collections.defaultdict( |
|
102 | 102 | lambda: collections.defaultdict(list)) |
|
103 | 103 | prev_prvid = -1 |
|
104 | 104 | # fake last entry with None, to aggregate on "latest" version which |
|
105 | 105 | # doesn't have an pull_request_version_id |
|
106 | 106 | for ver in versions + [AttributeDict({'pull_request_version_id': None})]: |
|
107 | 107 | prvid = ver.pull_request_version_id |
|
108 | 108 | if prev_prvid == -1: |
|
109 | 109 | prev_prvid = prvid |
|
110 | 110 | |
|
111 | 111 | for co in yield_comments(prvid): |
|
112 | 112 | comment_versions[prvid]['at'].append(co) |
|
113 | 113 | |
|
114 | 114 | # save until |
|
115 | 115 | current = comment_versions[prvid]['at'] |
|
116 | 116 | prev_until = comment_versions[prev_prvid]['until'] |
|
117 | 117 | cur_until = prev_until + current |
|
118 | 118 | comment_versions[prvid]['until'].extend(cur_until) |
|
119 | 119 | |
|
120 | 120 | # save outdated |
|
121 | 121 | if inline: |
|
122 | 122 | outdated = [x for x in cur_until |
|
123 | 123 | if x.outdated_at_version(show_version)] |
|
124 | 124 | else: |
|
125 | 125 | outdated = [x for x in cur_until |
|
126 | 126 | if x.older_than_version(show_version)] |
|
127 | 127 | display = [x for x in cur_until if x not in outdated] |
|
128 | 128 | |
|
129 | 129 | comment_versions[prvid]['outdated'] = outdated |
|
130 | 130 | comment_versions[prvid]['display'] = display |
|
131 | 131 | |
|
132 | 132 | prev_prvid = prvid |
|
133 | 133 | |
|
134 | 134 | return comment_versions |
|
135 | 135 | |
|
136 | 136 | def get_repository_comments(self, repo, comment_type=None, user=None, commit_id=None): |
|
137 | 137 | qry = Session().query(ChangesetComment) \ |
|
138 | 138 | .filter(ChangesetComment.repo == repo) |
|
139 | 139 | |
|
140 | 140 | if comment_type and comment_type in ChangesetComment.COMMENT_TYPES: |
|
141 | 141 | qry = qry.filter(ChangesetComment.comment_type == comment_type) |
|
142 | 142 | |
|
143 | 143 | if user: |
|
144 | 144 | user = self._get_user(user) |
|
145 | 145 | if user: |
|
146 | 146 | qry = qry.filter(ChangesetComment.user_id == user.user_id) |
|
147 | 147 | |
|
148 | 148 | if commit_id: |
|
149 | 149 | qry = qry.filter(ChangesetComment.revision == commit_id) |
|
150 | 150 | |
|
151 | 151 | qry = qry.order_by(ChangesetComment.created_on) |
|
152 | 152 | return qry.all() |
|
153 | 153 | |
|
154 | 154 | def get_repository_unresolved_todos(self, repo): |
|
155 | 155 | todos = Session().query(ChangesetComment) \ |
|
156 | 156 | .filter(ChangesetComment.repo == repo) \ |
|
157 | 157 | .filter(ChangesetComment.resolved_by == None) \ |
|
158 | 158 | .filter(ChangesetComment.comment_type |
|
159 | 159 | == ChangesetComment.COMMENT_TYPE_TODO) |
|
160 | 160 | todos = todos.all() |
|
161 | 161 | |
|
162 | 162 | return todos |
|
163 | 163 | |
|
164 | 164 | def get_pull_request_unresolved_todos(self, pull_request, show_outdated=True): |
|
165 | 165 | |
|
166 | 166 | todos = Session().query(ChangesetComment) \ |
|
167 | 167 | .filter(ChangesetComment.pull_request == pull_request) \ |
|
168 | 168 | .filter(ChangesetComment.resolved_by == None) \ |
|
169 | 169 | .filter(ChangesetComment.comment_type |
|
170 | 170 | == ChangesetComment.COMMENT_TYPE_TODO) |
|
171 | 171 | |
|
172 | 172 | if not show_outdated: |
|
173 | 173 | todos = todos.filter( |
|
174 | 174 | coalesce(ChangesetComment.display_state, '') != |
|
175 | 175 | ChangesetComment.COMMENT_OUTDATED) |
|
176 | 176 | |
|
177 | 177 | todos = todos.all() |
|
178 | 178 | |
|
179 | 179 | return todos |
|
180 | 180 | |
|
181 | 181 | def get_pull_request_resolved_todos(self, pull_request, show_outdated=True): |
|
182 | 182 | |
|
183 | 183 | todos = Session().query(ChangesetComment) \ |
|
184 | 184 | .filter(ChangesetComment.pull_request == pull_request) \ |
|
185 | 185 | .filter(ChangesetComment.resolved_by != None) \ |
|
186 | 186 | .filter(ChangesetComment.comment_type |
|
187 | 187 | == ChangesetComment.COMMENT_TYPE_TODO) |
|
188 | 188 | |
|
189 | 189 | if not show_outdated: |
|
190 | 190 | todos = todos.filter( |
|
191 | 191 | coalesce(ChangesetComment.display_state, '') != |
|
192 | 192 | ChangesetComment.COMMENT_OUTDATED) |
|
193 | 193 | |
|
194 | 194 | todos = todos.all() |
|
195 | 195 | |
|
196 | 196 | return todos |
|
197 | 197 | |
|
198 | 198 | def get_commit_unresolved_todos(self, commit_id, show_outdated=True): |
|
199 | 199 | |
|
200 | 200 | todos = Session().query(ChangesetComment) \ |
|
201 | 201 | .filter(ChangesetComment.revision == commit_id) \ |
|
202 | 202 | .filter(ChangesetComment.resolved_by == None) \ |
|
203 | 203 | .filter(ChangesetComment.comment_type |
|
204 | 204 | == ChangesetComment.COMMENT_TYPE_TODO) |
|
205 | 205 | |
|
206 | 206 | if not show_outdated: |
|
207 | 207 | todos = todos.filter( |
|
208 | 208 | coalesce(ChangesetComment.display_state, '') != |
|
209 | 209 | ChangesetComment.COMMENT_OUTDATED) |
|
210 | 210 | |
|
211 | 211 | todos = todos.all() |
|
212 | 212 | |
|
213 | 213 | return todos |
|
214 | 214 | |
|
215 | 215 | def get_commit_resolved_todos(self, commit_id, show_outdated=True): |
|
216 | 216 | |
|
217 | 217 | todos = Session().query(ChangesetComment) \ |
|
218 | 218 | .filter(ChangesetComment.revision == commit_id) \ |
|
219 | 219 | .filter(ChangesetComment.resolved_by != None) \ |
|
220 | 220 | .filter(ChangesetComment.comment_type |
|
221 | 221 | == ChangesetComment.COMMENT_TYPE_TODO) |
|
222 | 222 | |
|
223 | 223 | if not show_outdated: |
|
224 | 224 | todos = todos.filter( |
|
225 | 225 | coalesce(ChangesetComment.display_state, '') != |
|
226 | 226 | ChangesetComment.COMMENT_OUTDATED) |
|
227 | 227 | |
|
228 | 228 | todos = todos.all() |
|
229 | 229 | |
|
230 | 230 | return todos |
|
231 | 231 | |
|
232 | 232 | def _log_audit_action(self, action, action_data, auth_user, comment): |
|
233 | 233 | audit_logger.store( |
|
234 | 234 | action=action, |
|
235 | 235 | action_data=action_data, |
|
236 | 236 | user=auth_user, |
|
237 | 237 | repo=comment.repo) |
|
238 | 238 | |
|
239 | 239 | def create(self, text, repo, user, commit_id=None, pull_request=None, |
|
240 | 240 | f_path=None, line_no=None, status_change=None, |
|
241 | 241 | status_change_type=None, comment_type=None, |
|
242 | 242 | resolves_comment_id=None, closing_pr=False, send_email=True, |
|
243 | 243 | renderer=None, auth_user=None, extra_recipients=None): |
|
244 | 244 | """ |
|
245 | 245 | Creates new comment for commit or pull request. |
|
246 | 246 | IF status_change is not none this comment is associated with a |
|
247 | 247 | status change of commit or commit associated with pull request |
|
248 | 248 | |
|
249 | 249 | :param text: |
|
250 | 250 | :param repo: |
|
251 | 251 | :param user: |
|
252 | 252 | :param commit_id: |
|
253 | 253 | :param pull_request: |
|
254 | 254 | :param f_path: |
|
255 | 255 | :param line_no: |
|
256 | 256 | :param status_change: Label for status change |
|
257 | 257 | :param comment_type: Type of comment |
|
258 | 258 | :param resolves_comment_id: id of comment which this one will resolve |
|
259 | 259 | :param status_change_type: type of status change |
|
260 | 260 | :param closing_pr: |
|
261 | 261 | :param send_email: |
|
262 | 262 | :param renderer: pick renderer for this comment |
|
263 | 263 | :param auth_user: current authenticated user calling this method |
|
264 | 264 | :param extra_recipients: list of extra users to be added to recipients |
|
265 | 265 | """ |
|
266 | 266 | |
|
267 | 267 | if not text: |
|
268 | 268 | log.warning('Missing text for comment, skipping...') |
|
269 | 269 | return |
|
270 | 270 | request = get_current_request() |
|
271 | 271 | _ = request.translate |
|
272 | 272 | |
|
273 | 273 | if not renderer: |
|
274 | 274 | renderer = self._get_renderer(request=request) |
|
275 | 275 | |
|
276 | 276 | repo = self._get_repo(repo) |
|
277 | 277 | user = self._get_user(user) |
|
278 | 278 | auth_user = auth_user or user |
|
279 | 279 | |
|
280 | 280 | schema = comment_schema.CommentSchema() |
|
281 | 281 | validated_kwargs = schema.deserialize(dict( |
|
282 | 282 | comment_body=text, |
|
283 | 283 | comment_type=comment_type, |
|
284 | 284 | comment_file=f_path, |
|
285 | 285 | comment_line=line_no, |
|
286 | 286 | renderer_type=renderer, |
|
287 | 287 | status_change=status_change_type, |
|
288 | 288 | resolves_comment_id=resolves_comment_id, |
|
289 | 289 | repo=repo.repo_id, |
|
290 | 290 | user=user.user_id, |
|
291 | 291 | )) |
|
292 | 292 | |
|
293 | 293 | comment = ChangesetComment() |
|
294 | 294 | comment.renderer = validated_kwargs['renderer_type'] |
|
295 | 295 | comment.text = validated_kwargs['comment_body'] |
|
296 | 296 | comment.f_path = validated_kwargs['comment_file'] |
|
297 | 297 | comment.line_no = validated_kwargs['comment_line'] |
|
298 | 298 | comment.comment_type = validated_kwargs['comment_type'] |
|
299 | 299 | |
|
300 | 300 | comment.repo = repo |
|
301 | 301 | comment.author = user |
|
302 | 302 | resolved_comment = self.__get_commit_comment( |
|
303 | 303 | validated_kwargs['resolves_comment_id']) |
|
304 | 304 | # check if the comment actually belongs to this PR |
|
305 | 305 | if resolved_comment and resolved_comment.pull_request and \ |
|
306 | 306 | resolved_comment.pull_request != pull_request: |
|
307 | 307 | log.warning('Comment tried to resolved unrelated todo comment: %s', |
|
308 | 308 | resolved_comment) |
|
309 | 309 | # comment not bound to this pull request, forbid |
|
310 | 310 | resolved_comment = None |
|
311 | 311 | |
|
312 | 312 | elif resolved_comment and resolved_comment.repo and \ |
|
313 | 313 | resolved_comment.repo != repo: |
|
314 | 314 | log.warning('Comment tried to resolved unrelated todo comment: %s', |
|
315 | 315 | resolved_comment) |
|
316 | 316 | # comment not bound to this repo, forbid |
|
317 | 317 | resolved_comment = None |
|
318 | 318 | |
|
319 | 319 | comment.resolved_comment = resolved_comment |
|
320 | 320 | |
|
321 | 321 | pull_request_id = pull_request |
|
322 | 322 | |
|
323 | 323 | commit_obj = None |
|
324 | 324 | pull_request_obj = None |
|
325 | 325 | |
|
326 | 326 | if commit_id: |
|
327 | 327 | notification_type = EmailNotificationModel.TYPE_COMMIT_COMMENT |
|
328 | 328 | # do a lookup, so we don't pass something bad here |
|
329 | 329 | commit_obj = repo.scm_instance().get_commit(commit_id=commit_id) |
|
330 | 330 | comment.revision = commit_obj.raw_id |
|
331 | 331 | |
|
332 | 332 | elif pull_request_id: |
|
333 | 333 | notification_type = EmailNotificationModel.TYPE_PULL_REQUEST_COMMENT |
|
334 | 334 | pull_request_obj = self.__get_pull_request(pull_request_id) |
|
335 | 335 | comment.pull_request = pull_request_obj |
|
336 | 336 | else: |
|
337 | 337 | raise Exception('Please specify commit or pull_request_id') |
|
338 | 338 | |
|
339 | 339 | Session().add(comment) |
|
340 | 340 | Session().flush() |
|
341 | 341 | kwargs = { |
|
342 | 342 | 'user': user, |
|
343 | 343 | 'renderer_type': renderer, |
|
344 | 344 | 'repo_name': repo.repo_name, |
|
345 | 345 | 'status_change': status_change, |
|
346 | 346 | 'status_change_type': status_change_type, |
|
347 | 347 | 'comment_body': text, |
|
348 | 348 | 'comment_file': f_path, |
|
349 | 349 | 'comment_line': line_no, |
|
350 | 350 | 'comment_type': comment_type or 'note', |
|
351 | 351 | 'comment_id': comment.comment_id |
|
352 | 352 | } |
|
353 | 353 | |
|
354 | 354 | if commit_obj: |
|
355 | 355 | recipients = ChangesetComment.get_users( |
|
356 | 356 | revision=commit_obj.raw_id) |
|
357 | 357 | # add commit author if it's in RhodeCode system |
|
358 | 358 | cs_author = User.get_from_cs_author(commit_obj.author) |
|
359 | 359 | if not cs_author: |
|
360 | 360 | # use repo owner if we cannot extract the author correctly |
|
361 | 361 | cs_author = repo.user |
|
362 | 362 | recipients += [cs_author] |
|
363 | 363 | |
|
364 | 364 | commit_comment_url = self.get_url(comment, request=request) |
|
365 | 365 | commit_comment_reply_url = self.get_url( |
|
366 | 366 | comment, request=request, |
|
367 | 367 | anchor='comment-{}/?/ReplyToComment'.format(comment.comment_id)) |
|
368 | 368 | |
|
369 | 369 | target_repo_url = h.link_to( |
|
370 | 370 | repo.repo_name, |
|
371 | 371 | h.route_url('repo_summary', repo_name=repo.repo_name)) |
|
372 | 372 | |
|
373 | 373 | # commit specifics |
|
374 | 374 | kwargs.update({ |
|
375 | 375 | 'commit': commit_obj, |
|
376 | 376 | 'commit_message': commit_obj.message, |
|
377 | 377 | 'commit_target_repo_url': target_repo_url, |
|
378 | 378 | 'commit_comment_url': commit_comment_url, |
|
379 | 379 | 'commit_comment_reply_url': commit_comment_reply_url |
|
380 | 380 | }) |
|
381 | 381 | |
|
382 | 382 | elif pull_request_obj: |
|
383 | 383 | # get the current participants of this pull request |
|
384 | 384 | recipients = ChangesetComment.get_users( |
|
385 | 385 | pull_request_id=pull_request_obj.pull_request_id) |
|
386 | 386 | # add pull request author |
|
387 | 387 | recipients += [pull_request_obj.author] |
|
388 | 388 | |
|
389 | 389 | # add the reviewers to notification |
|
390 | 390 | recipients += [x.user for x in pull_request_obj.reviewers] |
|
391 | 391 | |
|
392 | 392 | pr_target_repo = pull_request_obj.target_repo |
|
393 | 393 | pr_source_repo = pull_request_obj.source_repo |
|
394 | 394 | |
|
395 | 395 | pr_comment_url = self.get_url(comment, request=request) |
|
396 | 396 | pr_comment_reply_url = self.get_url( |
|
397 | 397 | comment, request=request, |
|
398 | 398 | anchor='comment-{}/?/ReplyToComment'.format(comment.comment_id)) |
|
399 | 399 | |
|
400 | 400 | pr_url = h.route_url( |
|
401 | 401 | 'pullrequest_show', |
|
402 | 402 | repo_name=pr_target_repo.repo_name, |
|
403 | 403 | pull_request_id=pull_request_obj.pull_request_id, ) |
|
404 | 404 | |
|
405 | 405 | # set some variables for email notification |
|
406 | 406 | pr_target_repo_url = h.route_url( |
|
407 | 407 | 'repo_summary', repo_name=pr_target_repo.repo_name) |
|
408 | 408 | |
|
409 | 409 | pr_source_repo_url = h.route_url( |
|
410 | 410 | 'repo_summary', repo_name=pr_source_repo.repo_name) |
|
411 | 411 | |
|
412 | 412 | # pull request specifics |
|
413 | 413 | kwargs.update({ |
|
414 | 414 | 'pull_request': pull_request_obj, |
|
415 | 415 | 'pr_id': pull_request_obj.pull_request_id, |
|
416 | 416 | 'pull_request_url': pr_url, |
|
417 | 417 | 'pull_request_target_repo': pr_target_repo, |
|
418 | 418 | 'pull_request_target_repo_url': pr_target_repo_url, |
|
419 | 419 | 'pull_request_source_repo': pr_source_repo, |
|
420 | 420 | 'pull_request_source_repo_url': pr_source_repo_url, |
|
421 | 421 | 'pr_comment_url': pr_comment_url, |
|
422 | 422 | 'pr_comment_reply_url': pr_comment_reply_url, |
|
423 | 423 | 'pr_closing': closing_pr, |
|
424 | 424 | }) |
|
425 | 425 | |
|
426 | 426 | recipients += [self._get_user(u) for u in (extra_recipients or [])] |
|
427 | 427 | |
|
428 | 428 | if send_email: |
|
429 | 429 | # pre-generate the subject for notification itself |
|
430 | 430 | (subject, |
|
431 | 431 | _h, _e, # we don't care about those |
|
432 | 432 | body_plaintext) = EmailNotificationModel().render_email( |
|
433 | 433 | notification_type, **kwargs) |
|
434 | 434 | |
|
435 | 435 | mention_recipients = set( |
|
436 | 436 | self._extract_mentions(text)).difference(recipients) |
|
437 | 437 | |
|
438 | 438 | # create notification objects, and emails |
|
439 | 439 | NotificationModel().create( |
|
440 | 440 | created_by=user, |
|
441 | 441 | notification_subject=subject, |
|
442 | 442 | notification_body=body_plaintext, |
|
443 | 443 | notification_type=notification_type, |
|
444 | 444 | recipients=recipients, |
|
445 | 445 | mention_recipients=mention_recipients, |
|
446 | 446 | email_kwargs=kwargs, |
|
447 | 447 | ) |
|
448 | 448 | |
|
449 | 449 | Session().flush() |
|
450 | 450 | if comment.pull_request: |
|
451 | 451 | action = 'repo.pull_request.comment.create' |
|
452 | 452 | else: |
|
453 | 453 | action = 'repo.commit.comment.create' |
|
454 | 454 | |
|
455 | 455 | comment_data = comment.get_api_data() |
|
456 | 456 | self._log_audit_action( |
|
457 | 457 | action, {'data': comment_data}, auth_user, comment) |
|
458 | 458 | |
|
459 | 459 | msg_url = '' |
|
460 | 460 | channel = None |
|
461 | 461 | if commit_obj: |
|
462 | 462 | msg_url = commit_comment_url |
|
463 | 463 | repo_name = repo.repo_name |
|
464 | 464 | channel = u'/repo${}$/commit/{}'.format( |
|
465 | 465 | repo_name, |
|
466 | 466 | commit_obj.raw_id |
|
467 | 467 | ) |
|
468 | 468 | elif pull_request_obj: |
|
469 | 469 | msg_url = pr_comment_url |
|
470 | 470 | repo_name = pr_target_repo.repo_name |
|
471 | 471 | channel = u'/repo${}$/pr/{}'.format( |
|
472 | 472 | repo_name, |
|
473 | 473 | pull_request_id |
|
474 | 474 | ) |
|
475 | 475 | |
|
476 | 476 | message = '<strong>{}</strong> {} - ' \ |
|
477 | 477 | '<a onclick="window.location=\'{}\';' \ |
|
478 | 478 | 'window.location.reload()">' \ |
|
479 | 479 | '<strong>{}</strong></a>' |
|
480 | 480 | message = message.format( |
|
481 | 481 | user.username, _('made a comment'), msg_url, |
|
482 | 482 | _('Show it now')) |
|
483 | 483 | |
|
484 | 484 | channelstream.post_message( |
|
485 | 485 | channel, message, user.username, |
|
486 | 486 | registry=get_current_registry()) |
|
487 | 487 | |
|
488 | 488 | return comment |
|
489 | 489 | |
|
490 | 490 | def edit(self, comment_id, text, auth_user, version): |
|
491 | 491 | """ |
|
492 | 492 | Change existing comment for commit or pull request. |
|
493 | 493 | |
|
494 | 494 | :param comment_id: |
|
495 | 495 | :param text: |
|
496 | 496 | :param auth_user: current authenticated user calling this method |
|
497 | 497 | :param version: last comment version |
|
498 | 498 | """ |
|
499 | 499 | if not text: |
|
500 | 500 | log.warning('Missing text for comment, skipping...') |
|
501 | 501 | return |
|
502 | 502 | |
|
503 | 503 | comment = ChangesetComment.get(comment_id) |
|
504 | 504 | old_comment_text = comment.text |
|
505 | 505 | comment.text = text |
|
506 | 506 | comment.modified_at = datetime.datetime.now() |
|
507 | 507 | version = safe_int(version) |
|
508 | 508 | |
|
509 | 509 | # NOTE(marcink): this returns initial comment + edits, so v2 from ui |
|
510 | 510 | # would return 3 here |
|
511 | 511 | comment_version = ChangesetCommentHistory.get_version(comment_id) |
|
512 | 512 | |
|
513 | 513 | if isinstance(version, (int, long)) and (comment_version - version) != 1: |
|
514 | 514 | log.warning( |
|
515 | 515 | 'Version mismatch comment_version {} submitted {}, skipping'.format( |
|
516 | 516 | comment_version-1, # -1 since note above |
|
517 | 517 | version |
|
518 | 518 | ) |
|
519 | 519 | ) |
|
520 | 520 | raise CommentVersionMismatch() |
|
521 | 521 | |
|
522 | 522 | comment_history = ChangesetCommentHistory() |
|
523 | 523 | comment_history.comment_id = comment_id |
|
524 | 524 | comment_history.version = comment_version |
|
525 | 525 | comment_history.created_by_user_id = auth_user.user_id |
|
526 | 526 | comment_history.text = old_comment_text |
|
527 | 527 | # TODO add email notification |
|
528 | 528 | Session().add(comment_history) |
|
529 | 529 | Session().add(comment) |
|
530 | 530 | Session().flush() |
|
531 | 531 | |
|
532 | 532 | if comment.pull_request: |
|
533 | 533 | action = 'repo.pull_request.comment.edit' |
|
534 | 534 | else: |
|
535 | 535 | action = 'repo.commit.comment.edit' |
|
536 | 536 | |
|
537 | 537 | comment_data = comment.get_api_data() |
|
538 | 538 | comment_data['old_comment_text'] = old_comment_text |
|
539 | 539 | self._log_audit_action( |
|
540 | 540 | action, {'data': comment_data}, auth_user, comment) |
|
541 | 541 | |
|
542 | 542 | return comment_history |
|
543 | 543 | |
|
544 | 544 | def delete(self, comment, auth_user): |
|
545 | 545 | """ |
|
546 | 546 | Deletes given comment |
|
547 | 547 | """ |
|
548 | 548 | comment = self.__get_commit_comment(comment) |
|
549 | 549 | old_data = comment.get_api_data() |
|
550 | 550 | Session().delete(comment) |
|
551 | 551 | |
|
552 | 552 | if comment.pull_request: |
|
553 | 553 | action = 'repo.pull_request.comment.delete' |
|
554 | 554 | else: |
|
555 | 555 | action = 'repo.commit.comment.delete' |
|
556 | 556 | |
|
557 | 557 | self._log_audit_action( |
|
558 | 558 | action, {'old_data': old_data}, auth_user, comment) |
|
559 | 559 | |
|
560 | 560 | return comment |
|
561 | 561 | |
|
562 | 562 | def get_all_comments(self, repo_id, revision=None, pull_request=None): |
|
563 | 563 | q = ChangesetComment.query()\ |
|
564 | 564 | .filter(ChangesetComment.repo_id == repo_id) |
|
565 | 565 | if revision: |
|
566 | 566 | q = q.filter(ChangesetComment.revision == revision) |
|
567 | 567 | elif pull_request: |
|
568 | 568 | pull_request = self.__get_pull_request(pull_request) |
|
569 | 569 | q = q.filter(ChangesetComment.pull_request == pull_request) |
|
570 | 570 | else: |
|
571 | 571 | raise Exception('Please specify commit or pull_request') |
|
572 | 572 | q = q.order_by(ChangesetComment.created_on) |
|
573 | 573 | return q.all() |
|
574 | 574 | |
|
575 | 575 | def get_url(self, comment, request=None, permalink=False, anchor=None): |
|
576 | 576 | if not request: |
|
577 | 577 | request = get_current_request() |
|
578 | 578 | |
|
579 | 579 | comment = self.__get_commit_comment(comment) |
|
580 | 580 | if anchor is None: |
|
581 | 581 | anchor = 'comment-{}'.format(comment.comment_id) |
|
582 | 582 | |
|
583 | 583 | if comment.pull_request: |
|
584 | 584 | pull_request = comment.pull_request |
|
585 | 585 | if permalink: |
|
586 | 586 | return request.route_url( |
|
587 | 587 | 'pull_requests_global', |
|
588 | 588 | pull_request_id=pull_request.pull_request_id, |
|
589 | 589 | _anchor=anchor) |
|
590 | 590 | else: |
|
591 | 591 | return request.route_url( |
|
592 | 592 | 'pullrequest_show', |
|
593 | 593 | repo_name=safe_str(pull_request.target_repo.repo_name), |
|
594 | 594 | pull_request_id=pull_request.pull_request_id, |
|
595 | 595 | _anchor=anchor) |
|
596 | 596 | |
|
597 | 597 | else: |
|
598 | 598 | repo = comment.repo |
|
599 | 599 | commit_id = comment.revision |
|
600 | 600 | |
|
601 | 601 | if permalink: |
|
602 | 602 | return request.route_url( |
|
603 | 603 | 'repo_commit', repo_name=safe_str(repo.repo_id), |
|
604 | 604 | commit_id=commit_id, |
|
605 | 605 | _anchor=anchor) |
|
606 | 606 | |
|
607 | 607 | else: |
|
608 | 608 | return request.route_url( |
|
609 | 609 | 'repo_commit', repo_name=safe_str(repo.repo_name), |
|
610 | 610 | commit_id=commit_id, |
|
611 | 611 | _anchor=anchor) |
|
612 | 612 | |
|
613 | 613 | def get_comments(self, repo_id, revision=None, pull_request=None): |
|
614 | 614 | """ |
|
615 | 615 | Gets main comments based on revision or pull_request_id |
|
616 | 616 | |
|
617 | 617 | :param repo_id: |
|
618 | 618 | :param revision: |
|
619 | 619 | :param pull_request: |
|
620 | 620 | """ |
|
621 | 621 | |
|
622 | 622 | q = ChangesetComment.query()\ |
|
623 | 623 | .filter(ChangesetComment.repo_id == repo_id)\ |
|
624 | 624 | .filter(ChangesetComment.line_no == None)\ |
|
625 | 625 | .filter(ChangesetComment.f_path == None) |
|
626 | 626 | if revision: |
|
627 | 627 | q = q.filter(ChangesetComment.revision == revision) |
|
628 | 628 | elif pull_request: |
|
629 | 629 | pull_request = self.__get_pull_request(pull_request) |
|
630 | 630 | q = q.filter(ChangesetComment.pull_request == pull_request) |
|
631 | 631 | else: |
|
632 | 632 | raise Exception('Please specify commit or pull_request') |
|
633 | 633 | q = q.order_by(ChangesetComment.created_on) |
|
634 | 634 | return q.all() |
|
635 | 635 | |
|
636 | 636 | def get_inline_comments(self, repo_id, revision=None, pull_request=None): |
|
637 | 637 | q = self._get_inline_comments_query(repo_id, revision, pull_request) |
|
638 | 638 | return self._group_comments_by_path_and_line_number(q) |
|
639 | 639 | |
|
640 | 640 | def get_inline_comments_count(self, inline_comments, skip_outdated=True, |
|
641 | 641 | version=None): |
|
642 | 642 | inline_cnt = 0 |
|
643 | 643 | for fname, per_line_comments in inline_comments.iteritems(): |
|
644 | 644 | for lno, comments in per_line_comments.iteritems(): |
|
645 | 645 | for comm in comments: |
|
646 | 646 | if not comm.outdated_at_version(version) and skip_outdated: |
|
647 | 647 | inline_cnt += 1 |
|
648 | 648 | |
|
649 | 649 | return inline_cnt |
|
650 | 650 | |
|
651 | 651 | def get_outdated_comments(self, repo_id, pull_request): |
|
652 | 652 | # TODO: johbo: Remove `repo_id`, it is not needed to find the comments |
|
653 | 653 | # of a pull request. |
|
654 | 654 | q = self._all_inline_comments_of_pull_request(pull_request) |
|
655 | 655 | q = q.filter( |
|
656 | 656 | ChangesetComment.display_state == |
|
657 | 657 | ChangesetComment.COMMENT_OUTDATED |
|
658 | 658 | ).order_by(ChangesetComment.comment_id.asc()) |
|
659 | 659 | |
|
660 | 660 | return self._group_comments_by_path_and_line_number(q) |
|
661 | 661 | |
|
662 | 662 | def _get_inline_comments_query(self, repo_id, revision, pull_request): |
|
663 | 663 | # TODO: johbo: Split this into two methods: One for PR and one for |
|
664 | 664 | # commit. |
|
665 | 665 | if revision: |
|
666 | 666 | q = Session().query(ChangesetComment).filter( |
|
667 | 667 | ChangesetComment.repo_id == repo_id, |
|
668 | 668 | ChangesetComment.line_no != null(), |
|
669 | 669 | ChangesetComment.f_path != null(), |
|
670 | 670 | ChangesetComment.revision == revision) |
|
671 | 671 | |
|
672 | 672 | elif pull_request: |
|
673 | 673 | pull_request = self.__get_pull_request(pull_request) |
|
674 | 674 | if not CommentsModel.use_outdated_comments(pull_request): |
|
675 | 675 | q = self._visible_inline_comments_of_pull_request(pull_request) |
|
676 | 676 | else: |
|
677 | 677 | q = self._all_inline_comments_of_pull_request(pull_request) |
|
678 | 678 | |
|
679 | 679 | else: |
|
680 | 680 | raise Exception('Please specify commit or pull_request_id') |
|
681 | 681 | q = q.order_by(ChangesetComment.comment_id.asc()) |
|
682 | 682 | return q |
|
683 | 683 | |
|
684 | 684 | def _group_comments_by_path_and_line_number(self, q): |
|
685 | 685 | comments = q.all() |
|
686 | 686 | paths = collections.defaultdict(lambda: collections.defaultdict(list)) |
|
687 | 687 | for co in comments: |
|
688 | 688 | paths[co.f_path][co.line_no].append(co) |
|
689 | 689 | return paths |
|
690 | 690 | |
|
691 | 691 | @classmethod |
|
692 | 692 | def needed_extra_diff_context(cls): |
|
693 | 693 | return max(cls.DIFF_CONTEXT_BEFORE, cls.DIFF_CONTEXT_AFTER) |
|
694 | 694 | |
|
695 | 695 | def outdate_comments(self, pull_request, old_diff_data, new_diff_data): |
|
696 | 696 | if not CommentsModel.use_outdated_comments(pull_request): |
|
697 | 697 | return |
|
698 | 698 | |
|
699 | 699 | comments = self._visible_inline_comments_of_pull_request(pull_request) |
|
700 | 700 | comments_to_outdate = comments.all() |
|
701 | 701 | |
|
702 | 702 | for comment in comments_to_outdate: |
|
703 | 703 | self._outdate_one_comment(comment, old_diff_data, new_diff_data) |
|
704 | 704 | |
|
705 | 705 | def _outdate_one_comment(self, comment, old_diff_proc, new_diff_proc): |
|
706 | 706 | diff_line = _parse_comment_line_number(comment.line_no) |
|
707 | 707 | |
|
708 | 708 | try: |
|
709 | 709 | old_context = old_diff_proc.get_context_of_line( |
|
710 | 710 | path=comment.f_path, diff_line=diff_line) |
|
711 | 711 | new_context = new_diff_proc.get_context_of_line( |
|
712 | 712 | path=comment.f_path, diff_line=diff_line) |
|
713 | 713 | except (diffs.LineNotInDiffException, |
|
714 | 714 | diffs.FileNotInDiffException): |
|
715 | 715 | comment.display_state = ChangesetComment.COMMENT_OUTDATED |
|
716 | 716 | return |
|
717 | 717 | |
|
718 | 718 | if old_context == new_context: |
|
719 | 719 | return |
|
720 | 720 | |
|
721 | 721 | if self._should_relocate_diff_line(diff_line): |
|
722 | 722 | new_diff_lines = new_diff_proc.find_context( |
|
723 | 723 | path=comment.f_path, context=old_context, |
|
724 | 724 | offset=self.DIFF_CONTEXT_BEFORE) |
|
725 | 725 | if not new_diff_lines: |
|
726 | 726 | comment.display_state = ChangesetComment.COMMENT_OUTDATED |
|
727 | 727 | else: |
|
728 | 728 | new_diff_line = self._choose_closest_diff_line( |
|
729 | 729 | diff_line, new_diff_lines) |
|
730 | 730 | comment.line_no = _diff_to_comment_line_number(new_diff_line) |
|
731 | 731 | else: |
|
732 | 732 | comment.display_state = ChangesetComment.COMMENT_OUTDATED |
|
733 | 733 | |
|
734 | 734 | def _should_relocate_diff_line(self, diff_line): |
|
735 | 735 | """ |
|
736 | 736 | Checks if relocation shall be tried for the given `diff_line`. |
|
737 | 737 | |
|
738 | 738 | If a comment points into the first lines, then we can have a situation |
|
739 | 739 | that after an update another line has been added on top. In this case |
|
740 | 740 | we would find the context still and move the comment around. This |
|
741 | 741 | would be wrong. |
|
742 | 742 | """ |
|
743 | 743 | should_relocate = ( |
|
744 | 744 | (diff_line.new and diff_line.new > self.DIFF_CONTEXT_BEFORE) or |
|
745 | 745 | (diff_line.old and diff_line.old > self.DIFF_CONTEXT_BEFORE)) |
|
746 | 746 | return should_relocate |
|
747 | 747 | |
|
748 | 748 | def _choose_closest_diff_line(self, diff_line, new_diff_lines): |
|
749 | 749 | candidate = new_diff_lines[0] |
|
750 | 750 | best_delta = _diff_line_delta(diff_line, candidate) |
|
751 | 751 | for new_diff_line in new_diff_lines[1:]: |
|
752 | 752 | delta = _diff_line_delta(diff_line, new_diff_line) |
|
753 | 753 | if delta < best_delta: |
|
754 | 754 | candidate = new_diff_line |
|
755 | 755 | best_delta = delta |
|
756 | 756 | return candidate |
|
757 | 757 | |
|
758 | 758 | def _visible_inline_comments_of_pull_request(self, pull_request): |
|
759 | 759 | comments = self._all_inline_comments_of_pull_request(pull_request) |
|
760 | 760 | comments = comments.filter( |
|
761 | 761 | coalesce(ChangesetComment.display_state, '') != |
|
762 | 762 | ChangesetComment.COMMENT_OUTDATED) |
|
763 | 763 | return comments |
|
764 | 764 | |
|
765 | 765 | def _all_inline_comments_of_pull_request(self, pull_request): |
|
766 | 766 | comments = Session().query(ChangesetComment)\ |
|
767 | 767 | .filter(ChangesetComment.line_no != None)\ |
|
768 | 768 | .filter(ChangesetComment.f_path != None)\ |
|
769 | 769 | .filter(ChangesetComment.pull_request == pull_request) |
|
770 | 770 | return comments |
|
771 | 771 | |
|
772 | 772 | def _all_general_comments_of_pull_request(self, pull_request): |
|
773 | 773 | comments = Session().query(ChangesetComment)\ |
|
774 | 774 | .filter(ChangesetComment.line_no == None)\ |
|
775 | 775 | .filter(ChangesetComment.f_path == None)\ |
|
776 | 776 | .filter(ChangesetComment.pull_request == pull_request) |
|
777 | 777 | |
|
778 | 778 | return comments |
|
779 | 779 | |
|
780 | 780 | @staticmethod |
|
781 | 781 | def use_outdated_comments(pull_request): |
|
782 | 782 | settings_model = VcsSettingsModel(repo=pull_request.target_repo) |
|
783 | 783 | settings = settings_model.get_general_settings() |
|
784 | 784 | return settings.get('rhodecode_use_outdated_comments', False) |
|
785 | 785 | |
|
786 | 786 | def trigger_commit_comment_hook(self, repo, user, action, data=None): |
|
787 | 787 | repo = self._get_repo(repo) |
|
788 | 788 | target_scm = repo.scm_instance() |
|
789 | 789 | if action == 'create': |
|
790 | 790 | trigger_hook = hooks_utils.trigger_comment_commit_hooks |
|
791 | 791 | elif action == 'edit': |
|
792 | # TODO(dan): when this is supported we trigger edit hook too | |
|
793 | return | |
|
792 | trigger_hook = hooks_utils.trigger_comment_commit_edit_hooks | |
|
794 | 793 | else: |
|
795 | 794 | return |
|
796 | 795 | |
|
797 | 796 | log.debug('Handling repo %s trigger_commit_comment_hook with action %s: %s', |
|
798 | 797 | repo, action, trigger_hook) |
|
799 | 798 | trigger_hook( |
|
800 | 799 | username=user.username, |
|
801 | 800 | repo_name=repo.repo_name, |
|
802 | 801 | repo_type=target_scm.alias, |
|
803 | 802 | repo=repo, |
|
804 | 803 | data=data) |
|
805 | 804 | |
|
806 | 805 | |
|
807 | 806 | def _parse_comment_line_number(line_no): |
|
808 | 807 | """ |
|
809 | 808 | Parses line numbers of the form "(o|n)\d+" and returns them in a tuple. |
|
810 | 809 | """ |
|
811 | 810 | old_line = None |
|
812 | 811 | new_line = None |
|
813 | 812 | if line_no.startswith('o'): |
|
814 | 813 | old_line = int(line_no[1:]) |
|
815 | 814 | elif line_no.startswith('n'): |
|
816 | 815 | new_line = int(line_no[1:]) |
|
817 | 816 | else: |
|
818 | 817 | raise ValueError("Comment lines have to start with either 'o' or 'n'.") |
|
819 | 818 | return diffs.DiffLineNumber(old_line, new_line) |
|
820 | 819 | |
|
821 | 820 | |
|
822 | 821 | def _diff_to_comment_line_number(diff_line): |
|
823 | 822 | if diff_line.new is not None: |
|
824 | 823 | return u'n{}'.format(diff_line.new) |
|
825 | 824 | elif diff_line.old is not None: |
|
826 | 825 | return u'o{}'.format(diff_line.old) |
|
827 | 826 | return u'' |
|
828 | 827 | |
|
829 | 828 | |
|
830 | 829 | def _diff_line_delta(a, b): |
|
831 | 830 | if None not in (a.new, b.new): |
|
832 | 831 | return abs(a.new - b.new) |
|
833 | 832 | elif None not in (a.old, b.old): |
|
834 | 833 | return abs(a.old - b.old) |
|
835 | 834 | else: |
|
836 | 835 | raise ValueError( |
|
837 | 836 | "Cannot compute delta between {} and {}".format(a, b)) |
@@ -1,2072 +1,2074 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2012-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | |
|
22 | 22 | """ |
|
23 | 23 | pull request model for RhodeCode |
|
24 | 24 | """ |
|
25 | 25 | |
|
26 | 26 | |
|
27 | 27 | import json |
|
28 | 28 | import logging |
|
29 | 29 | import os |
|
30 | 30 | |
|
31 | 31 | import datetime |
|
32 | 32 | import urllib |
|
33 | 33 | import collections |
|
34 | 34 | |
|
35 | 35 | from pyramid import compat |
|
36 | 36 | from pyramid.threadlocal import get_current_request |
|
37 | 37 | |
|
38 | 38 | from rhodecode.lib.vcs.nodes import FileNode |
|
39 | 39 | from rhodecode.translation import lazy_ugettext |
|
40 | 40 | from rhodecode.lib import helpers as h, hooks_utils, diffs |
|
41 | 41 | from rhodecode.lib import audit_logger |
|
42 | 42 | from rhodecode.lib.compat import OrderedDict |
|
43 | 43 | from rhodecode.lib.hooks_daemon import prepare_callback_daemon |
|
44 | 44 | from rhodecode.lib.markup_renderer import ( |
|
45 | 45 | DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer) |
|
46 | 46 | from rhodecode.lib.utils2 import ( |
|
47 | 47 | safe_unicode, safe_str, md5_safe, AttributeDict, safe_int, |
|
48 | 48 | get_current_rhodecode_user) |
|
49 | 49 | from rhodecode.lib.vcs.backends.base import ( |
|
50 | 50 | Reference, MergeResponse, MergeFailureReason, UpdateFailureReason, |
|
51 | 51 | TargetRefMissing, SourceRefMissing) |
|
52 | 52 | from rhodecode.lib.vcs.conf import settings as vcs_settings |
|
53 | 53 | from rhodecode.lib.vcs.exceptions import ( |
|
54 | 54 | CommitDoesNotExistError, EmptyRepositoryError) |
|
55 | 55 | from rhodecode.model import BaseModel |
|
56 | 56 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
57 | 57 | from rhodecode.model.comment import CommentsModel |
|
58 | 58 | from rhodecode.model.db import ( |
|
59 | 59 | or_, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus, |
|
60 | 60 | PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User) |
|
61 | 61 | from rhodecode.model.meta import Session |
|
62 | 62 | from rhodecode.model.notification import NotificationModel, \ |
|
63 | 63 | EmailNotificationModel |
|
64 | 64 | from rhodecode.model.scm import ScmModel |
|
65 | 65 | from rhodecode.model.settings import VcsSettingsModel |
|
66 | 66 | |
|
67 | 67 | |
|
68 | 68 | log = logging.getLogger(__name__) |
|
69 | 69 | |
|
70 | 70 | |
|
71 | 71 | # Data structure to hold the response data when updating commits during a pull |
|
72 | 72 | # request update. |
|
73 | 73 | class UpdateResponse(object): |
|
74 | 74 | |
|
75 | 75 | def __init__(self, executed, reason, new, old, common_ancestor_id, |
|
76 | 76 | commit_changes, source_changed, target_changed): |
|
77 | 77 | |
|
78 | 78 | self.executed = executed |
|
79 | 79 | self.reason = reason |
|
80 | 80 | self.new = new |
|
81 | 81 | self.old = old |
|
82 | 82 | self.common_ancestor_id = common_ancestor_id |
|
83 | 83 | self.changes = commit_changes |
|
84 | 84 | self.source_changed = source_changed |
|
85 | 85 | self.target_changed = target_changed |
|
86 | 86 | |
|
87 | 87 | |
|
88 | 88 | def get_diff_info( |
|
89 | 89 | source_repo, source_ref, target_repo, target_ref, get_authors=False, |
|
90 | 90 | get_commit_authors=True): |
|
91 | 91 | """ |
|
92 | 92 | Calculates detailed diff information for usage in preview of creation of a pull-request. |
|
93 | 93 | This is also used for default reviewers logic |
|
94 | 94 | """ |
|
95 | 95 | |
|
96 | 96 | source_scm = source_repo.scm_instance() |
|
97 | 97 | target_scm = target_repo.scm_instance() |
|
98 | 98 | |
|
99 | 99 | ancestor_id = target_scm.get_common_ancestor(target_ref, source_ref, source_scm) |
|
100 | 100 | if not ancestor_id: |
|
101 | 101 | raise ValueError( |
|
102 | 102 | 'cannot calculate diff info without a common ancestor. ' |
|
103 | 103 | 'Make sure both repositories are related, and have a common forking commit.') |
|
104 | 104 | |
|
105 | 105 | # case here is that want a simple diff without incoming commits, |
|
106 | 106 | # previewing what will be merged based only on commits in the source. |
|
107 | 107 | log.debug('Using ancestor %s as source_ref instead of %s', |
|
108 | 108 | ancestor_id, source_ref) |
|
109 | 109 | |
|
110 | 110 | # source of changes now is the common ancestor |
|
111 | 111 | source_commit = source_scm.get_commit(commit_id=ancestor_id) |
|
112 | 112 | # target commit becomes the source ref as it is the last commit |
|
113 | 113 | # for diff generation this logic gives proper diff |
|
114 | 114 | target_commit = source_scm.get_commit(commit_id=source_ref) |
|
115 | 115 | |
|
116 | 116 | vcs_diff = \ |
|
117 | 117 | source_scm.get_diff(commit1=source_commit, commit2=target_commit, |
|
118 | 118 | ignore_whitespace=False, context=3) |
|
119 | 119 | |
|
120 | 120 | diff_processor = diffs.DiffProcessor( |
|
121 | 121 | vcs_diff, format='newdiff', diff_limit=None, |
|
122 | 122 | file_limit=None, show_full_diff=True) |
|
123 | 123 | |
|
124 | 124 | _parsed = diff_processor.prepare() |
|
125 | 125 | |
|
126 | 126 | all_files = [] |
|
127 | 127 | all_files_changes = [] |
|
128 | 128 | changed_lines = {} |
|
129 | 129 | stats = [0, 0] |
|
130 | 130 | for f in _parsed: |
|
131 | 131 | all_files.append(f['filename']) |
|
132 | 132 | all_files_changes.append({ |
|
133 | 133 | 'filename': f['filename'], |
|
134 | 134 | 'stats': f['stats'] |
|
135 | 135 | }) |
|
136 | 136 | stats[0] += f['stats']['added'] |
|
137 | 137 | stats[1] += f['stats']['deleted'] |
|
138 | 138 | |
|
139 | 139 | changed_lines[f['filename']] = [] |
|
140 | 140 | if len(f['chunks']) < 2: |
|
141 | 141 | continue |
|
142 | 142 | # first line is "context" information |
|
143 | 143 | for chunks in f['chunks'][1:]: |
|
144 | 144 | for chunk in chunks['lines']: |
|
145 | 145 | if chunk['action'] not in ('del', 'mod'): |
|
146 | 146 | continue |
|
147 | 147 | changed_lines[f['filename']].append(chunk['old_lineno']) |
|
148 | 148 | |
|
149 | 149 | commit_authors = [] |
|
150 | 150 | user_counts = {} |
|
151 | 151 | email_counts = {} |
|
152 | 152 | author_counts = {} |
|
153 | 153 | _commit_cache = {} |
|
154 | 154 | |
|
155 | 155 | commits = [] |
|
156 | 156 | if get_commit_authors: |
|
157 | 157 | commits = target_scm.compare( |
|
158 | 158 | target_ref, source_ref, source_scm, merge=True, |
|
159 | 159 | pre_load=["author"]) |
|
160 | 160 | |
|
161 | 161 | for commit in commits: |
|
162 | 162 | user = User.get_from_cs_author(commit.author) |
|
163 | 163 | if user and user not in commit_authors: |
|
164 | 164 | commit_authors.append(user) |
|
165 | 165 | |
|
166 | 166 | # lines |
|
167 | 167 | if get_authors: |
|
168 | 168 | target_commit = source_repo.get_commit(ancestor_id) |
|
169 | 169 | |
|
170 | 170 | for fname, lines in changed_lines.items(): |
|
171 | 171 | try: |
|
172 | 172 | node = target_commit.get_node(fname) |
|
173 | 173 | except Exception: |
|
174 | 174 | continue |
|
175 | 175 | |
|
176 | 176 | if not isinstance(node, FileNode): |
|
177 | 177 | continue |
|
178 | 178 | |
|
179 | 179 | for annotation in node.annotate: |
|
180 | 180 | line_no, commit_id, get_commit_func, line_text = annotation |
|
181 | 181 | if line_no in lines: |
|
182 | 182 | if commit_id not in _commit_cache: |
|
183 | 183 | _commit_cache[commit_id] = get_commit_func() |
|
184 | 184 | commit = _commit_cache[commit_id] |
|
185 | 185 | author = commit.author |
|
186 | 186 | email = commit.author_email |
|
187 | 187 | user = User.get_from_cs_author(author) |
|
188 | 188 | if user: |
|
189 | 189 | user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1 |
|
190 | 190 | author_counts[author] = author_counts.get(author, 0) + 1 |
|
191 | 191 | email_counts[email] = email_counts.get(email, 0) + 1 |
|
192 | 192 | |
|
193 | 193 | return { |
|
194 | 194 | 'commits': commits, |
|
195 | 195 | 'files': all_files_changes, |
|
196 | 196 | 'stats': stats, |
|
197 | 197 | 'ancestor': ancestor_id, |
|
198 | 198 | # original authors of modified files |
|
199 | 199 | 'original_authors': { |
|
200 | 200 | 'users': user_counts, |
|
201 | 201 | 'authors': author_counts, |
|
202 | 202 | 'emails': email_counts, |
|
203 | 203 | }, |
|
204 | 204 | 'commit_authors': commit_authors |
|
205 | 205 | } |
|
206 | 206 | |
|
207 | 207 | |
|
208 | 208 | class PullRequestModel(BaseModel): |
|
209 | 209 | |
|
210 | 210 | cls = PullRequest |
|
211 | 211 | |
|
212 | 212 | DIFF_CONTEXT = diffs.DEFAULT_CONTEXT |
|
213 | 213 | |
|
214 | 214 | UPDATE_STATUS_MESSAGES = { |
|
215 | 215 | UpdateFailureReason.NONE: lazy_ugettext( |
|
216 | 216 | 'Pull request update successful.'), |
|
217 | 217 | UpdateFailureReason.UNKNOWN: lazy_ugettext( |
|
218 | 218 | 'Pull request update failed because of an unknown error.'), |
|
219 | 219 | UpdateFailureReason.NO_CHANGE: lazy_ugettext( |
|
220 | 220 | 'No update needed because the source and target have not changed.'), |
|
221 | 221 | UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext( |
|
222 | 222 | 'Pull request cannot be updated because the reference type is ' |
|
223 | 223 | 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'), |
|
224 | 224 | UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext( |
|
225 | 225 | 'This pull request cannot be updated because the target ' |
|
226 | 226 | 'reference is missing.'), |
|
227 | 227 | UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext( |
|
228 | 228 | 'This pull request cannot be updated because the source ' |
|
229 | 229 | 'reference is missing.'), |
|
230 | 230 | } |
|
231 | 231 | REF_TYPES = ['bookmark', 'book', 'tag', 'branch'] |
|
232 | 232 | UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch'] |
|
233 | 233 | |
|
234 | 234 | def __get_pull_request(self, pull_request): |
|
235 | 235 | return self._get_instance(( |
|
236 | 236 | PullRequest, PullRequestVersion), pull_request) |
|
237 | 237 | |
|
238 | 238 | def _check_perms(self, perms, pull_request, user, api=False): |
|
239 | 239 | if not api: |
|
240 | 240 | return h.HasRepoPermissionAny(*perms)( |
|
241 | 241 | user=user, repo_name=pull_request.target_repo.repo_name) |
|
242 | 242 | else: |
|
243 | 243 | return h.HasRepoPermissionAnyApi(*perms)( |
|
244 | 244 | user=user, repo_name=pull_request.target_repo.repo_name) |
|
245 | 245 | |
|
246 | 246 | def check_user_read(self, pull_request, user, api=False): |
|
247 | 247 | _perms = ('repository.admin', 'repository.write', 'repository.read',) |
|
248 | 248 | return self._check_perms(_perms, pull_request, user, api) |
|
249 | 249 | |
|
250 | 250 | def check_user_merge(self, pull_request, user, api=False): |
|
251 | 251 | _perms = ('repository.admin', 'repository.write', 'hg.admin',) |
|
252 | 252 | return self._check_perms(_perms, pull_request, user, api) |
|
253 | 253 | |
|
254 | 254 | def check_user_update(self, pull_request, user, api=False): |
|
255 | 255 | owner = user.user_id == pull_request.user_id |
|
256 | 256 | return self.check_user_merge(pull_request, user, api) or owner |
|
257 | 257 | |
|
258 | 258 | def check_user_delete(self, pull_request, user): |
|
259 | 259 | owner = user.user_id == pull_request.user_id |
|
260 | 260 | _perms = ('repository.admin',) |
|
261 | 261 | return self._check_perms(_perms, pull_request, user) or owner |
|
262 | 262 | |
|
263 | 263 | def check_user_change_status(self, pull_request, user, api=False): |
|
264 | 264 | reviewer = user.user_id in [x.user_id for x in |
|
265 | 265 | pull_request.reviewers] |
|
266 | 266 | return self.check_user_update(pull_request, user, api) or reviewer |
|
267 | 267 | |
|
268 | 268 | def check_user_comment(self, pull_request, user): |
|
269 | 269 | owner = user.user_id == pull_request.user_id |
|
270 | 270 | return self.check_user_read(pull_request, user) or owner |
|
271 | 271 | |
|
272 | 272 | def get(self, pull_request): |
|
273 | 273 | return self.__get_pull_request(pull_request) |
|
274 | 274 | |
|
275 | 275 | def _prepare_get_all_query(self, repo_name, search_q=None, source=False, |
|
276 | 276 | statuses=None, opened_by=None, order_by=None, |
|
277 | 277 | order_dir='desc', only_created=False): |
|
278 | 278 | repo = None |
|
279 | 279 | if repo_name: |
|
280 | 280 | repo = self._get_repo(repo_name) |
|
281 | 281 | |
|
282 | 282 | q = PullRequest.query() |
|
283 | 283 | |
|
284 | 284 | if search_q: |
|
285 | 285 | like_expression = u'%{}%'.format(safe_unicode(search_q)) |
|
286 | 286 | q = q.join(User) |
|
287 | 287 | q = q.filter(or_( |
|
288 | 288 | cast(PullRequest.pull_request_id, String).ilike(like_expression), |
|
289 | 289 | User.username.ilike(like_expression), |
|
290 | 290 | PullRequest.title.ilike(like_expression), |
|
291 | 291 | PullRequest.description.ilike(like_expression), |
|
292 | 292 | )) |
|
293 | 293 | |
|
294 | 294 | # source or target |
|
295 | 295 | if repo and source: |
|
296 | 296 | q = q.filter(PullRequest.source_repo == repo) |
|
297 | 297 | elif repo: |
|
298 | 298 | q = q.filter(PullRequest.target_repo == repo) |
|
299 | 299 | |
|
300 | 300 | # closed,opened |
|
301 | 301 | if statuses: |
|
302 | 302 | q = q.filter(PullRequest.status.in_(statuses)) |
|
303 | 303 | |
|
304 | 304 | # opened by filter |
|
305 | 305 | if opened_by: |
|
306 | 306 | q = q.filter(PullRequest.user_id.in_(opened_by)) |
|
307 | 307 | |
|
308 | 308 | # only get those that are in "created" state |
|
309 | 309 | if only_created: |
|
310 | 310 | q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED) |
|
311 | 311 | |
|
312 | 312 | if order_by: |
|
313 | 313 | order_map = { |
|
314 | 314 | 'name_raw': PullRequest.pull_request_id, |
|
315 | 315 | 'id': PullRequest.pull_request_id, |
|
316 | 316 | 'title': PullRequest.title, |
|
317 | 317 | 'updated_on_raw': PullRequest.updated_on, |
|
318 | 318 | 'target_repo': PullRequest.target_repo_id |
|
319 | 319 | } |
|
320 | 320 | if order_dir == 'asc': |
|
321 | 321 | q = q.order_by(order_map[order_by].asc()) |
|
322 | 322 | else: |
|
323 | 323 | q = q.order_by(order_map[order_by].desc()) |
|
324 | 324 | |
|
325 | 325 | return q |
|
326 | 326 | |
|
327 | 327 | def count_all(self, repo_name, search_q=None, source=False, statuses=None, |
|
328 | 328 | opened_by=None): |
|
329 | 329 | """ |
|
330 | 330 | Count the number of pull requests for a specific repository. |
|
331 | 331 | |
|
332 | 332 | :param repo_name: target or source repo |
|
333 | 333 | :param search_q: filter by text |
|
334 | 334 | :param source: boolean flag to specify if repo_name refers to source |
|
335 | 335 | :param statuses: list of pull request statuses |
|
336 | 336 | :param opened_by: author user of the pull request |
|
337 | 337 | :returns: int number of pull requests |
|
338 | 338 | """ |
|
339 | 339 | q = self._prepare_get_all_query( |
|
340 | 340 | repo_name, search_q=search_q, source=source, statuses=statuses, |
|
341 | 341 | opened_by=opened_by) |
|
342 | 342 | |
|
343 | 343 | return q.count() |
|
344 | 344 | |
|
345 | 345 | def get_all(self, repo_name, search_q=None, source=False, statuses=None, |
|
346 | 346 | opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'): |
|
347 | 347 | """ |
|
348 | 348 | Get all pull requests for a specific repository. |
|
349 | 349 | |
|
350 | 350 | :param repo_name: target or source repo |
|
351 | 351 | :param search_q: filter by text |
|
352 | 352 | :param source: boolean flag to specify if repo_name refers to source |
|
353 | 353 | :param statuses: list of pull request statuses |
|
354 | 354 | :param opened_by: author user of the pull request |
|
355 | 355 | :param offset: pagination offset |
|
356 | 356 | :param length: length of returned list |
|
357 | 357 | :param order_by: order of the returned list |
|
358 | 358 | :param order_dir: 'asc' or 'desc' ordering direction |
|
359 | 359 | :returns: list of pull requests |
|
360 | 360 | """ |
|
361 | 361 | q = self._prepare_get_all_query( |
|
362 | 362 | repo_name, search_q=search_q, source=source, statuses=statuses, |
|
363 | 363 | opened_by=opened_by, order_by=order_by, order_dir=order_dir) |
|
364 | 364 | |
|
365 | 365 | if length: |
|
366 | 366 | pull_requests = q.limit(length).offset(offset).all() |
|
367 | 367 | else: |
|
368 | 368 | pull_requests = q.all() |
|
369 | 369 | |
|
370 | 370 | return pull_requests |
|
371 | 371 | |
|
372 | 372 | def count_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None, |
|
373 | 373 | opened_by=None): |
|
374 | 374 | """ |
|
375 | 375 | Count the number of pull requests for a specific repository that are |
|
376 | 376 | awaiting review. |
|
377 | 377 | |
|
378 | 378 | :param repo_name: target or source repo |
|
379 | 379 | :param search_q: filter by text |
|
380 | 380 | :param source: boolean flag to specify if repo_name refers to source |
|
381 | 381 | :param statuses: list of pull request statuses |
|
382 | 382 | :param opened_by: author user of the pull request |
|
383 | 383 | :returns: int number of pull requests |
|
384 | 384 | """ |
|
385 | 385 | pull_requests = self.get_awaiting_review( |
|
386 | 386 | repo_name, search_q=search_q, source=source, statuses=statuses, opened_by=opened_by) |
|
387 | 387 | |
|
388 | 388 | return len(pull_requests) |
|
389 | 389 | |
|
390 | 390 | def get_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None, |
|
391 | 391 | opened_by=None, offset=0, length=None, |
|
392 | 392 | order_by=None, order_dir='desc'): |
|
393 | 393 | """ |
|
394 | 394 | Get all pull requests for a specific repository that are awaiting |
|
395 | 395 | review. |
|
396 | 396 | |
|
397 | 397 | :param repo_name: target or source repo |
|
398 | 398 | :param search_q: filter by text |
|
399 | 399 | :param source: boolean flag to specify if repo_name refers to source |
|
400 | 400 | :param statuses: list of pull request statuses |
|
401 | 401 | :param opened_by: author user of the pull request |
|
402 | 402 | :param offset: pagination offset |
|
403 | 403 | :param length: length of returned list |
|
404 | 404 | :param order_by: order of the returned list |
|
405 | 405 | :param order_dir: 'asc' or 'desc' ordering direction |
|
406 | 406 | :returns: list of pull requests |
|
407 | 407 | """ |
|
408 | 408 | pull_requests = self.get_all( |
|
409 | 409 | repo_name, search_q=search_q, source=source, statuses=statuses, |
|
410 | 410 | opened_by=opened_by, order_by=order_by, order_dir=order_dir) |
|
411 | 411 | |
|
412 | 412 | _filtered_pull_requests = [] |
|
413 | 413 | for pr in pull_requests: |
|
414 | 414 | status = pr.calculated_review_status() |
|
415 | 415 | if status in [ChangesetStatus.STATUS_NOT_REVIEWED, |
|
416 | 416 | ChangesetStatus.STATUS_UNDER_REVIEW]: |
|
417 | 417 | _filtered_pull_requests.append(pr) |
|
418 | 418 | if length: |
|
419 | 419 | return _filtered_pull_requests[offset:offset+length] |
|
420 | 420 | else: |
|
421 | 421 | return _filtered_pull_requests |
|
422 | 422 | |
|
423 | 423 | def count_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None, |
|
424 | 424 | opened_by=None, user_id=None): |
|
425 | 425 | """ |
|
426 | 426 | Count the number of pull requests for a specific repository that are |
|
427 | 427 | awaiting review from a specific user. |
|
428 | 428 | |
|
429 | 429 | :param repo_name: target or source repo |
|
430 | 430 | :param search_q: filter by text |
|
431 | 431 | :param source: boolean flag to specify if repo_name refers to source |
|
432 | 432 | :param statuses: list of pull request statuses |
|
433 | 433 | :param opened_by: author user of the pull request |
|
434 | 434 | :param user_id: reviewer user of the pull request |
|
435 | 435 | :returns: int number of pull requests |
|
436 | 436 | """ |
|
437 | 437 | pull_requests = self.get_awaiting_my_review( |
|
438 | 438 | repo_name, search_q=search_q, source=source, statuses=statuses, |
|
439 | 439 | opened_by=opened_by, user_id=user_id) |
|
440 | 440 | |
|
441 | 441 | return len(pull_requests) |
|
442 | 442 | |
|
443 | 443 | def get_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None, |
|
444 | 444 | opened_by=None, user_id=None, offset=0, |
|
445 | 445 | length=None, order_by=None, order_dir='desc'): |
|
446 | 446 | """ |
|
447 | 447 | Get all pull requests for a specific repository that are awaiting |
|
448 | 448 | review from a specific user. |
|
449 | 449 | |
|
450 | 450 | :param repo_name: target or source repo |
|
451 | 451 | :param search_q: filter by text |
|
452 | 452 | :param source: boolean flag to specify if repo_name refers to source |
|
453 | 453 | :param statuses: list of pull request statuses |
|
454 | 454 | :param opened_by: author user of the pull request |
|
455 | 455 | :param user_id: reviewer user of the pull request |
|
456 | 456 | :param offset: pagination offset |
|
457 | 457 | :param length: length of returned list |
|
458 | 458 | :param order_by: order of the returned list |
|
459 | 459 | :param order_dir: 'asc' or 'desc' ordering direction |
|
460 | 460 | :returns: list of pull requests |
|
461 | 461 | """ |
|
462 | 462 | pull_requests = self.get_all( |
|
463 | 463 | repo_name, search_q=search_q, source=source, statuses=statuses, |
|
464 | 464 | opened_by=opened_by, order_by=order_by, order_dir=order_dir) |
|
465 | 465 | |
|
466 | 466 | _my = PullRequestModel().get_not_reviewed(user_id) |
|
467 | 467 | my_participation = [] |
|
468 | 468 | for pr in pull_requests: |
|
469 | 469 | if pr in _my: |
|
470 | 470 | my_participation.append(pr) |
|
471 | 471 | _filtered_pull_requests = my_participation |
|
472 | 472 | if length: |
|
473 | 473 | return _filtered_pull_requests[offset:offset+length] |
|
474 | 474 | else: |
|
475 | 475 | return _filtered_pull_requests |
|
476 | 476 | |
|
477 | 477 | def get_not_reviewed(self, user_id): |
|
478 | 478 | return [ |
|
479 | 479 | x.pull_request for x in PullRequestReviewers.query().filter( |
|
480 | 480 | PullRequestReviewers.user_id == user_id).all() |
|
481 | 481 | ] |
|
482 | 482 | |
|
483 | 483 | def _prepare_participating_query(self, user_id=None, statuses=None, query='', |
|
484 | 484 | order_by=None, order_dir='desc'): |
|
485 | 485 | q = PullRequest.query() |
|
486 | 486 | if user_id: |
|
487 | 487 | reviewers_subquery = Session().query( |
|
488 | 488 | PullRequestReviewers.pull_request_id).filter( |
|
489 | 489 | PullRequestReviewers.user_id == user_id).subquery() |
|
490 | 490 | user_filter = or_( |
|
491 | 491 | PullRequest.user_id == user_id, |
|
492 | 492 | PullRequest.pull_request_id.in_(reviewers_subquery) |
|
493 | 493 | ) |
|
494 | 494 | q = PullRequest.query().filter(user_filter) |
|
495 | 495 | |
|
496 | 496 | # closed,opened |
|
497 | 497 | if statuses: |
|
498 | 498 | q = q.filter(PullRequest.status.in_(statuses)) |
|
499 | 499 | |
|
500 | 500 | if query: |
|
501 | 501 | like_expression = u'%{}%'.format(safe_unicode(query)) |
|
502 | 502 | q = q.join(User) |
|
503 | 503 | q = q.filter(or_( |
|
504 | 504 | cast(PullRequest.pull_request_id, String).ilike(like_expression), |
|
505 | 505 | User.username.ilike(like_expression), |
|
506 | 506 | PullRequest.title.ilike(like_expression), |
|
507 | 507 | PullRequest.description.ilike(like_expression), |
|
508 | 508 | )) |
|
509 | 509 | if order_by: |
|
510 | 510 | order_map = { |
|
511 | 511 | 'name_raw': PullRequest.pull_request_id, |
|
512 | 512 | 'title': PullRequest.title, |
|
513 | 513 | 'updated_on_raw': PullRequest.updated_on, |
|
514 | 514 | 'target_repo': PullRequest.target_repo_id |
|
515 | 515 | } |
|
516 | 516 | if order_dir == 'asc': |
|
517 | 517 | q = q.order_by(order_map[order_by].asc()) |
|
518 | 518 | else: |
|
519 | 519 | q = q.order_by(order_map[order_by].desc()) |
|
520 | 520 | |
|
521 | 521 | return q |
|
522 | 522 | |
|
523 | 523 | def count_im_participating_in(self, user_id=None, statuses=None, query=''): |
|
524 | 524 | q = self._prepare_participating_query(user_id, statuses=statuses, query=query) |
|
525 | 525 | return q.count() |
|
526 | 526 | |
|
527 | 527 | def get_im_participating_in( |
|
528 | 528 | self, user_id=None, statuses=None, query='', offset=0, |
|
529 | 529 | length=None, order_by=None, order_dir='desc'): |
|
530 | 530 | """ |
|
531 | 531 | Get all Pull requests that i'm participating in, or i have opened |
|
532 | 532 | """ |
|
533 | 533 | |
|
534 | 534 | q = self._prepare_participating_query( |
|
535 | 535 | user_id, statuses=statuses, query=query, order_by=order_by, |
|
536 | 536 | order_dir=order_dir) |
|
537 | 537 | |
|
538 | 538 | if length: |
|
539 | 539 | pull_requests = q.limit(length).offset(offset).all() |
|
540 | 540 | else: |
|
541 | 541 | pull_requests = q.all() |
|
542 | 542 | |
|
543 | 543 | return pull_requests |
|
544 | 544 | |
|
545 | 545 | def get_versions(self, pull_request): |
|
546 | 546 | """ |
|
547 | 547 | returns version of pull request sorted by ID descending |
|
548 | 548 | """ |
|
549 | 549 | return PullRequestVersion.query()\ |
|
550 | 550 | .filter(PullRequestVersion.pull_request == pull_request)\ |
|
551 | 551 | .order_by(PullRequestVersion.pull_request_version_id.asc())\ |
|
552 | 552 | .all() |
|
553 | 553 | |
|
554 | 554 | def get_pr_version(self, pull_request_id, version=None): |
|
555 | 555 | at_version = None |
|
556 | 556 | |
|
557 | 557 | if version and version == 'latest': |
|
558 | 558 | pull_request_ver = PullRequest.get(pull_request_id) |
|
559 | 559 | pull_request_obj = pull_request_ver |
|
560 | 560 | _org_pull_request_obj = pull_request_obj |
|
561 | 561 | at_version = 'latest' |
|
562 | 562 | elif version: |
|
563 | 563 | pull_request_ver = PullRequestVersion.get_or_404(version) |
|
564 | 564 | pull_request_obj = pull_request_ver |
|
565 | 565 | _org_pull_request_obj = pull_request_ver.pull_request |
|
566 | 566 | at_version = pull_request_ver.pull_request_version_id |
|
567 | 567 | else: |
|
568 | 568 | _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404( |
|
569 | 569 | pull_request_id) |
|
570 | 570 | |
|
571 | 571 | pull_request_display_obj = PullRequest.get_pr_display_object( |
|
572 | 572 | pull_request_obj, _org_pull_request_obj) |
|
573 | 573 | |
|
574 | 574 | return _org_pull_request_obj, pull_request_obj, \ |
|
575 | 575 | pull_request_display_obj, at_version |
|
576 | 576 | |
|
577 | 577 | def create(self, created_by, source_repo, source_ref, target_repo, |
|
578 | 578 | target_ref, revisions, reviewers, title, description=None, |
|
579 | 579 | common_ancestor_id=None, |
|
580 | 580 | description_renderer=None, |
|
581 | 581 | reviewer_data=None, translator=None, auth_user=None): |
|
582 | 582 | translator = translator or get_current_request().translate |
|
583 | 583 | |
|
584 | 584 | created_by_user = self._get_user(created_by) |
|
585 | 585 | auth_user = auth_user or created_by_user.AuthUser() |
|
586 | 586 | source_repo = self._get_repo(source_repo) |
|
587 | 587 | target_repo = self._get_repo(target_repo) |
|
588 | 588 | |
|
589 | 589 | pull_request = PullRequest() |
|
590 | 590 | pull_request.source_repo = source_repo |
|
591 | 591 | pull_request.source_ref = source_ref |
|
592 | 592 | pull_request.target_repo = target_repo |
|
593 | 593 | pull_request.target_ref = target_ref |
|
594 | 594 | pull_request.revisions = revisions |
|
595 | 595 | pull_request.title = title |
|
596 | 596 | pull_request.description = description |
|
597 | 597 | pull_request.description_renderer = description_renderer |
|
598 | 598 | pull_request.author = created_by_user |
|
599 | 599 | pull_request.reviewer_data = reviewer_data |
|
600 | 600 | pull_request.pull_request_state = pull_request.STATE_CREATING |
|
601 | 601 | pull_request.common_ancestor_id = common_ancestor_id |
|
602 | 602 | |
|
603 | 603 | Session().add(pull_request) |
|
604 | 604 | Session().flush() |
|
605 | 605 | |
|
606 | 606 | reviewer_ids = set() |
|
607 | 607 | # members / reviewers |
|
608 | 608 | for reviewer_object in reviewers: |
|
609 | 609 | user_id, reasons, mandatory, rules = reviewer_object |
|
610 | 610 | user = self._get_user(user_id) |
|
611 | 611 | |
|
612 | 612 | # skip duplicates |
|
613 | 613 | if user.user_id in reviewer_ids: |
|
614 | 614 | continue |
|
615 | 615 | |
|
616 | 616 | reviewer_ids.add(user.user_id) |
|
617 | 617 | |
|
618 | 618 | reviewer = PullRequestReviewers() |
|
619 | 619 | reviewer.user = user |
|
620 | 620 | reviewer.pull_request = pull_request |
|
621 | 621 | reviewer.reasons = reasons |
|
622 | 622 | reviewer.mandatory = mandatory |
|
623 | 623 | |
|
624 | 624 | # NOTE(marcink): pick only first rule for now |
|
625 | 625 | rule_id = list(rules)[0] if rules else None |
|
626 | 626 | rule = RepoReviewRule.get(rule_id) if rule_id else None |
|
627 | 627 | if rule: |
|
628 | 628 | review_group = rule.user_group_vote_rule(user_id) |
|
629 | 629 | # we check if this particular reviewer is member of a voting group |
|
630 | 630 | if review_group: |
|
631 | 631 | # NOTE(marcink): |
|
632 | 632 | # can be that user is member of more but we pick the first same, |
|
633 | 633 | # same as default reviewers algo |
|
634 | 634 | review_group = review_group[0] |
|
635 | 635 | |
|
636 | 636 | rule_data = { |
|
637 | 637 | 'rule_name': |
|
638 | 638 | rule.review_rule_name, |
|
639 | 639 | 'rule_user_group_entry_id': |
|
640 | 640 | review_group.repo_review_rule_users_group_id, |
|
641 | 641 | 'rule_user_group_name': |
|
642 | 642 | review_group.users_group.users_group_name, |
|
643 | 643 | 'rule_user_group_members': |
|
644 | 644 | [x.user.username for x in review_group.users_group.members], |
|
645 | 645 | 'rule_user_group_members_id': |
|
646 | 646 | [x.user.user_id for x in review_group.users_group.members], |
|
647 | 647 | } |
|
648 | 648 | # e.g {'vote_rule': -1, 'mandatory': True} |
|
649 | 649 | rule_data.update(review_group.rule_data()) |
|
650 | 650 | |
|
651 | 651 | reviewer.rule_data = rule_data |
|
652 | 652 | |
|
653 | 653 | Session().add(reviewer) |
|
654 | 654 | Session().flush() |
|
655 | 655 | |
|
656 | 656 | # Set approval status to "Under Review" for all commits which are |
|
657 | 657 | # part of this pull request. |
|
658 | 658 | ChangesetStatusModel().set_status( |
|
659 | 659 | repo=target_repo, |
|
660 | 660 | status=ChangesetStatus.STATUS_UNDER_REVIEW, |
|
661 | 661 | user=created_by_user, |
|
662 | 662 | pull_request=pull_request |
|
663 | 663 | ) |
|
664 | 664 | # we commit early at this point. This has to do with a fact |
|
665 | 665 | # that before queries do some row-locking. And because of that |
|
666 | 666 | # we need to commit and finish transaction before below validate call |
|
667 | 667 | # that for large repos could be long resulting in long row locks |
|
668 | 668 | Session().commit() |
|
669 | 669 | |
|
670 | 670 | # prepare workspace, and run initial merge simulation. Set state during that |
|
671 | 671 | # operation |
|
672 | 672 | pull_request = PullRequest.get(pull_request.pull_request_id) |
|
673 | 673 | |
|
674 | 674 | # set as merging, for merge simulation, and if finished to created so we mark |
|
675 | 675 | # simulation is working fine |
|
676 | 676 | with pull_request.set_state(PullRequest.STATE_MERGING, |
|
677 | 677 | final_state=PullRequest.STATE_CREATED) as state_obj: |
|
678 | 678 | MergeCheck.validate( |
|
679 | 679 | pull_request, auth_user=auth_user, translator=translator) |
|
680 | 680 | |
|
681 | 681 | self.notify_reviewers(pull_request, reviewer_ids) |
|
682 | 682 | self.trigger_pull_request_hook(pull_request, created_by_user, 'create') |
|
683 | 683 | |
|
684 | 684 | creation_data = pull_request.get_api_data(with_merge_state=False) |
|
685 | 685 | self._log_audit_action( |
|
686 | 686 | 'repo.pull_request.create', {'data': creation_data}, |
|
687 | 687 | auth_user, pull_request) |
|
688 | 688 | |
|
689 | 689 | return pull_request |
|
690 | 690 | |
|
691 | 691 | def trigger_pull_request_hook(self, pull_request, user, action, data=None): |
|
692 | 692 | pull_request = self.__get_pull_request(pull_request) |
|
693 | 693 | target_scm = pull_request.target_repo.scm_instance() |
|
694 | 694 | if action == 'create': |
|
695 | 695 | trigger_hook = hooks_utils.trigger_create_pull_request_hook |
|
696 | 696 | elif action == 'merge': |
|
697 | 697 | trigger_hook = hooks_utils.trigger_merge_pull_request_hook |
|
698 | 698 | elif action == 'close': |
|
699 | 699 | trigger_hook = hooks_utils.trigger_close_pull_request_hook |
|
700 | 700 | elif action == 'review_status_change': |
|
701 | 701 | trigger_hook = hooks_utils.trigger_review_pull_request_hook |
|
702 | 702 | elif action == 'update': |
|
703 | 703 | trigger_hook = hooks_utils.trigger_update_pull_request_hook |
|
704 | 704 | elif action == 'comment': |
|
705 | 705 | trigger_hook = hooks_utils.trigger_comment_pull_request_hook |
|
706 | elif action == 'comment_edit': | |
|
707 | trigger_hook = hooks_utils.trigger_comment_pull_request_edit_hook | |
|
706 | 708 | else: |
|
707 | 709 | return |
|
708 | 710 | |
|
709 | 711 | log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s', |
|
710 | 712 | pull_request, action, trigger_hook) |
|
711 | 713 | trigger_hook( |
|
712 | 714 | username=user.username, |
|
713 | 715 | repo_name=pull_request.target_repo.repo_name, |
|
714 | 716 | repo_type=target_scm.alias, |
|
715 | 717 | pull_request=pull_request, |
|
716 | 718 | data=data) |
|
717 | 719 | |
|
718 | 720 | def _get_commit_ids(self, pull_request): |
|
719 | 721 | """ |
|
720 | 722 | Return the commit ids of the merged pull request. |
|
721 | 723 | |
|
722 | 724 | This method is not dealing correctly yet with the lack of autoupdates |
|
723 | 725 | nor with the implicit target updates. |
|
724 | 726 | For example: if a commit in the source repo is already in the target it |
|
725 | 727 | will be reported anyways. |
|
726 | 728 | """ |
|
727 | 729 | merge_rev = pull_request.merge_rev |
|
728 | 730 | if merge_rev is None: |
|
729 | 731 | raise ValueError('This pull request was not merged yet') |
|
730 | 732 | |
|
731 | 733 | commit_ids = list(pull_request.revisions) |
|
732 | 734 | if merge_rev not in commit_ids: |
|
733 | 735 | commit_ids.append(merge_rev) |
|
734 | 736 | |
|
735 | 737 | return commit_ids |
|
736 | 738 | |
|
737 | 739 | def merge_repo(self, pull_request, user, extras): |
|
738 | 740 | log.debug("Merging pull request %s", pull_request.pull_request_id) |
|
739 | 741 | extras['user_agent'] = 'internal-merge' |
|
740 | 742 | merge_state = self._merge_pull_request(pull_request, user, extras) |
|
741 | 743 | if merge_state.executed: |
|
742 | 744 | log.debug("Merge was successful, updating the pull request comments.") |
|
743 | 745 | self._comment_and_close_pr(pull_request, user, merge_state) |
|
744 | 746 | |
|
745 | 747 | self._log_audit_action( |
|
746 | 748 | 'repo.pull_request.merge', |
|
747 | 749 | {'merge_state': merge_state.__dict__}, |
|
748 | 750 | user, pull_request) |
|
749 | 751 | |
|
750 | 752 | else: |
|
751 | 753 | log.warn("Merge failed, not updating the pull request.") |
|
752 | 754 | return merge_state |
|
753 | 755 | |
|
754 | 756 | def _merge_pull_request(self, pull_request, user, extras, merge_msg=None): |
|
755 | 757 | target_vcs = pull_request.target_repo.scm_instance() |
|
756 | 758 | source_vcs = pull_request.source_repo.scm_instance() |
|
757 | 759 | |
|
758 | 760 | message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format( |
|
759 | 761 | pr_id=pull_request.pull_request_id, |
|
760 | 762 | pr_title=pull_request.title, |
|
761 | 763 | source_repo=source_vcs.name, |
|
762 | 764 | source_ref_name=pull_request.source_ref_parts.name, |
|
763 | 765 | target_repo=target_vcs.name, |
|
764 | 766 | target_ref_name=pull_request.target_ref_parts.name, |
|
765 | 767 | ) |
|
766 | 768 | |
|
767 | 769 | workspace_id = self._workspace_id(pull_request) |
|
768 | 770 | repo_id = pull_request.target_repo.repo_id |
|
769 | 771 | use_rebase = self._use_rebase_for_merging(pull_request) |
|
770 | 772 | close_branch = self._close_branch_before_merging(pull_request) |
|
771 | 773 | user_name = self._user_name_for_merging(pull_request, user) |
|
772 | 774 | |
|
773 | 775 | target_ref = self._refresh_reference( |
|
774 | 776 | pull_request.target_ref_parts, target_vcs) |
|
775 | 777 | |
|
776 | 778 | callback_daemon, extras = prepare_callback_daemon( |
|
777 | 779 | extras, protocol=vcs_settings.HOOKS_PROTOCOL, |
|
778 | 780 | host=vcs_settings.HOOKS_HOST, |
|
779 | 781 | use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS) |
|
780 | 782 | |
|
781 | 783 | with callback_daemon: |
|
782 | 784 | # TODO: johbo: Implement a clean way to run a config_override |
|
783 | 785 | # for a single call. |
|
784 | 786 | target_vcs.config.set( |
|
785 | 787 | 'rhodecode', 'RC_SCM_DATA', json.dumps(extras)) |
|
786 | 788 | |
|
787 | 789 | merge_state = target_vcs.merge( |
|
788 | 790 | repo_id, workspace_id, target_ref, source_vcs, |
|
789 | 791 | pull_request.source_ref_parts, |
|
790 | 792 | user_name=user_name, user_email=user.email, |
|
791 | 793 | message=message, use_rebase=use_rebase, |
|
792 | 794 | close_branch=close_branch) |
|
793 | 795 | return merge_state |
|
794 | 796 | |
|
795 | 797 | def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None): |
|
796 | 798 | pull_request.merge_rev = merge_state.merge_ref.commit_id |
|
797 | 799 | pull_request.updated_on = datetime.datetime.now() |
|
798 | 800 | close_msg = close_msg or 'Pull request merged and closed' |
|
799 | 801 | |
|
800 | 802 | CommentsModel().create( |
|
801 | 803 | text=safe_unicode(close_msg), |
|
802 | 804 | repo=pull_request.target_repo.repo_id, |
|
803 | 805 | user=user.user_id, |
|
804 | 806 | pull_request=pull_request.pull_request_id, |
|
805 | 807 | f_path=None, |
|
806 | 808 | line_no=None, |
|
807 | 809 | closing_pr=True |
|
808 | 810 | ) |
|
809 | 811 | |
|
810 | 812 | Session().add(pull_request) |
|
811 | 813 | Session().flush() |
|
812 | 814 | # TODO: paris: replace invalidation with less radical solution |
|
813 | 815 | ScmModel().mark_for_invalidation( |
|
814 | 816 | pull_request.target_repo.repo_name) |
|
815 | 817 | self.trigger_pull_request_hook(pull_request, user, 'merge') |
|
816 | 818 | |
|
817 | 819 | def has_valid_update_type(self, pull_request): |
|
818 | 820 | source_ref_type = pull_request.source_ref_parts.type |
|
819 | 821 | return source_ref_type in self.REF_TYPES |
|
820 | 822 | |
|
821 | 823 | def get_flow_commits(self, pull_request): |
|
822 | 824 | |
|
823 | 825 | # source repo |
|
824 | 826 | source_ref_name = pull_request.source_ref_parts.name |
|
825 | 827 | source_ref_type = pull_request.source_ref_parts.type |
|
826 | 828 | source_ref_id = pull_request.source_ref_parts.commit_id |
|
827 | 829 | source_repo = pull_request.source_repo.scm_instance() |
|
828 | 830 | |
|
829 | 831 | try: |
|
830 | 832 | if source_ref_type in self.REF_TYPES: |
|
831 | 833 | source_commit = source_repo.get_commit(source_ref_name) |
|
832 | 834 | else: |
|
833 | 835 | source_commit = source_repo.get_commit(source_ref_id) |
|
834 | 836 | except CommitDoesNotExistError: |
|
835 | 837 | raise SourceRefMissing() |
|
836 | 838 | |
|
837 | 839 | # target repo |
|
838 | 840 | target_ref_name = pull_request.target_ref_parts.name |
|
839 | 841 | target_ref_type = pull_request.target_ref_parts.type |
|
840 | 842 | target_ref_id = pull_request.target_ref_parts.commit_id |
|
841 | 843 | target_repo = pull_request.target_repo.scm_instance() |
|
842 | 844 | |
|
843 | 845 | try: |
|
844 | 846 | if target_ref_type in self.REF_TYPES: |
|
845 | 847 | target_commit = target_repo.get_commit(target_ref_name) |
|
846 | 848 | else: |
|
847 | 849 | target_commit = target_repo.get_commit(target_ref_id) |
|
848 | 850 | except CommitDoesNotExistError: |
|
849 | 851 | raise TargetRefMissing() |
|
850 | 852 | |
|
851 | 853 | return source_commit, target_commit |
|
852 | 854 | |
|
853 | 855 | def update_commits(self, pull_request, updating_user): |
|
854 | 856 | """ |
|
855 | 857 | Get the updated list of commits for the pull request |
|
856 | 858 | and return the new pull request version and the list |
|
857 | 859 | of commits processed by this update action |
|
858 | 860 | |
|
859 | 861 | updating_user is the user_object who triggered the update |
|
860 | 862 | """ |
|
861 | 863 | pull_request = self.__get_pull_request(pull_request) |
|
862 | 864 | source_ref_type = pull_request.source_ref_parts.type |
|
863 | 865 | source_ref_name = pull_request.source_ref_parts.name |
|
864 | 866 | source_ref_id = pull_request.source_ref_parts.commit_id |
|
865 | 867 | |
|
866 | 868 | target_ref_type = pull_request.target_ref_parts.type |
|
867 | 869 | target_ref_name = pull_request.target_ref_parts.name |
|
868 | 870 | target_ref_id = pull_request.target_ref_parts.commit_id |
|
869 | 871 | |
|
870 | 872 | if not self.has_valid_update_type(pull_request): |
|
871 | 873 | log.debug("Skipping update of pull request %s due to ref type: %s", |
|
872 | 874 | pull_request, source_ref_type) |
|
873 | 875 | return UpdateResponse( |
|
874 | 876 | executed=False, |
|
875 | 877 | reason=UpdateFailureReason.WRONG_REF_TYPE, |
|
876 | 878 | old=pull_request, new=None, common_ancestor_id=None, commit_changes=None, |
|
877 | 879 | source_changed=False, target_changed=False) |
|
878 | 880 | |
|
879 | 881 | try: |
|
880 | 882 | source_commit, target_commit = self.get_flow_commits(pull_request) |
|
881 | 883 | except SourceRefMissing: |
|
882 | 884 | return UpdateResponse( |
|
883 | 885 | executed=False, |
|
884 | 886 | reason=UpdateFailureReason.MISSING_SOURCE_REF, |
|
885 | 887 | old=pull_request, new=None, common_ancestor_id=None, commit_changes=None, |
|
886 | 888 | source_changed=False, target_changed=False) |
|
887 | 889 | except TargetRefMissing: |
|
888 | 890 | return UpdateResponse( |
|
889 | 891 | executed=False, |
|
890 | 892 | reason=UpdateFailureReason.MISSING_TARGET_REF, |
|
891 | 893 | old=pull_request, new=None, common_ancestor_id=None, commit_changes=None, |
|
892 | 894 | source_changed=False, target_changed=False) |
|
893 | 895 | |
|
894 | 896 | source_changed = source_ref_id != source_commit.raw_id |
|
895 | 897 | target_changed = target_ref_id != target_commit.raw_id |
|
896 | 898 | |
|
897 | 899 | if not (source_changed or target_changed): |
|
898 | 900 | log.debug("Nothing changed in pull request %s", pull_request) |
|
899 | 901 | return UpdateResponse( |
|
900 | 902 | executed=False, |
|
901 | 903 | reason=UpdateFailureReason.NO_CHANGE, |
|
902 | 904 | old=pull_request, new=None, common_ancestor_id=None, commit_changes=None, |
|
903 | 905 | source_changed=target_changed, target_changed=source_changed) |
|
904 | 906 | |
|
905 | 907 | change_in_found = 'target repo' if target_changed else 'source repo' |
|
906 | 908 | log.debug('Updating pull request because of change in %s detected', |
|
907 | 909 | change_in_found) |
|
908 | 910 | |
|
909 | 911 | # Finally there is a need for an update, in case of source change |
|
910 | 912 | # we create a new version, else just an update |
|
911 | 913 | if source_changed: |
|
912 | 914 | pull_request_version = self._create_version_from_snapshot(pull_request) |
|
913 | 915 | self._link_comments_to_version(pull_request_version) |
|
914 | 916 | else: |
|
915 | 917 | try: |
|
916 | 918 | ver = pull_request.versions[-1] |
|
917 | 919 | except IndexError: |
|
918 | 920 | ver = None |
|
919 | 921 | |
|
920 | 922 | pull_request.pull_request_version_id = \ |
|
921 | 923 | ver.pull_request_version_id if ver else None |
|
922 | 924 | pull_request_version = pull_request |
|
923 | 925 | |
|
924 | 926 | source_repo = pull_request.source_repo.scm_instance() |
|
925 | 927 | target_repo = pull_request.target_repo.scm_instance() |
|
926 | 928 | |
|
927 | 929 | # re-compute commit ids |
|
928 | 930 | old_commit_ids = pull_request.revisions |
|
929 | 931 | pre_load = ["author", "date", "message", "branch"] |
|
930 | 932 | commit_ranges = target_repo.compare( |
|
931 | 933 | target_commit.raw_id, source_commit.raw_id, source_repo, merge=True, |
|
932 | 934 | pre_load=pre_load) |
|
933 | 935 | |
|
934 | 936 | target_ref = target_commit.raw_id |
|
935 | 937 | source_ref = source_commit.raw_id |
|
936 | 938 | ancestor_commit_id = target_repo.get_common_ancestor( |
|
937 | 939 | target_ref, source_ref, source_repo) |
|
938 | 940 | |
|
939 | 941 | if not ancestor_commit_id: |
|
940 | 942 | raise ValueError( |
|
941 | 943 | 'cannot calculate diff info without a common ancestor. ' |
|
942 | 944 | 'Make sure both repositories are related, and have a common forking commit.') |
|
943 | 945 | |
|
944 | 946 | pull_request.common_ancestor_id = ancestor_commit_id |
|
945 | 947 | |
|
946 | 948 | pull_request.source_ref = '%s:%s:%s' % ( |
|
947 | 949 | source_ref_type, source_ref_name, source_commit.raw_id) |
|
948 | 950 | pull_request.target_ref = '%s:%s:%s' % ( |
|
949 | 951 | target_ref_type, target_ref_name, ancestor_commit_id) |
|
950 | 952 | |
|
951 | 953 | pull_request.revisions = [ |
|
952 | 954 | commit.raw_id for commit in reversed(commit_ranges)] |
|
953 | 955 | pull_request.updated_on = datetime.datetime.now() |
|
954 | 956 | Session().add(pull_request) |
|
955 | 957 | new_commit_ids = pull_request.revisions |
|
956 | 958 | |
|
957 | 959 | old_diff_data, new_diff_data = self._generate_update_diffs( |
|
958 | 960 | pull_request, pull_request_version) |
|
959 | 961 | |
|
960 | 962 | # calculate commit and file changes |
|
961 | 963 | commit_changes = self._calculate_commit_id_changes( |
|
962 | 964 | old_commit_ids, new_commit_ids) |
|
963 | 965 | file_changes = self._calculate_file_changes( |
|
964 | 966 | old_diff_data, new_diff_data) |
|
965 | 967 | |
|
966 | 968 | # set comments as outdated if DIFFS changed |
|
967 | 969 | CommentsModel().outdate_comments( |
|
968 | 970 | pull_request, old_diff_data=old_diff_data, |
|
969 | 971 | new_diff_data=new_diff_data) |
|
970 | 972 | |
|
971 | 973 | valid_commit_changes = (commit_changes.added or commit_changes.removed) |
|
972 | 974 | file_node_changes = ( |
|
973 | 975 | file_changes.added or file_changes.modified or file_changes.removed) |
|
974 | 976 | pr_has_changes = valid_commit_changes or file_node_changes |
|
975 | 977 | |
|
976 | 978 | # Add an automatic comment to the pull request, in case |
|
977 | 979 | # anything has changed |
|
978 | 980 | if pr_has_changes: |
|
979 | 981 | update_comment = CommentsModel().create( |
|
980 | 982 | text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes), |
|
981 | 983 | repo=pull_request.target_repo, |
|
982 | 984 | user=pull_request.author, |
|
983 | 985 | pull_request=pull_request, |
|
984 | 986 | send_email=False, renderer=DEFAULT_COMMENTS_RENDERER) |
|
985 | 987 | |
|
986 | 988 | # Update status to "Under Review" for added commits |
|
987 | 989 | for commit_id in commit_changes.added: |
|
988 | 990 | ChangesetStatusModel().set_status( |
|
989 | 991 | repo=pull_request.source_repo, |
|
990 | 992 | status=ChangesetStatus.STATUS_UNDER_REVIEW, |
|
991 | 993 | comment=update_comment, |
|
992 | 994 | user=pull_request.author, |
|
993 | 995 | pull_request=pull_request, |
|
994 | 996 | revision=commit_id) |
|
995 | 997 | |
|
996 | 998 | # send update email to users |
|
997 | 999 | try: |
|
998 | 1000 | self.notify_users(pull_request=pull_request, updating_user=updating_user, |
|
999 | 1001 | ancestor_commit_id=ancestor_commit_id, |
|
1000 | 1002 | commit_changes=commit_changes, |
|
1001 | 1003 | file_changes=file_changes) |
|
1002 | 1004 | except Exception: |
|
1003 | 1005 | log.exception('Failed to send email notification to users') |
|
1004 | 1006 | |
|
1005 | 1007 | log.debug( |
|
1006 | 1008 | 'Updated pull request %s, added_ids: %s, common_ids: %s, ' |
|
1007 | 1009 | 'removed_ids: %s', pull_request.pull_request_id, |
|
1008 | 1010 | commit_changes.added, commit_changes.common, commit_changes.removed) |
|
1009 | 1011 | log.debug( |
|
1010 | 1012 | 'Updated pull request with the following file changes: %s', |
|
1011 | 1013 | file_changes) |
|
1012 | 1014 | |
|
1013 | 1015 | log.info( |
|
1014 | 1016 | "Updated pull request %s from commit %s to commit %s, " |
|
1015 | 1017 | "stored new version %s of this pull request.", |
|
1016 | 1018 | pull_request.pull_request_id, source_ref_id, |
|
1017 | 1019 | pull_request.source_ref_parts.commit_id, |
|
1018 | 1020 | pull_request_version.pull_request_version_id) |
|
1019 | 1021 | Session().commit() |
|
1020 | 1022 | self.trigger_pull_request_hook(pull_request, pull_request.author, 'update') |
|
1021 | 1023 | |
|
1022 | 1024 | return UpdateResponse( |
|
1023 | 1025 | executed=True, reason=UpdateFailureReason.NONE, |
|
1024 | 1026 | old=pull_request, new=pull_request_version, |
|
1025 | 1027 | common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes, |
|
1026 | 1028 | source_changed=source_changed, target_changed=target_changed) |
|
1027 | 1029 | |
|
1028 | 1030 | def _create_version_from_snapshot(self, pull_request): |
|
1029 | 1031 | version = PullRequestVersion() |
|
1030 | 1032 | version.title = pull_request.title |
|
1031 | 1033 | version.description = pull_request.description |
|
1032 | 1034 | version.status = pull_request.status |
|
1033 | 1035 | version.pull_request_state = pull_request.pull_request_state |
|
1034 | 1036 | version.created_on = datetime.datetime.now() |
|
1035 | 1037 | version.updated_on = pull_request.updated_on |
|
1036 | 1038 | version.user_id = pull_request.user_id |
|
1037 | 1039 | version.source_repo = pull_request.source_repo |
|
1038 | 1040 | version.source_ref = pull_request.source_ref |
|
1039 | 1041 | version.target_repo = pull_request.target_repo |
|
1040 | 1042 | version.target_ref = pull_request.target_ref |
|
1041 | 1043 | |
|
1042 | 1044 | version._last_merge_source_rev = pull_request._last_merge_source_rev |
|
1043 | 1045 | version._last_merge_target_rev = pull_request._last_merge_target_rev |
|
1044 | 1046 | version.last_merge_status = pull_request.last_merge_status |
|
1045 | 1047 | version.last_merge_metadata = pull_request.last_merge_metadata |
|
1046 | 1048 | version.shadow_merge_ref = pull_request.shadow_merge_ref |
|
1047 | 1049 | version.merge_rev = pull_request.merge_rev |
|
1048 | 1050 | version.reviewer_data = pull_request.reviewer_data |
|
1049 | 1051 | |
|
1050 | 1052 | version.revisions = pull_request.revisions |
|
1051 | 1053 | version.common_ancestor_id = pull_request.common_ancestor_id |
|
1052 | 1054 | version.pull_request = pull_request |
|
1053 | 1055 | Session().add(version) |
|
1054 | 1056 | Session().flush() |
|
1055 | 1057 | |
|
1056 | 1058 | return version |
|
1057 | 1059 | |
|
1058 | 1060 | def _generate_update_diffs(self, pull_request, pull_request_version): |
|
1059 | 1061 | |
|
1060 | 1062 | diff_context = ( |
|
1061 | 1063 | self.DIFF_CONTEXT + |
|
1062 | 1064 | CommentsModel.needed_extra_diff_context()) |
|
1063 | 1065 | hide_whitespace_changes = False |
|
1064 | 1066 | source_repo = pull_request_version.source_repo |
|
1065 | 1067 | source_ref_id = pull_request_version.source_ref_parts.commit_id |
|
1066 | 1068 | target_ref_id = pull_request_version.target_ref_parts.commit_id |
|
1067 | 1069 | old_diff = self._get_diff_from_pr_or_version( |
|
1068 | 1070 | source_repo, source_ref_id, target_ref_id, |
|
1069 | 1071 | hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context) |
|
1070 | 1072 | |
|
1071 | 1073 | source_repo = pull_request.source_repo |
|
1072 | 1074 | source_ref_id = pull_request.source_ref_parts.commit_id |
|
1073 | 1075 | target_ref_id = pull_request.target_ref_parts.commit_id |
|
1074 | 1076 | |
|
1075 | 1077 | new_diff = self._get_diff_from_pr_or_version( |
|
1076 | 1078 | source_repo, source_ref_id, target_ref_id, |
|
1077 | 1079 | hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context) |
|
1078 | 1080 | |
|
1079 | 1081 | old_diff_data = diffs.DiffProcessor(old_diff) |
|
1080 | 1082 | old_diff_data.prepare() |
|
1081 | 1083 | new_diff_data = diffs.DiffProcessor(new_diff) |
|
1082 | 1084 | new_diff_data.prepare() |
|
1083 | 1085 | |
|
1084 | 1086 | return old_diff_data, new_diff_data |
|
1085 | 1087 | |
|
1086 | 1088 | def _link_comments_to_version(self, pull_request_version): |
|
1087 | 1089 | """ |
|
1088 | 1090 | Link all unlinked comments of this pull request to the given version. |
|
1089 | 1091 | |
|
1090 | 1092 | :param pull_request_version: The `PullRequestVersion` to which |
|
1091 | 1093 | the comments shall be linked. |
|
1092 | 1094 | |
|
1093 | 1095 | """ |
|
1094 | 1096 | pull_request = pull_request_version.pull_request |
|
1095 | 1097 | comments = ChangesetComment.query()\ |
|
1096 | 1098 | .filter( |
|
1097 | 1099 | # TODO: johbo: Should we query for the repo at all here? |
|
1098 | 1100 | # Pending decision on how comments of PRs are to be related |
|
1099 | 1101 | # to either the source repo, the target repo or no repo at all. |
|
1100 | 1102 | ChangesetComment.repo_id == pull_request.target_repo.repo_id, |
|
1101 | 1103 | ChangesetComment.pull_request == pull_request, |
|
1102 | 1104 | ChangesetComment.pull_request_version == None)\ |
|
1103 | 1105 | .order_by(ChangesetComment.comment_id.asc()) |
|
1104 | 1106 | |
|
1105 | 1107 | # TODO: johbo: Find out why this breaks if it is done in a bulk |
|
1106 | 1108 | # operation. |
|
1107 | 1109 | for comment in comments: |
|
1108 | 1110 | comment.pull_request_version_id = ( |
|
1109 | 1111 | pull_request_version.pull_request_version_id) |
|
1110 | 1112 | Session().add(comment) |
|
1111 | 1113 | |
|
1112 | 1114 | def _calculate_commit_id_changes(self, old_ids, new_ids): |
|
1113 | 1115 | added = [x for x in new_ids if x not in old_ids] |
|
1114 | 1116 | common = [x for x in new_ids if x in old_ids] |
|
1115 | 1117 | removed = [x for x in old_ids if x not in new_ids] |
|
1116 | 1118 | total = new_ids |
|
1117 | 1119 | return ChangeTuple(added, common, removed, total) |
|
1118 | 1120 | |
|
1119 | 1121 | def _calculate_file_changes(self, old_diff_data, new_diff_data): |
|
1120 | 1122 | |
|
1121 | 1123 | old_files = OrderedDict() |
|
1122 | 1124 | for diff_data in old_diff_data.parsed_diff: |
|
1123 | 1125 | old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff']) |
|
1124 | 1126 | |
|
1125 | 1127 | added_files = [] |
|
1126 | 1128 | modified_files = [] |
|
1127 | 1129 | removed_files = [] |
|
1128 | 1130 | for diff_data in new_diff_data.parsed_diff: |
|
1129 | 1131 | new_filename = diff_data['filename'] |
|
1130 | 1132 | new_hash = md5_safe(diff_data['raw_diff']) |
|
1131 | 1133 | |
|
1132 | 1134 | old_hash = old_files.get(new_filename) |
|
1133 | 1135 | if not old_hash: |
|
1134 | 1136 | # file is not present in old diff, we have to figure out from parsed diff |
|
1135 | 1137 | # operation ADD/REMOVE |
|
1136 | 1138 | operations_dict = diff_data['stats']['ops'] |
|
1137 | 1139 | if diffs.DEL_FILENODE in operations_dict: |
|
1138 | 1140 | removed_files.append(new_filename) |
|
1139 | 1141 | else: |
|
1140 | 1142 | added_files.append(new_filename) |
|
1141 | 1143 | else: |
|
1142 | 1144 | if new_hash != old_hash: |
|
1143 | 1145 | modified_files.append(new_filename) |
|
1144 | 1146 | # now remove a file from old, since we have seen it already |
|
1145 | 1147 | del old_files[new_filename] |
|
1146 | 1148 | |
|
1147 | 1149 | # removed files is when there are present in old, but not in NEW, |
|
1148 | 1150 | # since we remove old files that are present in new diff, left-overs |
|
1149 | 1151 | # if any should be the removed files |
|
1150 | 1152 | removed_files.extend(old_files.keys()) |
|
1151 | 1153 | |
|
1152 | 1154 | return FileChangeTuple(added_files, modified_files, removed_files) |
|
1153 | 1155 | |
|
1154 | 1156 | def _render_update_message(self, ancestor_commit_id, changes, file_changes): |
|
1155 | 1157 | """ |
|
1156 | 1158 | render the message using DEFAULT_COMMENTS_RENDERER (RST renderer), |
|
1157 | 1159 | so it's always looking the same disregarding on which default |
|
1158 | 1160 | renderer system is using. |
|
1159 | 1161 | |
|
1160 | 1162 | :param ancestor_commit_id: ancestor raw_id |
|
1161 | 1163 | :param changes: changes named tuple |
|
1162 | 1164 | :param file_changes: file changes named tuple |
|
1163 | 1165 | |
|
1164 | 1166 | """ |
|
1165 | 1167 | new_status = ChangesetStatus.get_status_lbl( |
|
1166 | 1168 | ChangesetStatus.STATUS_UNDER_REVIEW) |
|
1167 | 1169 | |
|
1168 | 1170 | changed_files = ( |
|
1169 | 1171 | file_changes.added + file_changes.modified + file_changes.removed) |
|
1170 | 1172 | |
|
1171 | 1173 | params = { |
|
1172 | 1174 | 'under_review_label': new_status, |
|
1173 | 1175 | 'added_commits': changes.added, |
|
1174 | 1176 | 'removed_commits': changes.removed, |
|
1175 | 1177 | 'changed_files': changed_files, |
|
1176 | 1178 | 'added_files': file_changes.added, |
|
1177 | 1179 | 'modified_files': file_changes.modified, |
|
1178 | 1180 | 'removed_files': file_changes.removed, |
|
1179 | 1181 | 'ancestor_commit_id': ancestor_commit_id |
|
1180 | 1182 | } |
|
1181 | 1183 | renderer = RstTemplateRenderer() |
|
1182 | 1184 | return renderer.render('pull_request_update.mako', **params) |
|
1183 | 1185 | |
|
1184 | 1186 | def edit(self, pull_request, title, description, description_renderer, user): |
|
1185 | 1187 | pull_request = self.__get_pull_request(pull_request) |
|
1186 | 1188 | old_data = pull_request.get_api_data(with_merge_state=False) |
|
1187 | 1189 | if pull_request.is_closed(): |
|
1188 | 1190 | raise ValueError('This pull request is closed') |
|
1189 | 1191 | if title: |
|
1190 | 1192 | pull_request.title = title |
|
1191 | 1193 | pull_request.description = description |
|
1192 | 1194 | pull_request.updated_on = datetime.datetime.now() |
|
1193 | 1195 | pull_request.description_renderer = description_renderer |
|
1194 | 1196 | Session().add(pull_request) |
|
1195 | 1197 | self._log_audit_action( |
|
1196 | 1198 | 'repo.pull_request.edit', {'old_data': old_data}, |
|
1197 | 1199 | user, pull_request) |
|
1198 | 1200 | |
|
1199 | 1201 | def update_reviewers(self, pull_request, reviewer_data, user): |
|
1200 | 1202 | """ |
|
1201 | 1203 | Update the reviewers in the pull request |
|
1202 | 1204 | |
|
1203 | 1205 | :param pull_request: the pr to update |
|
1204 | 1206 | :param reviewer_data: list of tuples |
|
1205 | 1207 | [(user, ['reason1', 'reason2'], mandatory_flag, [rules])] |
|
1206 | 1208 | """ |
|
1207 | 1209 | pull_request = self.__get_pull_request(pull_request) |
|
1208 | 1210 | if pull_request.is_closed(): |
|
1209 | 1211 | raise ValueError('This pull request is closed') |
|
1210 | 1212 | |
|
1211 | 1213 | reviewers = {} |
|
1212 | 1214 | for user_id, reasons, mandatory, rules in reviewer_data: |
|
1213 | 1215 | if isinstance(user_id, (int, compat.string_types)): |
|
1214 | 1216 | user_id = self._get_user(user_id).user_id |
|
1215 | 1217 | reviewers[user_id] = { |
|
1216 | 1218 | 'reasons': reasons, 'mandatory': mandatory} |
|
1217 | 1219 | |
|
1218 | 1220 | reviewers_ids = set(reviewers.keys()) |
|
1219 | 1221 | current_reviewers = PullRequestReviewers.query()\ |
|
1220 | 1222 | .filter(PullRequestReviewers.pull_request == |
|
1221 | 1223 | pull_request).all() |
|
1222 | 1224 | current_reviewers_ids = set([x.user.user_id for x in current_reviewers]) |
|
1223 | 1225 | |
|
1224 | 1226 | ids_to_add = reviewers_ids.difference(current_reviewers_ids) |
|
1225 | 1227 | ids_to_remove = current_reviewers_ids.difference(reviewers_ids) |
|
1226 | 1228 | |
|
1227 | 1229 | log.debug("Adding %s reviewers", ids_to_add) |
|
1228 | 1230 | log.debug("Removing %s reviewers", ids_to_remove) |
|
1229 | 1231 | changed = False |
|
1230 | 1232 | added_audit_reviewers = [] |
|
1231 | 1233 | removed_audit_reviewers = [] |
|
1232 | 1234 | |
|
1233 | 1235 | for uid in ids_to_add: |
|
1234 | 1236 | changed = True |
|
1235 | 1237 | _usr = self._get_user(uid) |
|
1236 | 1238 | reviewer = PullRequestReviewers() |
|
1237 | 1239 | reviewer.user = _usr |
|
1238 | 1240 | reviewer.pull_request = pull_request |
|
1239 | 1241 | reviewer.reasons = reviewers[uid]['reasons'] |
|
1240 | 1242 | # NOTE(marcink): mandatory shouldn't be changed now |
|
1241 | 1243 | # reviewer.mandatory = reviewers[uid]['reasons'] |
|
1242 | 1244 | Session().add(reviewer) |
|
1243 | 1245 | added_audit_reviewers.append(reviewer.get_dict()) |
|
1244 | 1246 | |
|
1245 | 1247 | for uid in ids_to_remove: |
|
1246 | 1248 | changed = True |
|
1247 | 1249 | # NOTE(marcink): we fetch "ALL" reviewers using .all(). This is an edge case |
|
1248 | 1250 | # that prevents and fixes cases that we added the same reviewer twice. |
|
1249 | 1251 | # this CAN happen due to the lack of DB checks |
|
1250 | 1252 | reviewers = PullRequestReviewers.query()\ |
|
1251 | 1253 | .filter(PullRequestReviewers.user_id == uid, |
|
1252 | 1254 | PullRequestReviewers.pull_request == pull_request)\ |
|
1253 | 1255 | .all() |
|
1254 | 1256 | |
|
1255 | 1257 | for obj in reviewers: |
|
1256 | 1258 | added_audit_reviewers.append(obj.get_dict()) |
|
1257 | 1259 | Session().delete(obj) |
|
1258 | 1260 | |
|
1259 | 1261 | if changed: |
|
1260 | 1262 | Session().expire_all() |
|
1261 | 1263 | pull_request.updated_on = datetime.datetime.now() |
|
1262 | 1264 | Session().add(pull_request) |
|
1263 | 1265 | |
|
1264 | 1266 | # finally store audit logs |
|
1265 | 1267 | for user_data in added_audit_reviewers: |
|
1266 | 1268 | self._log_audit_action( |
|
1267 | 1269 | 'repo.pull_request.reviewer.add', {'data': user_data}, |
|
1268 | 1270 | user, pull_request) |
|
1269 | 1271 | for user_data in removed_audit_reviewers: |
|
1270 | 1272 | self._log_audit_action( |
|
1271 | 1273 | 'repo.pull_request.reviewer.delete', {'old_data': user_data}, |
|
1272 | 1274 | user, pull_request) |
|
1273 | 1275 | |
|
1274 | 1276 | self.notify_reviewers(pull_request, ids_to_add) |
|
1275 | 1277 | return ids_to_add, ids_to_remove |
|
1276 | 1278 | |
|
1277 | 1279 | def get_url(self, pull_request, request=None, permalink=False): |
|
1278 | 1280 | if not request: |
|
1279 | 1281 | request = get_current_request() |
|
1280 | 1282 | |
|
1281 | 1283 | if permalink: |
|
1282 | 1284 | return request.route_url( |
|
1283 | 1285 | 'pull_requests_global', |
|
1284 | 1286 | pull_request_id=pull_request.pull_request_id,) |
|
1285 | 1287 | else: |
|
1286 | 1288 | return request.route_url('pullrequest_show', |
|
1287 | 1289 | repo_name=safe_str(pull_request.target_repo.repo_name), |
|
1288 | 1290 | pull_request_id=pull_request.pull_request_id,) |
|
1289 | 1291 | |
|
1290 | 1292 | def get_shadow_clone_url(self, pull_request, request=None): |
|
1291 | 1293 | """ |
|
1292 | 1294 | Returns qualified url pointing to the shadow repository. If this pull |
|
1293 | 1295 | request is closed there is no shadow repository and ``None`` will be |
|
1294 | 1296 | returned. |
|
1295 | 1297 | """ |
|
1296 | 1298 | if pull_request.is_closed(): |
|
1297 | 1299 | return None |
|
1298 | 1300 | else: |
|
1299 | 1301 | pr_url = urllib.unquote(self.get_url(pull_request, request=request)) |
|
1300 | 1302 | return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url)) |
|
1301 | 1303 | |
|
1302 | 1304 | def notify_reviewers(self, pull_request, reviewers_ids): |
|
1303 | 1305 | # notification to reviewers |
|
1304 | 1306 | if not reviewers_ids: |
|
1305 | 1307 | return |
|
1306 | 1308 | |
|
1307 | 1309 | log.debug('Notify following reviewers about pull-request %s', reviewers_ids) |
|
1308 | 1310 | |
|
1309 | 1311 | pull_request_obj = pull_request |
|
1310 | 1312 | # get the current participants of this pull request |
|
1311 | 1313 | recipients = reviewers_ids |
|
1312 | 1314 | notification_type = EmailNotificationModel.TYPE_PULL_REQUEST |
|
1313 | 1315 | |
|
1314 | 1316 | pr_source_repo = pull_request_obj.source_repo |
|
1315 | 1317 | pr_target_repo = pull_request_obj.target_repo |
|
1316 | 1318 | |
|
1317 | 1319 | pr_url = h.route_url('pullrequest_show', |
|
1318 | 1320 | repo_name=pr_target_repo.repo_name, |
|
1319 | 1321 | pull_request_id=pull_request_obj.pull_request_id,) |
|
1320 | 1322 | |
|
1321 | 1323 | # set some variables for email notification |
|
1322 | 1324 | pr_target_repo_url = h.route_url( |
|
1323 | 1325 | 'repo_summary', repo_name=pr_target_repo.repo_name) |
|
1324 | 1326 | |
|
1325 | 1327 | pr_source_repo_url = h.route_url( |
|
1326 | 1328 | 'repo_summary', repo_name=pr_source_repo.repo_name) |
|
1327 | 1329 | |
|
1328 | 1330 | # pull request specifics |
|
1329 | 1331 | pull_request_commits = [ |
|
1330 | 1332 | (x.raw_id, x.message) |
|
1331 | 1333 | for x in map(pr_source_repo.get_commit, pull_request.revisions)] |
|
1332 | 1334 | |
|
1333 | 1335 | kwargs = { |
|
1334 | 1336 | 'user': pull_request.author, |
|
1335 | 1337 | 'pull_request': pull_request_obj, |
|
1336 | 1338 | 'pull_request_commits': pull_request_commits, |
|
1337 | 1339 | |
|
1338 | 1340 | 'pull_request_target_repo': pr_target_repo, |
|
1339 | 1341 | 'pull_request_target_repo_url': pr_target_repo_url, |
|
1340 | 1342 | |
|
1341 | 1343 | 'pull_request_source_repo': pr_source_repo, |
|
1342 | 1344 | 'pull_request_source_repo_url': pr_source_repo_url, |
|
1343 | 1345 | |
|
1344 | 1346 | 'pull_request_url': pr_url, |
|
1345 | 1347 | } |
|
1346 | 1348 | |
|
1347 | 1349 | # pre-generate the subject for notification itself |
|
1348 | 1350 | (subject, |
|
1349 | 1351 | _h, _e, # we don't care about those |
|
1350 | 1352 | body_plaintext) = EmailNotificationModel().render_email( |
|
1351 | 1353 | notification_type, **kwargs) |
|
1352 | 1354 | |
|
1353 | 1355 | # create notification objects, and emails |
|
1354 | 1356 | NotificationModel().create( |
|
1355 | 1357 | created_by=pull_request.author, |
|
1356 | 1358 | notification_subject=subject, |
|
1357 | 1359 | notification_body=body_plaintext, |
|
1358 | 1360 | notification_type=notification_type, |
|
1359 | 1361 | recipients=recipients, |
|
1360 | 1362 | email_kwargs=kwargs, |
|
1361 | 1363 | ) |
|
1362 | 1364 | |
|
1363 | 1365 | def notify_users(self, pull_request, updating_user, ancestor_commit_id, |
|
1364 | 1366 | commit_changes, file_changes): |
|
1365 | 1367 | |
|
1366 | 1368 | updating_user_id = updating_user.user_id |
|
1367 | 1369 | reviewers = set([x.user.user_id for x in pull_request.reviewers]) |
|
1368 | 1370 | # NOTE(marcink): send notification to all other users except to |
|
1369 | 1371 | # person who updated the PR |
|
1370 | 1372 | recipients = reviewers.difference(set([updating_user_id])) |
|
1371 | 1373 | |
|
1372 | 1374 | log.debug('Notify following recipients about pull-request update %s', recipients) |
|
1373 | 1375 | |
|
1374 | 1376 | pull_request_obj = pull_request |
|
1375 | 1377 | |
|
1376 | 1378 | # send email about the update |
|
1377 | 1379 | changed_files = ( |
|
1378 | 1380 | file_changes.added + file_changes.modified + file_changes.removed) |
|
1379 | 1381 | |
|
1380 | 1382 | pr_source_repo = pull_request_obj.source_repo |
|
1381 | 1383 | pr_target_repo = pull_request_obj.target_repo |
|
1382 | 1384 | |
|
1383 | 1385 | pr_url = h.route_url('pullrequest_show', |
|
1384 | 1386 | repo_name=pr_target_repo.repo_name, |
|
1385 | 1387 | pull_request_id=pull_request_obj.pull_request_id,) |
|
1386 | 1388 | |
|
1387 | 1389 | # set some variables for email notification |
|
1388 | 1390 | pr_target_repo_url = h.route_url( |
|
1389 | 1391 | 'repo_summary', repo_name=pr_target_repo.repo_name) |
|
1390 | 1392 | |
|
1391 | 1393 | pr_source_repo_url = h.route_url( |
|
1392 | 1394 | 'repo_summary', repo_name=pr_source_repo.repo_name) |
|
1393 | 1395 | |
|
1394 | 1396 | email_kwargs = { |
|
1395 | 1397 | 'date': datetime.datetime.now(), |
|
1396 | 1398 | 'updating_user': updating_user, |
|
1397 | 1399 | |
|
1398 | 1400 | 'pull_request': pull_request_obj, |
|
1399 | 1401 | |
|
1400 | 1402 | 'pull_request_target_repo': pr_target_repo, |
|
1401 | 1403 | 'pull_request_target_repo_url': pr_target_repo_url, |
|
1402 | 1404 | |
|
1403 | 1405 | 'pull_request_source_repo': pr_source_repo, |
|
1404 | 1406 | 'pull_request_source_repo_url': pr_source_repo_url, |
|
1405 | 1407 | |
|
1406 | 1408 | 'pull_request_url': pr_url, |
|
1407 | 1409 | |
|
1408 | 1410 | 'ancestor_commit_id': ancestor_commit_id, |
|
1409 | 1411 | 'added_commits': commit_changes.added, |
|
1410 | 1412 | 'removed_commits': commit_changes.removed, |
|
1411 | 1413 | 'changed_files': changed_files, |
|
1412 | 1414 | 'added_files': file_changes.added, |
|
1413 | 1415 | 'modified_files': file_changes.modified, |
|
1414 | 1416 | 'removed_files': file_changes.removed, |
|
1415 | 1417 | } |
|
1416 | 1418 | |
|
1417 | 1419 | (subject, |
|
1418 | 1420 | _h, _e, # we don't care about those |
|
1419 | 1421 | body_plaintext) = EmailNotificationModel().render_email( |
|
1420 | 1422 | EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE, **email_kwargs) |
|
1421 | 1423 | |
|
1422 | 1424 | # create notification objects, and emails |
|
1423 | 1425 | NotificationModel().create( |
|
1424 | 1426 | created_by=updating_user, |
|
1425 | 1427 | notification_subject=subject, |
|
1426 | 1428 | notification_body=body_plaintext, |
|
1427 | 1429 | notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE, |
|
1428 | 1430 | recipients=recipients, |
|
1429 | 1431 | email_kwargs=email_kwargs, |
|
1430 | 1432 | ) |
|
1431 | 1433 | |
|
1432 | 1434 | def delete(self, pull_request, user=None): |
|
1433 | 1435 | if not user: |
|
1434 | 1436 | user = getattr(get_current_rhodecode_user(), 'username', None) |
|
1435 | 1437 | |
|
1436 | 1438 | pull_request = self.__get_pull_request(pull_request) |
|
1437 | 1439 | old_data = pull_request.get_api_data(with_merge_state=False) |
|
1438 | 1440 | self._cleanup_merge_workspace(pull_request) |
|
1439 | 1441 | self._log_audit_action( |
|
1440 | 1442 | 'repo.pull_request.delete', {'old_data': old_data}, |
|
1441 | 1443 | user, pull_request) |
|
1442 | 1444 | Session().delete(pull_request) |
|
1443 | 1445 | |
|
1444 | 1446 | def close_pull_request(self, pull_request, user): |
|
1445 | 1447 | pull_request = self.__get_pull_request(pull_request) |
|
1446 | 1448 | self._cleanup_merge_workspace(pull_request) |
|
1447 | 1449 | pull_request.status = PullRequest.STATUS_CLOSED |
|
1448 | 1450 | pull_request.updated_on = datetime.datetime.now() |
|
1449 | 1451 | Session().add(pull_request) |
|
1450 | 1452 | self.trigger_pull_request_hook(pull_request, pull_request.author, 'close') |
|
1451 | 1453 | |
|
1452 | 1454 | pr_data = pull_request.get_api_data(with_merge_state=False) |
|
1453 | 1455 | self._log_audit_action( |
|
1454 | 1456 | 'repo.pull_request.close', {'data': pr_data}, user, pull_request) |
|
1455 | 1457 | |
|
1456 | 1458 | def close_pull_request_with_comment( |
|
1457 | 1459 | self, pull_request, user, repo, message=None, auth_user=None): |
|
1458 | 1460 | |
|
1459 | 1461 | pull_request_review_status = pull_request.calculated_review_status() |
|
1460 | 1462 | |
|
1461 | 1463 | if pull_request_review_status == ChangesetStatus.STATUS_APPROVED: |
|
1462 | 1464 | # approved only if we have voting consent |
|
1463 | 1465 | status = ChangesetStatus.STATUS_APPROVED |
|
1464 | 1466 | else: |
|
1465 | 1467 | status = ChangesetStatus.STATUS_REJECTED |
|
1466 | 1468 | status_lbl = ChangesetStatus.get_status_lbl(status) |
|
1467 | 1469 | |
|
1468 | 1470 | default_message = ( |
|
1469 | 1471 | 'Closing with status change {transition_icon} {status}.' |
|
1470 | 1472 | ).format(transition_icon='>', status=status_lbl) |
|
1471 | 1473 | text = message or default_message |
|
1472 | 1474 | |
|
1473 | 1475 | # create a comment, and link it to new status |
|
1474 | 1476 | comment = CommentsModel().create( |
|
1475 | 1477 | text=text, |
|
1476 | 1478 | repo=repo.repo_id, |
|
1477 | 1479 | user=user.user_id, |
|
1478 | 1480 | pull_request=pull_request.pull_request_id, |
|
1479 | 1481 | status_change=status_lbl, |
|
1480 | 1482 | status_change_type=status, |
|
1481 | 1483 | closing_pr=True, |
|
1482 | 1484 | auth_user=auth_user, |
|
1483 | 1485 | ) |
|
1484 | 1486 | |
|
1485 | 1487 | # calculate old status before we change it |
|
1486 | 1488 | old_calculated_status = pull_request.calculated_review_status() |
|
1487 | 1489 | ChangesetStatusModel().set_status( |
|
1488 | 1490 | repo.repo_id, |
|
1489 | 1491 | status, |
|
1490 | 1492 | user.user_id, |
|
1491 | 1493 | comment=comment, |
|
1492 | 1494 | pull_request=pull_request.pull_request_id |
|
1493 | 1495 | ) |
|
1494 | 1496 | |
|
1495 | 1497 | Session().flush() |
|
1496 | 1498 | |
|
1497 | 1499 | self.trigger_pull_request_hook(pull_request, user, 'comment', |
|
1498 | 1500 | data={'comment': comment}) |
|
1499 | 1501 | |
|
1500 | 1502 | # we now calculate the status of pull request again, and based on that |
|
1501 | 1503 | # calculation trigger status change. This might happen in cases |
|
1502 | 1504 | # that non-reviewer admin closes a pr, which means his vote doesn't |
|
1503 | 1505 | # change the status, while if he's a reviewer this might change it. |
|
1504 | 1506 | calculated_status = pull_request.calculated_review_status() |
|
1505 | 1507 | if old_calculated_status != calculated_status: |
|
1506 | 1508 | self.trigger_pull_request_hook(pull_request, user, 'review_status_change', |
|
1507 | 1509 | data={'status': calculated_status}) |
|
1508 | 1510 | |
|
1509 | 1511 | # finally close the PR |
|
1510 | 1512 | PullRequestModel().close_pull_request(pull_request.pull_request_id, user) |
|
1511 | 1513 | |
|
1512 | 1514 | return comment, status |
|
1513 | 1515 | |
|
1514 | 1516 | def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False): |
|
1515 | 1517 | _ = translator or get_current_request().translate |
|
1516 | 1518 | |
|
1517 | 1519 | if not self._is_merge_enabled(pull_request): |
|
1518 | 1520 | return None, False, _('Server-side pull request merging is disabled.') |
|
1519 | 1521 | |
|
1520 | 1522 | if pull_request.is_closed(): |
|
1521 | 1523 | return None, False, _('This pull request is closed.') |
|
1522 | 1524 | |
|
1523 | 1525 | merge_possible, msg = self._check_repo_requirements( |
|
1524 | 1526 | target=pull_request.target_repo, source=pull_request.source_repo, |
|
1525 | 1527 | translator=_) |
|
1526 | 1528 | if not merge_possible: |
|
1527 | 1529 | return None, merge_possible, msg |
|
1528 | 1530 | |
|
1529 | 1531 | try: |
|
1530 | 1532 | merge_response = self._try_merge( |
|
1531 | 1533 | pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh) |
|
1532 | 1534 | log.debug("Merge response: %s", merge_response) |
|
1533 | 1535 | return merge_response, merge_response.possible, merge_response.merge_status_message |
|
1534 | 1536 | except NotImplementedError: |
|
1535 | 1537 | return None, False, _('Pull request merging is not supported.') |
|
1536 | 1538 | |
|
1537 | 1539 | def _check_repo_requirements(self, target, source, translator): |
|
1538 | 1540 | """ |
|
1539 | 1541 | Check if `target` and `source` have compatible requirements. |
|
1540 | 1542 | |
|
1541 | 1543 | Currently this is just checking for largefiles. |
|
1542 | 1544 | """ |
|
1543 | 1545 | _ = translator |
|
1544 | 1546 | target_has_largefiles = self._has_largefiles(target) |
|
1545 | 1547 | source_has_largefiles = self._has_largefiles(source) |
|
1546 | 1548 | merge_possible = True |
|
1547 | 1549 | message = u'' |
|
1548 | 1550 | |
|
1549 | 1551 | if target_has_largefiles != source_has_largefiles: |
|
1550 | 1552 | merge_possible = False |
|
1551 | 1553 | if source_has_largefiles: |
|
1552 | 1554 | message = _( |
|
1553 | 1555 | 'Target repository large files support is disabled.') |
|
1554 | 1556 | else: |
|
1555 | 1557 | message = _( |
|
1556 | 1558 | 'Source repository large files support is disabled.') |
|
1557 | 1559 | |
|
1558 | 1560 | return merge_possible, message |
|
1559 | 1561 | |
|
1560 | 1562 | def _has_largefiles(self, repo): |
|
1561 | 1563 | largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings( |
|
1562 | 1564 | 'extensions', 'largefiles') |
|
1563 | 1565 | return largefiles_ui and largefiles_ui[0].active |
|
1564 | 1566 | |
|
1565 | 1567 | def _try_merge(self, pull_request, force_shadow_repo_refresh=False): |
|
1566 | 1568 | """ |
|
1567 | 1569 | Try to merge the pull request and return the merge status. |
|
1568 | 1570 | """ |
|
1569 | 1571 | log.debug( |
|
1570 | 1572 | "Trying out if the pull request %s can be merged. Force_refresh=%s", |
|
1571 | 1573 | pull_request.pull_request_id, force_shadow_repo_refresh) |
|
1572 | 1574 | target_vcs = pull_request.target_repo.scm_instance() |
|
1573 | 1575 | # Refresh the target reference. |
|
1574 | 1576 | try: |
|
1575 | 1577 | target_ref = self._refresh_reference( |
|
1576 | 1578 | pull_request.target_ref_parts, target_vcs) |
|
1577 | 1579 | except CommitDoesNotExistError: |
|
1578 | 1580 | merge_state = MergeResponse( |
|
1579 | 1581 | False, False, None, MergeFailureReason.MISSING_TARGET_REF, |
|
1580 | 1582 | metadata={'target_ref': pull_request.target_ref_parts}) |
|
1581 | 1583 | return merge_state |
|
1582 | 1584 | |
|
1583 | 1585 | target_locked = pull_request.target_repo.locked |
|
1584 | 1586 | if target_locked and target_locked[0]: |
|
1585 | 1587 | locked_by = 'user:{}'.format(target_locked[0]) |
|
1586 | 1588 | log.debug("The target repository is locked by %s.", locked_by) |
|
1587 | 1589 | merge_state = MergeResponse( |
|
1588 | 1590 | False, False, None, MergeFailureReason.TARGET_IS_LOCKED, |
|
1589 | 1591 | metadata={'locked_by': locked_by}) |
|
1590 | 1592 | elif force_shadow_repo_refresh or self._needs_merge_state_refresh( |
|
1591 | 1593 | pull_request, target_ref): |
|
1592 | 1594 | log.debug("Refreshing the merge status of the repository.") |
|
1593 | 1595 | merge_state = self._refresh_merge_state( |
|
1594 | 1596 | pull_request, target_vcs, target_ref) |
|
1595 | 1597 | else: |
|
1596 | 1598 | possible = pull_request.last_merge_status == MergeFailureReason.NONE |
|
1597 | 1599 | metadata = { |
|
1598 | 1600 | 'unresolved_files': '', |
|
1599 | 1601 | 'target_ref': pull_request.target_ref_parts, |
|
1600 | 1602 | 'source_ref': pull_request.source_ref_parts, |
|
1601 | 1603 | } |
|
1602 | 1604 | if pull_request.last_merge_metadata: |
|
1603 | 1605 | metadata.update(pull_request.last_merge_metadata) |
|
1604 | 1606 | |
|
1605 | 1607 | if not possible and target_ref.type == 'branch': |
|
1606 | 1608 | # NOTE(marcink): case for mercurial multiple heads on branch |
|
1607 | 1609 | heads = target_vcs._heads(target_ref.name) |
|
1608 | 1610 | if len(heads) != 1: |
|
1609 | 1611 | heads = '\n,'.join(target_vcs._heads(target_ref.name)) |
|
1610 | 1612 | metadata.update({ |
|
1611 | 1613 | 'heads': heads |
|
1612 | 1614 | }) |
|
1613 | 1615 | |
|
1614 | 1616 | merge_state = MergeResponse( |
|
1615 | 1617 | possible, False, None, pull_request.last_merge_status, metadata=metadata) |
|
1616 | 1618 | |
|
1617 | 1619 | return merge_state |
|
1618 | 1620 | |
|
1619 | 1621 | def _refresh_reference(self, reference, vcs_repository): |
|
1620 | 1622 | if reference.type in self.UPDATABLE_REF_TYPES: |
|
1621 | 1623 | name_or_id = reference.name |
|
1622 | 1624 | else: |
|
1623 | 1625 | name_or_id = reference.commit_id |
|
1624 | 1626 | |
|
1625 | 1627 | refreshed_commit = vcs_repository.get_commit(name_or_id) |
|
1626 | 1628 | refreshed_reference = Reference( |
|
1627 | 1629 | reference.type, reference.name, refreshed_commit.raw_id) |
|
1628 | 1630 | return refreshed_reference |
|
1629 | 1631 | |
|
1630 | 1632 | def _needs_merge_state_refresh(self, pull_request, target_reference): |
|
1631 | 1633 | return not( |
|
1632 | 1634 | pull_request.revisions and |
|
1633 | 1635 | pull_request.revisions[0] == pull_request._last_merge_source_rev and |
|
1634 | 1636 | target_reference.commit_id == pull_request._last_merge_target_rev) |
|
1635 | 1637 | |
|
1636 | 1638 | def _refresh_merge_state(self, pull_request, target_vcs, target_reference): |
|
1637 | 1639 | workspace_id = self._workspace_id(pull_request) |
|
1638 | 1640 | source_vcs = pull_request.source_repo.scm_instance() |
|
1639 | 1641 | repo_id = pull_request.target_repo.repo_id |
|
1640 | 1642 | use_rebase = self._use_rebase_for_merging(pull_request) |
|
1641 | 1643 | close_branch = self._close_branch_before_merging(pull_request) |
|
1642 | 1644 | merge_state = target_vcs.merge( |
|
1643 | 1645 | repo_id, workspace_id, |
|
1644 | 1646 | target_reference, source_vcs, pull_request.source_ref_parts, |
|
1645 | 1647 | dry_run=True, use_rebase=use_rebase, |
|
1646 | 1648 | close_branch=close_branch) |
|
1647 | 1649 | |
|
1648 | 1650 | # Do not store the response if there was an unknown error. |
|
1649 | 1651 | if merge_state.failure_reason != MergeFailureReason.UNKNOWN: |
|
1650 | 1652 | pull_request._last_merge_source_rev = \ |
|
1651 | 1653 | pull_request.source_ref_parts.commit_id |
|
1652 | 1654 | pull_request._last_merge_target_rev = target_reference.commit_id |
|
1653 | 1655 | pull_request.last_merge_status = merge_state.failure_reason |
|
1654 | 1656 | pull_request.last_merge_metadata = merge_state.metadata |
|
1655 | 1657 | |
|
1656 | 1658 | pull_request.shadow_merge_ref = merge_state.merge_ref |
|
1657 | 1659 | Session().add(pull_request) |
|
1658 | 1660 | Session().commit() |
|
1659 | 1661 | |
|
1660 | 1662 | return merge_state |
|
1661 | 1663 | |
|
1662 | 1664 | def _workspace_id(self, pull_request): |
|
1663 | 1665 | workspace_id = 'pr-%s' % pull_request.pull_request_id |
|
1664 | 1666 | return workspace_id |
|
1665 | 1667 | |
|
1666 | 1668 | def generate_repo_data(self, repo, commit_id=None, branch=None, |
|
1667 | 1669 | bookmark=None, translator=None): |
|
1668 | 1670 | from rhodecode.model.repo import RepoModel |
|
1669 | 1671 | |
|
1670 | 1672 | all_refs, selected_ref = \ |
|
1671 | 1673 | self._get_repo_pullrequest_sources( |
|
1672 | 1674 | repo.scm_instance(), commit_id=commit_id, |
|
1673 | 1675 | branch=branch, bookmark=bookmark, translator=translator) |
|
1674 | 1676 | |
|
1675 | 1677 | refs_select2 = [] |
|
1676 | 1678 | for element in all_refs: |
|
1677 | 1679 | children = [{'id': x[0], 'text': x[1]} for x in element[0]] |
|
1678 | 1680 | refs_select2.append({'text': element[1], 'children': children}) |
|
1679 | 1681 | |
|
1680 | 1682 | return { |
|
1681 | 1683 | 'user': { |
|
1682 | 1684 | 'user_id': repo.user.user_id, |
|
1683 | 1685 | 'username': repo.user.username, |
|
1684 | 1686 | 'firstname': repo.user.first_name, |
|
1685 | 1687 | 'lastname': repo.user.last_name, |
|
1686 | 1688 | 'gravatar_link': h.gravatar_url(repo.user.email, 14), |
|
1687 | 1689 | }, |
|
1688 | 1690 | 'name': repo.repo_name, |
|
1689 | 1691 | 'link': RepoModel().get_url(repo), |
|
1690 | 1692 | 'description': h.chop_at_smart(repo.description_safe, '\n'), |
|
1691 | 1693 | 'refs': { |
|
1692 | 1694 | 'all_refs': all_refs, |
|
1693 | 1695 | 'selected_ref': selected_ref, |
|
1694 | 1696 | 'select2_refs': refs_select2 |
|
1695 | 1697 | } |
|
1696 | 1698 | } |
|
1697 | 1699 | |
|
1698 | 1700 | def generate_pullrequest_title(self, source, source_ref, target): |
|
1699 | 1701 | return u'{source}#{at_ref} to {target}'.format( |
|
1700 | 1702 | source=source, |
|
1701 | 1703 | at_ref=source_ref, |
|
1702 | 1704 | target=target, |
|
1703 | 1705 | ) |
|
1704 | 1706 | |
|
1705 | 1707 | def _cleanup_merge_workspace(self, pull_request): |
|
1706 | 1708 | # Merging related cleanup |
|
1707 | 1709 | repo_id = pull_request.target_repo.repo_id |
|
1708 | 1710 | target_scm = pull_request.target_repo.scm_instance() |
|
1709 | 1711 | workspace_id = self._workspace_id(pull_request) |
|
1710 | 1712 | |
|
1711 | 1713 | try: |
|
1712 | 1714 | target_scm.cleanup_merge_workspace(repo_id, workspace_id) |
|
1713 | 1715 | except NotImplementedError: |
|
1714 | 1716 | pass |
|
1715 | 1717 | |
|
1716 | 1718 | def _get_repo_pullrequest_sources( |
|
1717 | 1719 | self, repo, commit_id=None, branch=None, bookmark=None, |
|
1718 | 1720 | translator=None): |
|
1719 | 1721 | """ |
|
1720 | 1722 | Return a structure with repo's interesting commits, suitable for |
|
1721 | 1723 | the selectors in pullrequest controller |
|
1722 | 1724 | |
|
1723 | 1725 | :param commit_id: a commit that must be in the list somehow |
|
1724 | 1726 | and selected by default |
|
1725 | 1727 | :param branch: a branch that must be in the list and selected |
|
1726 | 1728 | by default - even if closed |
|
1727 | 1729 | :param bookmark: a bookmark that must be in the list and selected |
|
1728 | 1730 | """ |
|
1729 | 1731 | _ = translator or get_current_request().translate |
|
1730 | 1732 | |
|
1731 | 1733 | commit_id = safe_str(commit_id) if commit_id else None |
|
1732 | 1734 | branch = safe_unicode(branch) if branch else None |
|
1733 | 1735 | bookmark = safe_unicode(bookmark) if bookmark else None |
|
1734 | 1736 | |
|
1735 | 1737 | selected = None |
|
1736 | 1738 | |
|
1737 | 1739 | # order matters: first source that has commit_id in it will be selected |
|
1738 | 1740 | sources = [] |
|
1739 | 1741 | sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark)) |
|
1740 | 1742 | sources.append(('branch', repo.branches.items(), _('Branches'), branch)) |
|
1741 | 1743 | |
|
1742 | 1744 | if commit_id: |
|
1743 | 1745 | ref_commit = (h.short_id(commit_id), commit_id) |
|
1744 | 1746 | sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id)) |
|
1745 | 1747 | |
|
1746 | 1748 | sources.append( |
|
1747 | 1749 | ('branch', repo.branches_closed.items(), _('Closed Branches'), branch), |
|
1748 | 1750 | ) |
|
1749 | 1751 | |
|
1750 | 1752 | groups = [] |
|
1751 | 1753 | |
|
1752 | 1754 | for group_key, ref_list, group_name, match in sources: |
|
1753 | 1755 | group_refs = [] |
|
1754 | 1756 | for ref_name, ref_id in ref_list: |
|
1755 | 1757 | ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id) |
|
1756 | 1758 | group_refs.append((ref_key, ref_name)) |
|
1757 | 1759 | |
|
1758 | 1760 | if not selected: |
|
1759 | 1761 | if set([commit_id, match]) & set([ref_id, ref_name]): |
|
1760 | 1762 | selected = ref_key |
|
1761 | 1763 | |
|
1762 | 1764 | if group_refs: |
|
1763 | 1765 | groups.append((group_refs, group_name)) |
|
1764 | 1766 | |
|
1765 | 1767 | if not selected: |
|
1766 | 1768 | ref = commit_id or branch or bookmark |
|
1767 | 1769 | if ref: |
|
1768 | 1770 | raise CommitDoesNotExistError( |
|
1769 | 1771 | u'No commit refs could be found matching: {}'.format(ref)) |
|
1770 | 1772 | elif repo.DEFAULT_BRANCH_NAME in repo.branches: |
|
1771 | 1773 | selected = u'branch:{}:{}'.format( |
|
1772 | 1774 | safe_unicode(repo.DEFAULT_BRANCH_NAME), |
|
1773 | 1775 | safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME]) |
|
1774 | 1776 | ) |
|
1775 | 1777 | elif repo.commit_ids: |
|
1776 | 1778 | # make the user select in this case |
|
1777 | 1779 | selected = None |
|
1778 | 1780 | else: |
|
1779 | 1781 | raise EmptyRepositoryError() |
|
1780 | 1782 | return groups, selected |
|
1781 | 1783 | |
|
1782 | 1784 | def get_diff(self, source_repo, source_ref_id, target_ref_id, |
|
1783 | 1785 | hide_whitespace_changes, diff_context): |
|
1784 | 1786 | |
|
1785 | 1787 | return self._get_diff_from_pr_or_version( |
|
1786 | 1788 | source_repo, source_ref_id, target_ref_id, |
|
1787 | 1789 | hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context) |
|
1788 | 1790 | |
|
1789 | 1791 | def _get_diff_from_pr_or_version( |
|
1790 | 1792 | self, source_repo, source_ref_id, target_ref_id, |
|
1791 | 1793 | hide_whitespace_changes, diff_context): |
|
1792 | 1794 | |
|
1793 | 1795 | target_commit = source_repo.get_commit( |
|
1794 | 1796 | commit_id=safe_str(target_ref_id)) |
|
1795 | 1797 | source_commit = source_repo.get_commit( |
|
1796 | 1798 | commit_id=safe_str(source_ref_id), maybe_unreachable=True) |
|
1797 | 1799 | if isinstance(source_repo, Repository): |
|
1798 | 1800 | vcs_repo = source_repo.scm_instance() |
|
1799 | 1801 | else: |
|
1800 | 1802 | vcs_repo = source_repo |
|
1801 | 1803 | |
|
1802 | 1804 | # TODO: johbo: In the context of an update, we cannot reach |
|
1803 | 1805 | # the old commit anymore with our normal mechanisms. It needs |
|
1804 | 1806 | # some sort of special support in the vcs layer to avoid this |
|
1805 | 1807 | # workaround. |
|
1806 | 1808 | if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and |
|
1807 | 1809 | vcs_repo.alias == 'git'): |
|
1808 | 1810 | source_commit.raw_id = safe_str(source_ref_id) |
|
1809 | 1811 | |
|
1810 | 1812 | log.debug('calculating diff between ' |
|
1811 | 1813 | 'source_ref:%s and target_ref:%s for repo `%s`', |
|
1812 | 1814 | target_ref_id, source_ref_id, |
|
1813 | 1815 | safe_unicode(vcs_repo.path)) |
|
1814 | 1816 | |
|
1815 | 1817 | vcs_diff = vcs_repo.get_diff( |
|
1816 | 1818 | commit1=target_commit, commit2=source_commit, |
|
1817 | 1819 | ignore_whitespace=hide_whitespace_changes, context=diff_context) |
|
1818 | 1820 | return vcs_diff |
|
1819 | 1821 | |
|
1820 | 1822 | def _is_merge_enabled(self, pull_request): |
|
1821 | 1823 | return self._get_general_setting( |
|
1822 | 1824 | pull_request, 'rhodecode_pr_merge_enabled') |
|
1823 | 1825 | |
|
1824 | 1826 | def _use_rebase_for_merging(self, pull_request): |
|
1825 | 1827 | repo_type = pull_request.target_repo.repo_type |
|
1826 | 1828 | if repo_type == 'hg': |
|
1827 | 1829 | return self._get_general_setting( |
|
1828 | 1830 | pull_request, 'rhodecode_hg_use_rebase_for_merging') |
|
1829 | 1831 | elif repo_type == 'git': |
|
1830 | 1832 | return self._get_general_setting( |
|
1831 | 1833 | pull_request, 'rhodecode_git_use_rebase_for_merging') |
|
1832 | 1834 | |
|
1833 | 1835 | return False |
|
1834 | 1836 | |
|
1835 | 1837 | def _user_name_for_merging(self, pull_request, user): |
|
1836 | 1838 | env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '') |
|
1837 | 1839 | if env_user_name_attr and hasattr(user, env_user_name_attr): |
|
1838 | 1840 | user_name_attr = env_user_name_attr |
|
1839 | 1841 | else: |
|
1840 | 1842 | user_name_attr = 'short_contact' |
|
1841 | 1843 | |
|
1842 | 1844 | user_name = getattr(user, user_name_attr) |
|
1843 | 1845 | return user_name |
|
1844 | 1846 | |
|
1845 | 1847 | def _close_branch_before_merging(self, pull_request): |
|
1846 | 1848 | repo_type = pull_request.target_repo.repo_type |
|
1847 | 1849 | if repo_type == 'hg': |
|
1848 | 1850 | return self._get_general_setting( |
|
1849 | 1851 | pull_request, 'rhodecode_hg_close_branch_before_merging') |
|
1850 | 1852 | elif repo_type == 'git': |
|
1851 | 1853 | return self._get_general_setting( |
|
1852 | 1854 | pull_request, 'rhodecode_git_close_branch_before_merging') |
|
1853 | 1855 | |
|
1854 | 1856 | return False |
|
1855 | 1857 | |
|
1856 | 1858 | def _get_general_setting(self, pull_request, settings_key, default=False): |
|
1857 | 1859 | settings_model = VcsSettingsModel(repo=pull_request.target_repo) |
|
1858 | 1860 | settings = settings_model.get_general_settings() |
|
1859 | 1861 | return settings.get(settings_key, default) |
|
1860 | 1862 | |
|
1861 | 1863 | def _log_audit_action(self, action, action_data, user, pull_request): |
|
1862 | 1864 | audit_logger.store( |
|
1863 | 1865 | action=action, |
|
1864 | 1866 | action_data=action_data, |
|
1865 | 1867 | user=user, |
|
1866 | 1868 | repo=pull_request.target_repo) |
|
1867 | 1869 | |
|
1868 | 1870 | def get_reviewer_functions(self): |
|
1869 | 1871 | """ |
|
1870 | 1872 | Fetches functions for validation and fetching default reviewers. |
|
1871 | 1873 | If available we use the EE package, else we fallback to CE |
|
1872 | 1874 | package functions |
|
1873 | 1875 | """ |
|
1874 | 1876 | try: |
|
1875 | 1877 | from rc_reviewers.utils import get_default_reviewers_data |
|
1876 | 1878 | from rc_reviewers.utils import validate_default_reviewers |
|
1877 | 1879 | except ImportError: |
|
1878 | 1880 | from rhodecode.apps.repository.utils import get_default_reviewers_data |
|
1879 | 1881 | from rhodecode.apps.repository.utils import validate_default_reviewers |
|
1880 | 1882 | |
|
1881 | 1883 | return get_default_reviewers_data, validate_default_reviewers |
|
1882 | 1884 | |
|
1883 | 1885 | |
|
1884 | 1886 | class MergeCheck(object): |
|
1885 | 1887 | """ |
|
1886 | 1888 | Perform Merge Checks and returns a check object which stores information |
|
1887 | 1889 | about merge errors, and merge conditions |
|
1888 | 1890 | """ |
|
1889 | 1891 | TODO_CHECK = 'todo' |
|
1890 | 1892 | PERM_CHECK = 'perm' |
|
1891 | 1893 | REVIEW_CHECK = 'review' |
|
1892 | 1894 | MERGE_CHECK = 'merge' |
|
1893 | 1895 | WIP_CHECK = 'wip' |
|
1894 | 1896 | |
|
1895 | 1897 | def __init__(self): |
|
1896 | 1898 | self.review_status = None |
|
1897 | 1899 | self.merge_possible = None |
|
1898 | 1900 | self.merge_msg = '' |
|
1899 | 1901 | self.merge_response = None |
|
1900 | 1902 | self.failed = None |
|
1901 | 1903 | self.errors = [] |
|
1902 | 1904 | self.error_details = OrderedDict() |
|
1903 | 1905 | self.source_commit = AttributeDict() |
|
1904 | 1906 | self.target_commit = AttributeDict() |
|
1905 | 1907 | |
|
1906 | 1908 | def __repr__(self): |
|
1907 | 1909 | return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format( |
|
1908 | 1910 | self.merge_possible, self.failed, self.errors) |
|
1909 | 1911 | |
|
1910 | 1912 | def push_error(self, error_type, message, error_key, details): |
|
1911 | 1913 | self.failed = True |
|
1912 | 1914 | self.errors.append([error_type, message]) |
|
1913 | 1915 | self.error_details[error_key] = dict( |
|
1914 | 1916 | details=details, |
|
1915 | 1917 | error_type=error_type, |
|
1916 | 1918 | message=message |
|
1917 | 1919 | ) |
|
1918 | 1920 | |
|
1919 | 1921 | @classmethod |
|
1920 | 1922 | def validate(cls, pull_request, auth_user, translator, fail_early=False, |
|
1921 | 1923 | force_shadow_repo_refresh=False): |
|
1922 | 1924 | _ = translator |
|
1923 | 1925 | merge_check = cls() |
|
1924 | 1926 | |
|
1925 | 1927 | # title has WIP: |
|
1926 | 1928 | if pull_request.work_in_progress: |
|
1927 | 1929 | log.debug("MergeCheck: cannot merge, title has wip: marker.") |
|
1928 | 1930 | |
|
1929 | 1931 | msg = _('WIP marker in title prevents from accidental merge.') |
|
1930 | 1932 | merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title) |
|
1931 | 1933 | if fail_early: |
|
1932 | 1934 | return merge_check |
|
1933 | 1935 | |
|
1934 | 1936 | # permissions to merge |
|
1935 | 1937 | user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user) |
|
1936 | 1938 | if not user_allowed_to_merge: |
|
1937 | 1939 | log.debug("MergeCheck: cannot merge, approval is pending.") |
|
1938 | 1940 | |
|
1939 | 1941 | msg = _('User `{}` not allowed to perform merge.').format(auth_user.username) |
|
1940 | 1942 | merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username) |
|
1941 | 1943 | if fail_early: |
|
1942 | 1944 | return merge_check |
|
1943 | 1945 | |
|
1944 | 1946 | # permission to merge into the target branch |
|
1945 | 1947 | target_commit_id = pull_request.target_ref_parts.commit_id |
|
1946 | 1948 | if pull_request.target_ref_parts.type == 'branch': |
|
1947 | 1949 | branch_name = pull_request.target_ref_parts.name |
|
1948 | 1950 | else: |
|
1949 | 1951 | # for mercurial we can always figure out the branch from the commit |
|
1950 | 1952 | # in case of bookmark |
|
1951 | 1953 | target_commit = pull_request.target_repo.get_commit(target_commit_id) |
|
1952 | 1954 | branch_name = target_commit.branch |
|
1953 | 1955 | |
|
1954 | 1956 | rule, branch_perm = auth_user.get_rule_and_branch_permission( |
|
1955 | 1957 | pull_request.target_repo.repo_name, branch_name) |
|
1956 | 1958 | if branch_perm and branch_perm == 'branch.none': |
|
1957 | 1959 | msg = _('Target branch `{}` changes rejected by rule {}.').format( |
|
1958 | 1960 | branch_name, rule) |
|
1959 | 1961 | merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username) |
|
1960 | 1962 | if fail_early: |
|
1961 | 1963 | return merge_check |
|
1962 | 1964 | |
|
1963 | 1965 | # review status, must be always present |
|
1964 | 1966 | review_status = pull_request.calculated_review_status() |
|
1965 | 1967 | merge_check.review_status = review_status |
|
1966 | 1968 | |
|
1967 | 1969 | status_approved = review_status == ChangesetStatus.STATUS_APPROVED |
|
1968 | 1970 | if not status_approved: |
|
1969 | 1971 | log.debug("MergeCheck: cannot merge, approval is pending.") |
|
1970 | 1972 | |
|
1971 | 1973 | msg = _('Pull request reviewer approval is pending.') |
|
1972 | 1974 | |
|
1973 | 1975 | merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status) |
|
1974 | 1976 | |
|
1975 | 1977 | if fail_early: |
|
1976 | 1978 | return merge_check |
|
1977 | 1979 | |
|
1978 | 1980 | # left over TODOs |
|
1979 | 1981 | todos = CommentsModel().get_pull_request_unresolved_todos(pull_request) |
|
1980 | 1982 | if todos: |
|
1981 | 1983 | log.debug("MergeCheck: cannot merge, {} " |
|
1982 | 1984 | "unresolved TODOs left.".format(len(todos))) |
|
1983 | 1985 | |
|
1984 | 1986 | if len(todos) == 1: |
|
1985 | 1987 | msg = _('Cannot merge, {} TODO still not resolved.').format( |
|
1986 | 1988 | len(todos)) |
|
1987 | 1989 | else: |
|
1988 | 1990 | msg = _('Cannot merge, {} TODOs still not resolved.').format( |
|
1989 | 1991 | len(todos)) |
|
1990 | 1992 | |
|
1991 | 1993 | merge_check.push_error('warning', msg, cls.TODO_CHECK, todos) |
|
1992 | 1994 | |
|
1993 | 1995 | if fail_early: |
|
1994 | 1996 | return merge_check |
|
1995 | 1997 | |
|
1996 | 1998 | # merge possible, here is the filesystem simulation + shadow repo |
|
1997 | 1999 | merge_response, merge_status, msg = PullRequestModel().merge_status( |
|
1998 | 2000 | pull_request, translator=translator, |
|
1999 | 2001 | force_shadow_repo_refresh=force_shadow_repo_refresh) |
|
2000 | 2002 | |
|
2001 | 2003 | merge_check.merge_possible = merge_status |
|
2002 | 2004 | merge_check.merge_msg = msg |
|
2003 | 2005 | merge_check.merge_response = merge_response |
|
2004 | 2006 | |
|
2005 | 2007 | source_ref_id = pull_request.source_ref_parts.commit_id |
|
2006 | 2008 | target_ref_id = pull_request.target_ref_parts.commit_id |
|
2007 | 2009 | |
|
2008 | 2010 | try: |
|
2009 | 2011 | source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request) |
|
2010 | 2012 | merge_check.source_commit.changed = source_ref_id != source_commit.raw_id |
|
2011 | 2013 | merge_check.source_commit.ref_spec = pull_request.source_ref_parts |
|
2012 | 2014 | merge_check.source_commit.current_raw_id = source_commit.raw_id |
|
2013 | 2015 | merge_check.source_commit.previous_raw_id = source_ref_id |
|
2014 | 2016 | |
|
2015 | 2017 | merge_check.target_commit.changed = target_ref_id != target_commit.raw_id |
|
2016 | 2018 | merge_check.target_commit.ref_spec = pull_request.target_ref_parts |
|
2017 | 2019 | merge_check.target_commit.current_raw_id = target_commit.raw_id |
|
2018 | 2020 | merge_check.target_commit.previous_raw_id = target_ref_id |
|
2019 | 2021 | except (SourceRefMissing, TargetRefMissing): |
|
2020 | 2022 | pass |
|
2021 | 2023 | |
|
2022 | 2024 | if not merge_status: |
|
2023 | 2025 | log.debug("MergeCheck: cannot merge, pull request merge not possible.") |
|
2024 | 2026 | merge_check.push_error('warning', msg, cls.MERGE_CHECK, None) |
|
2025 | 2027 | |
|
2026 | 2028 | if fail_early: |
|
2027 | 2029 | return merge_check |
|
2028 | 2030 | |
|
2029 | 2031 | log.debug('MergeCheck: is failed: %s', merge_check.failed) |
|
2030 | 2032 | return merge_check |
|
2031 | 2033 | |
|
2032 | 2034 | @classmethod |
|
2033 | 2035 | def get_merge_conditions(cls, pull_request, translator): |
|
2034 | 2036 | _ = translator |
|
2035 | 2037 | merge_details = {} |
|
2036 | 2038 | |
|
2037 | 2039 | model = PullRequestModel() |
|
2038 | 2040 | use_rebase = model._use_rebase_for_merging(pull_request) |
|
2039 | 2041 | |
|
2040 | 2042 | if use_rebase: |
|
2041 | 2043 | merge_details['merge_strategy'] = dict( |
|
2042 | 2044 | details={}, |
|
2043 | 2045 | message=_('Merge strategy: rebase') |
|
2044 | 2046 | ) |
|
2045 | 2047 | else: |
|
2046 | 2048 | merge_details['merge_strategy'] = dict( |
|
2047 | 2049 | details={}, |
|
2048 | 2050 | message=_('Merge strategy: explicit merge commit') |
|
2049 | 2051 | ) |
|
2050 | 2052 | |
|
2051 | 2053 | close_branch = model._close_branch_before_merging(pull_request) |
|
2052 | 2054 | if close_branch: |
|
2053 | 2055 | repo_type = pull_request.target_repo.repo_type |
|
2054 | 2056 | close_msg = '' |
|
2055 | 2057 | if repo_type == 'hg': |
|
2056 | 2058 | close_msg = _('Source branch will be closed before the merge.') |
|
2057 | 2059 | elif repo_type == 'git': |
|
2058 | 2060 | close_msg = _('Source branch will be deleted after the merge.') |
|
2059 | 2061 | |
|
2060 | 2062 | merge_details['close_branch'] = dict( |
|
2061 | 2063 | details={}, |
|
2062 | 2064 | message=close_msg |
|
2063 | 2065 | ) |
|
2064 | 2066 | |
|
2065 | 2067 | return merge_details |
|
2066 | 2068 | |
|
2067 | 2069 | |
|
2068 | 2070 | ChangeTuple = collections.namedtuple( |
|
2069 | 2071 | 'ChangeTuple', ['added', 'common', 'removed', 'total']) |
|
2070 | 2072 | |
|
2071 | 2073 | FileChangeTuple = collections.namedtuple( |
|
2072 | 2074 | 'FileChangeTuple', ['added', 'modified', 'removed']) |
@@ -1,100 +1,116 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import pytest |
|
22 | 22 | |
|
23 | 23 | from rhodecode.tests.events.conftest import EventCatcher |
|
24 | 24 | |
|
25 | 25 | from rhodecode.model.comment import CommentsModel |
|
26 | 26 | from rhodecode.model.pull_request import PullRequestModel |
|
27 | 27 | from rhodecode.events import ( |
|
28 | 28 | PullRequestCreateEvent, |
|
29 | 29 | PullRequestUpdateEvent, |
|
30 | 30 | PullRequestCommentEvent, |
|
31 | PullRequestCommentEditEvent, | |
|
31 | 32 | PullRequestReviewEvent, |
|
32 | 33 | PullRequestMergeEvent, |
|
33 | 34 | PullRequestCloseEvent, |
|
34 | 35 | ) |
|
35 | 36 | |
|
36 | 37 | # TODO: dan: make the serialization tests complete json comparisons |
|
37 | 38 | @pytest.mark.backends("git", "hg") |
|
38 | 39 | @pytest.mark.parametrize('EventClass', [ |
|
39 | 40 | PullRequestCreateEvent, |
|
40 | 41 | PullRequestUpdateEvent, |
|
41 | 42 | PullRequestReviewEvent, |
|
42 | 43 | PullRequestMergeEvent, |
|
43 | 44 | PullRequestCloseEvent |
|
44 | 45 | ]) |
|
45 | 46 | def test_pullrequest_events_serialized(EventClass, pr_util, config_stub): |
|
46 | 47 | pr = pr_util.create_pull_request() |
|
47 | 48 | if EventClass == PullRequestReviewEvent: |
|
48 | 49 | event = EventClass(pr, 'approved') |
|
49 | 50 | else: |
|
50 | 51 | event = EventClass(pr) |
|
51 | 52 | data = event.as_dict() |
|
52 | 53 | assert data['name'] == EventClass.name |
|
53 | 54 | assert data['repo']['repo_name'] == pr.target_repo.repo_name |
|
54 | 55 | assert data['pullrequest']['pull_request_id'] == pr.pull_request_id |
|
55 | 56 | assert data['pullrequest']['url'] |
|
56 | 57 | assert data['pullrequest']['permalink_url'] |
|
57 | 58 | |
|
58 | 59 | |
|
59 | 60 | @pytest.mark.backends("git", "hg") |
|
60 | 61 | def test_create_pull_request_events(pr_util, config_stub): |
|
61 | 62 | with EventCatcher() as event_catcher: |
|
62 | 63 | pr_util.create_pull_request() |
|
63 | 64 | |
|
64 | 65 | assert PullRequestCreateEvent in event_catcher.events_types |
|
65 | 66 | |
|
66 | 67 | |
|
67 | 68 | @pytest.mark.backends("git", "hg") |
|
68 | 69 | def test_pullrequest_comment_events_serialized(pr_util, config_stub): |
|
69 | 70 | pr = pr_util.create_pull_request() |
|
70 | 71 | comment = CommentsModel().get_comments( |
|
71 | 72 | pr.target_repo.repo_id, pull_request=pr)[0] |
|
72 | 73 | event = PullRequestCommentEvent(pr, comment) |
|
73 | 74 | data = event.as_dict() |
|
74 | 75 | assert data['name'] == PullRequestCommentEvent.name |
|
75 | 76 | assert data['repo']['repo_name'] == pr.target_repo.repo_name |
|
76 | 77 | assert data['pullrequest']['pull_request_id'] == pr.pull_request_id |
|
77 | 78 | assert data['pullrequest']['url'] |
|
78 | 79 | assert data['pullrequest']['permalink_url'] |
|
79 | 80 | assert data['comment']['text'] == comment.text |
|
80 | 81 | |
|
81 | 82 | |
|
82 | 83 | @pytest.mark.backends("git", "hg") |
|
84 | def test_pullrequest_comment_edit_events_serialized(pr_util, config_stub): | |
|
85 | pr = pr_util.create_pull_request() | |
|
86 | comment = CommentsModel().get_comments( | |
|
87 | pr.target_repo.repo_id, pull_request=pr)[0] | |
|
88 | event = PullRequestCommentEditEvent(pr, comment) | |
|
89 | data = event.as_dict() | |
|
90 | assert data['name'] == PullRequestCommentEditEvent.name | |
|
91 | assert data['repo']['repo_name'] == pr.target_repo.repo_name | |
|
92 | assert data['pullrequest']['pull_request_id'] == pr.pull_request_id | |
|
93 | assert data['pullrequest']['url'] | |
|
94 | assert data['pullrequest']['permalink_url'] | |
|
95 | assert data['comment']['text'] == comment.text | |
|
96 | ||
|
97 | ||
|
98 | @pytest.mark.backends("git", "hg") | |
|
83 | 99 | def test_close_pull_request_events(pr_util, user_admin, config_stub): |
|
84 | 100 | pr = pr_util.create_pull_request() |
|
85 | 101 | |
|
86 | 102 | with EventCatcher() as event_catcher: |
|
87 | 103 | PullRequestModel().close_pull_request(pr, user_admin) |
|
88 | 104 | |
|
89 | 105 | assert PullRequestCloseEvent in event_catcher.events_types |
|
90 | 106 | |
|
91 | 107 | |
|
92 | 108 | @pytest.mark.backends("git", "hg") |
|
93 | 109 | def test_close_pull_request_with_comment_events(pr_util, user_admin, config_stub): |
|
94 | 110 | pr = pr_util.create_pull_request() |
|
95 | 111 | |
|
96 | 112 | with EventCatcher() as event_catcher: |
|
97 | 113 | PullRequestModel().close_pull_request_with_comment( |
|
98 | 114 | pr, user_admin, pr.target_repo) |
|
99 | 115 | |
|
100 | 116 | assert PullRequestCloseEvent in event_catcher.events_types |
@@ -1,145 +1,170 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import pytest |
|
22 | 22 | |
|
23 | 23 | from rhodecode.lib.utils2 import StrictAttributeDict |
|
24 | 24 | from rhodecode.tests.events.conftest import EventCatcher |
|
25 | 25 | |
|
26 | 26 | from rhodecode.lib import hooks_base, utils2 |
|
27 | 27 | from rhodecode.model.repo import RepoModel |
|
28 | 28 | from rhodecode.events.repo import ( |
|
29 | 29 | RepoPrePullEvent, RepoPullEvent, |
|
30 | 30 | RepoPrePushEvent, RepoPushEvent, |
|
31 | 31 | RepoPreCreateEvent, RepoCreateEvent, |
|
32 |
RepoPreDeleteEvent, RepoDeleteEvent, |
|
|
32 | RepoPreDeleteEvent, RepoDeleteEvent, | |
|
33 | RepoCommitCommentEvent, RepoCommitCommentEditEvent | |
|
33 | 34 | ) |
|
34 | 35 | |
|
35 | 36 | |
|
36 | 37 | @pytest.fixture() |
|
37 | 38 | def scm_extras(user_regular, repo_stub): |
|
38 | 39 | extras = utils2.AttributeDict({ |
|
39 | 40 | 'ip': '127.0.0.1', |
|
40 | 41 | 'username': user_regular.username, |
|
41 | 42 | 'user_id': user_regular.user_id, |
|
42 | 43 | 'action': '', |
|
43 | 44 | 'repository': repo_stub.repo_name, |
|
44 | 45 | 'scm': repo_stub.scm_instance().alias, |
|
45 | 46 | 'config': '', |
|
46 | 47 | 'repo_store': '', |
|
47 | 48 | 'server_url': 'http://example.com', |
|
48 | 49 | 'make_lock': None, |
|
49 | 50 | 'user_agent': 'some-client', |
|
50 | 51 | 'locked_by': [None], |
|
51 | 52 | 'commit_ids': ['a' * 40] * 3, |
|
52 | 53 | 'hook_type': 'scm_extras_test', |
|
53 | 54 | 'is_shadow_repo': False, |
|
54 | 55 | }) |
|
55 | 56 | return extras |
|
56 | 57 | |
|
57 | 58 | |
|
58 | 59 | # TODO: dan: make the serialization tests complete json comparisons |
|
59 | 60 | @pytest.mark.parametrize('EventClass', [ |
|
60 | 61 | RepoPreCreateEvent, RepoCreateEvent, |
|
61 | 62 | RepoPreDeleteEvent, RepoDeleteEvent, |
|
62 | 63 | ]) |
|
63 | 64 | def test_repo_events_serialized(config_stub, repo_stub, EventClass): |
|
64 | 65 | event = EventClass(repo_stub) |
|
65 | 66 | data = event.as_dict() |
|
66 | 67 | assert data['name'] == EventClass.name |
|
67 | 68 | assert data['repo']['repo_name'] == repo_stub.repo_name |
|
68 | 69 | assert data['repo']['url'] |
|
69 | 70 | assert data['repo']['permalink_url'] |
|
70 | 71 | |
|
71 | 72 | |
|
72 | 73 | @pytest.mark.parametrize('EventClass', [ |
|
73 | 74 | RepoPrePullEvent, RepoPullEvent, RepoPrePushEvent |
|
74 | 75 | ]) |
|
75 | 76 | def test_vcs_repo_events_serialize(config_stub, repo_stub, scm_extras, EventClass): |
|
76 | 77 | event = EventClass(repo_name=repo_stub.repo_name, extras=scm_extras) |
|
77 | 78 | data = event.as_dict() |
|
78 | 79 | assert data['name'] == EventClass.name |
|
79 | 80 | assert data['repo']['repo_name'] == repo_stub.repo_name |
|
80 | 81 | assert data['repo']['url'] |
|
81 | 82 | assert data['repo']['permalink_url'] |
|
82 | 83 | |
|
83 | 84 | |
|
84 | 85 | @pytest.mark.parametrize('EventClass', [RepoPushEvent]) |
|
85 | 86 | def test_vcs_repo_push_event_serialize(config_stub, repo_stub, scm_extras, EventClass): |
|
86 | 87 | event = EventClass(repo_name=repo_stub.repo_name, |
|
87 | 88 | pushed_commit_ids=scm_extras['commit_ids'], |
|
88 | 89 | extras=scm_extras) |
|
89 | 90 | data = event.as_dict() |
|
90 | 91 | assert data['name'] == EventClass.name |
|
91 | 92 | assert data['repo']['repo_name'] == repo_stub.repo_name |
|
92 | 93 | assert data['repo']['url'] |
|
93 | 94 | assert data['repo']['permalink_url'] |
|
94 | 95 | |
|
95 | 96 | |
|
96 | 97 | def test_create_delete_repo_fires_events(backend): |
|
97 | 98 | with EventCatcher() as event_catcher: |
|
98 | 99 | repo = backend.create_repo() |
|
99 | 100 | assert event_catcher.events_types == [RepoPreCreateEvent, RepoCreateEvent] |
|
100 | 101 | |
|
101 | 102 | with EventCatcher() as event_catcher: |
|
102 | 103 | RepoModel().delete(repo) |
|
103 | 104 | assert event_catcher.events_types == [RepoPreDeleteEvent, RepoDeleteEvent] |
|
104 | 105 | |
|
105 | 106 | |
|
106 | 107 | def test_pull_fires_events(scm_extras): |
|
107 | 108 | with EventCatcher() as event_catcher: |
|
108 | 109 | hooks_base.pre_push(scm_extras) |
|
109 | 110 | assert event_catcher.events_types == [RepoPrePushEvent] |
|
110 | 111 | |
|
111 | 112 | with EventCatcher() as event_catcher: |
|
112 | 113 | hooks_base.post_push(scm_extras) |
|
113 | 114 | assert event_catcher.events_types == [RepoPushEvent] |
|
114 | 115 | |
|
115 | 116 | |
|
116 | 117 | def test_push_fires_events(scm_extras): |
|
117 | 118 | with EventCatcher() as event_catcher: |
|
118 | 119 | hooks_base.pre_pull(scm_extras) |
|
119 | 120 | assert event_catcher.events_types == [RepoPrePullEvent] |
|
120 | 121 | |
|
121 | 122 | with EventCatcher() as event_catcher: |
|
122 | 123 | hooks_base.post_pull(scm_extras) |
|
123 | 124 | assert event_catcher.events_types == [RepoPullEvent] |
|
124 | 125 | |
|
125 | 126 | |
|
126 | 127 | @pytest.mark.parametrize('EventClass', [RepoCommitCommentEvent]) |
|
127 | 128 | def test_repo_commit_event(config_stub, repo_stub, EventClass): |
|
128 | 129 | |
|
129 | 130 | commit = StrictAttributeDict({ |
|
130 | 131 | 'raw_id': 'raw_id', |
|
131 | 132 | 'message': 'message', |
|
132 | 133 | 'branch': 'branch', |
|
133 | 134 | }) |
|
134 | 135 | |
|
135 | 136 | comment = StrictAttributeDict({ |
|
136 | 137 | 'comment_id': 'comment_id', |
|
137 | 138 | 'text': 'text', |
|
138 | 139 | 'comment_type': 'comment_type', |
|
139 | 140 | 'f_path': 'f_path', |
|
140 | 141 | 'line_no': 'line_no', |
|
142 | 'last_version': 0, | |
|
141 | 143 | }) |
|
142 | 144 | event = EventClass(repo=repo_stub, commit=commit, comment=comment) |
|
143 | 145 | data = event.as_dict() |
|
144 | 146 | assert data['commit']['commit_id'] |
|
145 | 147 | assert data['comment']['comment_id'] |
|
148 | ||
|
149 | ||
|
150 | @pytest.mark.parametrize('EventClass', [RepoCommitCommentEditEvent]) | |
|
151 | def test_repo_commit_edit_event(config_stub, repo_stub, EventClass): | |
|
152 | ||
|
153 | commit = StrictAttributeDict({ | |
|
154 | 'raw_id': 'raw_id', | |
|
155 | 'message': 'message', | |
|
156 | 'branch': 'branch', | |
|
157 | }) | |
|
158 | ||
|
159 | comment = StrictAttributeDict({ | |
|
160 | 'comment_id': 'comment_id', | |
|
161 | 'text': 'text', | |
|
162 | 'comment_type': 'comment_type', | |
|
163 | 'f_path': 'f_path', | |
|
164 | 'line_no': 'line_no', | |
|
165 | 'last_version': 0, | |
|
166 | }) | |
|
167 | event = EventClass(repo=repo_stub, commit=commit, comment=comment) | |
|
168 | data = event.as_dict() | |
|
169 | assert data['commit']['commit_id'] | |
|
170 | assert data['comment']['comment_id'] |
General Comments 0
You need to be logged in to leave comments.
Login now