Show More
The requested changes are too big and content was truncated. Show full diff
@@ -1,937 +1,937 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2011-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | |
|
22 | 22 | import logging |
|
23 | 23 | |
|
24 | 24 | from rhodecode import events |
|
25 | 25 | from rhodecode.api import jsonrpc_method, JSONRPCError, JSONRPCValidationError |
|
26 | 26 | from rhodecode.api.utils import ( |
|
27 | 27 | has_superadmin_permission, Optional, OAttr, get_repo_or_error, |
|
28 | 28 | get_pull_request_or_error, get_commit_or_error, get_user_or_error, |
|
29 | 29 | validate_repo_permissions, resolve_ref_or_error) |
|
30 | 30 | from rhodecode.lib.auth import (HasRepoPermissionAnyApi) |
|
31 | 31 | from rhodecode.lib.base import vcs_operation_context |
|
32 | 32 | from rhodecode.lib.utils2 import str2bool |
|
33 | 33 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
34 | 34 | from rhodecode.model.comment import CommentsModel |
|
35 | 35 | from rhodecode.model.db import Session, ChangesetStatus, ChangesetComment |
|
36 | 36 | from rhodecode.model.pull_request import PullRequestModel, MergeCheck |
|
37 | 37 | from rhodecode.model.settings import SettingsModel |
|
38 | 38 | from rhodecode.model.validation_schema import Invalid |
|
39 | 39 | from rhodecode.model.validation_schema.schemas.reviewer_schema import( |
|
40 | 40 | ReviewerListSchema) |
|
41 | 41 | |
|
42 | 42 | log = logging.getLogger(__name__) |
|
43 | 43 | |
|
44 | 44 | |
|
45 | 45 | @jsonrpc_method() |
|
46 | 46 | def get_pull_request(request, apiuser, pullrequestid, repoid=Optional(None)): |
|
47 | 47 | """ |
|
48 | 48 | Get a pull request based on the given ID. |
|
49 | 49 | |
|
50 | 50 | :param apiuser: This is filled automatically from the |authtoken|. |
|
51 | 51 | :type apiuser: AuthUser |
|
52 | 52 | :param repoid: Optional, repository name or repository ID from where |
|
53 | 53 | the pull request was opened. |
|
54 | 54 | :type repoid: str or int |
|
55 | 55 | :param pullrequestid: ID of the requested pull request. |
|
56 | 56 | :type pullrequestid: int |
|
57 | 57 | |
|
58 | 58 | Example output: |
|
59 | 59 | |
|
60 | 60 | .. code-block:: bash |
|
61 | 61 | |
|
62 | 62 | "id": <id_given_in_input>, |
|
63 | 63 | "result": |
|
64 | 64 | { |
|
65 | 65 | "pull_request_id": "<pull_request_id>", |
|
66 | 66 | "url": "<url>", |
|
67 | 67 | "title": "<title>", |
|
68 | 68 | "description": "<description>", |
|
69 | 69 | "status" : "<status>", |
|
70 | 70 | "created_on": "<date_time_created>", |
|
71 | 71 | "updated_on": "<date_time_updated>", |
|
72 | 72 | "commit_ids": [ |
|
73 | 73 | ... |
|
74 | 74 | "<commit_id>", |
|
75 | 75 | "<commit_id>", |
|
76 | 76 | ... |
|
77 | 77 | ], |
|
78 | 78 | "review_status": "<review_status>", |
|
79 | 79 | "mergeable": { |
|
80 | 80 | "status": "<bool>", |
|
81 | 81 | "message": "<message>", |
|
82 | 82 | }, |
|
83 | 83 | "source": { |
|
84 | 84 | "clone_url": "<clone_url>", |
|
85 | 85 | "repository": "<repository_name>", |
|
86 | 86 | "reference": |
|
87 | 87 | { |
|
88 | 88 | "name": "<name>", |
|
89 | 89 | "type": "<type>", |
|
90 | 90 | "commit_id": "<commit_id>", |
|
91 | 91 | } |
|
92 | 92 | }, |
|
93 | 93 | "target": { |
|
94 | 94 | "clone_url": "<clone_url>", |
|
95 | 95 | "repository": "<repository_name>", |
|
96 | 96 | "reference": |
|
97 | 97 | { |
|
98 | 98 | "name": "<name>", |
|
99 | 99 | "type": "<type>", |
|
100 | 100 | "commit_id": "<commit_id>", |
|
101 | 101 | } |
|
102 | 102 | }, |
|
103 | 103 | "merge": { |
|
104 | 104 | "clone_url": "<clone_url>", |
|
105 | 105 | "reference": |
|
106 | 106 | { |
|
107 | 107 | "name": "<name>", |
|
108 | 108 | "type": "<type>", |
|
109 | 109 | "commit_id": "<commit_id>", |
|
110 | 110 | } |
|
111 | 111 | }, |
|
112 | 112 | "author": <user_obj>, |
|
113 | 113 | "reviewers": [ |
|
114 | 114 | ... |
|
115 | 115 | { |
|
116 | 116 | "user": "<user_obj>", |
|
117 | 117 | "review_status": "<review_status>", |
|
118 | 118 | } |
|
119 | 119 | ... |
|
120 | 120 | ] |
|
121 | 121 | }, |
|
122 | 122 | "error": null |
|
123 | 123 | """ |
|
124 | 124 | |
|
125 | 125 | pull_request = get_pull_request_or_error(pullrequestid) |
|
126 | 126 | if Optional.extract(repoid): |
|
127 | 127 | repo = get_repo_or_error(repoid) |
|
128 | 128 | else: |
|
129 | 129 | repo = pull_request.target_repo |
|
130 | 130 | |
|
131 | 131 | if not PullRequestModel().check_user_read( |
|
132 | 132 | pull_request, apiuser, api=True): |
|
133 | 133 | raise JSONRPCError('repository `%s` or pull request `%s` ' |
|
134 | 134 | 'does not exist' % (repoid, pullrequestid)) |
|
135 | 135 | data = pull_request.get_api_data() |
|
136 | 136 | return data |
|
137 | 137 | |
|
138 | 138 | |
|
139 | 139 | @jsonrpc_method() |
|
140 | 140 | def get_pull_requests(request, apiuser, repoid, status=Optional('new')): |
|
141 | 141 | """ |
|
142 | 142 | Get all pull requests from the repository specified in `repoid`. |
|
143 | 143 | |
|
144 | 144 | :param apiuser: This is filled automatically from the |authtoken|. |
|
145 | 145 | :type apiuser: AuthUser |
|
146 | 146 | :param repoid: Optional repository name or repository ID. |
|
147 | 147 | :type repoid: str or int |
|
148 | 148 | :param status: Only return pull requests with the specified status. |
|
149 | 149 | Valid options are. |
|
150 | 150 | * ``new`` (default) |
|
151 | 151 | * ``open`` |
|
152 | 152 | * ``closed`` |
|
153 | 153 | :type status: str |
|
154 | 154 | |
|
155 | 155 | Example output: |
|
156 | 156 | |
|
157 | 157 | .. code-block:: bash |
|
158 | 158 | |
|
159 | 159 | "id": <id_given_in_input>, |
|
160 | 160 | "result": |
|
161 | 161 | [ |
|
162 | 162 | ... |
|
163 | 163 | { |
|
164 | 164 | "pull_request_id": "<pull_request_id>", |
|
165 | 165 | "url": "<url>", |
|
166 | 166 | "title" : "<title>", |
|
167 | 167 | "description": "<description>", |
|
168 | 168 | "status": "<status>", |
|
169 | 169 | "created_on": "<date_time_created>", |
|
170 | 170 | "updated_on": "<date_time_updated>", |
|
171 | 171 | "commit_ids": [ |
|
172 | 172 | ... |
|
173 | 173 | "<commit_id>", |
|
174 | 174 | "<commit_id>", |
|
175 | 175 | ... |
|
176 | 176 | ], |
|
177 | 177 | "review_status": "<review_status>", |
|
178 | 178 | "mergeable": { |
|
179 | 179 | "status": "<bool>", |
|
180 | 180 | "message: "<message>", |
|
181 | 181 | }, |
|
182 | 182 | "source": { |
|
183 | 183 | "clone_url": "<clone_url>", |
|
184 | 184 | "reference": |
|
185 | 185 | { |
|
186 | 186 | "name": "<name>", |
|
187 | 187 | "type": "<type>", |
|
188 | 188 | "commit_id": "<commit_id>", |
|
189 | 189 | } |
|
190 | 190 | }, |
|
191 | 191 | "target": { |
|
192 | 192 | "clone_url": "<clone_url>", |
|
193 | 193 | "reference": |
|
194 | 194 | { |
|
195 | 195 | "name": "<name>", |
|
196 | 196 | "type": "<type>", |
|
197 | 197 | "commit_id": "<commit_id>", |
|
198 | 198 | } |
|
199 | 199 | }, |
|
200 | 200 | "merge": { |
|
201 | 201 | "clone_url": "<clone_url>", |
|
202 | 202 | "reference": |
|
203 | 203 | { |
|
204 | 204 | "name": "<name>", |
|
205 | 205 | "type": "<type>", |
|
206 | 206 | "commit_id": "<commit_id>", |
|
207 | 207 | } |
|
208 | 208 | }, |
|
209 | 209 | "author": <user_obj>, |
|
210 | 210 | "reviewers": [ |
|
211 | 211 | ... |
|
212 | 212 | { |
|
213 | 213 | "user": "<user_obj>", |
|
214 | 214 | "review_status": "<review_status>", |
|
215 | 215 | } |
|
216 | 216 | ... |
|
217 | 217 | ] |
|
218 | 218 | } |
|
219 | 219 | ... |
|
220 | 220 | ], |
|
221 | 221 | "error": null |
|
222 | 222 | |
|
223 | 223 | """ |
|
224 | 224 | repo = get_repo_or_error(repoid) |
|
225 | 225 | if not has_superadmin_permission(apiuser): |
|
226 | 226 | _perms = ( |
|
227 | 227 | 'repository.admin', 'repository.write', 'repository.read',) |
|
228 | 228 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
229 | 229 | |
|
230 | 230 | status = Optional.extract(status) |
|
231 | 231 | pull_requests = PullRequestModel().get_all(repo, statuses=[status]) |
|
232 | 232 | data = [pr.get_api_data() for pr in pull_requests] |
|
233 | 233 | return data |
|
234 | 234 | |
|
235 | 235 | |
|
236 | 236 | @jsonrpc_method() |
|
237 | 237 | def merge_pull_request( |
|
238 | 238 | request, apiuser, pullrequestid, repoid=Optional(None), |
|
239 | 239 | userid=Optional(OAttr('apiuser'))): |
|
240 | 240 | """ |
|
241 | 241 | Merge the pull request specified by `pullrequestid` into its target |
|
242 | 242 | repository. |
|
243 | 243 | |
|
244 | 244 | :param apiuser: This is filled automatically from the |authtoken|. |
|
245 | 245 | :type apiuser: AuthUser |
|
246 | 246 | :param repoid: Optional, repository name or repository ID of the |
|
247 | 247 | target repository to which the |pr| is to be merged. |
|
248 | 248 | :type repoid: str or int |
|
249 | 249 | :param pullrequestid: ID of the pull request which shall be merged. |
|
250 | 250 | :type pullrequestid: int |
|
251 | 251 | :param userid: Merge the pull request as this user. |
|
252 | 252 | :type userid: Optional(str or int) |
|
253 | 253 | |
|
254 | 254 | Example output: |
|
255 | 255 | |
|
256 | 256 | .. code-block:: bash |
|
257 | 257 | |
|
258 | 258 | "id": <id_given_in_input>, |
|
259 | 259 | "result": { |
|
260 | 260 | "executed": "<bool>", |
|
261 | 261 | "failure_reason": "<int>", |
|
262 | 262 | "merge_commit_id": "<merge_commit_id>", |
|
263 | 263 | "possible": "<bool>", |
|
264 | 264 | "merge_ref": { |
|
265 | 265 | "commit_id": "<commit_id>", |
|
266 | 266 | "type": "<type>", |
|
267 | 267 | "name": "<name>" |
|
268 | 268 | } |
|
269 | 269 | }, |
|
270 | 270 | "error": null |
|
271 | 271 | """ |
|
272 | 272 | pull_request = get_pull_request_or_error(pullrequestid) |
|
273 | 273 | if Optional.extract(repoid): |
|
274 | 274 | repo = get_repo_or_error(repoid) |
|
275 | 275 | else: |
|
276 | 276 | repo = pull_request.target_repo |
|
277 | 277 | |
|
278 | 278 | if not isinstance(userid, Optional): |
|
279 | 279 | if (has_superadmin_permission(apiuser) or |
|
280 | 280 | HasRepoPermissionAnyApi('repository.admin')( |
|
281 | 281 | user=apiuser, repo_name=repo.repo_name)): |
|
282 | 282 | apiuser = get_user_or_error(userid) |
|
283 | 283 | else: |
|
284 | 284 | raise JSONRPCError('userid is not the same as your user') |
|
285 | 285 | |
|
286 | 286 | check = MergeCheck.validate( |
|
287 | 287 | pull_request, auth_user=apiuser, translator=request.translate) |
|
288 | 288 | merge_possible = not check.failed |
|
289 | 289 | |
|
290 | 290 | if not merge_possible: |
|
291 | 291 | error_messages = [] |
|
292 | 292 | for err_type, error_msg in check.errors: |
|
293 | 293 | error_msg = request.translate(error_msg) |
|
294 | 294 | error_messages.append(error_msg) |
|
295 | 295 | |
|
296 | 296 | reasons = ','.join(error_messages) |
|
297 | 297 | raise JSONRPCError( |
|
298 | 298 | 'merge not possible for following reasons: {}'.format(reasons)) |
|
299 | 299 | |
|
300 | 300 | target_repo = pull_request.target_repo |
|
301 | 301 | extras = vcs_operation_context( |
|
302 | 302 | request.environ, repo_name=target_repo.repo_name, |
|
303 | 303 | username=apiuser.username, action='push', |
|
304 | 304 | scm=target_repo.repo_type) |
|
305 | 305 | merge_response = PullRequestModel().merge_repo( |
|
306 | 306 | pull_request, apiuser, extras=extras) |
|
307 | 307 | if merge_response.executed: |
|
308 | 308 | PullRequestModel().close_pull_request( |
|
309 | 309 | pull_request.pull_request_id, apiuser) |
|
310 | 310 | |
|
311 | 311 | Session().commit() |
|
312 | 312 | |
|
313 | 313 | # In previous versions the merge response directly contained the merge |
|
314 | 314 | # commit id. It is now contained in the merge reference object. To be |
|
315 | 315 | # backwards compatible we have to extract it again. |
|
316 |
merge_response = merge_response. |
|
|
316 | merge_response = merge_response.asdict() | |
|
317 | 317 | merge_response['merge_commit_id'] = merge_response['merge_ref'].commit_id |
|
318 | 318 | |
|
319 | 319 | return merge_response |
|
320 | 320 | |
|
321 | 321 | |
|
322 | 322 | @jsonrpc_method() |
|
323 | 323 | def get_pull_request_comments( |
|
324 | 324 | request, apiuser, pullrequestid, repoid=Optional(None)): |
|
325 | 325 | """ |
|
326 | 326 | Get all comments of pull request specified with the `pullrequestid` |
|
327 | 327 | |
|
328 | 328 | :param apiuser: This is filled automatically from the |authtoken|. |
|
329 | 329 | :type apiuser: AuthUser |
|
330 | 330 | :param repoid: Optional repository name or repository ID. |
|
331 | 331 | :type repoid: str or int |
|
332 | 332 | :param pullrequestid: The pull request ID. |
|
333 | 333 | :type pullrequestid: int |
|
334 | 334 | |
|
335 | 335 | Example output: |
|
336 | 336 | |
|
337 | 337 | .. code-block:: bash |
|
338 | 338 | |
|
339 | 339 | id : <id_given_in_input> |
|
340 | 340 | result : [ |
|
341 | 341 | { |
|
342 | 342 | "comment_author": { |
|
343 | 343 | "active": true, |
|
344 | 344 | "full_name_or_username": "Tom Gore", |
|
345 | 345 | "username": "admin" |
|
346 | 346 | }, |
|
347 | 347 | "comment_created_on": "2017-01-02T18:43:45.533", |
|
348 | 348 | "comment_f_path": null, |
|
349 | 349 | "comment_id": 25, |
|
350 | 350 | "comment_lineno": null, |
|
351 | 351 | "comment_status": { |
|
352 | 352 | "status": "under_review", |
|
353 | 353 | "status_lbl": "Under Review" |
|
354 | 354 | }, |
|
355 | 355 | "comment_text": "Example text", |
|
356 | 356 | "comment_type": null, |
|
357 | 357 | "pull_request_version": null |
|
358 | 358 | } |
|
359 | 359 | ], |
|
360 | 360 | error : null |
|
361 | 361 | """ |
|
362 | 362 | |
|
363 | 363 | pull_request = get_pull_request_or_error(pullrequestid) |
|
364 | 364 | if Optional.extract(repoid): |
|
365 | 365 | repo = get_repo_or_error(repoid) |
|
366 | 366 | else: |
|
367 | 367 | repo = pull_request.target_repo |
|
368 | 368 | |
|
369 | 369 | if not PullRequestModel().check_user_read( |
|
370 | 370 | pull_request, apiuser, api=True): |
|
371 | 371 | raise JSONRPCError('repository `%s` or pull request `%s` ' |
|
372 | 372 | 'does not exist' % (repoid, pullrequestid)) |
|
373 | 373 | |
|
374 | 374 | (pull_request_latest, |
|
375 | 375 | pull_request_at_ver, |
|
376 | 376 | pull_request_display_obj, |
|
377 | 377 | at_version) = PullRequestModel().get_pr_version( |
|
378 | 378 | pull_request.pull_request_id, version=None) |
|
379 | 379 | |
|
380 | 380 | versions = pull_request_display_obj.versions() |
|
381 | 381 | ver_map = { |
|
382 | 382 | ver.pull_request_version_id: cnt |
|
383 | 383 | for cnt, ver in enumerate(versions, 1) |
|
384 | 384 | } |
|
385 | 385 | |
|
386 | 386 | # GENERAL COMMENTS with versions # |
|
387 | 387 | q = CommentsModel()._all_general_comments_of_pull_request(pull_request) |
|
388 | 388 | q = q.order_by(ChangesetComment.comment_id.asc()) |
|
389 | 389 | general_comments = q.all() |
|
390 | 390 | |
|
391 | 391 | # INLINE COMMENTS with versions # |
|
392 | 392 | q = CommentsModel()._all_inline_comments_of_pull_request(pull_request) |
|
393 | 393 | q = q.order_by(ChangesetComment.comment_id.asc()) |
|
394 | 394 | inline_comments = q.all() |
|
395 | 395 | |
|
396 | 396 | data = [] |
|
397 | 397 | for comment in inline_comments + general_comments: |
|
398 | 398 | full_data = comment.get_api_data() |
|
399 | 399 | pr_version_id = None |
|
400 | 400 | if comment.pull_request_version_id: |
|
401 | 401 | pr_version_id = 'v{}'.format( |
|
402 | 402 | ver_map[comment.pull_request_version_id]) |
|
403 | 403 | |
|
404 | 404 | # sanitize some entries |
|
405 | 405 | |
|
406 | 406 | full_data['pull_request_version'] = pr_version_id |
|
407 | 407 | full_data['comment_author'] = { |
|
408 | 408 | 'username': full_data['comment_author'].username, |
|
409 | 409 | 'full_name_or_username': full_data['comment_author'].full_name_or_username, |
|
410 | 410 | 'active': full_data['comment_author'].active, |
|
411 | 411 | } |
|
412 | 412 | |
|
413 | 413 | if full_data['comment_status']: |
|
414 | 414 | full_data['comment_status'] = { |
|
415 | 415 | 'status': full_data['comment_status'][0].status, |
|
416 | 416 | 'status_lbl': full_data['comment_status'][0].status_lbl, |
|
417 | 417 | } |
|
418 | 418 | else: |
|
419 | 419 | full_data['comment_status'] = {} |
|
420 | 420 | |
|
421 | 421 | data.append(full_data) |
|
422 | 422 | return data |
|
423 | 423 | |
|
424 | 424 | |
|
425 | 425 | @jsonrpc_method() |
|
426 | 426 | def comment_pull_request( |
|
427 | 427 | request, apiuser, pullrequestid, repoid=Optional(None), |
|
428 | 428 | message=Optional(None), commit_id=Optional(None), status=Optional(None), |
|
429 | 429 | comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE), |
|
430 | 430 | resolves_comment_id=Optional(None), |
|
431 | 431 | userid=Optional(OAttr('apiuser'))): |
|
432 | 432 | """ |
|
433 | 433 | Comment on the pull request specified with the `pullrequestid`, |
|
434 | 434 | in the |repo| specified by the `repoid`, and optionally change the |
|
435 | 435 | review status. |
|
436 | 436 | |
|
437 | 437 | :param apiuser: This is filled automatically from the |authtoken|. |
|
438 | 438 | :type apiuser: AuthUser |
|
439 | 439 | :param repoid: Optional repository name or repository ID. |
|
440 | 440 | :type repoid: str or int |
|
441 | 441 | :param pullrequestid: The pull request ID. |
|
442 | 442 | :type pullrequestid: int |
|
443 | 443 | :param commit_id: Specify the commit_id for which to set a comment. If |
|
444 | 444 | given commit_id is different than latest in the PR status |
|
445 | 445 | change won't be performed. |
|
446 | 446 | :type commit_id: str |
|
447 | 447 | :param message: The text content of the comment. |
|
448 | 448 | :type message: str |
|
449 | 449 | :param status: (**Optional**) Set the approval status of the pull |
|
450 | 450 | request. One of: 'not_reviewed', 'approved', 'rejected', |
|
451 | 451 | 'under_review' |
|
452 | 452 | :type status: str |
|
453 | 453 | :param comment_type: Comment type, one of: 'note', 'todo' |
|
454 | 454 | :type comment_type: Optional(str), default: 'note' |
|
455 | 455 | :param userid: Comment on the pull request as this user |
|
456 | 456 | :type userid: Optional(str or int) |
|
457 | 457 | |
|
458 | 458 | Example output: |
|
459 | 459 | |
|
460 | 460 | .. code-block:: bash |
|
461 | 461 | |
|
462 | 462 | id : <id_given_in_input> |
|
463 | 463 | result : { |
|
464 | 464 | "pull_request_id": "<Integer>", |
|
465 | 465 | "comment_id": "<Integer>", |
|
466 | 466 | "status": {"given": <given_status>, |
|
467 | 467 | "was_changed": <bool status_was_actually_changed> }, |
|
468 | 468 | }, |
|
469 | 469 | error : null |
|
470 | 470 | """ |
|
471 | 471 | pull_request = get_pull_request_or_error(pullrequestid) |
|
472 | 472 | if Optional.extract(repoid): |
|
473 | 473 | repo = get_repo_or_error(repoid) |
|
474 | 474 | else: |
|
475 | 475 | repo = pull_request.target_repo |
|
476 | 476 | |
|
477 | 477 | if not isinstance(userid, Optional): |
|
478 | 478 | if (has_superadmin_permission(apiuser) or |
|
479 | 479 | HasRepoPermissionAnyApi('repository.admin')( |
|
480 | 480 | user=apiuser, repo_name=repo.repo_name)): |
|
481 | 481 | apiuser = get_user_or_error(userid) |
|
482 | 482 | else: |
|
483 | 483 | raise JSONRPCError('userid is not the same as your user') |
|
484 | 484 | |
|
485 | 485 | if not PullRequestModel().check_user_read( |
|
486 | 486 | pull_request, apiuser, api=True): |
|
487 | 487 | raise JSONRPCError('repository `%s` does not exist' % (repoid,)) |
|
488 | 488 | message = Optional.extract(message) |
|
489 | 489 | status = Optional.extract(status) |
|
490 | 490 | commit_id = Optional.extract(commit_id) |
|
491 | 491 | comment_type = Optional.extract(comment_type) |
|
492 | 492 | resolves_comment_id = Optional.extract(resolves_comment_id) |
|
493 | 493 | |
|
494 | 494 | if not message and not status: |
|
495 | 495 | raise JSONRPCError( |
|
496 | 496 | 'Both message and status parameters are missing. ' |
|
497 | 497 | 'At least one is required.') |
|
498 | 498 | |
|
499 | 499 | if (status not in (st[0] for st in ChangesetStatus.STATUSES) and |
|
500 | 500 | status is not None): |
|
501 | 501 | raise JSONRPCError('Unknown comment status: `%s`' % status) |
|
502 | 502 | |
|
503 | 503 | if commit_id and commit_id not in pull_request.revisions: |
|
504 | 504 | raise JSONRPCError( |
|
505 | 505 | 'Invalid commit_id `%s` for this pull request.' % commit_id) |
|
506 | 506 | |
|
507 | 507 | allowed_to_change_status = PullRequestModel().check_user_change_status( |
|
508 | 508 | pull_request, apiuser) |
|
509 | 509 | |
|
510 | 510 | # if commit_id is passed re-validated if user is allowed to change status |
|
511 | 511 | # based on latest commit_id from the PR |
|
512 | 512 | if commit_id: |
|
513 | 513 | commit_idx = pull_request.revisions.index(commit_id) |
|
514 | 514 | if commit_idx != 0: |
|
515 | 515 | allowed_to_change_status = False |
|
516 | 516 | |
|
517 | 517 | if resolves_comment_id: |
|
518 | 518 | comment = ChangesetComment.get(resolves_comment_id) |
|
519 | 519 | if not comment: |
|
520 | 520 | raise JSONRPCError( |
|
521 | 521 | 'Invalid resolves_comment_id `%s` for this pull request.' |
|
522 | 522 | % resolves_comment_id) |
|
523 | 523 | if comment.comment_type != ChangesetComment.COMMENT_TYPE_TODO: |
|
524 | 524 | raise JSONRPCError( |
|
525 | 525 | 'Comment `%s` is wrong type for setting status to resolved.' |
|
526 | 526 | % resolves_comment_id) |
|
527 | 527 | |
|
528 | 528 | text = message |
|
529 | 529 | status_label = ChangesetStatus.get_status_lbl(status) |
|
530 | 530 | if status and allowed_to_change_status: |
|
531 | 531 | st_message = ('Status change %(transition_icon)s %(status)s' |
|
532 | 532 | % {'transition_icon': '>', 'status': status_label}) |
|
533 | 533 | text = message or st_message |
|
534 | 534 | |
|
535 | 535 | rc_config = SettingsModel().get_all_settings() |
|
536 | 536 | renderer = rc_config.get('rhodecode_markup_renderer', 'rst') |
|
537 | 537 | |
|
538 | 538 | status_change = status and allowed_to_change_status |
|
539 | 539 | comment = CommentsModel().create( |
|
540 | 540 | text=text, |
|
541 | 541 | repo=pull_request.target_repo.repo_id, |
|
542 | 542 | user=apiuser.user_id, |
|
543 | 543 | pull_request=pull_request.pull_request_id, |
|
544 | 544 | f_path=None, |
|
545 | 545 | line_no=None, |
|
546 | 546 | status_change=(status_label if status_change else None), |
|
547 | 547 | status_change_type=(status if status_change else None), |
|
548 | 548 | closing_pr=False, |
|
549 | 549 | renderer=renderer, |
|
550 | 550 | comment_type=comment_type, |
|
551 | 551 | resolves_comment_id=resolves_comment_id, |
|
552 | 552 | auth_user=apiuser |
|
553 | 553 | ) |
|
554 | 554 | |
|
555 | 555 | if allowed_to_change_status and status: |
|
556 | 556 | ChangesetStatusModel().set_status( |
|
557 | 557 | pull_request.target_repo.repo_id, |
|
558 | 558 | status, |
|
559 | 559 | apiuser.user_id, |
|
560 | 560 | comment, |
|
561 | 561 | pull_request=pull_request.pull_request_id |
|
562 | 562 | ) |
|
563 | 563 | Session().flush() |
|
564 | 564 | |
|
565 | 565 | Session().commit() |
|
566 | 566 | data = { |
|
567 | 567 | 'pull_request_id': pull_request.pull_request_id, |
|
568 | 568 | 'comment_id': comment.comment_id if comment else None, |
|
569 | 569 | 'status': {'given': status, 'was_changed': status_change}, |
|
570 | 570 | } |
|
571 | 571 | return data |
|
572 | 572 | |
|
573 | 573 | |
|
574 | 574 | @jsonrpc_method() |
|
575 | 575 | def create_pull_request( |
|
576 | 576 | request, apiuser, source_repo, target_repo, source_ref, target_ref, |
|
577 | 577 | title=Optional(''), description=Optional(''), description_renderer=Optional(''), |
|
578 | 578 | reviewers=Optional(None)): |
|
579 | 579 | """ |
|
580 | 580 | Creates a new pull request. |
|
581 | 581 | |
|
582 | 582 | Accepts refs in the following formats: |
|
583 | 583 | |
|
584 | 584 | * branch:<branch_name>:<sha> |
|
585 | 585 | * branch:<branch_name> |
|
586 | 586 | * bookmark:<bookmark_name>:<sha> (Mercurial only) |
|
587 | 587 | * bookmark:<bookmark_name> (Mercurial only) |
|
588 | 588 | |
|
589 | 589 | :param apiuser: This is filled automatically from the |authtoken|. |
|
590 | 590 | :type apiuser: AuthUser |
|
591 | 591 | :param source_repo: Set the source repository name. |
|
592 | 592 | :type source_repo: str |
|
593 | 593 | :param target_repo: Set the target repository name. |
|
594 | 594 | :type target_repo: str |
|
595 | 595 | :param source_ref: Set the source ref name. |
|
596 | 596 | :type source_ref: str |
|
597 | 597 | :param target_ref: Set the target ref name. |
|
598 | 598 | :type target_ref: str |
|
599 | 599 | :param title: Optionally Set the pull request title, it's generated otherwise |
|
600 | 600 | :type title: str |
|
601 | 601 | :param description: Set the pull request description. |
|
602 | 602 | :type description: Optional(str) |
|
603 | 603 | :type description_renderer: Optional(str) |
|
604 | 604 | :param description_renderer: Set pull request renderer for the description. |
|
605 | 605 | It should be 'rst', 'markdown' or 'plain'. If not give default |
|
606 | 606 | system renderer will be used |
|
607 | 607 | :param reviewers: Set the new pull request reviewers list. |
|
608 | 608 | Reviewer defined by review rules will be added automatically to the |
|
609 | 609 | defined list. |
|
610 | 610 | :type reviewers: Optional(list) |
|
611 | 611 | Accepts username strings or objects of the format: |
|
612 | 612 | |
|
613 | 613 | [{'username': 'nick', 'reasons': ['original author'], 'mandatory': <bool>}] |
|
614 | 614 | """ |
|
615 | 615 | |
|
616 | 616 | source_db_repo = get_repo_or_error(source_repo) |
|
617 | 617 | target_db_repo = get_repo_or_error(target_repo) |
|
618 | 618 | if not has_superadmin_permission(apiuser): |
|
619 | 619 | _perms = ('repository.admin', 'repository.write', 'repository.read',) |
|
620 | 620 | validate_repo_permissions(apiuser, source_repo, source_db_repo, _perms) |
|
621 | 621 | |
|
622 | 622 | full_source_ref = resolve_ref_or_error(source_ref, source_db_repo) |
|
623 | 623 | full_target_ref = resolve_ref_or_error(target_ref, target_db_repo) |
|
624 | 624 | |
|
625 | 625 | source_scm = source_db_repo.scm_instance() |
|
626 | 626 | target_scm = target_db_repo.scm_instance() |
|
627 | 627 | |
|
628 | 628 | source_commit = get_commit_or_error(full_source_ref, source_db_repo) |
|
629 | 629 | target_commit = get_commit_or_error(full_target_ref, target_db_repo) |
|
630 | 630 | |
|
631 | 631 | ancestor = source_scm.get_common_ancestor( |
|
632 | 632 | source_commit.raw_id, target_commit.raw_id, target_scm) |
|
633 | 633 | if not ancestor: |
|
634 | 634 | raise JSONRPCError('no common ancestor found') |
|
635 | 635 | |
|
636 | 636 | # recalculate target ref based on ancestor |
|
637 | 637 | target_ref_type, target_ref_name, __ = full_target_ref.split(':') |
|
638 | 638 | full_target_ref = ':'.join((target_ref_type, target_ref_name, ancestor)) |
|
639 | 639 | |
|
640 | 640 | commit_ranges = target_scm.compare( |
|
641 | 641 | target_commit.raw_id, source_commit.raw_id, source_scm, |
|
642 | 642 | merge=True, pre_load=[]) |
|
643 | 643 | |
|
644 | 644 | if not commit_ranges: |
|
645 | 645 | raise JSONRPCError('no commits found') |
|
646 | 646 | |
|
647 | 647 | reviewer_objects = Optional.extract(reviewers) or [] |
|
648 | 648 | |
|
649 | 649 | # serialize and validate passed in given reviewers |
|
650 | 650 | if reviewer_objects: |
|
651 | 651 | schema = ReviewerListSchema() |
|
652 | 652 | try: |
|
653 | 653 | reviewer_objects = schema.deserialize(reviewer_objects) |
|
654 | 654 | except Invalid as err: |
|
655 | 655 | raise JSONRPCValidationError(colander_exc=err) |
|
656 | 656 | |
|
657 | 657 | # validate users |
|
658 | 658 | for reviewer_object in reviewer_objects: |
|
659 | 659 | user = get_user_or_error(reviewer_object['username']) |
|
660 | 660 | reviewer_object['user_id'] = user.user_id |
|
661 | 661 | |
|
662 | 662 | get_default_reviewers_data, validate_default_reviewers = \ |
|
663 | 663 | PullRequestModel().get_reviewer_functions() |
|
664 | 664 | |
|
665 | 665 | # recalculate reviewers logic, to make sure we can validate this |
|
666 | 666 | reviewer_rules = get_default_reviewers_data( |
|
667 | 667 | apiuser.get_instance(), source_db_repo, |
|
668 | 668 | source_commit, target_db_repo, target_commit) |
|
669 | 669 | |
|
670 | 670 | # now MERGE our given with the calculated |
|
671 | 671 | reviewer_objects = reviewer_rules['reviewers'] + reviewer_objects |
|
672 | 672 | |
|
673 | 673 | try: |
|
674 | 674 | reviewers = validate_default_reviewers( |
|
675 | 675 | reviewer_objects, reviewer_rules) |
|
676 | 676 | except ValueError as e: |
|
677 | 677 | raise JSONRPCError('Reviewers Validation: {}'.format(e)) |
|
678 | 678 | |
|
679 | 679 | title = Optional.extract(title) |
|
680 | 680 | if not title: |
|
681 | 681 | title_source_ref = source_ref.split(':', 2)[1] |
|
682 | 682 | title = PullRequestModel().generate_pullrequest_title( |
|
683 | 683 | source=source_repo, |
|
684 | 684 | source_ref=title_source_ref, |
|
685 | 685 | target=target_repo |
|
686 | 686 | ) |
|
687 | 687 | # fetch renderer, if set fallback to plain in case of PR |
|
688 | 688 | rc_config = SettingsModel().get_all_settings() |
|
689 | 689 | default_system_renderer = rc_config.get('rhodecode_markup_renderer', 'plain') |
|
690 | 690 | description = Optional.extract(description) |
|
691 | 691 | description_renderer = Optional.extract(description_renderer) or default_system_renderer |
|
692 | 692 | |
|
693 | 693 | pull_request = PullRequestModel().create( |
|
694 | 694 | created_by=apiuser.user_id, |
|
695 | 695 | source_repo=source_repo, |
|
696 | 696 | source_ref=full_source_ref, |
|
697 | 697 | target_repo=target_repo, |
|
698 | 698 | target_ref=full_target_ref, |
|
699 | 699 | revisions=[commit.raw_id for commit in reversed(commit_ranges)], |
|
700 | 700 | reviewers=reviewers, |
|
701 | 701 | title=title, |
|
702 | 702 | description=description, |
|
703 | 703 | description_renderer=description_renderer, |
|
704 | 704 | reviewer_data=reviewer_rules, |
|
705 | 705 | auth_user=apiuser |
|
706 | 706 | ) |
|
707 | 707 | |
|
708 | 708 | Session().commit() |
|
709 | 709 | data = { |
|
710 | 710 | 'msg': 'Created new pull request `{}`'.format(title), |
|
711 | 711 | 'pull_request_id': pull_request.pull_request_id, |
|
712 | 712 | } |
|
713 | 713 | return data |
|
714 | 714 | |
|
715 | 715 | |
|
716 | 716 | @jsonrpc_method() |
|
717 | 717 | def update_pull_request( |
|
718 | 718 | request, apiuser, pullrequestid, repoid=Optional(None), |
|
719 | 719 | title=Optional(''), description=Optional(''), description_renderer=Optional(''), |
|
720 | 720 | reviewers=Optional(None), update_commits=Optional(None)): |
|
721 | 721 | """ |
|
722 | 722 | Updates a pull request. |
|
723 | 723 | |
|
724 | 724 | :param apiuser: This is filled automatically from the |authtoken|. |
|
725 | 725 | :type apiuser: AuthUser |
|
726 | 726 | :param repoid: Optional repository name or repository ID. |
|
727 | 727 | :type repoid: str or int |
|
728 | 728 | :param pullrequestid: The pull request ID. |
|
729 | 729 | :type pullrequestid: int |
|
730 | 730 | :param title: Set the pull request title. |
|
731 | 731 | :type title: str |
|
732 | 732 | :param description: Update pull request description. |
|
733 | 733 | :type description: Optional(str) |
|
734 | 734 | :type description_renderer: Optional(str) |
|
735 | 735 | :param description_renderer: Update pull request renderer for the description. |
|
736 | 736 | It should be 'rst', 'markdown' or 'plain' |
|
737 | 737 | :param reviewers: Update pull request reviewers list with new value. |
|
738 | 738 | :type reviewers: Optional(list) |
|
739 | 739 | Accepts username strings or objects of the format: |
|
740 | 740 | |
|
741 | 741 | [{'username': 'nick', 'reasons': ['original author'], 'mandatory': <bool>}] |
|
742 | 742 | |
|
743 | 743 | :param update_commits: Trigger update of commits for this pull request |
|
744 | 744 | :type: update_commits: Optional(bool) |
|
745 | 745 | |
|
746 | 746 | Example output: |
|
747 | 747 | |
|
748 | 748 | .. code-block:: bash |
|
749 | 749 | |
|
750 | 750 | id : <id_given_in_input> |
|
751 | 751 | result : { |
|
752 | 752 | "msg": "Updated pull request `63`", |
|
753 | 753 | "pull_request": <pull_request_object>, |
|
754 | 754 | "updated_reviewers": { |
|
755 | 755 | "added": [ |
|
756 | 756 | "username" |
|
757 | 757 | ], |
|
758 | 758 | "removed": [] |
|
759 | 759 | }, |
|
760 | 760 | "updated_commits": { |
|
761 | 761 | "added": [ |
|
762 | 762 | "<sha1_hash>" |
|
763 | 763 | ], |
|
764 | 764 | "common": [ |
|
765 | 765 | "<sha1_hash>", |
|
766 | 766 | "<sha1_hash>", |
|
767 | 767 | ], |
|
768 | 768 | "removed": [] |
|
769 | 769 | } |
|
770 | 770 | } |
|
771 | 771 | error : null |
|
772 | 772 | """ |
|
773 | 773 | |
|
774 | 774 | pull_request = get_pull_request_or_error(pullrequestid) |
|
775 | 775 | if Optional.extract(repoid): |
|
776 | 776 | repo = get_repo_or_error(repoid) |
|
777 | 777 | else: |
|
778 | 778 | repo = pull_request.target_repo |
|
779 | 779 | |
|
780 | 780 | if not PullRequestModel().check_user_update( |
|
781 | 781 | pull_request, apiuser, api=True): |
|
782 | 782 | raise JSONRPCError( |
|
783 | 783 | 'pull request `%s` update failed, no permission to update.' % ( |
|
784 | 784 | pullrequestid,)) |
|
785 | 785 | if pull_request.is_closed(): |
|
786 | 786 | raise JSONRPCError( |
|
787 | 787 | 'pull request `%s` update failed, pull request is closed' % ( |
|
788 | 788 | pullrequestid,)) |
|
789 | 789 | |
|
790 | 790 | reviewer_objects = Optional.extract(reviewers) or [] |
|
791 | 791 | |
|
792 | 792 | if reviewer_objects: |
|
793 | 793 | schema = ReviewerListSchema() |
|
794 | 794 | try: |
|
795 | 795 | reviewer_objects = schema.deserialize(reviewer_objects) |
|
796 | 796 | except Invalid as err: |
|
797 | 797 | raise JSONRPCValidationError(colander_exc=err) |
|
798 | 798 | |
|
799 | 799 | # validate users |
|
800 | 800 | for reviewer_object in reviewer_objects: |
|
801 | 801 | user = get_user_or_error(reviewer_object['username']) |
|
802 | 802 | reviewer_object['user_id'] = user.user_id |
|
803 | 803 | |
|
804 | 804 | get_default_reviewers_data, get_validated_reviewers = \ |
|
805 | 805 | PullRequestModel().get_reviewer_functions() |
|
806 | 806 | |
|
807 | 807 | # re-use stored rules |
|
808 | 808 | reviewer_rules = pull_request.reviewer_data |
|
809 | 809 | try: |
|
810 | 810 | reviewers = get_validated_reviewers( |
|
811 | 811 | reviewer_objects, reviewer_rules) |
|
812 | 812 | except ValueError as e: |
|
813 | 813 | raise JSONRPCError('Reviewers Validation: {}'.format(e)) |
|
814 | 814 | else: |
|
815 | 815 | reviewers = [] |
|
816 | 816 | |
|
817 | 817 | title = Optional.extract(title) |
|
818 | 818 | description = Optional.extract(description) |
|
819 | 819 | description_renderer = Optional.extract(description_renderer) |
|
820 | 820 | |
|
821 | 821 | if title or description: |
|
822 | 822 | PullRequestModel().edit( |
|
823 | 823 | pull_request, |
|
824 | 824 | title or pull_request.title, |
|
825 | 825 | description or pull_request.description, |
|
826 | 826 | description_renderer or pull_request.description_renderer, |
|
827 | 827 | apiuser) |
|
828 | 828 | Session().commit() |
|
829 | 829 | |
|
830 | 830 | commit_changes = {"added": [], "common": [], "removed": []} |
|
831 | 831 | if str2bool(Optional.extract(update_commits)): |
|
832 | 832 | if PullRequestModel().has_valid_update_type(pull_request): |
|
833 | 833 | update_response = PullRequestModel().update_commits( |
|
834 | 834 | pull_request) |
|
835 | 835 | commit_changes = update_response.changes or commit_changes |
|
836 | 836 | Session().commit() |
|
837 | 837 | |
|
838 | 838 | reviewers_changes = {"added": [], "removed": []} |
|
839 | 839 | if reviewers: |
|
840 | 840 | added_reviewers, removed_reviewers = \ |
|
841 | 841 | PullRequestModel().update_reviewers(pull_request, reviewers, apiuser) |
|
842 | 842 | |
|
843 | 843 | reviewers_changes['added'] = sorted( |
|
844 | 844 | [get_user_or_error(n).username for n in added_reviewers]) |
|
845 | 845 | reviewers_changes['removed'] = sorted( |
|
846 | 846 | [get_user_or_error(n).username for n in removed_reviewers]) |
|
847 | 847 | Session().commit() |
|
848 | 848 | |
|
849 | 849 | data = { |
|
850 | 850 | 'msg': 'Updated pull request `{}`'.format( |
|
851 | 851 | pull_request.pull_request_id), |
|
852 | 852 | 'pull_request': pull_request.get_api_data(), |
|
853 | 853 | 'updated_commits': commit_changes, |
|
854 | 854 | 'updated_reviewers': reviewers_changes |
|
855 | 855 | } |
|
856 | 856 | |
|
857 | 857 | return data |
|
858 | 858 | |
|
859 | 859 | |
|
860 | 860 | @jsonrpc_method() |
|
861 | 861 | def close_pull_request( |
|
862 | 862 | request, apiuser, pullrequestid, repoid=Optional(None), |
|
863 | 863 | userid=Optional(OAttr('apiuser')), message=Optional('')): |
|
864 | 864 | """ |
|
865 | 865 | Close the pull request specified by `pullrequestid`. |
|
866 | 866 | |
|
867 | 867 | :param apiuser: This is filled automatically from the |authtoken|. |
|
868 | 868 | :type apiuser: AuthUser |
|
869 | 869 | :param repoid: Repository name or repository ID to which the pull |
|
870 | 870 | request belongs. |
|
871 | 871 | :type repoid: str or int |
|
872 | 872 | :param pullrequestid: ID of the pull request to be closed. |
|
873 | 873 | :type pullrequestid: int |
|
874 | 874 | :param userid: Close the pull request as this user. |
|
875 | 875 | :type userid: Optional(str or int) |
|
876 | 876 | :param message: Optional message to close the Pull Request with. If not |
|
877 | 877 | specified it will be generated automatically. |
|
878 | 878 | :type message: Optional(str) |
|
879 | 879 | |
|
880 | 880 | Example output: |
|
881 | 881 | |
|
882 | 882 | .. code-block:: bash |
|
883 | 883 | |
|
884 | 884 | "id": <id_given_in_input>, |
|
885 | 885 | "result": { |
|
886 | 886 | "pull_request_id": "<int>", |
|
887 | 887 | "close_status": "<str:status_lbl>, |
|
888 | 888 | "closed": "<bool>" |
|
889 | 889 | }, |
|
890 | 890 | "error": null |
|
891 | 891 | |
|
892 | 892 | """ |
|
893 | 893 | _ = request.translate |
|
894 | 894 | |
|
895 | 895 | pull_request = get_pull_request_or_error(pullrequestid) |
|
896 | 896 | if Optional.extract(repoid): |
|
897 | 897 | repo = get_repo_or_error(repoid) |
|
898 | 898 | else: |
|
899 | 899 | repo = pull_request.target_repo |
|
900 | 900 | |
|
901 | 901 | if not isinstance(userid, Optional): |
|
902 | 902 | if (has_superadmin_permission(apiuser) or |
|
903 | 903 | HasRepoPermissionAnyApi('repository.admin')( |
|
904 | 904 | user=apiuser, repo_name=repo.repo_name)): |
|
905 | 905 | apiuser = get_user_or_error(userid) |
|
906 | 906 | else: |
|
907 | 907 | raise JSONRPCError('userid is not the same as your user') |
|
908 | 908 | |
|
909 | 909 | if pull_request.is_closed(): |
|
910 | 910 | raise JSONRPCError( |
|
911 | 911 | 'pull request `%s` is already closed' % (pullrequestid,)) |
|
912 | 912 | |
|
913 | 913 | # only owner or admin or person with write permissions |
|
914 | 914 | allowed_to_close = PullRequestModel().check_user_update( |
|
915 | 915 | pull_request, apiuser, api=True) |
|
916 | 916 | |
|
917 | 917 | if not allowed_to_close: |
|
918 | 918 | raise JSONRPCError( |
|
919 | 919 | 'pull request `%s` close failed, no permission to close.' % ( |
|
920 | 920 | pullrequestid,)) |
|
921 | 921 | |
|
922 | 922 | # message we're using to close the PR, else it's automatically generated |
|
923 | 923 | message = Optional.extract(message) |
|
924 | 924 | |
|
925 | 925 | # finally close the PR, with proper message comment |
|
926 | 926 | comment, status = PullRequestModel().close_pull_request_with_comment( |
|
927 | 927 | pull_request, apiuser, repo, message=message, auth_user=apiuser) |
|
928 | 928 | status_lbl = ChangesetStatus.get_status_lbl(status) |
|
929 | 929 | |
|
930 | 930 | Session().commit() |
|
931 | 931 | |
|
932 | 932 | data = { |
|
933 | 933 | 'pull_request_id': pull_request.pull_request_id, |
|
934 | 934 | 'close_status': status_lbl, |
|
935 | 935 | 'closed': True, |
|
936 | 936 | } |
|
937 | 937 | return data |
@@ -1,1233 +1,1233 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | import mock |
|
21 | 21 | import pytest |
|
22 | 22 | |
|
23 | 23 | import rhodecode |
|
24 | 24 | from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason |
|
25 | 25 | from rhodecode.lib.vcs.nodes import FileNode |
|
26 | 26 | from rhodecode.lib import helpers as h |
|
27 | 27 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
28 | 28 | from rhodecode.model.db import ( |
|
29 | 29 | PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment, Repository) |
|
30 | 30 | from rhodecode.model.meta import Session |
|
31 | 31 | from rhodecode.model.pull_request import PullRequestModel |
|
32 | 32 | from rhodecode.model.user import UserModel |
|
33 | 33 | from rhodecode.tests import ( |
|
34 | 34 | assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN) |
|
35 | from rhodecode.tests.utils import AssertResponse | |
|
36 | 35 | |
|
37 | 36 | |
|
38 | 37 | def route_path(name, params=None, **kwargs): |
|
39 | 38 | import urllib |
|
40 | 39 | |
|
41 | 40 | base_url = { |
|
42 | 41 | 'repo_changelog': '/{repo_name}/changelog', |
|
43 | 42 | 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}', |
|
44 | 43 | 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}', |
|
45 | 44 | 'pullrequest_show_all': '/{repo_name}/pull-request', |
|
46 | 45 | 'pullrequest_show_all_data': '/{repo_name}/pull-request-data', |
|
47 | 46 | 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}', |
|
48 | 47 | 'pullrequest_repo_targets': '/{repo_name}/pull-request/repo-destinations', |
|
49 | 48 | 'pullrequest_new': '/{repo_name}/pull-request/new', |
|
50 | 49 | 'pullrequest_create': '/{repo_name}/pull-request/create', |
|
51 | 50 | 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update', |
|
52 | 51 | 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge', |
|
53 | 52 | 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete', |
|
54 | 53 | 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment', |
|
55 | 54 | 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete', |
|
56 | 55 | }[name].format(**kwargs) |
|
57 | 56 | |
|
58 | 57 | if params: |
|
59 | 58 | base_url = '{}?{}'.format(base_url, urllib.urlencode(params)) |
|
60 | 59 | return base_url |
|
61 | 60 | |
|
62 | 61 | |
|
63 | 62 | @pytest.mark.usefixtures('app', 'autologin_user') |
|
64 | 63 | @pytest.mark.backends("git", "hg") |
|
65 | 64 | class TestPullrequestsView(object): |
|
66 | 65 | |
|
67 | 66 | def test_index(self, backend): |
|
68 | 67 | self.app.get(route_path( |
|
69 | 68 | 'pullrequest_new', |
|
70 | 69 | repo_name=backend.repo_name)) |
|
71 | 70 | |
|
72 | 71 | def test_option_menu_create_pull_request_exists(self, backend): |
|
73 | 72 | repo_name = backend.repo_name |
|
74 | 73 | response = self.app.get(h.route_path('repo_summary', repo_name=repo_name)) |
|
75 | 74 | |
|
76 | 75 | create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path( |
|
77 | 76 | 'pullrequest_new', repo_name=repo_name) |
|
78 | 77 | response.mustcontain(create_pr_link) |
|
79 | 78 | |
|
80 | 79 | def test_create_pr_form_with_raw_commit_id(self, backend): |
|
81 | 80 | repo = backend.repo |
|
82 | 81 | |
|
83 | 82 | self.app.get( |
|
84 | 83 | route_path('pullrequest_new', repo_name=repo.repo_name, |
|
85 | 84 | commit=repo.get_commit().raw_id), |
|
86 | 85 | status=200) |
|
87 | 86 | |
|
88 | 87 | @pytest.mark.parametrize('pr_merge_enabled', [True, False]) |
|
89 | 88 | @pytest.mark.parametrize('range_diff', ["0", "1"]) |
|
90 | 89 | def test_show(self, pr_util, pr_merge_enabled, range_diff): |
|
91 | 90 | pull_request = pr_util.create_pull_request( |
|
92 | 91 | mergeable=pr_merge_enabled, enable_notifications=False) |
|
93 | 92 | |
|
94 | 93 | response = self.app.get(route_path( |
|
95 | 94 | 'pullrequest_show', |
|
96 | 95 | repo_name=pull_request.target_repo.scm_instance().name, |
|
97 | 96 | pull_request_id=pull_request.pull_request_id, |
|
98 | 97 | params={'range-diff': range_diff})) |
|
99 | 98 | |
|
100 | 99 | for commit_id in pull_request.revisions: |
|
101 | 100 | response.mustcontain(commit_id) |
|
102 | 101 | |
|
103 | 102 | assert pull_request.target_ref_parts.type in response |
|
104 | 103 | assert pull_request.target_ref_parts.name in response |
|
105 | 104 | target_clone_url = pull_request.target_repo.clone_url() |
|
106 | 105 | assert target_clone_url in response |
|
107 | 106 | |
|
108 | 107 | assert 'class="pull-request-merge"' in response |
|
109 | 108 | if pr_merge_enabled: |
|
110 | 109 | response.mustcontain('Pull request reviewer approval is pending') |
|
111 | 110 | else: |
|
112 | 111 | response.mustcontain('Server-side pull request merging is disabled.') |
|
113 | 112 | |
|
114 | 113 | if range_diff == "1": |
|
115 | 114 | response.mustcontain('Turn off: Show the diff as commit range') |
|
116 | 115 | |
|
117 | 116 | def test_close_status_visibility(self, pr_util, user_util, csrf_token): |
|
118 | 117 | # Logout |
|
119 | 118 | response = self.app.post( |
|
120 | 119 | h.route_path('logout'), |
|
121 | 120 | params={'csrf_token': csrf_token}) |
|
122 | 121 | # Login as regular user |
|
123 | 122 | response = self.app.post(h.route_path('login'), |
|
124 | 123 | {'username': TEST_USER_REGULAR_LOGIN, |
|
125 | 124 | 'password': 'test12'}) |
|
126 | 125 | |
|
127 | 126 | pull_request = pr_util.create_pull_request( |
|
128 | 127 | author=TEST_USER_REGULAR_LOGIN) |
|
129 | 128 | |
|
130 | 129 | response = self.app.get(route_path( |
|
131 | 130 | 'pullrequest_show', |
|
132 | 131 | repo_name=pull_request.target_repo.scm_instance().name, |
|
133 | 132 | pull_request_id=pull_request.pull_request_id)) |
|
134 | 133 | |
|
135 | 134 | response.mustcontain('Server-side pull request merging is disabled.') |
|
136 | 135 | |
|
137 | 136 | assert_response = response.assert_response() |
|
138 | 137 | # for regular user without a merge permissions, we don't see it |
|
139 | 138 | assert_response.no_element_exists('#close-pull-request-action') |
|
140 | 139 | |
|
141 | 140 | user_util.grant_user_permission_to_repo( |
|
142 | 141 | pull_request.target_repo, |
|
143 | 142 | UserModel().get_by_username(TEST_USER_REGULAR_LOGIN), |
|
144 | 143 | 'repository.write') |
|
145 | 144 | response = self.app.get(route_path( |
|
146 | 145 | 'pullrequest_show', |
|
147 | 146 | repo_name=pull_request.target_repo.scm_instance().name, |
|
148 | 147 | pull_request_id=pull_request.pull_request_id)) |
|
149 | 148 | |
|
150 | 149 | response.mustcontain('Server-side pull request merging is disabled.') |
|
151 | 150 | |
|
152 | 151 | assert_response = response.assert_response() |
|
153 | 152 | # now regular user has a merge permissions, we have CLOSE button |
|
154 | 153 | assert_response.one_element_exists('#close-pull-request-action') |
|
155 | 154 | |
|
156 | 155 | def test_show_invalid_commit_id(self, pr_util): |
|
157 | 156 | # Simulating invalid revisions which will cause a lookup error |
|
158 | 157 | pull_request = pr_util.create_pull_request() |
|
159 | 158 | pull_request.revisions = ['invalid'] |
|
160 | 159 | Session().add(pull_request) |
|
161 | 160 | Session().commit() |
|
162 | 161 | |
|
163 | 162 | response = self.app.get(route_path( |
|
164 | 163 | 'pullrequest_show', |
|
165 | 164 | repo_name=pull_request.target_repo.scm_instance().name, |
|
166 | 165 | pull_request_id=pull_request.pull_request_id)) |
|
167 | 166 | |
|
168 | 167 | for commit_id in pull_request.revisions: |
|
169 | 168 | response.mustcontain(commit_id) |
|
170 | 169 | |
|
171 | 170 | def test_show_invalid_source_reference(self, pr_util): |
|
172 | 171 | pull_request = pr_util.create_pull_request() |
|
173 | 172 | pull_request.source_ref = 'branch:b:invalid' |
|
174 | 173 | Session().add(pull_request) |
|
175 | 174 | Session().commit() |
|
176 | 175 | |
|
177 | 176 | self.app.get(route_path( |
|
178 | 177 | 'pullrequest_show', |
|
179 | 178 | repo_name=pull_request.target_repo.scm_instance().name, |
|
180 | 179 | pull_request_id=pull_request.pull_request_id)) |
|
181 | 180 | |
|
182 | 181 | def test_edit_title_description(self, pr_util, csrf_token): |
|
183 | 182 | pull_request = pr_util.create_pull_request() |
|
184 | 183 | pull_request_id = pull_request.pull_request_id |
|
185 | 184 | |
|
186 | 185 | response = self.app.post( |
|
187 | 186 | route_path('pullrequest_update', |
|
188 | 187 | repo_name=pull_request.target_repo.repo_name, |
|
189 | 188 | pull_request_id=pull_request_id), |
|
190 | 189 | params={ |
|
191 | 190 | 'edit_pull_request': 'true', |
|
192 | 191 | 'title': 'New title', |
|
193 | 192 | 'description': 'New description', |
|
194 | 193 | 'csrf_token': csrf_token}) |
|
195 | 194 | |
|
196 | 195 | assert_session_flash( |
|
197 | 196 | response, u'Pull request title & description updated.', |
|
198 | 197 | category='success') |
|
199 | 198 | |
|
200 | 199 | pull_request = PullRequest.get(pull_request_id) |
|
201 | 200 | assert pull_request.title == 'New title' |
|
202 | 201 | assert pull_request.description == 'New description' |
|
203 | 202 | |
|
204 | 203 | def test_edit_title_description_closed(self, pr_util, csrf_token): |
|
205 | 204 | pull_request = pr_util.create_pull_request() |
|
206 | 205 | pull_request_id = pull_request.pull_request_id |
|
207 | 206 | repo_name = pull_request.target_repo.repo_name |
|
208 | 207 | pr_util.close() |
|
209 | 208 | |
|
210 | 209 | response = self.app.post( |
|
211 | 210 | route_path('pullrequest_update', |
|
212 | 211 | repo_name=repo_name, pull_request_id=pull_request_id), |
|
213 | 212 | params={ |
|
214 | 213 | 'edit_pull_request': 'true', |
|
215 | 214 | 'title': 'New title', |
|
216 | 215 | 'description': 'New description', |
|
217 | 216 | 'csrf_token': csrf_token}, status=200) |
|
218 | 217 | assert_session_flash( |
|
219 | 218 | response, u'Cannot update closed pull requests.', |
|
220 | 219 | category='error') |
|
221 | 220 | |
|
222 | 221 | def test_update_invalid_source_reference(self, pr_util, csrf_token): |
|
223 | 222 | from rhodecode.lib.vcs.backends.base import UpdateFailureReason |
|
224 | 223 | |
|
225 | 224 | pull_request = pr_util.create_pull_request() |
|
226 | 225 | pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id' |
|
227 | 226 | Session().add(pull_request) |
|
228 | 227 | Session().commit() |
|
229 | 228 | |
|
230 | 229 | pull_request_id = pull_request.pull_request_id |
|
231 | 230 | |
|
232 | 231 | response = self.app.post( |
|
233 | 232 | route_path('pullrequest_update', |
|
234 | 233 | repo_name=pull_request.target_repo.repo_name, |
|
235 | 234 | pull_request_id=pull_request_id), |
|
236 | params={'update_commits': 'true', | |
|
237 | 'csrf_token': csrf_token}) | |
|
235 | params={'update_commits': 'true', 'csrf_token': csrf_token}) | |
|
238 | 236 | |
|
239 | 237 | expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[ |
|
240 | 238 | UpdateFailureReason.MISSING_SOURCE_REF]) |
|
241 | 239 | assert_session_flash(response, expected_msg, category='error') |
|
242 | 240 | |
|
243 | 241 | def test_missing_target_reference(self, pr_util, csrf_token): |
|
244 | 242 | from rhodecode.lib.vcs.backends.base import MergeFailureReason |
|
245 | 243 | pull_request = pr_util.create_pull_request( |
|
246 | 244 | approved=True, mergeable=True) |
|
247 |
|
|
|
245 | unicode_reference = u'branch:invalid-branch:invalid-commit-id' | |
|
246 | pull_request.target_ref = unicode_reference | |
|
248 | 247 | Session().add(pull_request) |
|
249 | 248 | Session().commit() |
|
250 | 249 | |
|
251 | 250 | pull_request_id = pull_request.pull_request_id |
|
252 | 251 | pull_request_url = route_path( |
|
253 | 252 | 'pullrequest_show', |
|
254 | 253 | repo_name=pull_request.target_repo.repo_name, |
|
255 | 254 | pull_request_id=pull_request_id) |
|
256 | 255 | |
|
257 | 256 | response = self.app.get(pull_request_url) |
|
258 | ||
|
259 |
|
|
|
260 | expected_msg = PullRequestModel.MERGE_STATUS_MESSAGES[ | |
|
261 | MergeFailureReason.MISSING_TARGET_REF] | |
|
262 | assertr.element_contains( | |
|
263 |
'span[data-role="merge-message"]', |
|
|
257 | target_ref_id = 'invalid-branch' | |
|
258 | merge_resp = MergeResponse( | |
|
259 | True, True, '', MergeFailureReason.MISSING_TARGET_REF, | |
|
260 | metadata={'target_ref': PullRequest.unicode_to_reference(unicode_reference)}) | |
|
261 | response.assert_response().element_contains( | |
|
262 | 'span[data-role="merge-message"]', merge_resp.merge_status_message) | |
|
264 | 263 | |
|
265 | 264 | def test_comment_and_close_pull_request_custom_message_approved( |
|
266 | 265 | self, pr_util, csrf_token, xhr_header): |
|
267 | 266 | |
|
268 | 267 | pull_request = pr_util.create_pull_request(approved=True) |
|
269 | 268 | pull_request_id = pull_request.pull_request_id |
|
270 | 269 | author = pull_request.user_id |
|
271 | 270 | repo = pull_request.target_repo.repo_id |
|
272 | 271 | |
|
273 | 272 | self.app.post( |
|
274 | 273 | route_path('pullrequest_comment_create', |
|
275 | repo_name=pull_request.target_repo.scm_instance().name, | |
|
276 | pull_request_id=pull_request_id), | |
|
274 | repo_name=pull_request.target_repo.scm_instance().name, | |
|
275 | pull_request_id=pull_request_id), | |
|
277 | 276 | params={ |
|
278 | 277 | 'close_pull_request': '1', |
|
279 | 278 | 'text': 'Closing a PR', |
|
280 | 279 | 'csrf_token': csrf_token}, |
|
281 | 280 | extra_environ=xhr_header,) |
|
282 | 281 | |
|
283 | 282 | journal = UserLog.query()\ |
|
284 | 283 | .filter(UserLog.user_id == author)\ |
|
285 | 284 | .filter(UserLog.repository_id == repo) \ |
|
286 | 285 | .order_by('user_log_id') \ |
|
287 | 286 | .all() |
|
288 | 287 | assert journal[-1].action == 'repo.pull_request.close' |
|
289 | 288 | |
|
290 | 289 | pull_request = PullRequest.get(pull_request_id) |
|
291 | 290 | assert pull_request.is_closed() |
|
292 | 291 | |
|
293 | 292 | status = ChangesetStatusModel().get_status( |
|
294 | 293 | pull_request.source_repo, pull_request=pull_request) |
|
295 | 294 | assert status == ChangesetStatus.STATUS_APPROVED |
|
296 | 295 | comments = ChangesetComment().query() \ |
|
297 | 296 | .filter(ChangesetComment.pull_request == pull_request) \ |
|
298 | 297 | .order_by(ChangesetComment.comment_id.asc())\ |
|
299 | 298 | .all() |
|
300 | 299 | assert comments[-1].text == 'Closing a PR' |
|
301 | 300 | |
|
302 | 301 | def test_comment_force_close_pull_request_rejected( |
|
303 | 302 | self, pr_util, csrf_token, xhr_header): |
|
304 | 303 | pull_request = pr_util.create_pull_request() |
|
305 | 304 | pull_request_id = pull_request.pull_request_id |
|
306 | 305 | PullRequestModel().update_reviewers( |
|
307 | 306 | pull_request_id, [(1, ['reason'], False, []), (2, ['reason2'], False, [])], |
|
308 | 307 | pull_request.author) |
|
309 | 308 | author = pull_request.user_id |
|
310 | 309 | repo = pull_request.target_repo.repo_id |
|
311 | 310 | |
|
312 | 311 | self.app.post( |
|
313 | 312 | route_path('pullrequest_comment_create', |
|
314 | 313 | repo_name=pull_request.target_repo.scm_instance().name, |
|
315 | 314 | pull_request_id=pull_request_id), |
|
316 | 315 | params={ |
|
317 | 316 | 'close_pull_request': '1', |
|
318 | 317 | 'csrf_token': csrf_token}, |
|
319 | 318 | extra_environ=xhr_header) |
|
320 | 319 | |
|
321 | 320 | pull_request = PullRequest.get(pull_request_id) |
|
322 | 321 | |
|
323 | 322 | journal = UserLog.query()\ |
|
324 | 323 | .filter(UserLog.user_id == author, UserLog.repository_id == repo) \ |
|
325 | 324 | .order_by('user_log_id') \ |
|
326 | 325 | .all() |
|
327 | 326 | assert journal[-1].action == 'repo.pull_request.close' |
|
328 | 327 | |
|
329 | 328 | # check only the latest status, not the review status |
|
330 | 329 | status = ChangesetStatusModel().get_status( |
|
331 | 330 | pull_request.source_repo, pull_request=pull_request) |
|
332 | 331 | assert status == ChangesetStatus.STATUS_REJECTED |
|
333 | 332 | |
|
334 | 333 | def test_comment_and_close_pull_request( |
|
335 | 334 | self, pr_util, csrf_token, xhr_header): |
|
336 | 335 | pull_request = pr_util.create_pull_request() |
|
337 | 336 | pull_request_id = pull_request.pull_request_id |
|
338 | 337 | |
|
339 | 338 | response = self.app.post( |
|
340 | 339 | route_path('pullrequest_comment_create', |
|
341 | 340 | repo_name=pull_request.target_repo.scm_instance().name, |
|
342 | 341 | pull_request_id=pull_request.pull_request_id), |
|
343 | 342 | params={ |
|
344 | 343 | 'close_pull_request': 'true', |
|
345 | 344 | 'csrf_token': csrf_token}, |
|
346 | 345 | extra_environ=xhr_header) |
|
347 | 346 | |
|
348 | 347 | assert response.json |
|
349 | 348 | |
|
350 | 349 | pull_request = PullRequest.get(pull_request_id) |
|
351 | 350 | assert pull_request.is_closed() |
|
352 | 351 | |
|
353 | 352 | # check only the latest status, not the review status |
|
354 | 353 | status = ChangesetStatusModel().get_status( |
|
355 | 354 | pull_request.source_repo, pull_request=pull_request) |
|
356 | 355 | assert status == ChangesetStatus.STATUS_REJECTED |
|
357 | 356 | |
|
358 | 357 | def test_create_pull_request(self, backend, csrf_token): |
|
359 | 358 | commits = [ |
|
360 | 359 | {'message': 'ancestor'}, |
|
361 | 360 | {'message': 'change'}, |
|
362 | 361 | {'message': 'change2'}, |
|
363 | 362 | ] |
|
364 | 363 | commit_ids = backend.create_master_repo(commits) |
|
365 | 364 | target = backend.create_repo(heads=['ancestor']) |
|
366 | 365 | source = backend.create_repo(heads=['change2']) |
|
367 | 366 | |
|
368 | 367 | response = self.app.post( |
|
369 | 368 | route_path('pullrequest_create', repo_name=source.repo_name), |
|
370 | 369 | [ |
|
371 | 370 | ('source_repo', source.repo_name), |
|
372 | 371 | ('source_ref', 'branch:default:' + commit_ids['change2']), |
|
373 | 372 | ('target_repo', target.repo_name), |
|
374 | 373 | ('target_ref', 'branch:default:' + commit_ids['ancestor']), |
|
375 | 374 | ('common_ancestor', commit_ids['ancestor']), |
|
376 | 375 | ('pullrequest_title', 'Title'), |
|
377 | 376 | ('pullrequest_desc', 'Description'), |
|
378 | 377 | ('description_renderer', 'markdown'), |
|
379 | 378 | ('__start__', 'review_members:sequence'), |
|
380 | 379 | ('__start__', 'reviewer:mapping'), |
|
381 | 380 | ('user_id', '1'), |
|
382 | 381 | ('__start__', 'reasons:sequence'), |
|
383 | 382 | ('reason', 'Some reason'), |
|
384 | 383 | ('__end__', 'reasons:sequence'), |
|
385 | 384 | ('__start__', 'rules:sequence'), |
|
386 | 385 | ('__end__', 'rules:sequence'), |
|
387 | 386 | ('mandatory', 'False'), |
|
388 | 387 | ('__end__', 'reviewer:mapping'), |
|
389 | 388 | ('__end__', 'review_members:sequence'), |
|
390 | 389 | ('__start__', 'revisions:sequence'), |
|
391 | 390 | ('revisions', commit_ids['change']), |
|
392 | 391 | ('revisions', commit_ids['change2']), |
|
393 | 392 | ('__end__', 'revisions:sequence'), |
|
394 | 393 | ('user', ''), |
|
395 | 394 | ('csrf_token', csrf_token), |
|
396 | 395 | ], |
|
397 | 396 | status=302) |
|
398 | 397 | |
|
399 | 398 | location = response.headers['Location'] |
|
400 | 399 | pull_request_id = location.rsplit('/', 1)[1] |
|
401 | 400 | assert pull_request_id != 'new' |
|
402 | 401 | pull_request = PullRequest.get(int(pull_request_id)) |
|
403 | 402 | |
|
404 | 403 | # check that we have now both revisions |
|
405 | 404 | assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']] |
|
406 | 405 | assert pull_request.source_ref == 'branch:default:' + commit_ids['change2'] |
|
407 | 406 | expected_target_ref = 'branch:default:' + commit_ids['ancestor'] |
|
408 | 407 | assert pull_request.target_ref == expected_target_ref |
|
409 | 408 | |
|
410 | 409 | def test_reviewer_notifications(self, backend, csrf_token): |
|
411 | 410 | # We have to use the app.post for this test so it will create the |
|
412 | 411 | # notifications properly with the new PR |
|
413 | 412 | commits = [ |
|
414 | 413 | {'message': 'ancestor', |
|
415 | 414 | 'added': [FileNode('file_A', content='content_of_ancestor')]}, |
|
416 | 415 | {'message': 'change', |
|
417 | 416 | 'added': [FileNode('file_a', content='content_of_change')]}, |
|
418 | 417 | {'message': 'change-child'}, |
|
419 | 418 | {'message': 'ancestor-child', 'parents': ['ancestor'], |
|
420 | 419 | 'added': [ |
|
421 | 420 | FileNode('file_B', content='content_of_ancestor_child')]}, |
|
422 | 421 | {'message': 'ancestor-child-2'}, |
|
423 | 422 | ] |
|
424 | 423 | commit_ids = backend.create_master_repo(commits) |
|
425 | 424 | target = backend.create_repo(heads=['ancestor-child']) |
|
426 | 425 | source = backend.create_repo(heads=['change']) |
|
427 | 426 | |
|
428 | 427 | response = self.app.post( |
|
429 | 428 | route_path('pullrequest_create', repo_name=source.repo_name), |
|
430 | 429 | [ |
|
431 | 430 | ('source_repo', source.repo_name), |
|
432 | 431 | ('source_ref', 'branch:default:' + commit_ids['change']), |
|
433 | 432 | ('target_repo', target.repo_name), |
|
434 | 433 | ('target_ref', 'branch:default:' + commit_ids['ancestor-child']), |
|
435 | 434 | ('common_ancestor', commit_ids['ancestor']), |
|
436 | 435 | ('pullrequest_title', 'Title'), |
|
437 | 436 | ('pullrequest_desc', 'Description'), |
|
438 | 437 | ('description_renderer', 'markdown'), |
|
439 | 438 | ('__start__', 'review_members:sequence'), |
|
440 | 439 | ('__start__', 'reviewer:mapping'), |
|
441 | 440 | ('user_id', '2'), |
|
442 | 441 | ('__start__', 'reasons:sequence'), |
|
443 | 442 | ('reason', 'Some reason'), |
|
444 | 443 | ('__end__', 'reasons:sequence'), |
|
445 | 444 | ('__start__', 'rules:sequence'), |
|
446 | 445 | ('__end__', 'rules:sequence'), |
|
447 | 446 | ('mandatory', 'False'), |
|
448 | 447 | ('__end__', 'reviewer:mapping'), |
|
449 | 448 | ('__end__', 'review_members:sequence'), |
|
450 | 449 | ('__start__', 'revisions:sequence'), |
|
451 | 450 | ('revisions', commit_ids['change']), |
|
452 | 451 | ('__end__', 'revisions:sequence'), |
|
453 | 452 | ('user', ''), |
|
454 | 453 | ('csrf_token', csrf_token), |
|
455 | 454 | ], |
|
456 | 455 | status=302) |
|
457 | 456 | |
|
458 | 457 | location = response.headers['Location'] |
|
459 | 458 | |
|
460 | 459 | pull_request_id = location.rsplit('/', 1)[1] |
|
461 | 460 | assert pull_request_id != 'new' |
|
462 | 461 | pull_request = PullRequest.get(int(pull_request_id)) |
|
463 | 462 | |
|
464 | 463 | # Check that a notification was made |
|
465 | 464 | notifications = Notification.query()\ |
|
466 | 465 | .filter(Notification.created_by == pull_request.author.user_id, |
|
467 | 466 | Notification.type_ == Notification.TYPE_PULL_REQUEST, |
|
468 | 467 | Notification.subject.contains( |
|
469 | 468 | "wants you to review pull request #%s" % pull_request_id)) |
|
470 | 469 | assert len(notifications.all()) == 1 |
|
471 | 470 | |
|
472 | 471 | # Change reviewers and check that a notification was made |
|
473 | 472 | PullRequestModel().update_reviewers( |
|
474 | 473 | pull_request.pull_request_id, [(1, [], False, [])], |
|
475 | 474 | pull_request.author) |
|
476 | 475 | assert len(notifications.all()) == 2 |
|
477 | 476 | |
|
478 | 477 | def test_create_pull_request_stores_ancestor_commit_id(self, backend, |
|
479 | 478 | csrf_token): |
|
480 | 479 | commits = [ |
|
481 | 480 | {'message': 'ancestor', |
|
482 | 481 | 'added': [FileNode('file_A', content='content_of_ancestor')]}, |
|
483 | 482 | {'message': 'change', |
|
484 | 483 | 'added': [FileNode('file_a', content='content_of_change')]}, |
|
485 | 484 | {'message': 'change-child'}, |
|
486 | 485 | {'message': 'ancestor-child', 'parents': ['ancestor'], |
|
487 | 486 | 'added': [ |
|
488 | 487 | FileNode('file_B', content='content_of_ancestor_child')]}, |
|
489 | 488 | {'message': 'ancestor-child-2'}, |
|
490 | 489 | ] |
|
491 | 490 | commit_ids = backend.create_master_repo(commits) |
|
492 | 491 | target = backend.create_repo(heads=['ancestor-child']) |
|
493 | 492 | source = backend.create_repo(heads=['change']) |
|
494 | 493 | |
|
495 | 494 | response = self.app.post( |
|
496 | 495 | route_path('pullrequest_create', repo_name=source.repo_name), |
|
497 | 496 | [ |
|
498 | 497 | ('source_repo', source.repo_name), |
|
499 | 498 | ('source_ref', 'branch:default:' + commit_ids['change']), |
|
500 | 499 | ('target_repo', target.repo_name), |
|
501 | 500 | ('target_ref', 'branch:default:' + commit_ids['ancestor-child']), |
|
502 | 501 | ('common_ancestor', commit_ids['ancestor']), |
|
503 | 502 | ('pullrequest_title', 'Title'), |
|
504 | 503 | ('pullrequest_desc', 'Description'), |
|
505 | 504 | ('description_renderer', 'markdown'), |
|
506 | 505 | ('__start__', 'review_members:sequence'), |
|
507 | 506 | ('__start__', 'reviewer:mapping'), |
|
508 | 507 | ('user_id', '1'), |
|
509 | 508 | ('__start__', 'reasons:sequence'), |
|
510 | 509 | ('reason', 'Some reason'), |
|
511 | 510 | ('__end__', 'reasons:sequence'), |
|
512 | 511 | ('__start__', 'rules:sequence'), |
|
513 | 512 | ('__end__', 'rules:sequence'), |
|
514 | 513 | ('mandatory', 'False'), |
|
515 | 514 | ('__end__', 'reviewer:mapping'), |
|
516 | 515 | ('__end__', 'review_members:sequence'), |
|
517 | 516 | ('__start__', 'revisions:sequence'), |
|
518 | 517 | ('revisions', commit_ids['change']), |
|
519 | 518 | ('__end__', 'revisions:sequence'), |
|
520 | 519 | ('user', ''), |
|
521 | 520 | ('csrf_token', csrf_token), |
|
522 | 521 | ], |
|
523 | 522 | status=302) |
|
524 | 523 | |
|
525 | 524 | location = response.headers['Location'] |
|
526 | 525 | |
|
527 | 526 | pull_request_id = location.rsplit('/', 1)[1] |
|
528 | 527 | assert pull_request_id != 'new' |
|
529 | 528 | pull_request = PullRequest.get(int(pull_request_id)) |
|
530 | 529 | |
|
531 | 530 | # target_ref has to point to the ancestor's commit_id in order to |
|
532 | 531 | # show the correct diff |
|
533 | 532 | expected_target_ref = 'branch:default:' + commit_ids['ancestor'] |
|
534 | 533 | assert pull_request.target_ref == expected_target_ref |
|
535 | 534 | |
|
536 | 535 | # Check generated diff contents |
|
537 | 536 | response = response.follow() |
|
538 | 537 | assert 'content_of_ancestor' not in response.body |
|
539 | 538 | assert 'content_of_ancestor-child' not in response.body |
|
540 | 539 | assert 'content_of_change' in response.body |
|
541 | 540 | |
|
542 | 541 | def test_merge_pull_request_enabled(self, pr_util, csrf_token): |
|
543 | 542 | # Clear any previous calls to rcextensions |
|
544 | 543 | rhodecode.EXTENSIONS.calls.clear() |
|
545 | 544 | |
|
546 | 545 | pull_request = pr_util.create_pull_request( |
|
547 | 546 | approved=True, mergeable=True) |
|
548 | 547 | pull_request_id = pull_request.pull_request_id |
|
549 | 548 | repo_name = pull_request.target_repo.scm_instance().name, |
|
550 | 549 | |
|
551 | 550 | response = self.app.post( |
|
552 | 551 | route_path('pullrequest_merge', |
|
553 | 552 | repo_name=str(repo_name[0]), |
|
554 | 553 | pull_request_id=pull_request_id), |
|
555 | 554 | params={'csrf_token': csrf_token}).follow() |
|
556 | 555 | |
|
557 | 556 | pull_request = PullRequest.get(pull_request_id) |
|
558 | 557 | |
|
559 | 558 | assert response.status_int == 200 |
|
560 | 559 | assert pull_request.is_closed() |
|
561 | 560 | assert_pull_request_status( |
|
562 | 561 | pull_request, ChangesetStatus.STATUS_APPROVED) |
|
563 | 562 | |
|
564 | 563 | # Check the relevant log entries were added |
|
565 | 564 | user_logs = UserLog.query().order_by('-user_log_id').limit(3) |
|
566 | 565 | actions = [log.action for log in user_logs] |
|
567 | 566 | pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request) |
|
568 | 567 | expected_actions = [ |
|
569 | 568 | u'repo.pull_request.close', |
|
570 | 569 | u'repo.pull_request.merge', |
|
571 | 570 | u'repo.pull_request.comment.create' |
|
572 | 571 | ] |
|
573 | 572 | assert actions == expected_actions |
|
574 | 573 | |
|
575 | 574 | user_logs = UserLog.query().order_by('-user_log_id').limit(4) |
|
576 | 575 | actions = [log for log in user_logs] |
|
577 | 576 | assert actions[-1].action == 'user.push' |
|
578 | 577 | assert actions[-1].action_data['commit_ids'] == pr_commit_ids |
|
579 | 578 | |
|
580 | 579 | # Check post_push rcextension was really executed |
|
581 | 580 | push_calls = rhodecode.EXTENSIONS.calls['_push_hook'] |
|
582 | 581 | assert len(push_calls) == 1 |
|
583 | 582 | unused_last_call_args, last_call_kwargs = push_calls[0] |
|
584 | 583 | assert last_call_kwargs['action'] == 'push' |
|
585 | 584 | assert last_call_kwargs['commit_ids'] == pr_commit_ids |
|
586 | 585 | |
|
587 | 586 | def test_merge_pull_request_disabled(self, pr_util, csrf_token): |
|
588 | 587 | pull_request = pr_util.create_pull_request(mergeable=False) |
|
589 | 588 | pull_request_id = pull_request.pull_request_id |
|
590 | 589 | pull_request = PullRequest.get(pull_request_id) |
|
591 | 590 | |
|
592 | 591 | response = self.app.post( |
|
593 | 592 | route_path('pullrequest_merge', |
|
594 | 593 | repo_name=pull_request.target_repo.scm_instance().name, |
|
595 | 594 | pull_request_id=pull_request.pull_request_id), |
|
596 | 595 | params={'csrf_token': csrf_token}).follow() |
|
597 | 596 | |
|
598 | 597 | assert response.status_int == 200 |
|
599 | 598 | response.mustcontain( |
|
600 | 599 | 'Merge is not currently possible because of below failed checks.') |
|
601 | 600 | response.mustcontain('Server-side pull request merging is disabled.') |
|
602 | 601 | |
|
603 | 602 | @pytest.mark.skip_backends('svn') |
|
604 | 603 | def test_merge_pull_request_not_approved(self, pr_util, csrf_token): |
|
605 | 604 | pull_request = pr_util.create_pull_request(mergeable=True) |
|
606 | 605 | pull_request_id = pull_request.pull_request_id |
|
607 | 606 | repo_name = pull_request.target_repo.scm_instance().name |
|
608 | 607 | |
|
609 | 608 | response = self.app.post( |
|
610 | 609 | route_path('pullrequest_merge', |
|
611 | repo_name=repo_name, | |
|
612 | pull_request_id=pull_request_id), | |
|
610 | repo_name=repo_name, pull_request_id=pull_request_id), | |
|
613 | 611 | params={'csrf_token': csrf_token}).follow() |
|
614 | 612 | |
|
615 | 613 | assert response.status_int == 200 |
|
616 | 614 | |
|
617 | 615 | response.mustcontain( |
|
618 | 616 | 'Merge is not currently possible because of below failed checks.') |
|
619 | 617 | response.mustcontain('Pull request reviewer approval is pending.') |
|
620 | 618 | |
|
621 | 619 | def test_merge_pull_request_renders_failure_reason( |
|
622 | 620 | self, user_regular, csrf_token, pr_util): |
|
623 | 621 | pull_request = pr_util.create_pull_request(mergeable=True, approved=True) |
|
624 | 622 | pull_request_id = pull_request.pull_request_id |
|
625 | 623 | repo_name = pull_request.target_repo.scm_instance().name |
|
626 | 624 | |
|
625 | merge_resp = MergeResponse(True, False, 'STUB_COMMIT_ID', | |
|
626 | MergeFailureReason.PUSH_FAILED, | |
|
627 | metadata={'target': 'shadow repo', | |
|
628 | 'merge_commit': 'xxx'}) | |
|
627 | 629 | model_patcher = mock.patch.multiple( |
|
628 | 630 | PullRequestModel, |
|
629 |
merge_repo=mock.Mock(return_value= |
|
|
630 | True, False, 'STUB_COMMIT_ID', MergeFailureReason.PUSH_FAILED)), | |
|
631 | merge_repo=mock.Mock(return_value=merge_resp), | |
|
631 | 632 | merge_status=mock.Mock(return_value=(True, 'WRONG_MESSAGE'))) |
|
632 | 633 | |
|
633 | 634 | with model_patcher: |
|
634 | 635 | response = self.app.post( |
|
635 | 636 | route_path('pullrequest_merge', |
|
636 | 637 | repo_name=repo_name, |
|
637 | 638 | pull_request_id=pull_request_id), |
|
638 | 639 | params={'csrf_token': csrf_token}, status=302) |
|
639 | 640 | |
|
640 | assert_session_flash(response, PullRequestModel.MERGE_STATUS_MESSAGES[ | |
|
641 | MergeFailureReason.PUSH_FAILED]) | |
|
641 | merge_resp = MergeResponse(True, True, '', MergeFailureReason.PUSH_FAILED, | |
|
642 | metadata={'target': 'shadow repo', | |
|
643 | 'merge_commit': 'xxx'}) | |
|
644 | assert_session_flash(response, merge_resp.merge_status_message) | |
|
642 | 645 | |
|
643 | 646 | def test_update_source_revision(self, backend, csrf_token): |
|
644 | 647 | commits = [ |
|
645 | 648 | {'message': 'ancestor'}, |
|
646 | 649 | {'message': 'change'}, |
|
647 | 650 | {'message': 'change-2'}, |
|
648 | 651 | ] |
|
649 | 652 | commit_ids = backend.create_master_repo(commits) |
|
650 | 653 | target = backend.create_repo(heads=['ancestor']) |
|
651 | 654 | source = backend.create_repo(heads=['change']) |
|
652 | 655 | |
|
653 | 656 | # create pr from a in source to A in target |
|
654 | 657 | pull_request = PullRequest() |
|
655 | 658 | pull_request.source_repo = source |
|
656 | 659 | # TODO: johbo: Make sure that we write the source ref this way! |
|
657 | 660 | pull_request.source_ref = 'branch:{branch}:{commit_id}'.format( |
|
658 | 661 | branch=backend.default_branch_name, commit_id=commit_ids['change']) |
|
659 | 662 | pull_request.target_repo = target |
|
660 | 663 | |
|
661 | 664 | pull_request.target_ref = 'branch:{branch}:{commit_id}'.format( |
|
662 | 665 | branch=backend.default_branch_name, |
|
663 | 666 | commit_id=commit_ids['ancestor']) |
|
664 | 667 | pull_request.revisions = [commit_ids['change']] |
|
665 | 668 | pull_request.title = u"Test" |
|
666 | 669 | pull_request.description = u"Description" |
|
667 | 670 | pull_request.author = UserModel().get_by_username( |
|
668 | 671 | TEST_USER_ADMIN_LOGIN) |
|
669 | 672 | Session().add(pull_request) |
|
670 | 673 | Session().commit() |
|
671 | 674 | pull_request_id = pull_request.pull_request_id |
|
672 | 675 | |
|
673 | 676 | # source has ancestor - change - change-2 |
|
674 | 677 | backend.pull_heads(source, heads=['change-2']) |
|
675 | 678 | |
|
676 | 679 | # update PR |
|
677 | 680 | self.app.post( |
|
678 | 681 | route_path('pullrequest_update', |
|
679 | 682 | repo_name=target.repo_name, |
|
680 | 683 | pull_request_id=pull_request_id), |
|
681 | 684 | params={'update_commits': 'true', |
|
682 | 685 | 'csrf_token': csrf_token}) |
|
683 | 686 | |
|
684 | 687 | # check that we have now both revisions |
|
685 | 688 | pull_request = PullRequest.get(pull_request_id) |
|
686 | 689 | assert pull_request.revisions == [ |
|
687 | 690 | commit_ids['change-2'], commit_ids['change']] |
|
688 | 691 | |
|
689 | 692 | # TODO: johbo: this should be a test on its own |
|
690 | 693 | response = self.app.get(route_path( |
|
691 | 694 | 'pullrequest_new', |
|
692 | 695 | repo_name=target.repo_name)) |
|
693 | 696 | assert response.status_int == 200 |
|
694 | 697 | assert 'Pull request updated to' in response.body |
|
695 | 698 | assert 'with 1 added, 0 removed commits.' in response.body |
|
696 | 699 | |
|
697 | 700 | def test_update_target_revision(self, backend, csrf_token): |
|
698 | 701 | commits = [ |
|
699 | 702 | {'message': 'ancestor'}, |
|
700 | 703 | {'message': 'change'}, |
|
701 | 704 | {'message': 'ancestor-new', 'parents': ['ancestor']}, |
|
702 | 705 | {'message': 'change-rebased'}, |
|
703 | 706 | ] |
|
704 | 707 | commit_ids = backend.create_master_repo(commits) |
|
705 | 708 | target = backend.create_repo(heads=['ancestor']) |
|
706 | 709 | source = backend.create_repo(heads=['change']) |
|
707 | 710 | |
|
708 | 711 | # create pr from a in source to A in target |
|
709 | 712 | pull_request = PullRequest() |
|
710 | 713 | pull_request.source_repo = source |
|
711 | 714 | # TODO: johbo: Make sure that we write the source ref this way! |
|
712 | 715 | pull_request.source_ref = 'branch:{branch}:{commit_id}'.format( |
|
713 | 716 | branch=backend.default_branch_name, commit_id=commit_ids['change']) |
|
714 | 717 | pull_request.target_repo = target |
|
715 | 718 | # TODO: johbo: Target ref should be branch based, since tip can jump |
|
716 | 719 | # from branch to branch |
|
717 | 720 | pull_request.target_ref = 'branch:{branch}:{commit_id}'.format( |
|
718 | 721 | branch=backend.default_branch_name, |
|
719 | 722 | commit_id=commit_ids['ancestor']) |
|
720 | 723 | pull_request.revisions = [commit_ids['change']] |
|
721 | 724 | pull_request.title = u"Test" |
|
722 | 725 | pull_request.description = u"Description" |
|
723 | 726 | pull_request.author = UserModel().get_by_username( |
|
724 | 727 | TEST_USER_ADMIN_LOGIN) |
|
725 | 728 | Session().add(pull_request) |
|
726 | 729 | Session().commit() |
|
727 | 730 | pull_request_id = pull_request.pull_request_id |
|
728 | 731 | |
|
729 | 732 | # target has ancestor - ancestor-new |
|
730 | 733 | # source has ancestor - ancestor-new - change-rebased |
|
731 | 734 | backend.pull_heads(target, heads=['ancestor-new']) |
|
732 | 735 | backend.pull_heads(source, heads=['change-rebased']) |
|
733 | 736 | |
|
734 | 737 | # update PR |
|
735 | 738 | self.app.post( |
|
736 | 739 | route_path('pullrequest_update', |
|
737 | 740 | repo_name=target.repo_name, |
|
738 | 741 | pull_request_id=pull_request_id), |
|
739 | 742 | params={'update_commits': 'true', |
|
740 | 743 | 'csrf_token': csrf_token}, |
|
741 | 744 | status=200) |
|
742 | 745 | |
|
743 | 746 | # check that we have now both revisions |
|
744 | 747 | pull_request = PullRequest.get(pull_request_id) |
|
745 | 748 | assert pull_request.revisions == [commit_ids['change-rebased']] |
|
746 | 749 | assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format( |
|
747 | 750 | branch=backend.default_branch_name, |
|
748 | 751 | commit_id=commit_ids['ancestor-new']) |
|
749 | 752 | |
|
750 | 753 | # TODO: johbo: This should be a test on its own |
|
751 | 754 | response = self.app.get(route_path( |
|
752 | 755 | 'pullrequest_new', |
|
753 | 756 | repo_name=target.repo_name)) |
|
754 | 757 | assert response.status_int == 200 |
|
755 | 758 | assert 'Pull request updated to' in response.body |
|
756 | 759 | assert 'with 1 added, 1 removed commits.' in response.body |
|
757 | 760 | |
|
758 | 761 | def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token): |
|
759 | 762 | backend = backend_git |
|
760 | 763 | commits = [ |
|
761 | 764 | {'message': 'master-commit-1'}, |
|
762 | 765 | {'message': 'master-commit-2-change-1'}, |
|
763 | 766 | {'message': 'master-commit-3-change-2'}, |
|
764 | 767 | |
|
765 | 768 | {'message': 'feat-commit-1', 'parents': ['master-commit-1']}, |
|
766 | 769 | {'message': 'feat-commit-2'}, |
|
767 | 770 | ] |
|
768 | 771 | commit_ids = backend.create_master_repo(commits) |
|
769 | 772 | target = backend.create_repo(heads=['master-commit-3-change-2']) |
|
770 | 773 | source = backend.create_repo(heads=['feat-commit-2']) |
|
771 | 774 | |
|
772 | 775 | # create pr from a in source to A in target |
|
773 | 776 | pull_request = PullRequest() |
|
774 | 777 | pull_request.source_repo = source |
|
775 | 778 | # TODO: johbo: Make sure that we write the source ref this way! |
|
776 | 779 | pull_request.source_ref = 'branch:{branch}:{commit_id}'.format( |
|
777 | 780 | branch=backend.default_branch_name, |
|
778 | 781 | commit_id=commit_ids['master-commit-3-change-2']) |
|
779 | 782 | |
|
780 | 783 | pull_request.target_repo = target |
|
781 | 784 | # TODO: johbo: Target ref should be branch based, since tip can jump |
|
782 | 785 | # from branch to branch |
|
783 | 786 | pull_request.target_ref = 'branch:{branch}:{commit_id}'.format( |
|
784 | 787 | branch=backend.default_branch_name, |
|
785 | 788 | commit_id=commit_ids['feat-commit-2']) |
|
786 | 789 | |
|
787 | 790 | pull_request.revisions = [ |
|
788 | 791 | commit_ids['feat-commit-1'], |
|
789 | 792 | commit_ids['feat-commit-2'] |
|
790 | 793 | ] |
|
791 | 794 | pull_request.title = u"Test" |
|
792 | 795 | pull_request.description = u"Description" |
|
793 | 796 | pull_request.author = UserModel().get_by_username( |
|
794 | 797 | TEST_USER_ADMIN_LOGIN) |
|
795 | 798 | Session().add(pull_request) |
|
796 | 799 | Session().commit() |
|
797 | 800 | pull_request_id = pull_request.pull_request_id |
|
798 | 801 | |
|
799 | 802 | # PR is created, now we simulate a force-push into target, |
|
800 | 803 | # that drops a 2 last commits |
|
801 | 804 | vcsrepo = target.scm_instance() |
|
802 | 805 | vcsrepo.config.clear_section('hooks') |
|
803 | 806 | vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2']) |
|
804 | 807 | |
|
805 | 808 | # update PR |
|
806 | 809 | self.app.post( |
|
807 | 810 | route_path('pullrequest_update', |
|
808 | 811 | repo_name=target.repo_name, |
|
809 | 812 | pull_request_id=pull_request_id), |
|
810 | 813 | params={'update_commits': 'true', |
|
811 | 814 | 'csrf_token': csrf_token}, |
|
812 | 815 | status=200) |
|
813 | 816 | |
|
814 | 817 | response = self.app.get(route_path( |
|
815 | 818 | 'pullrequest_new', |
|
816 | 819 | repo_name=target.repo_name)) |
|
817 | 820 | assert response.status_int == 200 |
|
818 | 821 | response.mustcontain('Pull request updated to') |
|
819 | 822 | response.mustcontain('with 0 added, 0 removed commits.') |
|
820 | 823 | |
|
821 | 824 | def test_update_of_ancestor_reference(self, backend, csrf_token): |
|
822 | 825 | commits = [ |
|
823 | 826 | {'message': 'ancestor'}, |
|
824 | 827 | {'message': 'change'}, |
|
825 | 828 | {'message': 'change-2'}, |
|
826 | 829 | {'message': 'ancestor-new', 'parents': ['ancestor']}, |
|
827 | 830 | {'message': 'change-rebased'}, |
|
828 | 831 | ] |
|
829 | 832 | commit_ids = backend.create_master_repo(commits) |
|
830 | 833 | target = backend.create_repo(heads=['ancestor']) |
|
831 | 834 | source = backend.create_repo(heads=['change']) |
|
832 | 835 | |
|
833 | 836 | # create pr from a in source to A in target |
|
834 | 837 | pull_request = PullRequest() |
|
835 | 838 | pull_request.source_repo = source |
|
836 | 839 | # TODO: johbo: Make sure that we write the source ref this way! |
|
837 | 840 | pull_request.source_ref = 'branch:{branch}:{commit_id}'.format( |
|
838 | 841 | branch=backend.default_branch_name, |
|
839 | 842 | commit_id=commit_ids['change']) |
|
840 | 843 | pull_request.target_repo = target |
|
841 | 844 | # TODO: johbo: Target ref should be branch based, since tip can jump |
|
842 | 845 | # from branch to branch |
|
843 | 846 | pull_request.target_ref = 'branch:{branch}:{commit_id}'.format( |
|
844 | 847 | branch=backend.default_branch_name, |
|
845 | 848 | commit_id=commit_ids['ancestor']) |
|
846 | 849 | pull_request.revisions = [commit_ids['change']] |
|
847 | 850 | pull_request.title = u"Test" |
|
848 | 851 | pull_request.description = u"Description" |
|
849 | 852 | pull_request.author = UserModel().get_by_username( |
|
850 | 853 | TEST_USER_ADMIN_LOGIN) |
|
851 | 854 | Session().add(pull_request) |
|
852 | 855 | Session().commit() |
|
853 | 856 | pull_request_id = pull_request.pull_request_id |
|
854 | 857 | |
|
855 | 858 | # target has ancestor - ancestor-new |
|
856 | 859 | # source has ancestor - ancestor-new - change-rebased |
|
857 | 860 | backend.pull_heads(target, heads=['ancestor-new']) |
|
858 | 861 | backend.pull_heads(source, heads=['change-rebased']) |
|
859 | 862 | |
|
860 | 863 | # update PR |
|
861 | 864 | self.app.post( |
|
862 | 865 | route_path('pullrequest_update', |
|
863 | 866 | repo_name=target.repo_name, |
|
864 | 867 | pull_request_id=pull_request_id), |
|
865 | 868 | params={'update_commits': 'true', |
|
866 | 869 | 'csrf_token': csrf_token}, |
|
867 | 870 | status=200) |
|
868 | 871 | |
|
869 | 872 | # Expect the target reference to be updated correctly |
|
870 | 873 | pull_request = PullRequest.get(pull_request_id) |
|
871 | 874 | assert pull_request.revisions == [commit_ids['change-rebased']] |
|
872 | 875 | expected_target_ref = 'branch:{branch}:{commit_id}'.format( |
|
873 | 876 | branch=backend.default_branch_name, |
|
874 | 877 | commit_id=commit_ids['ancestor-new']) |
|
875 | 878 | assert pull_request.target_ref == expected_target_ref |
|
876 | 879 | |
|
877 | 880 | def test_remove_pull_request_branch(self, backend_git, csrf_token): |
|
878 | 881 | branch_name = 'development' |
|
879 | 882 | commits = [ |
|
880 | 883 | {'message': 'initial-commit'}, |
|
881 | 884 | {'message': 'old-feature'}, |
|
882 | 885 | {'message': 'new-feature', 'branch': branch_name}, |
|
883 | 886 | ] |
|
884 | 887 | repo = backend_git.create_repo(commits) |
|
885 | 888 | commit_ids = backend_git.commit_ids |
|
886 | 889 | |
|
887 | 890 | pull_request = PullRequest() |
|
888 | 891 | pull_request.source_repo = repo |
|
889 | 892 | pull_request.target_repo = repo |
|
890 | 893 | pull_request.source_ref = 'branch:{branch}:{commit_id}'.format( |
|
891 | 894 | branch=branch_name, commit_id=commit_ids['new-feature']) |
|
892 | 895 | pull_request.target_ref = 'branch:{branch}:{commit_id}'.format( |
|
893 | 896 | branch=backend_git.default_branch_name, |
|
894 | 897 | commit_id=commit_ids['old-feature']) |
|
895 | 898 | pull_request.revisions = [commit_ids['new-feature']] |
|
896 | 899 | pull_request.title = u"Test" |
|
897 | 900 | pull_request.description = u"Description" |
|
898 | 901 | pull_request.author = UserModel().get_by_username( |
|
899 | 902 | TEST_USER_ADMIN_LOGIN) |
|
900 | 903 | Session().add(pull_request) |
|
901 | 904 | Session().commit() |
|
902 | 905 | |
|
903 | 906 | vcs = repo.scm_instance() |
|
904 | 907 | vcs.remove_ref('refs/heads/{}'.format(branch_name)) |
|
905 | 908 | |
|
906 | 909 | response = self.app.get(route_path( |
|
907 | 910 | 'pullrequest_show', |
|
908 | 911 | repo_name=repo.repo_name, |
|
909 | 912 | pull_request_id=pull_request.pull_request_id)) |
|
910 | 913 | |
|
911 | 914 | assert response.status_int == 200 |
|
912 | assert_response = AssertResponse(response) | |
|
913 | assert_response.element_contains( | |
|
915 | ||
|
916 | response.assert_response().element_contains( | |
|
914 | 917 | '#changeset_compare_view_content .alert strong', |
|
915 | 918 | 'Missing commits') |
|
916 | assert_response.element_contains( | |
|
919 | response.assert_response().element_contains( | |
|
917 | 920 | '#changeset_compare_view_content .alert', |
|
918 | 921 | 'This pull request cannot be displayed, because one or more' |
|
919 | 922 | ' commits no longer exist in the source repository.') |
|
920 | 923 | |
|
921 | 924 | def test_strip_commits_from_pull_request( |
|
922 | 925 | self, backend, pr_util, csrf_token): |
|
923 | 926 | commits = [ |
|
924 | 927 | {'message': 'initial-commit'}, |
|
925 | 928 | {'message': 'old-feature'}, |
|
926 | 929 | {'message': 'new-feature', 'parents': ['initial-commit']}, |
|
927 | 930 | ] |
|
928 | 931 | pull_request = pr_util.create_pull_request( |
|
929 | 932 | commits, target_head='initial-commit', source_head='new-feature', |
|
930 | 933 | revisions=['new-feature']) |
|
931 | 934 | |
|
932 | 935 | vcs = pr_util.source_repository.scm_instance() |
|
933 | 936 | if backend.alias == 'git': |
|
934 | 937 | vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master') |
|
935 | 938 | else: |
|
936 | 939 | vcs.strip(pr_util.commit_ids['new-feature']) |
|
937 | 940 | |
|
938 | 941 | response = self.app.get(route_path( |
|
939 | 942 | 'pullrequest_show', |
|
940 | 943 | repo_name=pr_util.target_repository.repo_name, |
|
941 | 944 | pull_request_id=pull_request.pull_request_id)) |
|
942 | 945 | |
|
943 | 946 | assert response.status_int == 200 |
|
944 | assert_response = AssertResponse(response) | |
|
945 | assert_response.element_contains( | |
|
947 | ||
|
948 | response.assert_response().element_contains( | |
|
946 | 949 | '#changeset_compare_view_content .alert strong', |
|
947 | 950 | 'Missing commits') |
|
948 | assert_response.element_contains( | |
|
951 | response.assert_response().element_contains( | |
|
949 | 952 | '#changeset_compare_view_content .alert', |
|
950 | 953 | 'This pull request cannot be displayed, because one or more' |
|
951 | 954 | ' commits no longer exist in the source repository.') |
|
952 | assert_response.element_contains( | |
|
955 | response.assert_response().element_contains( | |
|
953 | 956 | '#update_commits', |
|
954 | 957 | 'Update commits') |
|
955 | 958 | |
|
956 | 959 | def test_strip_commits_and_update( |
|
957 | 960 | self, backend, pr_util, csrf_token): |
|
958 | 961 | commits = [ |
|
959 | 962 | {'message': 'initial-commit'}, |
|
960 | 963 | {'message': 'old-feature'}, |
|
961 | 964 | {'message': 'new-feature', 'parents': ['old-feature']}, |
|
962 | 965 | ] |
|
963 | 966 | pull_request = pr_util.create_pull_request( |
|
964 | 967 | commits, target_head='old-feature', source_head='new-feature', |
|
965 | 968 | revisions=['new-feature'], mergeable=True) |
|
966 | 969 | |
|
967 | 970 | vcs = pr_util.source_repository.scm_instance() |
|
968 | 971 | if backend.alias == 'git': |
|
969 | 972 | vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master') |
|
970 | 973 | else: |
|
971 | 974 | vcs.strip(pr_util.commit_ids['new-feature']) |
|
972 | 975 | |
|
973 | 976 | response = self.app.post( |
|
974 | 977 | route_path('pullrequest_update', |
|
975 | 978 | repo_name=pull_request.target_repo.repo_name, |
|
976 | 979 | pull_request_id=pull_request.pull_request_id), |
|
977 | 980 | params={'update_commits': 'true', |
|
978 | 981 | 'csrf_token': csrf_token}) |
|
979 | 982 | |
|
980 | 983 | assert response.status_int == 200 |
|
981 | 984 | assert response.body == 'true' |
|
982 | 985 | |
|
983 | 986 | # Make sure that after update, it won't raise 500 errors |
|
984 | 987 | response = self.app.get(route_path( |
|
985 | 988 | 'pullrequest_show', |
|
986 | 989 | repo_name=pr_util.target_repository.repo_name, |
|
987 | 990 | pull_request_id=pull_request.pull_request_id)) |
|
988 | 991 | |
|
989 | 992 | assert response.status_int == 200 |
|
990 | assert_response = AssertResponse(response) | |
|
991 | assert_response.element_contains( | |
|
993 | response.assert_response().element_contains( | |
|
992 | 994 | '#changeset_compare_view_content .alert strong', |
|
993 | 995 | 'Missing commits') |
|
994 | 996 | |
|
995 | 997 | def test_branch_is_a_link(self, pr_util): |
|
996 | 998 | pull_request = pr_util.create_pull_request() |
|
997 | 999 | pull_request.source_ref = 'branch:origin:1234567890abcdef' |
|
998 | 1000 | pull_request.target_ref = 'branch:target:abcdef1234567890' |
|
999 | 1001 | Session().add(pull_request) |
|
1000 | 1002 | Session().commit() |
|
1001 | 1003 | |
|
1002 | 1004 | response = self.app.get(route_path( |
|
1003 | 1005 | 'pullrequest_show', |
|
1004 | 1006 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1005 | 1007 | pull_request_id=pull_request.pull_request_id)) |
|
1006 | 1008 | assert response.status_int == 200 |
|
1007 | assert_response = AssertResponse(response) | |
|
1008 | 1009 | |
|
1009 | origin = assert_response.get_element('.pr-origininfo .tag') | |
|
1010 | origin = response.assert_response().get_element('.pr-origininfo .tag') | |
|
1010 | 1011 | origin_children = origin.getchildren() |
|
1011 | 1012 | assert len(origin_children) == 1 |
|
1012 | target = assert_response.get_element('.pr-targetinfo .tag') | |
|
1013 | target = response.assert_response().get_element('.pr-targetinfo .tag') | |
|
1013 | 1014 | target_children = target.getchildren() |
|
1014 | 1015 | assert len(target_children) == 1 |
|
1015 | 1016 | |
|
1016 | 1017 | expected_origin_link = route_path( |
|
1017 | 1018 | 'repo_changelog', |
|
1018 | 1019 | repo_name=pull_request.source_repo.scm_instance().name, |
|
1019 | 1020 | params=dict(branch='origin')) |
|
1020 | 1021 | expected_target_link = route_path( |
|
1021 | 1022 | 'repo_changelog', |
|
1022 | 1023 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1023 | 1024 | params=dict(branch='target')) |
|
1024 | 1025 | assert origin_children[0].attrib['href'] == expected_origin_link |
|
1025 | 1026 | assert origin_children[0].text == 'branch: origin' |
|
1026 | 1027 | assert target_children[0].attrib['href'] == expected_target_link |
|
1027 | 1028 | assert target_children[0].text == 'branch: target' |
|
1028 | 1029 | |
|
1029 | 1030 | def test_bookmark_is_not_a_link(self, pr_util): |
|
1030 | 1031 | pull_request = pr_util.create_pull_request() |
|
1031 | 1032 | pull_request.source_ref = 'bookmark:origin:1234567890abcdef' |
|
1032 | 1033 | pull_request.target_ref = 'bookmark:target:abcdef1234567890' |
|
1033 | 1034 | Session().add(pull_request) |
|
1034 | 1035 | Session().commit() |
|
1035 | 1036 | |
|
1036 | 1037 | response = self.app.get(route_path( |
|
1037 | 1038 | 'pullrequest_show', |
|
1038 | 1039 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1039 | 1040 | pull_request_id=pull_request.pull_request_id)) |
|
1040 | 1041 | assert response.status_int == 200 |
|
1041 | assert_response = AssertResponse(response) | |
|
1042 | 1042 | |
|
1043 | origin = assert_response.get_element('.pr-origininfo .tag') | |
|
1043 | origin = response.assert_response().get_element('.pr-origininfo .tag') | |
|
1044 | 1044 | assert origin.text.strip() == 'bookmark: origin' |
|
1045 | 1045 | assert origin.getchildren() == [] |
|
1046 | 1046 | |
|
1047 | target = assert_response.get_element('.pr-targetinfo .tag') | |
|
1047 | target = response.assert_response().get_element('.pr-targetinfo .tag') | |
|
1048 | 1048 | assert target.text.strip() == 'bookmark: target' |
|
1049 | 1049 | assert target.getchildren() == [] |
|
1050 | 1050 | |
|
1051 | 1051 | def test_tag_is_not_a_link(self, pr_util): |
|
1052 | 1052 | pull_request = pr_util.create_pull_request() |
|
1053 | 1053 | pull_request.source_ref = 'tag:origin:1234567890abcdef' |
|
1054 | 1054 | pull_request.target_ref = 'tag:target:abcdef1234567890' |
|
1055 | 1055 | Session().add(pull_request) |
|
1056 | 1056 | Session().commit() |
|
1057 | 1057 | |
|
1058 | 1058 | response = self.app.get(route_path( |
|
1059 | 1059 | 'pullrequest_show', |
|
1060 | 1060 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1061 | 1061 | pull_request_id=pull_request.pull_request_id)) |
|
1062 | 1062 | assert response.status_int == 200 |
|
1063 | assert_response = AssertResponse(response) | |
|
1064 | 1063 | |
|
1065 | origin = assert_response.get_element('.pr-origininfo .tag') | |
|
1064 | origin = response.assert_response().get_element('.pr-origininfo .tag') | |
|
1066 | 1065 | assert origin.text.strip() == 'tag: origin' |
|
1067 | 1066 | assert origin.getchildren() == [] |
|
1068 | 1067 | |
|
1069 | target = assert_response.get_element('.pr-targetinfo .tag') | |
|
1068 | target = response.assert_response().get_element('.pr-targetinfo .tag') | |
|
1070 | 1069 | assert target.text.strip() == 'tag: target' |
|
1071 | 1070 | assert target.getchildren() == [] |
|
1072 | 1071 | |
|
1073 | 1072 | @pytest.mark.parametrize('mergeable', [True, False]) |
|
1074 | 1073 | def test_shadow_repository_link( |
|
1075 | 1074 | self, mergeable, pr_util, http_host_only_stub): |
|
1076 | 1075 | """ |
|
1077 | 1076 | Check that the pull request summary page displays a link to the shadow |
|
1078 | 1077 | repository if the pull request is mergeable. If it is not mergeable |
|
1079 | 1078 | the link should not be displayed. |
|
1080 | 1079 | """ |
|
1081 | 1080 | pull_request = pr_util.create_pull_request( |
|
1082 | 1081 | mergeable=mergeable, enable_notifications=False) |
|
1083 | 1082 | target_repo = pull_request.target_repo.scm_instance() |
|
1084 | 1083 | pr_id = pull_request.pull_request_id |
|
1085 | 1084 | shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format( |
|
1086 | 1085 | host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id) |
|
1087 | 1086 | |
|
1088 | 1087 | response = self.app.get(route_path( |
|
1089 | 1088 | 'pullrequest_show', |
|
1090 | 1089 | repo_name=target_repo.name, |
|
1091 | 1090 | pull_request_id=pr_id)) |
|
1092 | 1091 | |
|
1093 | assertr = AssertResponse(response) | |
|
1094 | 1092 | if mergeable: |
|
1095 |
assertr.element_value_contains( |
|
|
1096 | assertr.element_value_contains('input.pr-mergeinfo ', 'pr-merge') | |
|
1093 | response.assert_response().element_value_contains( | |
|
1094 | 'input.pr-mergeinfo', shadow_url) | |
|
1095 | response.assert_response().element_value_contains( | |
|
1096 | 'input.pr-mergeinfo ', 'pr-merge') | |
|
1097 | 1097 | else: |
|
1098 | assertr.no_element_exists('.pr-mergeinfo') | |
|
1098 | response.assert_response().no_element_exists('.pr-mergeinfo') | |
|
1099 | 1099 | |
|
1100 | 1100 | |
|
1101 | 1101 | @pytest.mark.usefixtures('app') |
|
1102 | 1102 | @pytest.mark.backends("git", "hg") |
|
1103 | 1103 | class TestPullrequestsControllerDelete(object): |
|
1104 | 1104 | def test_pull_request_delete_button_permissions_admin( |
|
1105 | 1105 | self, autologin_user, user_admin, pr_util): |
|
1106 | 1106 | pull_request = pr_util.create_pull_request( |
|
1107 | 1107 | author=user_admin.username, enable_notifications=False) |
|
1108 | 1108 | |
|
1109 | 1109 | response = self.app.get(route_path( |
|
1110 | 1110 | 'pullrequest_show', |
|
1111 | 1111 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1112 | 1112 | pull_request_id=pull_request.pull_request_id)) |
|
1113 | 1113 | |
|
1114 | 1114 | response.mustcontain('id="delete_pullrequest"') |
|
1115 | 1115 | response.mustcontain('Confirm to delete this pull request') |
|
1116 | 1116 | |
|
1117 | 1117 | def test_pull_request_delete_button_permissions_owner( |
|
1118 | 1118 | self, autologin_regular_user, user_regular, pr_util): |
|
1119 | 1119 | pull_request = pr_util.create_pull_request( |
|
1120 | 1120 | author=user_regular.username, enable_notifications=False) |
|
1121 | 1121 | |
|
1122 | 1122 | response = self.app.get(route_path( |
|
1123 | 1123 | 'pullrequest_show', |
|
1124 | 1124 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1125 | 1125 | pull_request_id=pull_request.pull_request_id)) |
|
1126 | 1126 | |
|
1127 | 1127 | response.mustcontain('id="delete_pullrequest"') |
|
1128 | 1128 | response.mustcontain('Confirm to delete this pull request') |
|
1129 | 1129 | |
|
1130 | 1130 | def test_pull_request_delete_button_permissions_forbidden( |
|
1131 | 1131 | self, autologin_regular_user, user_regular, user_admin, pr_util): |
|
1132 | 1132 | pull_request = pr_util.create_pull_request( |
|
1133 | 1133 | author=user_admin.username, enable_notifications=False) |
|
1134 | 1134 | |
|
1135 | 1135 | response = self.app.get(route_path( |
|
1136 | 1136 | 'pullrequest_show', |
|
1137 | 1137 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1138 | 1138 | pull_request_id=pull_request.pull_request_id)) |
|
1139 | 1139 | response.mustcontain(no=['id="delete_pullrequest"']) |
|
1140 | 1140 | response.mustcontain(no=['Confirm to delete this pull request']) |
|
1141 | 1141 | |
|
1142 | 1142 | def test_pull_request_delete_button_permissions_can_update_cannot_delete( |
|
1143 | 1143 | self, autologin_regular_user, user_regular, user_admin, pr_util, |
|
1144 | 1144 | user_util): |
|
1145 | 1145 | |
|
1146 | 1146 | pull_request = pr_util.create_pull_request( |
|
1147 | 1147 | author=user_admin.username, enable_notifications=False) |
|
1148 | 1148 | |
|
1149 | 1149 | user_util.grant_user_permission_to_repo( |
|
1150 | 1150 | pull_request.target_repo, user_regular, |
|
1151 | 1151 | 'repository.write') |
|
1152 | 1152 | |
|
1153 | 1153 | response = self.app.get(route_path( |
|
1154 | 1154 | 'pullrequest_show', |
|
1155 | 1155 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1156 | 1156 | pull_request_id=pull_request.pull_request_id)) |
|
1157 | 1157 | |
|
1158 | 1158 | response.mustcontain('id="open_edit_pullrequest"') |
|
1159 | 1159 | response.mustcontain('id="delete_pullrequest"') |
|
1160 | 1160 | response.mustcontain(no=['Confirm to delete this pull request']) |
|
1161 | 1161 | |
|
1162 | 1162 | def test_delete_comment_returns_404_if_comment_does_not_exist( |
|
1163 | 1163 | self, autologin_user, pr_util, user_admin, csrf_token, xhr_header): |
|
1164 | 1164 | |
|
1165 | 1165 | pull_request = pr_util.create_pull_request( |
|
1166 | 1166 | author=user_admin.username, enable_notifications=False) |
|
1167 | 1167 | |
|
1168 | 1168 | self.app.post( |
|
1169 | 1169 | route_path( |
|
1170 | 1170 | 'pullrequest_comment_delete', |
|
1171 | 1171 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1172 | 1172 | pull_request_id=pull_request.pull_request_id, |
|
1173 | 1173 | comment_id=1024404), |
|
1174 | 1174 | extra_environ=xhr_header, |
|
1175 | 1175 | params={'csrf_token': csrf_token}, |
|
1176 | 1176 | status=404 |
|
1177 | 1177 | ) |
|
1178 | 1178 | |
|
1179 | 1179 | def test_delete_comment( |
|
1180 | 1180 | self, autologin_user, pr_util, user_admin, csrf_token, xhr_header): |
|
1181 | 1181 | |
|
1182 | 1182 | pull_request = pr_util.create_pull_request( |
|
1183 | 1183 | author=user_admin.username, enable_notifications=False) |
|
1184 | 1184 | comment = pr_util.create_comment() |
|
1185 | 1185 | comment_id = comment.comment_id |
|
1186 | 1186 | |
|
1187 | 1187 | response = self.app.post( |
|
1188 | 1188 | route_path( |
|
1189 | 1189 | 'pullrequest_comment_delete', |
|
1190 | 1190 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1191 | 1191 | pull_request_id=pull_request.pull_request_id, |
|
1192 | 1192 | comment_id=comment_id), |
|
1193 | 1193 | extra_environ=xhr_header, |
|
1194 | 1194 | params={'csrf_token': csrf_token}, |
|
1195 | 1195 | status=200 |
|
1196 | 1196 | ) |
|
1197 | 1197 | assert response.body == 'true' |
|
1198 | 1198 | |
|
1199 | 1199 | @pytest.mark.parametrize('url_type', [ |
|
1200 | 1200 | 'pullrequest_new', |
|
1201 | 1201 | 'pullrequest_create', |
|
1202 | 1202 | 'pullrequest_update', |
|
1203 | 1203 | 'pullrequest_merge', |
|
1204 | 1204 | ]) |
|
1205 | 1205 | def test_pull_request_is_forbidden_on_archived_repo( |
|
1206 | 1206 | self, autologin_user, backend, xhr_header, user_util, url_type): |
|
1207 | 1207 | |
|
1208 | 1208 | # create a temporary repo |
|
1209 | 1209 | source = user_util.create_repo(repo_type=backend.alias) |
|
1210 | 1210 | repo_name = source.repo_name |
|
1211 | 1211 | repo = Repository.get_by_repo_name(repo_name) |
|
1212 | 1212 | repo.archived = True |
|
1213 | 1213 | Session().commit() |
|
1214 | 1214 | |
|
1215 | 1215 | response = self.app.get( |
|
1216 | 1216 | route_path(url_type, repo_name=repo_name, pull_request_id=1), status=302) |
|
1217 | 1217 | |
|
1218 | 1218 | msg = 'Action not supported for archived repository.' |
|
1219 | 1219 | assert_session_flash(response, msg) |
|
1220 | 1220 | |
|
1221 | 1221 | |
|
1222 | 1222 | def assert_pull_request_status(pull_request, expected_status): |
|
1223 | 1223 | status = ChangesetStatusModel().calculated_review_status( |
|
1224 | 1224 | pull_request=pull_request) |
|
1225 | 1225 | assert status == expected_status |
|
1226 | 1226 | |
|
1227 | 1227 | |
|
1228 | 1228 | @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create']) |
|
1229 | 1229 | @pytest.mark.usefixtures("autologin_user") |
|
1230 | 1230 | def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route): |
|
1231 | 1231 | response = app.get( |
|
1232 | 1232 | route_path(route, repo_name=backend_svn.repo_name), status=404) |
|
1233 | 1233 |
@@ -1,1414 +1,1412 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2011-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import logging |
|
22 | 22 | import collections |
|
23 | 23 | |
|
24 | 24 | import formencode |
|
25 | 25 | import formencode.htmlfill |
|
26 | 26 | import peppercorn |
|
27 | 27 | from pyramid.httpexceptions import ( |
|
28 | 28 | HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest) |
|
29 | 29 | from pyramid.view import view_config |
|
30 | 30 | from pyramid.renderers import render |
|
31 | 31 | |
|
32 | 32 | from rhodecode import events |
|
33 | 33 | from rhodecode.apps._base import RepoAppView, DataGridAppView |
|
34 | 34 | |
|
35 | 35 | from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream |
|
36 | 36 | from rhodecode.lib.base import vcs_operation_context |
|
37 | 37 | from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist |
|
38 | 38 | from rhodecode.lib.ext_json import json |
|
39 | 39 | from rhodecode.lib.auth import ( |
|
40 | 40 | LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator, |
|
41 | 41 | NotAnonymous, CSRFRequired) |
|
42 | 42 | from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode |
|
43 | 43 | from rhodecode.lib.vcs.backends.base import EmptyCommit, UpdateFailureReason |
|
44 | 44 | from rhodecode.lib.vcs.exceptions import (CommitDoesNotExistError, |
|
45 | 45 | RepositoryRequirementError, EmptyRepositoryError) |
|
46 | 46 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
47 | 47 | from rhodecode.model.comment import CommentsModel |
|
48 | 48 | from rhodecode.model.db import (func, or_, PullRequest, PullRequestVersion, |
|
49 | 49 | ChangesetComment, ChangesetStatus, Repository) |
|
50 | 50 | from rhodecode.model.forms import PullRequestForm |
|
51 | 51 | from rhodecode.model.meta import Session |
|
52 | 52 | from rhodecode.model.pull_request import PullRequestModel, MergeCheck |
|
53 | 53 | from rhodecode.model.scm import ScmModel |
|
54 | 54 | |
|
55 | 55 | log = logging.getLogger(__name__) |
|
56 | 56 | |
|
57 | 57 | |
|
58 | 58 | class RepoPullRequestsView(RepoAppView, DataGridAppView): |
|
59 | 59 | |
|
60 | 60 | def load_default_context(self): |
|
61 | 61 | c = self._get_local_tmpl_context(include_app_defaults=True) |
|
62 | 62 | c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED |
|
63 | 63 | c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED |
|
64 | 64 | # backward compat., we use for OLD PRs a plain renderer |
|
65 | 65 | c.renderer = 'plain' |
|
66 | 66 | return c |
|
67 | 67 | |
|
68 | 68 | def _get_pull_requests_list( |
|
69 | 69 | self, repo_name, source, filter_type, opened_by, statuses): |
|
70 | 70 | |
|
71 | 71 | draw, start, limit = self._extract_chunk(self.request) |
|
72 | 72 | search_q, order_by, order_dir = self._extract_ordering(self.request) |
|
73 | 73 | _render = self.request.get_partial_renderer( |
|
74 | 74 | 'rhodecode:templates/data_table/_dt_elements.mako') |
|
75 | 75 | |
|
76 | 76 | # pagination |
|
77 | 77 | |
|
78 | 78 | if filter_type == 'awaiting_review': |
|
79 | 79 | pull_requests = PullRequestModel().get_awaiting_review( |
|
80 | 80 | repo_name, source=source, opened_by=opened_by, |
|
81 | 81 | statuses=statuses, offset=start, length=limit, |
|
82 | 82 | order_by=order_by, order_dir=order_dir) |
|
83 | 83 | pull_requests_total_count = PullRequestModel().count_awaiting_review( |
|
84 | 84 | repo_name, source=source, statuses=statuses, |
|
85 | 85 | opened_by=opened_by) |
|
86 | 86 | elif filter_type == 'awaiting_my_review': |
|
87 | 87 | pull_requests = PullRequestModel().get_awaiting_my_review( |
|
88 | 88 | repo_name, source=source, opened_by=opened_by, |
|
89 | 89 | user_id=self._rhodecode_user.user_id, statuses=statuses, |
|
90 | 90 | offset=start, length=limit, order_by=order_by, |
|
91 | 91 | order_dir=order_dir) |
|
92 | 92 | pull_requests_total_count = PullRequestModel().count_awaiting_my_review( |
|
93 | 93 | repo_name, source=source, user_id=self._rhodecode_user.user_id, |
|
94 | 94 | statuses=statuses, opened_by=opened_by) |
|
95 | 95 | else: |
|
96 | 96 | pull_requests = PullRequestModel().get_all( |
|
97 | 97 | repo_name, source=source, opened_by=opened_by, |
|
98 | 98 | statuses=statuses, offset=start, length=limit, |
|
99 | 99 | order_by=order_by, order_dir=order_dir) |
|
100 | 100 | pull_requests_total_count = PullRequestModel().count_all( |
|
101 | 101 | repo_name, source=source, statuses=statuses, |
|
102 | 102 | opened_by=opened_by) |
|
103 | 103 | |
|
104 | 104 | data = [] |
|
105 | 105 | comments_model = CommentsModel() |
|
106 | 106 | for pr in pull_requests: |
|
107 | 107 | comments = comments_model.get_all_comments( |
|
108 | 108 | self.db_repo.repo_id, pull_request=pr) |
|
109 | 109 | |
|
110 | 110 | data.append({ |
|
111 | 111 | 'name': _render('pullrequest_name', |
|
112 | 112 | pr.pull_request_id, pr.target_repo.repo_name), |
|
113 | 113 | 'name_raw': pr.pull_request_id, |
|
114 | 114 | 'status': _render('pullrequest_status', |
|
115 | 115 | pr.calculated_review_status()), |
|
116 | 116 | 'title': _render( |
|
117 | 117 | 'pullrequest_title', pr.title, pr.description), |
|
118 | 118 | 'description': h.escape(pr.description), |
|
119 | 119 | 'updated_on': _render('pullrequest_updated_on', |
|
120 | 120 | h.datetime_to_time(pr.updated_on)), |
|
121 | 121 | 'updated_on_raw': h.datetime_to_time(pr.updated_on), |
|
122 | 122 | 'created_on': _render('pullrequest_updated_on', |
|
123 | 123 | h.datetime_to_time(pr.created_on)), |
|
124 | 124 | 'created_on_raw': h.datetime_to_time(pr.created_on), |
|
125 | 125 | 'author': _render('pullrequest_author', |
|
126 | 126 | pr.author.full_contact, ), |
|
127 | 127 | 'author_raw': pr.author.full_name, |
|
128 | 128 | 'comments': _render('pullrequest_comments', len(comments)), |
|
129 | 129 | 'comments_raw': len(comments), |
|
130 | 130 | 'closed': pr.is_closed(), |
|
131 | 131 | }) |
|
132 | 132 | |
|
133 | 133 | data = ({ |
|
134 | 134 | 'draw': draw, |
|
135 | 135 | 'data': data, |
|
136 | 136 | 'recordsTotal': pull_requests_total_count, |
|
137 | 137 | 'recordsFiltered': pull_requests_total_count, |
|
138 | 138 | }) |
|
139 | 139 | return data |
|
140 | 140 | |
|
141 | 141 | def get_recache_flag(self): |
|
142 | 142 | for flag_name in ['force_recache', 'force-recache', 'no-cache']: |
|
143 | 143 | flag_val = self.request.GET.get(flag_name) |
|
144 | 144 | if str2bool(flag_val): |
|
145 | 145 | return True |
|
146 | 146 | return False |
|
147 | 147 | |
|
148 | 148 | @LoginRequired() |
|
149 | 149 | @HasRepoPermissionAnyDecorator( |
|
150 | 150 | 'repository.read', 'repository.write', 'repository.admin') |
|
151 | 151 | @view_config( |
|
152 | 152 | route_name='pullrequest_show_all', request_method='GET', |
|
153 | 153 | renderer='rhodecode:templates/pullrequests/pullrequests.mako') |
|
154 | 154 | def pull_request_list(self): |
|
155 | 155 | c = self.load_default_context() |
|
156 | 156 | |
|
157 | 157 | req_get = self.request.GET |
|
158 | 158 | c.source = str2bool(req_get.get('source')) |
|
159 | 159 | c.closed = str2bool(req_get.get('closed')) |
|
160 | 160 | c.my = str2bool(req_get.get('my')) |
|
161 | 161 | c.awaiting_review = str2bool(req_get.get('awaiting_review')) |
|
162 | 162 | c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review')) |
|
163 | 163 | |
|
164 | 164 | c.active = 'open' |
|
165 | 165 | if c.my: |
|
166 | 166 | c.active = 'my' |
|
167 | 167 | if c.closed: |
|
168 | 168 | c.active = 'closed' |
|
169 | 169 | if c.awaiting_review and not c.source: |
|
170 | 170 | c.active = 'awaiting' |
|
171 | 171 | if c.source and not c.awaiting_review: |
|
172 | 172 | c.active = 'source' |
|
173 | 173 | if c.awaiting_my_review: |
|
174 | 174 | c.active = 'awaiting_my' |
|
175 | 175 | |
|
176 | 176 | return self._get_template_context(c) |
|
177 | 177 | |
|
178 | 178 | @LoginRequired() |
|
179 | 179 | @HasRepoPermissionAnyDecorator( |
|
180 | 180 | 'repository.read', 'repository.write', 'repository.admin') |
|
181 | 181 | @view_config( |
|
182 | 182 | route_name='pullrequest_show_all_data', request_method='GET', |
|
183 | 183 | renderer='json_ext', xhr=True) |
|
184 | 184 | def pull_request_list_data(self): |
|
185 | 185 | self.load_default_context() |
|
186 | 186 | |
|
187 | 187 | # additional filters |
|
188 | 188 | req_get = self.request.GET |
|
189 | 189 | source = str2bool(req_get.get('source')) |
|
190 | 190 | closed = str2bool(req_get.get('closed')) |
|
191 | 191 | my = str2bool(req_get.get('my')) |
|
192 | 192 | awaiting_review = str2bool(req_get.get('awaiting_review')) |
|
193 | 193 | awaiting_my_review = str2bool(req_get.get('awaiting_my_review')) |
|
194 | 194 | |
|
195 | 195 | filter_type = 'awaiting_review' if awaiting_review \ |
|
196 | 196 | else 'awaiting_my_review' if awaiting_my_review \ |
|
197 | 197 | else None |
|
198 | 198 | |
|
199 | 199 | opened_by = None |
|
200 | 200 | if my: |
|
201 | 201 | opened_by = [self._rhodecode_user.user_id] |
|
202 | 202 | |
|
203 | 203 | statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN] |
|
204 | 204 | if closed: |
|
205 | 205 | statuses = [PullRequest.STATUS_CLOSED] |
|
206 | 206 | |
|
207 | 207 | data = self._get_pull_requests_list( |
|
208 | 208 | repo_name=self.db_repo_name, source=source, |
|
209 | 209 | filter_type=filter_type, opened_by=opened_by, statuses=statuses) |
|
210 | 210 | |
|
211 | 211 | return data |
|
212 | 212 | |
|
213 | 213 | def _is_diff_cache_enabled(self, target_repo): |
|
214 | 214 | caching_enabled = self._get_general_setting( |
|
215 | 215 | target_repo, 'rhodecode_diff_cache') |
|
216 | 216 | log.debug('Diff caching enabled: %s', caching_enabled) |
|
217 | 217 | return caching_enabled |
|
218 | 218 | |
|
219 | 219 | def _get_diffset(self, source_repo_name, source_repo, |
|
220 | 220 | source_ref_id, target_ref_id, |
|
221 | 221 | target_commit, source_commit, diff_limit, file_limit, |
|
222 | 222 | fulldiff, hide_whitespace_changes, diff_context): |
|
223 | 223 | |
|
224 | 224 | vcs_diff = PullRequestModel().get_diff( |
|
225 | 225 | source_repo, source_ref_id, target_ref_id, |
|
226 | 226 | hide_whitespace_changes, diff_context) |
|
227 | 227 | |
|
228 | 228 | diff_processor = diffs.DiffProcessor( |
|
229 | 229 | vcs_diff, format='newdiff', diff_limit=diff_limit, |
|
230 | 230 | file_limit=file_limit, show_full_diff=fulldiff) |
|
231 | 231 | |
|
232 | 232 | _parsed = diff_processor.prepare() |
|
233 | 233 | |
|
234 | 234 | diffset = codeblocks.DiffSet( |
|
235 | 235 | repo_name=self.db_repo_name, |
|
236 | 236 | source_repo_name=source_repo_name, |
|
237 | 237 | source_node_getter=codeblocks.diffset_node_getter(target_commit), |
|
238 | 238 | target_node_getter=codeblocks.diffset_node_getter(source_commit), |
|
239 | 239 | ) |
|
240 | 240 | diffset = self.path_filter.render_patchset_filtered( |
|
241 | 241 | diffset, _parsed, target_commit.raw_id, source_commit.raw_id) |
|
242 | 242 | |
|
243 | 243 | return diffset |
|
244 | 244 | |
|
245 | 245 | def _get_range_diffset(self, source_scm, source_repo, |
|
246 | 246 | commit1, commit2, diff_limit, file_limit, |
|
247 | 247 | fulldiff, hide_whitespace_changes, diff_context): |
|
248 | 248 | vcs_diff = source_scm.get_diff( |
|
249 | 249 | commit1, commit2, |
|
250 | 250 | ignore_whitespace=hide_whitespace_changes, |
|
251 | 251 | context=diff_context) |
|
252 | 252 | |
|
253 | 253 | diff_processor = diffs.DiffProcessor( |
|
254 | 254 | vcs_diff, format='newdiff', diff_limit=diff_limit, |
|
255 | 255 | file_limit=file_limit, show_full_diff=fulldiff) |
|
256 | 256 | |
|
257 | 257 | _parsed = diff_processor.prepare() |
|
258 | 258 | |
|
259 | 259 | diffset = codeblocks.DiffSet( |
|
260 | 260 | repo_name=source_repo.repo_name, |
|
261 | 261 | source_node_getter=codeblocks.diffset_node_getter(commit1), |
|
262 | 262 | target_node_getter=codeblocks.diffset_node_getter(commit2)) |
|
263 | 263 | |
|
264 | 264 | diffset = self.path_filter.render_patchset_filtered( |
|
265 | 265 | diffset, _parsed, commit1.raw_id, commit2.raw_id) |
|
266 | 266 | |
|
267 | 267 | return diffset |
|
268 | 268 | |
|
269 | 269 | @LoginRequired() |
|
270 | 270 | @HasRepoPermissionAnyDecorator( |
|
271 | 271 | 'repository.read', 'repository.write', 'repository.admin') |
|
272 | 272 | @view_config( |
|
273 | 273 | route_name='pullrequest_show', request_method='GET', |
|
274 | 274 | renderer='rhodecode:templates/pullrequests/pullrequest_show.mako') |
|
275 | 275 | def pull_request_show(self): |
|
276 | 276 | pull_request_id = self.request.matchdict['pull_request_id'] |
|
277 | 277 | |
|
278 | 278 | c = self.load_default_context() |
|
279 | 279 | |
|
280 | 280 | version = self.request.GET.get('version') |
|
281 | 281 | from_version = self.request.GET.get('from_version') or version |
|
282 | 282 | merge_checks = self.request.GET.get('merge_checks') |
|
283 | 283 | c.fulldiff = str2bool(self.request.GET.get('fulldiff')) |
|
284 | 284 | |
|
285 | 285 | # fetch global flags of ignore ws or context lines |
|
286 | 286 | diff_context = diffs.get_diff_context(self.request) |
|
287 | 287 | hide_whitespace_changes = diffs.get_diff_whitespace_flag(self.request) |
|
288 | 288 | |
|
289 | 289 | force_refresh = str2bool(self.request.GET.get('force_refresh')) |
|
290 | 290 | |
|
291 | 291 | (pull_request_latest, |
|
292 | 292 | pull_request_at_ver, |
|
293 | 293 | pull_request_display_obj, |
|
294 | 294 | at_version) = PullRequestModel().get_pr_version( |
|
295 | 295 | pull_request_id, version=version) |
|
296 | 296 | pr_closed = pull_request_latest.is_closed() |
|
297 | 297 | |
|
298 | 298 | if pr_closed and (version or from_version): |
|
299 | 299 | # not allow to browse versions |
|
300 | 300 | raise HTTPFound(h.route_path( |
|
301 | 301 | 'pullrequest_show', repo_name=self.db_repo_name, |
|
302 | 302 | pull_request_id=pull_request_id)) |
|
303 | 303 | |
|
304 | 304 | versions = pull_request_display_obj.versions() |
|
305 | 305 | # used to store per-commit range diffs |
|
306 | 306 | c.changes = collections.OrderedDict() |
|
307 | 307 | c.range_diff_on = self.request.GET.get('range-diff') == "1" |
|
308 | 308 | |
|
309 | 309 | c.at_version = at_version |
|
310 | 310 | c.at_version_num = (at_version |
|
311 | 311 | if at_version and at_version != 'latest' |
|
312 | 312 | else None) |
|
313 | 313 | c.at_version_pos = ChangesetComment.get_index_from_version( |
|
314 | 314 | c.at_version_num, versions) |
|
315 | 315 | |
|
316 | 316 | (prev_pull_request_latest, |
|
317 | 317 | prev_pull_request_at_ver, |
|
318 | 318 | prev_pull_request_display_obj, |
|
319 | 319 | prev_at_version) = PullRequestModel().get_pr_version( |
|
320 | 320 | pull_request_id, version=from_version) |
|
321 | 321 | |
|
322 | 322 | c.from_version = prev_at_version |
|
323 | 323 | c.from_version_num = (prev_at_version |
|
324 | 324 | if prev_at_version and prev_at_version != 'latest' |
|
325 | 325 | else None) |
|
326 | 326 | c.from_version_pos = ChangesetComment.get_index_from_version( |
|
327 | 327 | c.from_version_num, versions) |
|
328 | 328 | |
|
329 | 329 | # define if we're in COMPARE mode or VIEW at version mode |
|
330 | 330 | compare = at_version != prev_at_version |
|
331 | 331 | |
|
332 | 332 | # pull_requests repo_name we opened it against |
|
333 | 333 | # ie. target_repo must match |
|
334 | 334 | if self.db_repo_name != pull_request_at_ver.target_repo.repo_name: |
|
335 | 335 | raise HTTPNotFound() |
|
336 | 336 | |
|
337 | 337 | c.shadow_clone_url = PullRequestModel().get_shadow_clone_url( |
|
338 | 338 | pull_request_at_ver) |
|
339 | 339 | |
|
340 | 340 | c.pull_request = pull_request_display_obj |
|
341 | 341 | c.renderer = pull_request_at_ver.description_renderer or c.renderer |
|
342 | 342 | c.pull_request_latest = pull_request_latest |
|
343 | 343 | |
|
344 | 344 | if compare or (at_version and not at_version == 'latest'): |
|
345 | 345 | c.allowed_to_change_status = False |
|
346 | 346 | c.allowed_to_update = False |
|
347 | 347 | c.allowed_to_merge = False |
|
348 | 348 | c.allowed_to_delete = False |
|
349 | 349 | c.allowed_to_comment = False |
|
350 | 350 | c.allowed_to_close = False |
|
351 | 351 | else: |
|
352 | 352 | can_change_status = PullRequestModel().check_user_change_status( |
|
353 | 353 | pull_request_at_ver, self._rhodecode_user) |
|
354 | 354 | c.allowed_to_change_status = can_change_status and not pr_closed |
|
355 | 355 | |
|
356 | 356 | c.allowed_to_update = PullRequestModel().check_user_update( |
|
357 | 357 | pull_request_latest, self._rhodecode_user) and not pr_closed |
|
358 | 358 | c.allowed_to_merge = PullRequestModel().check_user_merge( |
|
359 | 359 | pull_request_latest, self._rhodecode_user) and not pr_closed |
|
360 | 360 | c.allowed_to_delete = PullRequestModel().check_user_delete( |
|
361 | 361 | pull_request_latest, self._rhodecode_user) and not pr_closed |
|
362 | 362 | c.allowed_to_comment = not pr_closed |
|
363 | 363 | c.allowed_to_close = c.allowed_to_merge and not pr_closed |
|
364 | 364 | |
|
365 | 365 | c.forbid_adding_reviewers = False |
|
366 | 366 | c.forbid_author_to_review = False |
|
367 | 367 | c.forbid_commit_author_to_review = False |
|
368 | 368 | |
|
369 | 369 | if pull_request_latest.reviewer_data and \ |
|
370 | 370 | 'rules' in pull_request_latest.reviewer_data: |
|
371 | 371 | rules = pull_request_latest.reviewer_data['rules'] or {} |
|
372 | 372 | try: |
|
373 | 373 | c.forbid_adding_reviewers = rules.get( |
|
374 | 374 | 'forbid_adding_reviewers') |
|
375 | 375 | c.forbid_author_to_review = rules.get( |
|
376 | 376 | 'forbid_author_to_review') |
|
377 | 377 | c.forbid_commit_author_to_review = rules.get( |
|
378 | 378 | 'forbid_commit_author_to_review') |
|
379 | 379 | except Exception: |
|
380 | 380 | pass |
|
381 | 381 | |
|
382 | 382 | # check merge capabilities |
|
383 | 383 | _merge_check = MergeCheck.validate( |
|
384 | 384 | pull_request_latest, auth_user=self._rhodecode_user, |
|
385 | 385 | translator=self.request.translate, |
|
386 | 386 | force_shadow_repo_refresh=force_refresh) |
|
387 | 387 | c.pr_merge_errors = _merge_check.error_details |
|
388 | 388 | c.pr_merge_possible = not _merge_check.failed |
|
389 | 389 | c.pr_merge_message = _merge_check.merge_msg |
|
390 | 390 | |
|
391 | 391 | c.pr_merge_info = MergeCheck.get_merge_conditions( |
|
392 | 392 | pull_request_latest, translator=self.request.translate) |
|
393 | 393 | |
|
394 | 394 | c.pull_request_review_status = _merge_check.review_status |
|
395 | 395 | if merge_checks: |
|
396 | 396 | self.request.override_renderer = \ |
|
397 | 397 | 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako' |
|
398 | 398 | return self._get_template_context(c) |
|
399 | 399 | |
|
400 | 400 | comments_model = CommentsModel() |
|
401 | 401 | |
|
402 | 402 | # reviewers and statuses |
|
403 | 403 | c.pull_request_reviewers = pull_request_at_ver.reviewers_statuses() |
|
404 | 404 | allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers] |
|
405 | 405 | |
|
406 | 406 | # GENERAL COMMENTS with versions # |
|
407 | 407 | q = comments_model._all_general_comments_of_pull_request(pull_request_latest) |
|
408 | 408 | q = q.order_by(ChangesetComment.comment_id.asc()) |
|
409 | 409 | general_comments = q |
|
410 | 410 | |
|
411 | 411 | # pick comments we want to render at current version |
|
412 | 412 | c.comment_versions = comments_model.aggregate_comments( |
|
413 | 413 | general_comments, versions, c.at_version_num) |
|
414 | 414 | c.comments = c.comment_versions[c.at_version_num]['until'] |
|
415 | 415 | |
|
416 | 416 | # INLINE COMMENTS with versions # |
|
417 | 417 | q = comments_model._all_inline_comments_of_pull_request(pull_request_latest) |
|
418 | 418 | q = q.order_by(ChangesetComment.comment_id.asc()) |
|
419 | 419 | inline_comments = q |
|
420 | 420 | |
|
421 | 421 | c.inline_versions = comments_model.aggregate_comments( |
|
422 | 422 | inline_comments, versions, c.at_version_num, inline=True) |
|
423 | 423 | |
|
424 | 424 | # inject latest version |
|
425 | 425 | latest_ver = PullRequest.get_pr_display_object( |
|
426 | 426 | pull_request_latest, pull_request_latest) |
|
427 | 427 | |
|
428 | 428 | c.versions = versions + [latest_ver] |
|
429 | 429 | |
|
430 | 430 | # if we use version, then do not show later comments |
|
431 | 431 | # than current version |
|
432 | 432 | display_inline_comments = collections.defaultdict( |
|
433 | 433 | lambda: collections.defaultdict(list)) |
|
434 | 434 | for co in inline_comments: |
|
435 | 435 | if c.at_version_num: |
|
436 | 436 | # pick comments that are at least UPTO given version, so we |
|
437 | 437 | # don't render comments for higher version |
|
438 | 438 | should_render = co.pull_request_version_id and \ |
|
439 | 439 | co.pull_request_version_id <= c.at_version_num |
|
440 | 440 | else: |
|
441 | 441 | # showing all, for 'latest' |
|
442 | 442 | should_render = True |
|
443 | 443 | |
|
444 | 444 | if should_render: |
|
445 | 445 | display_inline_comments[co.f_path][co.line_no].append(co) |
|
446 | 446 | |
|
447 | 447 | # load diff data into template context, if we use compare mode then |
|
448 | 448 | # diff is calculated based on changes between versions of PR |
|
449 | 449 | |
|
450 | 450 | source_repo = pull_request_at_ver.source_repo |
|
451 | 451 | source_ref_id = pull_request_at_ver.source_ref_parts.commit_id |
|
452 | 452 | |
|
453 | 453 | target_repo = pull_request_at_ver.target_repo |
|
454 | 454 | target_ref_id = pull_request_at_ver.target_ref_parts.commit_id |
|
455 | 455 | |
|
456 | 456 | if compare: |
|
457 | 457 | # in compare switch the diff base to latest commit from prev version |
|
458 | 458 | target_ref_id = prev_pull_request_display_obj.revisions[0] |
|
459 | 459 | |
|
460 | 460 | # despite opening commits for bookmarks/branches/tags, we always |
|
461 | 461 | # convert this to rev to prevent changes after bookmark or branch change |
|
462 | 462 | c.source_ref_type = 'rev' |
|
463 | 463 | c.source_ref = source_ref_id |
|
464 | 464 | |
|
465 | 465 | c.target_ref_type = 'rev' |
|
466 | 466 | c.target_ref = target_ref_id |
|
467 | 467 | |
|
468 | 468 | c.source_repo = source_repo |
|
469 | 469 | c.target_repo = target_repo |
|
470 | 470 | |
|
471 | 471 | c.commit_ranges = [] |
|
472 | 472 | source_commit = EmptyCommit() |
|
473 | 473 | target_commit = EmptyCommit() |
|
474 | 474 | c.missing_requirements = False |
|
475 | 475 | |
|
476 | 476 | source_scm = source_repo.scm_instance() |
|
477 | 477 | target_scm = target_repo.scm_instance() |
|
478 | 478 | |
|
479 | 479 | shadow_scm = None |
|
480 | 480 | try: |
|
481 | 481 | shadow_scm = pull_request_latest.get_shadow_repo() |
|
482 | 482 | except Exception: |
|
483 | 483 | log.debug('Failed to get shadow repo', exc_info=True) |
|
484 | 484 | # try first the existing source_repo, and then shadow |
|
485 | 485 | # repo if we can obtain one |
|
486 | 486 | commits_source_repo = source_scm or shadow_scm |
|
487 | 487 | |
|
488 | 488 | c.commits_source_repo = commits_source_repo |
|
489 | 489 | c.ancestor = None # set it to None, to hide it from PR view |
|
490 | 490 | |
|
491 | 491 | # empty version means latest, so we keep this to prevent |
|
492 | 492 | # double caching |
|
493 | 493 | version_normalized = version or 'latest' |
|
494 | 494 | from_version_normalized = from_version or 'latest' |
|
495 | 495 | |
|
496 | 496 | cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(target_repo) |
|
497 | 497 | cache_file_path = diff_cache_exist( |
|
498 | 498 | cache_path, 'pull_request', pull_request_id, version_normalized, |
|
499 | 499 | from_version_normalized, source_ref_id, target_ref_id, |
|
500 | 500 | hide_whitespace_changes, diff_context, c.fulldiff) |
|
501 | 501 | |
|
502 | 502 | caching_enabled = self._is_diff_cache_enabled(c.target_repo) |
|
503 | 503 | force_recache = self.get_recache_flag() |
|
504 | 504 | |
|
505 | 505 | cached_diff = None |
|
506 | 506 | if caching_enabled: |
|
507 | 507 | cached_diff = load_cached_diff(cache_file_path) |
|
508 | 508 | |
|
509 | 509 | has_proper_commit_cache = ( |
|
510 | 510 | cached_diff and cached_diff.get('commits') |
|
511 | 511 | and len(cached_diff.get('commits', [])) == 5 |
|
512 | 512 | and cached_diff.get('commits')[0] |
|
513 | 513 | and cached_diff.get('commits')[3]) |
|
514 | 514 | |
|
515 | 515 | if not force_recache and not c.range_diff_on and has_proper_commit_cache: |
|
516 | 516 | diff_commit_cache = \ |
|
517 | 517 | (ancestor_commit, commit_cache, missing_requirements, |
|
518 | 518 | source_commit, target_commit) = cached_diff['commits'] |
|
519 | 519 | else: |
|
520 | 520 | diff_commit_cache = \ |
|
521 | 521 | (ancestor_commit, commit_cache, missing_requirements, |
|
522 | 522 | source_commit, target_commit) = self.get_commits( |
|
523 | 523 | commits_source_repo, |
|
524 | 524 | pull_request_at_ver, |
|
525 | 525 | source_commit, |
|
526 | 526 | source_ref_id, |
|
527 | 527 | source_scm, |
|
528 | 528 | target_commit, |
|
529 | 529 | target_ref_id, |
|
530 | 530 | target_scm) |
|
531 | 531 | |
|
532 | 532 | # register our commit range |
|
533 | 533 | for comm in commit_cache.values(): |
|
534 | 534 | c.commit_ranges.append(comm) |
|
535 | 535 | |
|
536 | 536 | c.missing_requirements = missing_requirements |
|
537 | 537 | c.ancestor_commit = ancestor_commit |
|
538 | 538 | c.statuses = source_repo.statuses( |
|
539 | 539 | [x.raw_id for x in c.commit_ranges]) |
|
540 | 540 | |
|
541 | 541 | # auto collapse if we have more than limit |
|
542 | 542 | collapse_limit = diffs.DiffProcessor._collapse_commits_over |
|
543 | 543 | c.collapse_all_commits = len(c.commit_ranges) > collapse_limit |
|
544 | 544 | c.compare_mode = compare |
|
545 | 545 | |
|
546 | 546 | # diff_limit is the old behavior, will cut off the whole diff |
|
547 | 547 | # if the limit is applied otherwise will just hide the |
|
548 | 548 | # big files from the front-end |
|
549 | 549 | diff_limit = c.visual.cut_off_limit_diff |
|
550 | 550 | file_limit = c.visual.cut_off_limit_file |
|
551 | 551 | |
|
552 | 552 | c.missing_commits = False |
|
553 | 553 | if (c.missing_requirements |
|
554 | 554 | or isinstance(source_commit, EmptyCommit) |
|
555 | 555 | or source_commit == target_commit): |
|
556 | 556 | |
|
557 | 557 | c.missing_commits = True |
|
558 | 558 | else: |
|
559 | 559 | c.inline_comments = display_inline_comments |
|
560 | 560 | |
|
561 | 561 | has_proper_diff_cache = cached_diff and cached_diff.get('commits') |
|
562 | 562 | if not force_recache and has_proper_diff_cache: |
|
563 | 563 | c.diffset = cached_diff['diff'] |
|
564 | 564 | (ancestor_commit, commit_cache, missing_requirements, |
|
565 | 565 | source_commit, target_commit) = cached_diff['commits'] |
|
566 | 566 | else: |
|
567 | 567 | c.diffset = self._get_diffset( |
|
568 | 568 | c.source_repo.repo_name, commits_source_repo, |
|
569 | 569 | source_ref_id, target_ref_id, |
|
570 | 570 | target_commit, source_commit, |
|
571 | 571 | diff_limit, file_limit, c.fulldiff, |
|
572 | 572 | hide_whitespace_changes, diff_context) |
|
573 | 573 | |
|
574 | 574 | # save cached diff |
|
575 | 575 | if caching_enabled: |
|
576 | 576 | cache_diff(cache_file_path, c.diffset, diff_commit_cache) |
|
577 | 577 | |
|
578 | 578 | c.limited_diff = c.diffset.limited_diff |
|
579 | 579 | |
|
580 | 580 | # calculate removed files that are bound to comments |
|
581 | 581 | comment_deleted_files = [ |
|
582 | 582 | fname for fname in display_inline_comments |
|
583 | 583 | if fname not in c.diffset.file_stats] |
|
584 | 584 | |
|
585 | 585 | c.deleted_files_comments = collections.defaultdict(dict) |
|
586 | 586 | for fname, per_line_comments in display_inline_comments.items(): |
|
587 | 587 | if fname in comment_deleted_files: |
|
588 | 588 | c.deleted_files_comments[fname]['stats'] = 0 |
|
589 | 589 | c.deleted_files_comments[fname]['comments'] = list() |
|
590 | 590 | for lno, comments in per_line_comments.items(): |
|
591 | 591 | c.deleted_files_comments[fname]['comments'].extend(comments) |
|
592 | 592 | |
|
593 | 593 | # maybe calculate the range diff |
|
594 | 594 | if c.range_diff_on: |
|
595 | 595 | # TODO(marcink): set whitespace/context |
|
596 | 596 | context_lcl = 3 |
|
597 | 597 | ign_whitespace_lcl = False |
|
598 | 598 | |
|
599 | 599 | for commit in c.commit_ranges: |
|
600 | 600 | commit2 = commit |
|
601 | 601 | commit1 = commit.first_parent |
|
602 | 602 | |
|
603 | 603 | range_diff_cache_file_path = diff_cache_exist( |
|
604 | 604 | cache_path, 'diff', commit.raw_id, |
|
605 | 605 | ign_whitespace_lcl, context_lcl, c.fulldiff) |
|
606 | 606 | |
|
607 | 607 | cached_diff = None |
|
608 | 608 | if caching_enabled: |
|
609 | 609 | cached_diff = load_cached_diff(range_diff_cache_file_path) |
|
610 | 610 | |
|
611 | 611 | has_proper_diff_cache = cached_diff and cached_diff.get('diff') |
|
612 | 612 | if not force_recache and has_proper_diff_cache: |
|
613 | 613 | diffset = cached_diff['diff'] |
|
614 | 614 | else: |
|
615 | 615 | diffset = self._get_range_diffset( |
|
616 | 616 | source_scm, source_repo, |
|
617 | 617 | commit1, commit2, diff_limit, file_limit, |
|
618 | 618 | c.fulldiff, ign_whitespace_lcl, context_lcl |
|
619 | 619 | ) |
|
620 | 620 | |
|
621 | 621 | # save cached diff |
|
622 | 622 | if caching_enabled: |
|
623 | 623 | cache_diff(range_diff_cache_file_path, diffset, None) |
|
624 | 624 | |
|
625 | 625 | c.changes[commit.raw_id] = diffset |
|
626 | 626 | |
|
627 | 627 | # this is a hack to properly display links, when creating PR, the |
|
628 | 628 | # compare view and others uses different notation, and |
|
629 | 629 | # compare_commits.mako renders links based on the target_repo. |
|
630 | 630 | # We need to swap that here to generate it properly on the html side |
|
631 | 631 | c.target_repo = c.source_repo |
|
632 | 632 | |
|
633 | 633 | c.commit_statuses = ChangesetStatus.STATUSES |
|
634 | 634 | |
|
635 | 635 | c.show_version_changes = not pr_closed |
|
636 | 636 | if c.show_version_changes: |
|
637 | 637 | cur_obj = pull_request_at_ver |
|
638 | 638 | prev_obj = prev_pull_request_at_ver |
|
639 | 639 | |
|
640 | 640 | old_commit_ids = prev_obj.revisions |
|
641 | 641 | new_commit_ids = cur_obj.revisions |
|
642 | 642 | commit_changes = PullRequestModel()._calculate_commit_id_changes( |
|
643 | 643 | old_commit_ids, new_commit_ids) |
|
644 | 644 | c.commit_changes_summary = commit_changes |
|
645 | 645 | |
|
646 | 646 | # calculate the diff for commits between versions |
|
647 | 647 | c.commit_changes = [] |
|
648 | 648 | mark = lambda cs, fw: list( |
|
649 | 649 | h.itertools.izip_longest([], cs, fillvalue=fw)) |
|
650 | 650 | for c_type, raw_id in mark(commit_changes.added, 'a') \ |
|
651 | 651 | + mark(commit_changes.removed, 'r') \ |
|
652 | 652 | + mark(commit_changes.common, 'c'): |
|
653 | 653 | |
|
654 | 654 | if raw_id in commit_cache: |
|
655 | 655 | commit = commit_cache[raw_id] |
|
656 | 656 | else: |
|
657 | 657 | try: |
|
658 | 658 | commit = commits_source_repo.get_commit(raw_id) |
|
659 | 659 | except CommitDoesNotExistError: |
|
660 | 660 | # in case we fail extracting still use "dummy" commit |
|
661 | 661 | # for display in commit diff |
|
662 | 662 | commit = h.AttributeDict( |
|
663 | 663 | {'raw_id': raw_id, |
|
664 | 664 | 'message': 'EMPTY or MISSING COMMIT'}) |
|
665 | 665 | c.commit_changes.append([c_type, commit]) |
|
666 | 666 | |
|
667 | 667 | # current user review statuses for each version |
|
668 | 668 | c.review_versions = {} |
|
669 | 669 | if self._rhodecode_user.user_id in allowed_reviewers: |
|
670 | 670 | for co in general_comments: |
|
671 | 671 | if co.author.user_id == self._rhodecode_user.user_id: |
|
672 | 672 | status = co.status_change |
|
673 | 673 | if status: |
|
674 | 674 | _ver_pr = status[0].comment.pull_request_version_id |
|
675 | 675 | c.review_versions[_ver_pr] = status[0] |
|
676 | 676 | |
|
677 | 677 | return self._get_template_context(c) |
|
678 | 678 | |
|
679 | 679 | def get_commits( |
|
680 | 680 | self, commits_source_repo, pull_request_at_ver, source_commit, |
|
681 | 681 | source_ref_id, source_scm, target_commit, target_ref_id, target_scm): |
|
682 | 682 | commit_cache = collections.OrderedDict() |
|
683 | 683 | missing_requirements = False |
|
684 | 684 | try: |
|
685 | 685 | pre_load = ["author", "branch", "date", "message", "parents"] |
|
686 | 686 | show_revs = pull_request_at_ver.revisions |
|
687 | 687 | for rev in show_revs: |
|
688 | 688 | comm = commits_source_repo.get_commit( |
|
689 | 689 | commit_id=rev, pre_load=pre_load) |
|
690 | 690 | commit_cache[comm.raw_id] = comm |
|
691 | 691 | |
|
692 | 692 | # Order here matters, we first need to get target, and then |
|
693 | 693 | # the source |
|
694 | 694 | target_commit = commits_source_repo.get_commit( |
|
695 | 695 | commit_id=safe_str(target_ref_id)) |
|
696 | 696 | |
|
697 | 697 | source_commit = commits_source_repo.get_commit( |
|
698 | 698 | commit_id=safe_str(source_ref_id)) |
|
699 | 699 | except CommitDoesNotExistError: |
|
700 | 700 | log.warning( |
|
701 | 701 | 'Failed to get commit from `{}` repo'.format( |
|
702 | 702 | commits_source_repo), exc_info=True) |
|
703 | 703 | except RepositoryRequirementError: |
|
704 | 704 | log.warning( |
|
705 | 705 | 'Failed to get all required data from repo', exc_info=True) |
|
706 | 706 | missing_requirements = True |
|
707 | 707 | ancestor_commit = None |
|
708 | 708 | try: |
|
709 | 709 | ancestor_id = source_scm.get_common_ancestor( |
|
710 | 710 | source_commit.raw_id, target_commit.raw_id, target_scm) |
|
711 | 711 | ancestor_commit = source_scm.get_commit(ancestor_id) |
|
712 | 712 | except Exception: |
|
713 | 713 | ancestor_commit = None |
|
714 | 714 | return ancestor_commit, commit_cache, missing_requirements, source_commit, target_commit |
|
715 | 715 | |
|
716 | 716 | def assure_not_empty_repo(self): |
|
717 | 717 | _ = self.request.translate |
|
718 | 718 | |
|
719 | 719 | try: |
|
720 | 720 | self.db_repo.scm_instance().get_commit() |
|
721 | 721 | except EmptyRepositoryError: |
|
722 | 722 | h.flash(h.literal(_('There are no commits yet')), |
|
723 | 723 | category='warning') |
|
724 | 724 | raise HTTPFound( |
|
725 | 725 | h.route_path('repo_summary', repo_name=self.db_repo.repo_name)) |
|
726 | 726 | |
|
727 | 727 | @LoginRequired() |
|
728 | 728 | @NotAnonymous() |
|
729 | 729 | @HasRepoPermissionAnyDecorator( |
|
730 | 730 | 'repository.read', 'repository.write', 'repository.admin') |
|
731 | 731 | @view_config( |
|
732 | 732 | route_name='pullrequest_new', request_method='GET', |
|
733 | 733 | renderer='rhodecode:templates/pullrequests/pullrequest.mako') |
|
734 | 734 | def pull_request_new(self): |
|
735 | 735 | _ = self.request.translate |
|
736 | 736 | c = self.load_default_context() |
|
737 | 737 | |
|
738 | 738 | self.assure_not_empty_repo() |
|
739 | 739 | source_repo = self.db_repo |
|
740 | 740 | |
|
741 | 741 | commit_id = self.request.GET.get('commit') |
|
742 | 742 | branch_ref = self.request.GET.get('branch') |
|
743 | 743 | bookmark_ref = self.request.GET.get('bookmark') |
|
744 | 744 | |
|
745 | 745 | try: |
|
746 | 746 | source_repo_data = PullRequestModel().generate_repo_data( |
|
747 | 747 | source_repo, commit_id=commit_id, |
|
748 | 748 | branch=branch_ref, bookmark=bookmark_ref, |
|
749 | 749 | translator=self.request.translate) |
|
750 | 750 | except CommitDoesNotExistError as e: |
|
751 | 751 | log.exception(e) |
|
752 | 752 | h.flash(_('Commit does not exist'), 'error') |
|
753 | 753 | raise HTTPFound( |
|
754 | 754 | h.route_path('pullrequest_new', repo_name=source_repo.repo_name)) |
|
755 | 755 | |
|
756 | 756 | default_target_repo = source_repo |
|
757 | 757 | |
|
758 | 758 | if source_repo.parent: |
|
759 | 759 | parent_vcs_obj = source_repo.parent.scm_instance() |
|
760 | 760 | if parent_vcs_obj and not parent_vcs_obj.is_empty(): |
|
761 | 761 | # change default if we have a parent repo |
|
762 | 762 | default_target_repo = source_repo.parent |
|
763 | 763 | |
|
764 | 764 | target_repo_data = PullRequestModel().generate_repo_data( |
|
765 | 765 | default_target_repo, translator=self.request.translate) |
|
766 | 766 | |
|
767 | 767 | selected_source_ref = source_repo_data['refs']['selected_ref'] |
|
768 | 768 | title_source_ref = '' |
|
769 | 769 | if selected_source_ref: |
|
770 | 770 | title_source_ref = selected_source_ref.split(':', 2)[1] |
|
771 | 771 | c.default_title = PullRequestModel().generate_pullrequest_title( |
|
772 | 772 | source=source_repo.repo_name, |
|
773 | 773 | source_ref=title_source_ref, |
|
774 | 774 | target=default_target_repo.repo_name |
|
775 | 775 | ) |
|
776 | 776 | |
|
777 | 777 | c.default_repo_data = { |
|
778 | 778 | 'source_repo_name': source_repo.repo_name, |
|
779 | 779 | 'source_refs_json': json.dumps(source_repo_data), |
|
780 | 780 | 'target_repo_name': default_target_repo.repo_name, |
|
781 | 781 | 'target_refs_json': json.dumps(target_repo_data), |
|
782 | 782 | } |
|
783 | 783 | c.default_source_ref = selected_source_ref |
|
784 | 784 | |
|
785 | 785 | return self._get_template_context(c) |
|
786 | 786 | |
|
787 | 787 | @LoginRequired() |
|
788 | 788 | @NotAnonymous() |
|
789 | 789 | @HasRepoPermissionAnyDecorator( |
|
790 | 790 | 'repository.read', 'repository.write', 'repository.admin') |
|
791 | 791 | @view_config( |
|
792 | 792 | route_name='pullrequest_repo_refs', request_method='GET', |
|
793 | 793 | renderer='json_ext', xhr=True) |
|
794 | 794 | def pull_request_repo_refs(self): |
|
795 | 795 | self.load_default_context() |
|
796 | 796 | target_repo_name = self.request.matchdict['target_repo_name'] |
|
797 | 797 | repo = Repository.get_by_repo_name(target_repo_name) |
|
798 | 798 | if not repo: |
|
799 | 799 | raise HTTPNotFound() |
|
800 | 800 | |
|
801 | 801 | target_perm = HasRepoPermissionAny( |
|
802 | 802 | 'repository.read', 'repository.write', 'repository.admin')( |
|
803 | 803 | target_repo_name) |
|
804 | 804 | if not target_perm: |
|
805 | 805 | raise HTTPNotFound() |
|
806 | 806 | |
|
807 | 807 | return PullRequestModel().generate_repo_data( |
|
808 | 808 | repo, translator=self.request.translate) |
|
809 | 809 | |
|
810 | 810 | @LoginRequired() |
|
811 | 811 | @NotAnonymous() |
|
812 | 812 | @HasRepoPermissionAnyDecorator( |
|
813 | 813 | 'repository.read', 'repository.write', 'repository.admin') |
|
814 | 814 | @view_config( |
|
815 | 815 | route_name='pullrequest_repo_targets', request_method='GET', |
|
816 | 816 | renderer='json_ext', xhr=True) |
|
817 | 817 | def pullrequest_repo_targets(self): |
|
818 | 818 | _ = self.request.translate |
|
819 | 819 | filter_query = self.request.GET.get('query') |
|
820 | 820 | |
|
821 | 821 | # get the parents |
|
822 | 822 | parent_target_repos = [] |
|
823 | 823 | if self.db_repo.parent: |
|
824 | 824 | parents_query = Repository.query() \ |
|
825 | 825 | .order_by(func.length(Repository.repo_name)) \ |
|
826 | 826 | .filter(Repository.fork_id == self.db_repo.parent.repo_id) |
|
827 | 827 | |
|
828 | 828 | if filter_query: |
|
829 | 829 | ilike_expression = u'%{}%'.format(safe_unicode(filter_query)) |
|
830 | 830 | parents_query = parents_query.filter( |
|
831 | 831 | Repository.repo_name.ilike(ilike_expression)) |
|
832 | 832 | parents = parents_query.limit(20).all() |
|
833 | 833 | |
|
834 | 834 | for parent in parents: |
|
835 | 835 | parent_vcs_obj = parent.scm_instance() |
|
836 | 836 | if parent_vcs_obj and not parent_vcs_obj.is_empty(): |
|
837 | 837 | parent_target_repos.append(parent) |
|
838 | 838 | |
|
839 | 839 | # get other forks, and repo itself |
|
840 | 840 | query = Repository.query() \ |
|
841 | 841 | .order_by(func.length(Repository.repo_name)) \ |
|
842 | 842 | .filter( |
|
843 | 843 | or_(Repository.repo_id == self.db_repo.repo_id, # repo itself |
|
844 | 844 | Repository.fork_id == self.db_repo.repo_id) # forks of this repo |
|
845 | 845 | ) \ |
|
846 | 846 | .filter(~Repository.repo_id.in_([x.repo_id for x in parent_target_repos])) |
|
847 | 847 | |
|
848 | 848 | if filter_query: |
|
849 | 849 | ilike_expression = u'%{}%'.format(safe_unicode(filter_query)) |
|
850 | 850 | query = query.filter(Repository.repo_name.ilike(ilike_expression)) |
|
851 | 851 | |
|
852 | 852 | limit = max(20 - len(parent_target_repos), 5) # not less then 5 |
|
853 | 853 | target_repos = query.limit(limit).all() |
|
854 | 854 | |
|
855 | 855 | all_target_repos = target_repos + parent_target_repos |
|
856 | 856 | |
|
857 | 857 | repos = [] |
|
858 | 858 | for obj in ScmModel().get_repos(all_target_repos): |
|
859 | 859 | repos.append({ |
|
860 | 860 | 'id': obj['name'], |
|
861 | 861 | 'text': obj['name'], |
|
862 | 862 | 'type': 'repo', |
|
863 | 863 | 'repo_id': obj['dbrepo']['repo_id'], |
|
864 | 864 | 'repo_type': obj['dbrepo']['repo_type'], |
|
865 | 865 | 'private': obj['dbrepo']['private'], |
|
866 | 866 | |
|
867 | 867 | }) |
|
868 | 868 | |
|
869 | 869 | data = { |
|
870 | 870 | 'more': False, |
|
871 | 871 | 'results': [{ |
|
872 | 872 | 'text': _('Repositories'), |
|
873 | 873 | 'children': repos |
|
874 | 874 | }] if repos else [] |
|
875 | 875 | } |
|
876 | 876 | return data |
|
877 | 877 | |
|
878 | 878 | @LoginRequired() |
|
879 | 879 | @NotAnonymous() |
|
880 | 880 | @HasRepoPermissionAnyDecorator( |
|
881 | 881 | 'repository.read', 'repository.write', 'repository.admin') |
|
882 | 882 | @CSRFRequired() |
|
883 | 883 | @view_config( |
|
884 | 884 | route_name='pullrequest_create', request_method='POST', |
|
885 | 885 | renderer=None) |
|
886 | 886 | def pull_request_create(self): |
|
887 | 887 | _ = self.request.translate |
|
888 | 888 | self.assure_not_empty_repo() |
|
889 | 889 | self.load_default_context() |
|
890 | 890 | |
|
891 | 891 | controls = peppercorn.parse(self.request.POST.items()) |
|
892 | 892 | |
|
893 | 893 | try: |
|
894 | 894 | form = PullRequestForm( |
|
895 | 895 | self.request.translate, self.db_repo.repo_id)() |
|
896 | 896 | _form = form.to_python(controls) |
|
897 | 897 | except formencode.Invalid as errors: |
|
898 | 898 | if errors.error_dict.get('revisions'): |
|
899 | 899 | msg = 'Revisions: %s' % errors.error_dict['revisions'] |
|
900 | 900 | elif errors.error_dict.get('pullrequest_title'): |
|
901 | 901 | msg = errors.error_dict.get('pullrequest_title') |
|
902 | 902 | else: |
|
903 | 903 | msg = _('Error creating pull request: {}').format(errors) |
|
904 | 904 | log.exception(msg) |
|
905 | 905 | h.flash(msg, 'error') |
|
906 | 906 | |
|
907 | 907 | # would rather just go back to form ... |
|
908 | 908 | raise HTTPFound( |
|
909 | 909 | h.route_path('pullrequest_new', repo_name=self.db_repo_name)) |
|
910 | 910 | |
|
911 | 911 | source_repo = _form['source_repo'] |
|
912 | 912 | source_ref = _form['source_ref'] |
|
913 | 913 | target_repo = _form['target_repo'] |
|
914 | 914 | target_ref = _form['target_ref'] |
|
915 | 915 | commit_ids = _form['revisions'][::-1] |
|
916 | 916 | |
|
917 | 917 | # find the ancestor for this pr |
|
918 | 918 | source_db_repo = Repository.get_by_repo_name(_form['source_repo']) |
|
919 | 919 | target_db_repo = Repository.get_by_repo_name(_form['target_repo']) |
|
920 | 920 | |
|
921 | 921 | # re-check permissions again here |
|
922 | 922 | # source_repo we must have read permissions |
|
923 | 923 | |
|
924 | 924 | source_perm = HasRepoPermissionAny( |
|
925 | 925 | 'repository.read', |
|
926 | 926 | 'repository.write', 'repository.admin')(source_db_repo.repo_name) |
|
927 | 927 | if not source_perm: |
|
928 | 928 | msg = _('Not Enough permissions to source repo `{}`.'.format( |
|
929 | 929 | source_db_repo.repo_name)) |
|
930 | 930 | h.flash(msg, category='error') |
|
931 | 931 | # copy the args back to redirect |
|
932 | 932 | org_query = self.request.GET.mixed() |
|
933 | 933 | raise HTTPFound( |
|
934 | 934 | h.route_path('pullrequest_new', repo_name=self.db_repo_name, |
|
935 | 935 | _query=org_query)) |
|
936 | 936 | |
|
937 | 937 | # target repo we must have read permissions, and also later on |
|
938 | 938 | # we want to check branch permissions here |
|
939 | 939 | target_perm = HasRepoPermissionAny( |
|
940 | 940 | 'repository.read', |
|
941 | 941 | 'repository.write', 'repository.admin')(target_db_repo.repo_name) |
|
942 | 942 | if not target_perm: |
|
943 | 943 | msg = _('Not Enough permissions to target repo `{}`.'.format( |
|
944 | 944 | target_db_repo.repo_name)) |
|
945 | 945 | h.flash(msg, category='error') |
|
946 | 946 | # copy the args back to redirect |
|
947 | 947 | org_query = self.request.GET.mixed() |
|
948 | 948 | raise HTTPFound( |
|
949 | 949 | h.route_path('pullrequest_new', repo_name=self.db_repo_name, |
|
950 | 950 | _query=org_query)) |
|
951 | 951 | |
|
952 | 952 | source_scm = source_db_repo.scm_instance() |
|
953 | 953 | target_scm = target_db_repo.scm_instance() |
|
954 | 954 | |
|
955 | 955 | source_commit = source_scm.get_commit(source_ref.split(':')[-1]) |
|
956 | 956 | target_commit = target_scm.get_commit(target_ref.split(':')[-1]) |
|
957 | 957 | |
|
958 | 958 | ancestor = source_scm.get_common_ancestor( |
|
959 | 959 | source_commit.raw_id, target_commit.raw_id, target_scm) |
|
960 | 960 | |
|
961 | 961 | # recalculate target ref based on ancestor |
|
962 | 962 | target_ref_type, target_ref_name, __ = _form['target_ref'].split(':') |
|
963 | 963 | target_ref = ':'.join((target_ref_type, target_ref_name, ancestor)) |
|
964 | 964 | |
|
965 | 965 | get_default_reviewers_data, validate_default_reviewers = \ |
|
966 | 966 | PullRequestModel().get_reviewer_functions() |
|
967 | 967 | |
|
968 | 968 | # recalculate reviewers logic, to make sure we can validate this |
|
969 | 969 | reviewer_rules = get_default_reviewers_data( |
|
970 | 970 | self._rhodecode_db_user, source_db_repo, |
|
971 | 971 | source_commit, target_db_repo, target_commit) |
|
972 | 972 | |
|
973 | 973 | given_reviewers = _form['review_members'] |
|
974 | 974 | reviewers = validate_default_reviewers( |
|
975 | 975 | given_reviewers, reviewer_rules) |
|
976 | 976 | |
|
977 | 977 | pullrequest_title = _form['pullrequest_title'] |
|
978 | 978 | title_source_ref = source_ref.split(':', 2)[1] |
|
979 | 979 | if not pullrequest_title: |
|
980 | 980 | pullrequest_title = PullRequestModel().generate_pullrequest_title( |
|
981 | 981 | source=source_repo, |
|
982 | 982 | source_ref=title_source_ref, |
|
983 | 983 | target=target_repo |
|
984 | 984 | ) |
|
985 | 985 | |
|
986 | 986 | description = _form['pullrequest_desc'] |
|
987 | 987 | description_renderer = _form['description_renderer'] |
|
988 | 988 | |
|
989 | 989 | try: |
|
990 | 990 | pull_request = PullRequestModel().create( |
|
991 | 991 | created_by=self._rhodecode_user.user_id, |
|
992 | 992 | source_repo=source_repo, |
|
993 | 993 | source_ref=source_ref, |
|
994 | 994 | target_repo=target_repo, |
|
995 | 995 | target_ref=target_ref, |
|
996 | 996 | revisions=commit_ids, |
|
997 | 997 | reviewers=reviewers, |
|
998 | 998 | title=pullrequest_title, |
|
999 | 999 | description=description, |
|
1000 | 1000 | description_renderer=description_renderer, |
|
1001 | 1001 | reviewer_data=reviewer_rules, |
|
1002 | 1002 | auth_user=self._rhodecode_user |
|
1003 | 1003 | ) |
|
1004 | 1004 | Session().commit() |
|
1005 | 1005 | |
|
1006 | 1006 | h.flash(_('Successfully opened new pull request'), |
|
1007 | 1007 | category='success') |
|
1008 | 1008 | except Exception: |
|
1009 | 1009 | msg = _('Error occurred during creation of this pull request.') |
|
1010 | 1010 | log.exception(msg) |
|
1011 | 1011 | h.flash(msg, category='error') |
|
1012 | 1012 | |
|
1013 | 1013 | # copy the args back to redirect |
|
1014 | 1014 | org_query = self.request.GET.mixed() |
|
1015 | 1015 | raise HTTPFound( |
|
1016 | 1016 | h.route_path('pullrequest_new', repo_name=self.db_repo_name, |
|
1017 | 1017 | _query=org_query)) |
|
1018 | 1018 | |
|
1019 | 1019 | raise HTTPFound( |
|
1020 | 1020 | h.route_path('pullrequest_show', repo_name=target_repo, |
|
1021 | 1021 | pull_request_id=pull_request.pull_request_id)) |
|
1022 | 1022 | |
|
1023 | 1023 | @LoginRequired() |
|
1024 | 1024 | @NotAnonymous() |
|
1025 | 1025 | @HasRepoPermissionAnyDecorator( |
|
1026 | 1026 | 'repository.read', 'repository.write', 'repository.admin') |
|
1027 | 1027 | @CSRFRequired() |
|
1028 | 1028 | @view_config( |
|
1029 | 1029 | route_name='pullrequest_update', request_method='POST', |
|
1030 | 1030 | renderer='json_ext') |
|
1031 | 1031 | def pull_request_update(self): |
|
1032 | 1032 | pull_request = PullRequest.get_or_404( |
|
1033 | 1033 | self.request.matchdict['pull_request_id']) |
|
1034 | 1034 | _ = self.request.translate |
|
1035 | 1035 | |
|
1036 | 1036 | self.load_default_context() |
|
1037 | 1037 | |
|
1038 | 1038 | if pull_request.is_closed(): |
|
1039 | 1039 | log.debug('update: forbidden because pull request is closed') |
|
1040 | 1040 | msg = _(u'Cannot update closed pull requests.') |
|
1041 | 1041 | h.flash(msg, category='error') |
|
1042 | 1042 | return True |
|
1043 | 1043 | |
|
1044 | 1044 | # only owner or admin can update it |
|
1045 | 1045 | allowed_to_update = PullRequestModel().check_user_update( |
|
1046 | 1046 | pull_request, self._rhodecode_user) |
|
1047 | 1047 | if allowed_to_update: |
|
1048 | 1048 | controls = peppercorn.parse(self.request.POST.items()) |
|
1049 | 1049 | |
|
1050 | 1050 | if 'review_members' in controls: |
|
1051 | 1051 | self._update_reviewers( |
|
1052 | 1052 | pull_request, controls['review_members'], |
|
1053 | 1053 | pull_request.reviewer_data) |
|
1054 | 1054 | elif str2bool(self.request.POST.get('update_commits', 'false')): |
|
1055 | 1055 | self._update_commits(pull_request) |
|
1056 | 1056 | elif str2bool(self.request.POST.get('edit_pull_request', 'false')): |
|
1057 | 1057 | self._edit_pull_request(pull_request) |
|
1058 | 1058 | else: |
|
1059 | 1059 | raise HTTPBadRequest() |
|
1060 | 1060 | return True |
|
1061 | 1061 | raise HTTPForbidden() |
|
1062 | 1062 | |
|
1063 | 1063 | def _edit_pull_request(self, pull_request): |
|
1064 | 1064 | _ = self.request.translate |
|
1065 | 1065 | |
|
1066 | 1066 | try: |
|
1067 | 1067 | PullRequestModel().edit( |
|
1068 | 1068 | pull_request, |
|
1069 | 1069 | self.request.POST.get('title'), |
|
1070 | 1070 | self.request.POST.get('description'), |
|
1071 | 1071 | self.request.POST.get('description_renderer'), |
|
1072 | 1072 | self._rhodecode_user) |
|
1073 | 1073 | except ValueError: |
|
1074 | 1074 | msg = _(u'Cannot update closed pull requests.') |
|
1075 | 1075 | h.flash(msg, category='error') |
|
1076 | 1076 | return |
|
1077 | 1077 | else: |
|
1078 | 1078 | Session().commit() |
|
1079 | 1079 | |
|
1080 | 1080 | msg = _(u'Pull request title & description updated.') |
|
1081 | 1081 | h.flash(msg, category='success') |
|
1082 | 1082 | return |
|
1083 | 1083 | |
|
1084 | 1084 | def _update_commits(self, pull_request): |
|
1085 | 1085 | _ = self.request.translate |
|
1086 | 1086 | resp = PullRequestModel().update_commits(pull_request) |
|
1087 | 1087 | |
|
1088 | 1088 | if resp.executed: |
|
1089 | 1089 | |
|
1090 | 1090 | if resp.target_changed and resp.source_changed: |
|
1091 | 1091 | changed = 'target and source repositories' |
|
1092 | 1092 | elif resp.target_changed and not resp.source_changed: |
|
1093 | 1093 | changed = 'target repository' |
|
1094 | 1094 | elif not resp.target_changed and resp.source_changed: |
|
1095 | 1095 | changed = 'source repository' |
|
1096 | 1096 | else: |
|
1097 | 1097 | changed = 'nothing' |
|
1098 | 1098 | |
|
1099 | 1099 | msg = _( |
|
1100 | 1100 | u'Pull request updated to "{source_commit_id}" with ' |
|
1101 | 1101 | u'{count_added} added, {count_removed} removed commits. ' |
|
1102 | 1102 | u'Source of changes: {change_source}') |
|
1103 | 1103 | msg = msg.format( |
|
1104 | 1104 | source_commit_id=pull_request.source_ref_parts.commit_id, |
|
1105 | 1105 | count_added=len(resp.changes.added), |
|
1106 | 1106 | count_removed=len(resp.changes.removed), |
|
1107 | 1107 | change_source=changed) |
|
1108 | 1108 | h.flash(msg, category='success') |
|
1109 | 1109 | |
|
1110 | 1110 | channel = '/repo${}$/pr/{}'.format( |
|
1111 | 1111 | pull_request.target_repo.repo_name, |
|
1112 | 1112 | pull_request.pull_request_id) |
|
1113 | 1113 | message = msg + ( |
|
1114 | 1114 | ' - <a onclick="window.location.reload()">' |
|
1115 | 1115 | '<strong>{}</strong></a>'.format(_('Reload page'))) |
|
1116 | 1116 | channelstream.post_message( |
|
1117 | 1117 | channel, message, self._rhodecode_user.username, |
|
1118 | 1118 | registry=self.request.registry) |
|
1119 | 1119 | else: |
|
1120 | 1120 | msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason] |
|
1121 | 1121 | warning_reasons = [ |
|
1122 | 1122 | UpdateFailureReason.NO_CHANGE, |
|
1123 | 1123 | UpdateFailureReason.WRONG_REF_TYPE, |
|
1124 | 1124 | ] |
|
1125 | 1125 | category = 'warning' if resp.reason in warning_reasons else 'error' |
|
1126 | 1126 | h.flash(msg, category=category) |
|
1127 | 1127 | |
|
1128 | 1128 | @LoginRequired() |
|
1129 | 1129 | @NotAnonymous() |
|
1130 | 1130 | @HasRepoPermissionAnyDecorator( |
|
1131 | 1131 | 'repository.read', 'repository.write', 'repository.admin') |
|
1132 | 1132 | @CSRFRequired() |
|
1133 | 1133 | @view_config( |
|
1134 | 1134 | route_name='pullrequest_merge', request_method='POST', |
|
1135 | 1135 | renderer='json_ext') |
|
1136 | 1136 | def pull_request_merge(self): |
|
1137 | 1137 | """ |
|
1138 | 1138 | Merge will perform a server-side merge of the specified |
|
1139 | 1139 | pull request, if the pull request is approved and mergeable. |
|
1140 | 1140 | After successful merging, the pull request is automatically |
|
1141 | 1141 | closed, with a relevant comment. |
|
1142 | 1142 | """ |
|
1143 | 1143 | pull_request = PullRequest.get_or_404( |
|
1144 | 1144 | self.request.matchdict['pull_request_id']) |
|
1145 | 1145 | |
|
1146 | 1146 | self.load_default_context() |
|
1147 | 1147 | check = MergeCheck.validate( |
|
1148 | 1148 | pull_request, auth_user=self._rhodecode_user, |
|
1149 | 1149 | translator=self.request.translate) |
|
1150 | 1150 | merge_possible = not check.failed |
|
1151 | 1151 | |
|
1152 | 1152 | for err_type, error_msg in check.errors: |
|
1153 | 1153 | h.flash(error_msg, category=err_type) |
|
1154 | 1154 | |
|
1155 | 1155 | if merge_possible: |
|
1156 | 1156 | log.debug("Pre-conditions checked, trying to merge.") |
|
1157 | 1157 | extras = vcs_operation_context( |
|
1158 | 1158 | self.request.environ, repo_name=pull_request.target_repo.repo_name, |
|
1159 | 1159 | username=self._rhodecode_db_user.username, action='push', |
|
1160 | 1160 | scm=pull_request.target_repo.repo_type) |
|
1161 | 1161 | self._merge_pull_request( |
|
1162 | 1162 | pull_request, self._rhodecode_db_user, extras) |
|
1163 | 1163 | else: |
|
1164 | 1164 | log.debug("Pre-conditions failed, NOT merging.") |
|
1165 | 1165 | |
|
1166 | 1166 | raise HTTPFound( |
|
1167 | 1167 | h.route_path('pullrequest_show', |
|
1168 | 1168 | repo_name=pull_request.target_repo.repo_name, |
|
1169 | 1169 | pull_request_id=pull_request.pull_request_id)) |
|
1170 | 1170 | |
|
1171 | 1171 | def _merge_pull_request(self, pull_request, user, extras): |
|
1172 | 1172 | _ = self.request.translate |
|
1173 | 1173 | merge_resp = PullRequestModel().merge_repo(pull_request, user, extras=extras) |
|
1174 | 1174 | |
|
1175 | 1175 | if merge_resp.executed: |
|
1176 | 1176 | log.debug("The merge was successful, closing the pull request.") |
|
1177 | 1177 | PullRequestModel().close_pull_request( |
|
1178 | 1178 | pull_request.pull_request_id, user) |
|
1179 | 1179 | Session().commit() |
|
1180 | 1180 | msg = _('Pull request was successfully merged and closed.') |
|
1181 | 1181 | h.flash(msg, category='success') |
|
1182 | 1182 | else: |
|
1183 | 1183 | log.debug( |
|
1184 | "The merge was not successful. Merge response: %s", | |
|
1185 | merge_resp) | |
|
1186 | msg = PullRequestModel().merge_status_message( | |
|
1187 | merge_resp.failure_reason) | |
|
1184 | "The merge was not successful. Merge response: %s", merge_resp) | |
|
1185 | msg = merge_resp.merge_status_message | |
|
1188 | 1186 | h.flash(msg, category='error') |
|
1189 | 1187 | |
|
1190 | 1188 | def _update_reviewers(self, pull_request, review_members, reviewer_rules): |
|
1191 | 1189 | _ = self.request.translate |
|
1192 | 1190 | get_default_reviewers_data, validate_default_reviewers = \ |
|
1193 | 1191 | PullRequestModel().get_reviewer_functions() |
|
1194 | 1192 | |
|
1195 | 1193 | try: |
|
1196 | 1194 | reviewers = validate_default_reviewers(review_members, reviewer_rules) |
|
1197 | 1195 | except ValueError as e: |
|
1198 | 1196 | log.error('Reviewers Validation: {}'.format(e)) |
|
1199 | 1197 | h.flash(e, category='error') |
|
1200 | 1198 | return |
|
1201 | 1199 | |
|
1202 | 1200 | PullRequestModel().update_reviewers( |
|
1203 | 1201 | pull_request, reviewers, self._rhodecode_user) |
|
1204 | 1202 | h.flash(_('Pull request reviewers updated.'), category='success') |
|
1205 | 1203 | Session().commit() |
|
1206 | 1204 | |
|
1207 | 1205 | @LoginRequired() |
|
1208 | 1206 | @NotAnonymous() |
|
1209 | 1207 | @HasRepoPermissionAnyDecorator( |
|
1210 | 1208 | 'repository.read', 'repository.write', 'repository.admin') |
|
1211 | 1209 | @CSRFRequired() |
|
1212 | 1210 | @view_config( |
|
1213 | 1211 | route_name='pullrequest_delete', request_method='POST', |
|
1214 | 1212 | renderer='json_ext') |
|
1215 | 1213 | def pull_request_delete(self): |
|
1216 | 1214 | _ = self.request.translate |
|
1217 | 1215 | |
|
1218 | 1216 | pull_request = PullRequest.get_or_404( |
|
1219 | 1217 | self.request.matchdict['pull_request_id']) |
|
1220 | 1218 | self.load_default_context() |
|
1221 | 1219 | |
|
1222 | 1220 | pr_closed = pull_request.is_closed() |
|
1223 | 1221 | allowed_to_delete = PullRequestModel().check_user_delete( |
|
1224 | 1222 | pull_request, self._rhodecode_user) and not pr_closed |
|
1225 | 1223 | |
|
1226 | 1224 | # only owner can delete it ! |
|
1227 | 1225 | if allowed_to_delete: |
|
1228 | 1226 | PullRequestModel().delete(pull_request, self._rhodecode_user) |
|
1229 | 1227 | Session().commit() |
|
1230 | 1228 | h.flash(_('Successfully deleted pull request'), |
|
1231 | 1229 | category='success') |
|
1232 | 1230 | raise HTTPFound(h.route_path('pullrequest_show_all', |
|
1233 | 1231 | repo_name=self.db_repo_name)) |
|
1234 | 1232 | |
|
1235 | 1233 | log.warning('user %s tried to delete pull request without access', |
|
1236 | 1234 | self._rhodecode_user) |
|
1237 | 1235 | raise HTTPNotFound() |
|
1238 | 1236 | |
|
1239 | 1237 | @LoginRequired() |
|
1240 | 1238 | @NotAnonymous() |
|
1241 | 1239 | @HasRepoPermissionAnyDecorator( |
|
1242 | 1240 | 'repository.read', 'repository.write', 'repository.admin') |
|
1243 | 1241 | @CSRFRequired() |
|
1244 | 1242 | @view_config( |
|
1245 | 1243 | route_name='pullrequest_comment_create', request_method='POST', |
|
1246 | 1244 | renderer='json_ext') |
|
1247 | 1245 | def pull_request_comment_create(self): |
|
1248 | 1246 | _ = self.request.translate |
|
1249 | 1247 | |
|
1250 | 1248 | pull_request = PullRequest.get_or_404( |
|
1251 | 1249 | self.request.matchdict['pull_request_id']) |
|
1252 | 1250 | pull_request_id = pull_request.pull_request_id |
|
1253 | 1251 | |
|
1254 | 1252 | if pull_request.is_closed(): |
|
1255 | 1253 | log.debug('comment: forbidden because pull request is closed') |
|
1256 | 1254 | raise HTTPForbidden() |
|
1257 | 1255 | |
|
1258 | 1256 | allowed_to_comment = PullRequestModel().check_user_comment( |
|
1259 | 1257 | pull_request, self._rhodecode_user) |
|
1260 | 1258 | if not allowed_to_comment: |
|
1261 | 1259 | log.debug( |
|
1262 | 1260 | 'comment: forbidden because pull request is from forbidden repo') |
|
1263 | 1261 | raise HTTPForbidden() |
|
1264 | 1262 | |
|
1265 | 1263 | c = self.load_default_context() |
|
1266 | 1264 | |
|
1267 | 1265 | status = self.request.POST.get('changeset_status', None) |
|
1268 | 1266 | text = self.request.POST.get('text') |
|
1269 | 1267 | comment_type = self.request.POST.get('comment_type') |
|
1270 | 1268 | resolves_comment_id = self.request.POST.get('resolves_comment_id', None) |
|
1271 | 1269 | close_pull_request = self.request.POST.get('close_pull_request') |
|
1272 | 1270 | |
|
1273 | 1271 | # the logic here should work like following, if we submit close |
|
1274 | 1272 | # pr comment, use `close_pull_request_with_comment` function |
|
1275 | 1273 | # else handle regular comment logic |
|
1276 | 1274 | |
|
1277 | 1275 | if close_pull_request: |
|
1278 | 1276 | # only owner or admin or person with write permissions |
|
1279 | 1277 | allowed_to_close = PullRequestModel().check_user_update( |
|
1280 | 1278 | pull_request, self._rhodecode_user) |
|
1281 | 1279 | if not allowed_to_close: |
|
1282 | 1280 | log.debug('comment: forbidden because not allowed to close ' |
|
1283 | 1281 | 'pull request %s', pull_request_id) |
|
1284 | 1282 | raise HTTPForbidden() |
|
1285 | 1283 | comment, status = PullRequestModel().close_pull_request_with_comment( |
|
1286 | 1284 | pull_request, self._rhodecode_user, self.db_repo, message=text, |
|
1287 | 1285 | auth_user=self._rhodecode_user) |
|
1288 | 1286 | Session().flush() |
|
1289 | 1287 | events.trigger( |
|
1290 | 1288 | events.PullRequestCommentEvent(pull_request, comment)) |
|
1291 | 1289 | |
|
1292 | 1290 | else: |
|
1293 | 1291 | # regular comment case, could be inline, or one with status. |
|
1294 | 1292 | # for that one we check also permissions |
|
1295 | 1293 | |
|
1296 | 1294 | allowed_to_change_status = PullRequestModel().check_user_change_status( |
|
1297 | 1295 | pull_request, self._rhodecode_user) |
|
1298 | 1296 | |
|
1299 | 1297 | if status and allowed_to_change_status: |
|
1300 | 1298 | message = (_('Status change %(transition_icon)s %(status)s') |
|
1301 | 1299 | % {'transition_icon': '>', |
|
1302 | 1300 | 'status': ChangesetStatus.get_status_lbl(status)}) |
|
1303 | 1301 | text = text or message |
|
1304 | 1302 | |
|
1305 | 1303 | comment = CommentsModel().create( |
|
1306 | 1304 | text=text, |
|
1307 | 1305 | repo=self.db_repo.repo_id, |
|
1308 | 1306 | user=self._rhodecode_user.user_id, |
|
1309 | 1307 | pull_request=pull_request, |
|
1310 | 1308 | f_path=self.request.POST.get('f_path'), |
|
1311 | 1309 | line_no=self.request.POST.get('line'), |
|
1312 | 1310 | status_change=(ChangesetStatus.get_status_lbl(status) |
|
1313 | 1311 | if status and allowed_to_change_status else None), |
|
1314 | 1312 | status_change_type=(status |
|
1315 | 1313 | if status and allowed_to_change_status else None), |
|
1316 | 1314 | comment_type=comment_type, |
|
1317 | 1315 | resolves_comment_id=resolves_comment_id, |
|
1318 | 1316 | auth_user=self._rhodecode_user |
|
1319 | 1317 | ) |
|
1320 | 1318 | |
|
1321 | 1319 | if allowed_to_change_status: |
|
1322 | 1320 | # calculate old status before we change it |
|
1323 | 1321 | old_calculated_status = pull_request.calculated_review_status() |
|
1324 | 1322 | |
|
1325 | 1323 | # get status if set ! |
|
1326 | 1324 | if status: |
|
1327 | 1325 | ChangesetStatusModel().set_status( |
|
1328 | 1326 | self.db_repo.repo_id, |
|
1329 | 1327 | status, |
|
1330 | 1328 | self._rhodecode_user.user_id, |
|
1331 | 1329 | comment, |
|
1332 | 1330 | pull_request=pull_request |
|
1333 | 1331 | ) |
|
1334 | 1332 | |
|
1335 | 1333 | Session().flush() |
|
1336 | 1334 | # this is somehow required to get access to some relationship |
|
1337 | 1335 | # loaded on comment |
|
1338 | 1336 | Session().refresh(comment) |
|
1339 | 1337 | |
|
1340 | 1338 | events.trigger( |
|
1341 | 1339 | events.PullRequestCommentEvent(pull_request, comment)) |
|
1342 | 1340 | |
|
1343 | 1341 | # we now calculate the status of pull request, and based on that |
|
1344 | 1342 | # calculation we set the commits status |
|
1345 | 1343 | calculated_status = pull_request.calculated_review_status() |
|
1346 | 1344 | if old_calculated_status != calculated_status: |
|
1347 | 1345 | PullRequestModel()._trigger_pull_request_hook( |
|
1348 | 1346 | pull_request, self._rhodecode_user, 'review_status_change') |
|
1349 | 1347 | |
|
1350 | 1348 | Session().commit() |
|
1351 | 1349 | |
|
1352 | 1350 | data = { |
|
1353 | 1351 | 'target_id': h.safeid(h.safe_unicode( |
|
1354 | 1352 | self.request.POST.get('f_path'))), |
|
1355 | 1353 | } |
|
1356 | 1354 | if comment: |
|
1357 | 1355 | c.co = comment |
|
1358 | 1356 | rendered_comment = render( |
|
1359 | 1357 | 'rhodecode:templates/changeset/changeset_comment_block.mako', |
|
1360 | 1358 | self._get_template_context(c), self.request) |
|
1361 | 1359 | |
|
1362 | 1360 | data.update(comment.get_dict()) |
|
1363 | 1361 | data.update({'rendered_text': rendered_comment}) |
|
1364 | 1362 | |
|
1365 | 1363 | return data |
|
1366 | 1364 | |
|
1367 | 1365 | @LoginRequired() |
|
1368 | 1366 | @NotAnonymous() |
|
1369 | 1367 | @HasRepoPermissionAnyDecorator( |
|
1370 | 1368 | 'repository.read', 'repository.write', 'repository.admin') |
|
1371 | 1369 | @CSRFRequired() |
|
1372 | 1370 | @view_config( |
|
1373 | 1371 | route_name='pullrequest_comment_delete', request_method='POST', |
|
1374 | 1372 | renderer='json_ext') |
|
1375 | 1373 | def pull_request_comment_delete(self): |
|
1376 | 1374 | pull_request = PullRequest.get_or_404( |
|
1377 | 1375 | self.request.matchdict['pull_request_id']) |
|
1378 | 1376 | |
|
1379 | 1377 | comment = ChangesetComment.get_or_404( |
|
1380 | 1378 | self.request.matchdict['comment_id']) |
|
1381 | 1379 | comment_id = comment.comment_id |
|
1382 | 1380 | |
|
1383 | 1381 | if pull_request.is_closed(): |
|
1384 | 1382 | log.debug('comment: forbidden because pull request is closed') |
|
1385 | 1383 | raise HTTPForbidden() |
|
1386 | 1384 | |
|
1387 | 1385 | if not comment: |
|
1388 | 1386 | log.debug('Comment with id:%s not found, skipping', comment_id) |
|
1389 | 1387 | # comment already deleted in another call probably |
|
1390 | 1388 | return True |
|
1391 | 1389 | |
|
1392 | 1390 | if comment.pull_request.is_closed(): |
|
1393 | 1391 | # don't allow deleting comments on closed pull request |
|
1394 | 1392 | raise HTTPForbidden() |
|
1395 | 1393 | |
|
1396 | 1394 | is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name) |
|
1397 | 1395 | super_admin = h.HasPermissionAny('hg.admin')() |
|
1398 | 1396 | comment_owner = comment.author.user_id == self._rhodecode_user.user_id |
|
1399 | 1397 | is_repo_comment = comment.repo.repo_name == self.db_repo_name |
|
1400 | 1398 | comment_repo_admin = is_repo_admin and is_repo_comment |
|
1401 | 1399 | |
|
1402 | 1400 | if super_admin or comment_owner or comment_repo_admin: |
|
1403 | 1401 | old_calculated_status = comment.pull_request.calculated_review_status() |
|
1404 | 1402 | CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user) |
|
1405 | 1403 | Session().commit() |
|
1406 | 1404 | calculated_status = comment.pull_request.calculated_review_status() |
|
1407 | 1405 | if old_calculated_status != calculated_status: |
|
1408 | 1406 | PullRequestModel()._trigger_pull_request_hook( |
|
1409 | 1407 | comment.pull_request, self._rhodecode_user, 'review_status_change') |
|
1410 | 1408 | return True |
|
1411 | 1409 | else: |
|
1412 | 1410 | log.warning('No permissions for user %s to delete comment_id: %s', |
|
1413 | 1411 | self._rhodecode_db_user, comment_id) |
|
1414 | 1412 | raise HTTPNotFound() |
@@ -1,1755 +1,1837 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2014-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Base module for all VCS systems |
|
23 | 23 | """ |
|
24 | ||
|
25 | import collections | |
|
24 | import os | |
|
25 | import re | |
|
26 | import time | |
|
27 | import shutil | |
|
26 | 28 | import datetime |
|
27 | 29 | import fnmatch |
|
28 | 30 | import itertools |
|
29 | 31 | import logging |
|
30 | import os | |
|
31 | import re | |
|
32 | import time | |
|
32 | import collections | |
|
33 | 33 | import warnings |
|
34 | import shutil | |
|
35 | 34 | |
|
36 | 35 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
37 | 36 | |
|
37 | from rhodecode.translation import lazy_ugettext | |
|
38 | 38 | from rhodecode.lib.utils2 import safe_str, safe_unicode |
|
39 | 39 | from rhodecode.lib.vcs import connection |
|
40 | 40 | from rhodecode.lib.vcs.utils import author_name, author_email |
|
41 | 41 | from rhodecode.lib.vcs.conf import settings |
|
42 | 42 | from rhodecode.lib.vcs.exceptions import ( |
|
43 | 43 | CommitError, EmptyRepositoryError, NodeAlreadyAddedError, |
|
44 | 44 | NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError, |
|
45 | 45 | NodeDoesNotExistError, NodeNotChangedError, VCSError, |
|
46 | 46 | ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError, |
|
47 | 47 | RepositoryError) |
|
48 | 48 | |
|
49 | 49 | |
|
50 | 50 | log = logging.getLogger(__name__) |
|
51 | 51 | |
|
52 | 52 | |
|
53 | 53 | FILEMODE_DEFAULT = 0o100644 |
|
54 | 54 | FILEMODE_EXECUTABLE = 0o100755 |
|
55 | 55 | |
|
56 | 56 | Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id')) |
|
57 | MergeResponse = collections.namedtuple( | |
|
58 | 'MergeResponse', | |
|
59 | ('possible', 'executed', 'merge_ref', 'failure_reason')) | |
|
60 | 57 | |
|
61 | 58 | |
|
62 | 59 | class MergeFailureReason(object): |
|
63 | 60 | """ |
|
64 | 61 | Enumeration with all the reasons why the server side merge could fail. |
|
65 | 62 | |
|
66 | 63 | DO NOT change the number of the reasons, as they may be stored in the |
|
67 | 64 | database. |
|
68 | 65 | |
|
69 | 66 | Changing the name of a reason is acceptable and encouraged to deprecate old |
|
70 | 67 | reasons. |
|
71 | 68 | """ |
|
72 | 69 | |
|
73 | 70 | # Everything went well. |
|
74 | 71 | NONE = 0 |
|
75 | 72 | |
|
76 | 73 | # An unexpected exception was raised. Check the logs for more details. |
|
77 | 74 | UNKNOWN = 1 |
|
78 | 75 | |
|
79 | 76 | # The merge was not successful, there are conflicts. |
|
80 | 77 | MERGE_FAILED = 2 |
|
81 | 78 | |
|
82 | 79 | # The merge succeeded but we could not push it to the target repository. |
|
83 | 80 | PUSH_FAILED = 3 |
|
84 | 81 | |
|
85 | 82 | # The specified target is not a head in the target repository. |
|
86 | 83 | TARGET_IS_NOT_HEAD = 4 |
|
87 | 84 | |
|
88 | 85 | # The source repository contains more branches than the target. Pushing |
|
89 | 86 | # the merge will create additional branches in the target. |
|
90 | 87 | HG_SOURCE_HAS_MORE_BRANCHES = 5 |
|
91 | 88 | |
|
92 | 89 | # The target reference has multiple heads. That does not allow to correctly |
|
93 | 90 | # identify the target location. This could only happen for mercurial |
|
94 | 91 | # branches. |
|
95 | 92 | HG_TARGET_HAS_MULTIPLE_HEADS = 6 |
|
96 | 93 | |
|
97 | 94 | # The target repository is locked |
|
98 | 95 | TARGET_IS_LOCKED = 7 |
|
99 | 96 | |
|
100 | 97 | # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead. |
|
101 | 98 | # A involved commit could not be found. |
|
102 | 99 | _DEPRECATED_MISSING_COMMIT = 8 |
|
103 | 100 | |
|
104 | 101 | # The target repo reference is missing. |
|
105 | 102 | MISSING_TARGET_REF = 9 |
|
106 | 103 | |
|
107 | 104 | # The source repo reference is missing. |
|
108 | 105 | MISSING_SOURCE_REF = 10 |
|
109 | 106 | |
|
110 | 107 | # The merge was not successful, there are conflicts related to sub |
|
111 | 108 | # repositories. |
|
112 | 109 | SUBREPO_MERGE_FAILED = 11 |
|
113 | 110 | |
|
114 | 111 | |
|
115 | 112 | class UpdateFailureReason(object): |
|
116 | 113 | """ |
|
117 | 114 | Enumeration with all the reasons why the pull request update could fail. |
|
118 | 115 | |
|
119 | 116 | DO NOT change the number of the reasons, as they may be stored in the |
|
120 | 117 | database. |
|
121 | 118 | |
|
122 | 119 | Changing the name of a reason is acceptable and encouraged to deprecate old |
|
123 | 120 | reasons. |
|
124 | 121 | """ |
|
125 | 122 | |
|
126 | 123 | # Everything went well. |
|
127 | 124 | NONE = 0 |
|
128 | 125 | |
|
129 | 126 | # An unexpected exception was raised. Check the logs for more details. |
|
130 | 127 | UNKNOWN = 1 |
|
131 | 128 | |
|
132 | 129 | # The pull request is up to date. |
|
133 | 130 | NO_CHANGE = 2 |
|
134 | 131 | |
|
135 | 132 | # The pull request has a reference type that is not supported for update. |
|
136 | 133 | WRONG_REF_TYPE = 3 |
|
137 | 134 | |
|
138 | 135 | # Update failed because the target reference is missing. |
|
139 | 136 | MISSING_TARGET_REF = 4 |
|
140 | 137 | |
|
141 | 138 | # Update failed because the source reference is missing. |
|
142 | 139 | MISSING_SOURCE_REF = 5 |
|
143 | 140 | |
|
144 | 141 | |
|
142 | class MergeResponse(object): | |
|
143 | ||
|
144 | # uses .format(**metadata) for variables | |
|
145 | MERGE_STATUS_MESSAGES = { | |
|
146 | MergeFailureReason.NONE: lazy_ugettext( | |
|
147 | u'This pull request can be automatically merged.'), | |
|
148 | MergeFailureReason.UNKNOWN: lazy_ugettext( | |
|
149 | u'This pull request cannot be merged because of an unhandled exception. ' | |
|
150 | u'{exception}'), | |
|
151 | MergeFailureReason.MERGE_FAILED: lazy_ugettext( | |
|
152 | u'This pull request cannot be merged because of merge conflicts.'), | |
|
153 | MergeFailureReason.PUSH_FAILED: lazy_ugettext( | |
|
154 | u'This pull request could not be merged because push to ' | |
|
155 | u'target:`{target}@{merge_commit}` failed.'), | |
|
156 | MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext( | |
|
157 | u'This pull request cannot be merged because the target ' | |
|
158 | u'`{target_ref.name}` is not a head.'), | |
|
159 | MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext( | |
|
160 | u'This pull request cannot be merged because the source contains ' | |
|
161 | u'more branches than the target.'), | |
|
162 | MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext( | |
|
163 | u'This pull request cannot be merged because the target ' | |
|
164 | u'has multiple heads: `{heads}`.'), | |
|
165 | MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext( | |
|
166 | u'This pull request cannot be merged because the target repository is ' | |
|
167 | u'locked by {locked_by}.'), | |
|
168 | ||
|
169 | MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext( | |
|
170 | u'This pull request cannot be merged because the target ' | |
|
171 | u'reference `{target_ref.name}` is missing.'), | |
|
172 | MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext( | |
|
173 | u'This pull request cannot be merged because the source ' | |
|
174 | u'reference `{source_ref.name}` is missing.'), | |
|
175 | MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext( | |
|
176 | u'This pull request cannot be merged because of conflicts related ' | |
|
177 | u'to sub repositories.'), | |
|
178 | ||
|
179 | # Deprecations | |
|
180 | MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext( | |
|
181 | u'This pull request cannot be merged because the target or the ' | |
|
182 | u'source reference is missing.'), | |
|
183 | ||
|
184 | } | |
|
185 | ||
|
186 | def __init__(self, possible, executed, merge_ref, failure_reason, metadata=None): | |
|
187 | self.possible = possible | |
|
188 | self.executed = executed | |
|
189 | self.merge_ref = merge_ref | |
|
190 | self.failure_reason = failure_reason | |
|
191 | self.metadata = metadata or {} | |
|
192 | ||
|
193 | def __repr__(self): | |
|
194 | return '<MergeResponse:{} {}>'.format(self.label, self.failure_reason) | |
|
195 | ||
|
196 | def __eq__(self, other): | |
|
197 | same_instance = isinstance(other, self.__class__) | |
|
198 | return same_instance \ | |
|
199 | and self.possible == other.possible \ | |
|
200 | and self.executed == other.executed \ | |
|
201 | and self.failure_reason == other.failure_reason | |
|
202 | ||
|
203 | @property | |
|
204 | def label(self): | |
|
205 | label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if | |
|
206 | not k.startswith('_')) | |
|
207 | return label_dict.get(self.failure_reason) | |
|
208 | ||
|
209 | @property | |
|
210 | def merge_status_message(self): | |
|
211 | """ | |
|
212 | Return a human friendly error message for the given merge status code. | |
|
213 | """ | |
|
214 | msg = safe_unicode(self.MERGE_STATUS_MESSAGES[self.failure_reason]) | |
|
215 | try: | |
|
216 | return msg.format(**self.metadata) | |
|
217 | except Exception: | |
|
218 | log.exception('Failed to format %s message', self) | |
|
219 | return msg | |
|
220 | ||
|
221 | def asdict(self): | |
|
222 | data = {} | |
|
223 | for k in ['possible', 'executed', 'merge_ref', 'failure_reason']: | |
|
224 | data[k] = getattr(self, k) | |
|
225 | return data | |
|
226 | ||
|
227 | ||
|
145 | 228 | class BaseRepository(object): |
|
146 | 229 | """ |
|
147 | 230 | Base Repository for final backends |
|
148 | 231 | |
|
149 | 232 | .. attribute:: DEFAULT_BRANCH_NAME |
|
150 | 233 | |
|
151 | 234 | name of default branch (i.e. "trunk" for svn, "master" for git etc. |
|
152 | 235 | |
|
153 | 236 | .. attribute:: commit_ids |
|
154 | 237 | |
|
155 | 238 | list of all available commit ids, in ascending order |
|
156 | 239 | |
|
157 | 240 | .. attribute:: path |
|
158 | 241 | |
|
159 | 242 | absolute path to the repository |
|
160 | 243 | |
|
161 | 244 | .. attribute:: bookmarks |
|
162 | 245 | |
|
163 | 246 | Mapping from name to :term:`Commit ID` of the bookmark. Empty in case |
|
164 | 247 | there are no bookmarks or the backend implementation does not support |
|
165 | 248 | bookmarks. |
|
166 | 249 | |
|
167 | 250 | .. attribute:: tags |
|
168 | 251 | |
|
169 | 252 | Mapping from name to :term:`Commit ID` of the tag. |
|
170 | 253 | |
|
171 | 254 | """ |
|
172 | 255 | |
|
173 | 256 | DEFAULT_BRANCH_NAME = None |
|
174 | 257 | DEFAULT_CONTACT = u"Unknown" |
|
175 | 258 | DEFAULT_DESCRIPTION = u"unknown" |
|
176 | 259 | EMPTY_COMMIT_ID = '0' * 40 |
|
177 | 260 | |
|
178 | 261 | path = None |
|
179 | 262 | |
|
180 | 263 | def __init__(self, repo_path, config=None, create=False, **kwargs): |
|
181 | 264 | """ |
|
182 | 265 | Initializes repository. Raises RepositoryError if repository could |
|
183 | 266 | not be find at the given ``repo_path`` or directory at ``repo_path`` |
|
184 | 267 | exists and ``create`` is set to True. |
|
185 | 268 | |
|
186 | 269 | :param repo_path: local path of the repository |
|
187 | 270 | :param config: repository configuration |
|
188 | 271 | :param create=False: if set to True, would try to create repository. |
|
189 | 272 | :param src_url=None: if set, should be proper url from which repository |
|
190 | 273 | would be cloned; requires ``create`` parameter to be set to True - |
|
191 | 274 | raises RepositoryError if src_url is set and create evaluates to |
|
192 | 275 | False |
|
193 | 276 | """ |
|
194 | 277 | raise NotImplementedError |
|
195 | 278 | |
|
196 | 279 | def __repr__(self): |
|
197 | 280 | return '<%s at %s>' % (self.__class__.__name__, self.path) |
|
198 | 281 | |
|
199 | 282 | def __len__(self): |
|
200 | 283 | return self.count() |
|
201 | 284 | |
|
202 | 285 | def __eq__(self, other): |
|
203 | 286 | same_instance = isinstance(other, self.__class__) |
|
204 | 287 | return same_instance and other.path == self.path |
|
205 | 288 | |
|
206 | 289 | def __ne__(self, other): |
|
207 | 290 | return not self.__eq__(other) |
|
208 | 291 | |
|
209 | 292 | def get_create_shadow_cache_pr_path(self, db_repo): |
|
210 | 293 | path = db_repo.cached_diffs_dir |
|
211 | 294 | if not os.path.exists(path): |
|
212 | 295 | os.makedirs(path, 0o755) |
|
213 | 296 | return path |
|
214 | 297 | |
|
215 | 298 | @classmethod |
|
216 | 299 | def get_default_config(cls, default=None): |
|
217 | 300 | config = Config() |
|
218 | 301 | if default and isinstance(default, list): |
|
219 | 302 | for section, key, val in default: |
|
220 | 303 | config.set(section, key, val) |
|
221 | 304 | return config |
|
222 | 305 | |
|
223 | 306 | @LazyProperty |
|
224 | 307 | def _remote(self): |
|
225 | 308 | raise NotImplementedError |
|
226 | 309 | |
|
227 | 310 | @LazyProperty |
|
228 | 311 | def EMPTY_COMMIT(self): |
|
229 | 312 | return EmptyCommit(self.EMPTY_COMMIT_ID) |
|
230 | 313 | |
|
231 | 314 | @LazyProperty |
|
232 | 315 | def alias(self): |
|
233 | 316 | for k, v in settings.BACKENDS.items(): |
|
234 | 317 | if v.split('.')[-1] == str(self.__class__.__name__): |
|
235 | 318 | return k |
|
236 | 319 | |
|
237 | 320 | @LazyProperty |
|
238 | 321 | def name(self): |
|
239 | 322 | return safe_unicode(os.path.basename(self.path)) |
|
240 | 323 | |
|
241 | 324 | @LazyProperty |
|
242 | 325 | def description(self): |
|
243 | 326 | raise NotImplementedError |
|
244 | 327 | |
|
245 | 328 | def refs(self): |
|
246 | 329 | """ |
|
247 | 330 | returns a `dict` with branches, bookmarks, tags, and closed_branches |
|
248 | 331 | for this repository |
|
249 | 332 | """ |
|
250 | 333 | return dict( |
|
251 | 334 | branches=self.branches, |
|
252 | 335 | branches_closed=self.branches_closed, |
|
253 | 336 | tags=self.tags, |
|
254 | 337 | bookmarks=self.bookmarks |
|
255 | 338 | ) |
|
256 | 339 | |
|
257 | 340 | @LazyProperty |
|
258 | 341 | def branches(self): |
|
259 | 342 | """ |
|
260 | 343 | A `dict` which maps branch names to commit ids. |
|
261 | 344 | """ |
|
262 | 345 | raise NotImplementedError |
|
263 | 346 | |
|
264 | 347 | @LazyProperty |
|
265 | 348 | def branches_closed(self): |
|
266 | 349 | """ |
|
267 | 350 | A `dict` which maps tags names to commit ids. |
|
268 | 351 | """ |
|
269 | 352 | raise NotImplementedError |
|
270 | 353 | |
|
271 | 354 | @LazyProperty |
|
272 | 355 | def bookmarks(self): |
|
273 | 356 | """ |
|
274 | 357 | A `dict` which maps tags names to commit ids. |
|
275 | 358 | """ |
|
276 | 359 | raise NotImplementedError |
|
277 | 360 | |
|
278 | 361 | @LazyProperty |
|
279 | 362 | def tags(self): |
|
280 | 363 | """ |
|
281 | 364 | A `dict` which maps tags names to commit ids. |
|
282 | 365 | """ |
|
283 | 366 | raise NotImplementedError |
|
284 | 367 | |
|
285 | 368 | @LazyProperty |
|
286 | 369 | def size(self): |
|
287 | 370 | """ |
|
288 | 371 | Returns combined size in bytes for all repository files |
|
289 | 372 | """ |
|
290 | 373 | tip = self.get_commit() |
|
291 | 374 | return tip.size |
|
292 | 375 | |
|
293 | 376 | def size_at_commit(self, commit_id): |
|
294 | 377 | commit = self.get_commit(commit_id) |
|
295 | 378 | return commit.size |
|
296 | 379 | |
|
297 | 380 | def is_empty(self): |
|
298 | 381 | return not bool(self.commit_ids) |
|
299 | 382 | |
|
300 | 383 | @staticmethod |
|
301 | 384 | def check_url(url, config): |
|
302 | 385 | """ |
|
303 | 386 | Function will check given url and try to verify if it's a valid |
|
304 | 387 | link. |
|
305 | 388 | """ |
|
306 | 389 | raise NotImplementedError |
|
307 | 390 | |
|
308 | 391 | @staticmethod |
|
309 | 392 | def is_valid_repository(path): |
|
310 | 393 | """ |
|
311 | 394 | Check if given `path` contains a valid repository of this backend |
|
312 | 395 | """ |
|
313 | 396 | raise NotImplementedError |
|
314 | 397 | |
|
315 | 398 | # ========================================================================== |
|
316 | 399 | # COMMITS |
|
317 | 400 | # ========================================================================== |
|
318 | 401 | |
|
319 | 402 | def get_commit(self, commit_id=None, commit_idx=None, pre_load=None): |
|
320 | 403 | """ |
|
321 | 404 | Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx` |
|
322 | 405 | are both None, most recent commit is returned. |
|
323 | 406 | |
|
324 | 407 | :param pre_load: Optional. List of commit attributes to load. |
|
325 | 408 | |
|
326 | 409 | :raises ``EmptyRepositoryError``: if there are no commits |
|
327 | 410 | """ |
|
328 | 411 | raise NotImplementedError |
|
329 | 412 | |
|
330 | 413 | def __iter__(self): |
|
331 | 414 | for commit_id in self.commit_ids: |
|
332 | 415 | yield self.get_commit(commit_id=commit_id) |
|
333 | 416 | |
|
334 | 417 | def get_commits( |
|
335 | 418 | self, start_id=None, end_id=None, start_date=None, end_date=None, |
|
336 | 419 | branch_name=None, show_hidden=False, pre_load=None): |
|
337 | 420 | """ |
|
338 | 421 | Returns iterator of `BaseCommit` objects from start to end |
|
339 | 422 | not inclusive. This should behave just like a list, ie. end is not |
|
340 | 423 | inclusive. |
|
341 | 424 | |
|
342 | 425 | :param start_id: None or str, must be a valid commit id |
|
343 | 426 | :param end_id: None or str, must be a valid commit id |
|
344 | 427 | :param start_date: |
|
345 | 428 | :param end_date: |
|
346 | 429 | :param branch_name: |
|
347 | 430 | :param show_hidden: |
|
348 | 431 | :param pre_load: |
|
349 | 432 | """ |
|
350 | 433 | raise NotImplementedError |
|
351 | 434 | |
|
352 | 435 | def __getitem__(self, key): |
|
353 | 436 | """ |
|
354 | 437 | Allows index based access to the commit objects of this repository. |
|
355 | 438 | """ |
|
356 | 439 | pre_load = ["author", "branch", "date", "message", "parents"] |
|
357 | 440 | if isinstance(key, slice): |
|
358 | 441 | return self._get_range(key, pre_load) |
|
359 | 442 | return self.get_commit(commit_idx=key, pre_load=pre_load) |
|
360 | 443 | |
|
361 | 444 | def _get_range(self, slice_obj, pre_load): |
|
362 | 445 | for commit_id in self.commit_ids.__getitem__(slice_obj): |
|
363 | 446 | yield self.get_commit(commit_id=commit_id, pre_load=pre_load) |
|
364 | 447 | |
|
365 | 448 | def count(self): |
|
366 | 449 | return len(self.commit_ids) |
|
367 | 450 | |
|
368 | 451 | def tag(self, name, user, commit_id=None, message=None, date=None, **opts): |
|
369 | 452 | """ |
|
370 | 453 | Creates and returns a tag for the given ``commit_id``. |
|
371 | 454 | |
|
372 | 455 | :param name: name for new tag |
|
373 | 456 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" |
|
374 | 457 | :param commit_id: commit id for which new tag would be created |
|
375 | 458 | :param message: message of the tag's commit |
|
376 | 459 | :param date: date of tag's commit |
|
377 | 460 | |
|
378 | 461 | :raises TagAlreadyExistError: if tag with same name already exists |
|
379 | 462 | """ |
|
380 | 463 | raise NotImplementedError |
|
381 | 464 | |
|
382 | 465 | def remove_tag(self, name, user, message=None, date=None): |
|
383 | 466 | """ |
|
384 | 467 | Removes tag with the given ``name``. |
|
385 | 468 | |
|
386 | 469 | :param name: name of the tag to be removed |
|
387 | 470 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" |
|
388 | 471 | :param message: message of the tag's removal commit |
|
389 | 472 | :param date: date of tag's removal commit |
|
390 | 473 | |
|
391 | 474 | :raises TagDoesNotExistError: if tag with given name does not exists |
|
392 | 475 | """ |
|
393 | 476 | raise NotImplementedError |
|
394 | 477 | |
|
395 | 478 | def get_diff( |
|
396 | 479 | self, commit1, commit2, path=None, ignore_whitespace=False, |
|
397 | 480 | context=3, path1=None): |
|
398 | 481 | """ |
|
399 | 482 | Returns (git like) *diff*, as plain text. Shows changes introduced by |
|
400 | 483 | `commit2` since `commit1`. |
|
401 | 484 | |
|
402 | 485 | :param commit1: Entry point from which diff is shown. Can be |
|
403 | 486 | ``self.EMPTY_COMMIT`` - in this case, patch showing all |
|
404 | 487 | the changes since empty state of the repository until `commit2` |
|
405 | 488 | :param commit2: Until which commit changes should be shown. |
|
406 | 489 | :param path: Can be set to a path of a file to create a diff of that |
|
407 | 490 | file. If `path1` is also set, this value is only associated to |
|
408 | 491 | `commit2`. |
|
409 | 492 | :param ignore_whitespace: If set to ``True``, would not show whitespace |
|
410 | 493 | changes. Defaults to ``False``. |
|
411 | 494 | :param context: How many lines before/after changed lines should be |
|
412 | 495 | shown. Defaults to ``3``. |
|
413 | 496 | :param path1: Can be set to a path to associate with `commit1`. This |
|
414 | 497 | parameter works only for backends which support diff generation for |
|
415 | 498 | different paths. Other backends will raise a `ValueError` if `path1` |
|
416 | 499 | is set and has a different value than `path`. |
|
417 | 500 | :param file_path: filter this diff by given path pattern |
|
418 | 501 | """ |
|
419 | 502 | raise NotImplementedError |
|
420 | 503 | |
|
421 | 504 | def strip(self, commit_id, branch=None): |
|
422 | 505 | """ |
|
423 | 506 | Strip given commit_id from the repository |
|
424 | 507 | """ |
|
425 | 508 | raise NotImplementedError |
|
426 | 509 | |
|
427 | 510 | def get_common_ancestor(self, commit_id1, commit_id2, repo2): |
|
428 | 511 | """ |
|
429 | 512 | Return a latest common ancestor commit if one exists for this repo |
|
430 | 513 | `commit_id1` vs `commit_id2` from `repo2`. |
|
431 | 514 | |
|
432 | 515 | :param commit_id1: Commit it from this repository to use as a |
|
433 | 516 | target for the comparison. |
|
434 | 517 | :param commit_id2: Source commit id to use for comparison. |
|
435 | 518 | :param repo2: Source repository to use for comparison. |
|
436 | 519 | """ |
|
437 | 520 | raise NotImplementedError |
|
438 | 521 | |
|
439 | 522 | def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None): |
|
440 | 523 | """ |
|
441 | 524 | Compare this repository's revision `commit_id1` with `commit_id2`. |
|
442 | 525 | |
|
443 | 526 | Returns a tuple(commits, ancestor) that would be merged from |
|
444 | 527 | `commit_id2`. Doing a normal compare (``merge=False``), ``None`` |
|
445 | 528 | will be returned as ancestor. |
|
446 | 529 | |
|
447 | 530 | :param commit_id1: Commit it from this repository to use as a |
|
448 | 531 | target for the comparison. |
|
449 | 532 | :param commit_id2: Source commit id to use for comparison. |
|
450 | 533 | :param repo2: Source repository to use for comparison. |
|
451 | 534 | :param merge: If set to ``True`` will do a merge compare which also |
|
452 | 535 | returns the common ancestor. |
|
453 | 536 | :param pre_load: Optional. List of commit attributes to load. |
|
454 | 537 | """ |
|
455 | 538 | raise NotImplementedError |
|
456 | 539 | |
|
457 | 540 | def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref, |
|
458 | 541 | user_name='', user_email='', message='', dry_run=False, |
|
459 | 542 | use_rebase=False, close_branch=False): |
|
460 | 543 | """ |
|
461 | 544 | Merge the revisions specified in `source_ref` from `source_repo` |
|
462 | 545 | onto the `target_ref` of this repository. |
|
463 | 546 | |
|
464 | 547 | `source_ref` and `target_ref` are named tupls with the following |
|
465 | 548 | fields `type`, `name` and `commit_id`. |
|
466 | 549 | |
|
467 | 550 | Returns a MergeResponse named tuple with the following fields |
|
468 | 551 | 'possible', 'executed', 'source_commit', 'target_commit', |
|
469 | 552 | 'merge_commit'. |
|
470 | 553 | |
|
471 | 554 | :param repo_id: `repo_id` target repo id. |
|
472 | 555 | :param workspace_id: `workspace_id` unique identifier. |
|
473 | 556 | :param target_ref: `target_ref` points to the commit on top of which |
|
474 | 557 | the `source_ref` should be merged. |
|
475 | 558 | :param source_repo: The repository that contains the commits to be |
|
476 | 559 | merged. |
|
477 | 560 | :param source_ref: `source_ref` points to the topmost commit from |
|
478 | 561 | the `source_repo` which should be merged. |
|
479 | 562 | :param user_name: Merge commit `user_name`. |
|
480 | 563 | :param user_email: Merge commit `user_email`. |
|
481 | 564 | :param message: Merge commit `message`. |
|
482 | 565 | :param dry_run: If `True` the merge will not take place. |
|
483 | 566 | :param use_rebase: If `True` commits from the source will be rebased |
|
484 | 567 | on top of the target instead of being merged. |
|
485 | 568 | :param close_branch: If `True` branch will be close before merging it |
|
486 | 569 | """ |
|
487 | 570 | if dry_run: |
|
488 | 571 | message = message or settings.MERGE_DRY_RUN_MESSAGE |
|
489 | 572 | user_email = user_email or settings.MERGE_DRY_RUN_EMAIL |
|
490 | 573 | user_name = user_name or settings.MERGE_DRY_RUN_USER |
|
491 | 574 | else: |
|
492 | 575 | if not user_name: |
|
493 | 576 | raise ValueError('user_name cannot be empty') |
|
494 | 577 | if not user_email: |
|
495 | 578 | raise ValueError('user_email cannot be empty') |
|
496 | 579 | if not message: |
|
497 | 580 | raise ValueError('message cannot be empty') |
|
498 | 581 | |
|
499 | 582 | try: |
|
500 | 583 | return self._merge_repo( |
|
501 | 584 | repo_id, workspace_id, target_ref, source_repo, |
|
502 | 585 | source_ref, message, user_name, user_email, dry_run=dry_run, |
|
503 | 586 | use_rebase=use_rebase, close_branch=close_branch) |
|
504 | except RepositoryError: | |
|
505 | log.exception( | |
|
506 | 'Unexpected failure when running merge, dry-run=%s', | |
|
507 | dry_run) | |
|
587 | except RepositoryError as exc: | |
|
588 | log.exception('Unexpected failure when running merge, dry-run=%s', dry_run) | |
|
508 | 589 | return MergeResponse( |
|
509 |
False, False, None, MergeFailureReason.UNKNOWN |
|
|
590 | False, False, None, MergeFailureReason.UNKNOWN, | |
|
591 | metadata={'exception': str(exc)}) | |
|
510 | 592 | |
|
511 | 593 | def _merge_repo(self, repo_id, workspace_id, target_ref, |
|
512 | 594 | source_repo, source_ref, merge_message, |
|
513 | 595 | merger_name, merger_email, dry_run=False, |
|
514 | 596 | use_rebase=False, close_branch=False): |
|
515 | 597 | """Internal implementation of merge.""" |
|
516 | 598 | raise NotImplementedError |
|
517 | 599 | |
|
518 | 600 | def _maybe_prepare_merge_workspace( |
|
519 | 601 | self, repo_id, workspace_id, target_ref, source_ref): |
|
520 | 602 | """ |
|
521 | 603 | Create the merge workspace. |
|
522 | 604 | |
|
523 | 605 | :param workspace_id: `workspace_id` unique identifier. |
|
524 | 606 | """ |
|
525 | 607 | raise NotImplementedError |
|
526 | 608 | |
|
527 | 609 | def _get_legacy_shadow_repository_path(self, workspace_id): |
|
528 | 610 | """ |
|
529 | 611 | Legacy version that was used before. We still need it for |
|
530 | 612 | backward compat |
|
531 | 613 | """ |
|
532 | 614 | return os.path.join( |
|
533 | 615 | os.path.dirname(self.path), |
|
534 | 616 | '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id)) |
|
535 | 617 | |
|
536 | 618 | def _get_shadow_repository_path(self, repo_id, workspace_id): |
|
537 | 619 | # The name of the shadow repository must start with '.', so it is |
|
538 | 620 | # skipped by 'rhodecode.lib.utils.get_filesystem_repos'. |
|
539 | 621 | legacy_repository_path = self._get_legacy_shadow_repository_path(workspace_id) |
|
540 | 622 | if os.path.exists(legacy_repository_path): |
|
541 | 623 | return legacy_repository_path |
|
542 | 624 | else: |
|
543 | 625 | return os.path.join( |
|
544 | 626 | os.path.dirname(self.path), |
|
545 | 627 | '.__shadow_repo_%s_%s' % (repo_id, workspace_id)) |
|
546 | 628 | |
|
547 | 629 | def cleanup_merge_workspace(self, repo_id, workspace_id): |
|
548 | 630 | """ |
|
549 | 631 | Remove merge workspace. |
|
550 | 632 | |
|
551 | 633 | This function MUST not fail in case there is no workspace associated to |
|
552 | 634 | the given `workspace_id`. |
|
553 | 635 | |
|
554 | 636 | :param workspace_id: `workspace_id` unique identifier. |
|
555 | 637 | """ |
|
556 | 638 | shadow_repository_path = self._get_shadow_repository_path(repo_id, workspace_id) |
|
557 | 639 | shadow_repository_path_del = '{}.{}.delete'.format( |
|
558 | 640 | shadow_repository_path, time.time()) |
|
559 | 641 | |
|
560 | 642 | # move the shadow repo, so it never conflicts with the one used. |
|
561 | 643 | # we use this method because shutil.rmtree had some edge case problems |
|
562 | 644 | # removing symlinked repositories |
|
563 | 645 | if not os.path.isdir(shadow_repository_path): |
|
564 | 646 | return |
|
565 | 647 | |
|
566 | 648 | shutil.move(shadow_repository_path, shadow_repository_path_del) |
|
567 | 649 | try: |
|
568 | 650 | shutil.rmtree(shadow_repository_path_del, ignore_errors=False) |
|
569 | 651 | except Exception: |
|
570 | 652 | log.exception('Failed to gracefully remove shadow repo under %s', |
|
571 | 653 | shadow_repository_path_del) |
|
572 | 654 | shutil.rmtree(shadow_repository_path_del, ignore_errors=True) |
|
573 | 655 | |
|
574 | 656 | # ========== # |
|
575 | 657 | # COMMIT API # |
|
576 | 658 | # ========== # |
|
577 | 659 | |
|
578 | 660 | @LazyProperty |
|
579 | 661 | def in_memory_commit(self): |
|
580 | 662 | """ |
|
581 | 663 | Returns :class:`InMemoryCommit` object for this repository. |
|
582 | 664 | """ |
|
583 | 665 | raise NotImplementedError |
|
584 | 666 | |
|
585 | 667 | # ======================== # |
|
586 | 668 | # UTILITIES FOR SUBCLASSES # |
|
587 | 669 | # ======================== # |
|
588 | 670 | |
|
589 | 671 | def _validate_diff_commits(self, commit1, commit2): |
|
590 | 672 | """ |
|
591 | 673 | Validates that the given commits are related to this repository. |
|
592 | 674 | |
|
593 | 675 | Intended as a utility for sub classes to have a consistent validation |
|
594 | 676 | of input parameters in methods like :meth:`get_diff`. |
|
595 | 677 | """ |
|
596 | 678 | self._validate_commit(commit1) |
|
597 | 679 | self._validate_commit(commit2) |
|
598 | 680 | if (isinstance(commit1, EmptyCommit) and |
|
599 | 681 | isinstance(commit2, EmptyCommit)): |
|
600 | 682 | raise ValueError("Cannot compare two empty commits") |
|
601 | 683 | |
|
602 | 684 | def _validate_commit(self, commit): |
|
603 | 685 | if not isinstance(commit, BaseCommit): |
|
604 | 686 | raise TypeError( |
|
605 | 687 | "%s is not of type BaseCommit" % repr(commit)) |
|
606 | 688 | if commit.repository != self and not isinstance(commit, EmptyCommit): |
|
607 | 689 | raise ValueError( |
|
608 | 690 | "Commit %s must be a valid commit from this repository %s, " |
|
609 | 691 | "related to this repository instead %s." % |
|
610 | 692 | (commit, self, commit.repository)) |
|
611 | 693 | |
|
612 | 694 | def _validate_commit_id(self, commit_id): |
|
613 | 695 | if not isinstance(commit_id, basestring): |
|
614 | 696 | raise TypeError("commit_id must be a string value") |
|
615 | 697 | |
|
616 | 698 | def _validate_commit_idx(self, commit_idx): |
|
617 | 699 | if not isinstance(commit_idx, (int, long)): |
|
618 | 700 | raise TypeError("commit_idx must be a numeric value") |
|
619 | 701 | |
|
620 | 702 | def _validate_branch_name(self, branch_name): |
|
621 | 703 | if branch_name and branch_name not in self.branches_all: |
|
622 | 704 | msg = ("Branch %s not found in %s" % (branch_name, self)) |
|
623 | 705 | raise BranchDoesNotExistError(msg) |
|
624 | 706 | |
|
625 | 707 | # |
|
626 | 708 | # Supporting deprecated API parts |
|
627 | 709 | # TODO: johbo: consider to move this into a mixin |
|
628 | 710 | # |
|
629 | 711 | |
|
630 | 712 | @property |
|
631 | 713 | def EMPTY_CHANGESET(self): |
|
632 | 714 | warnings.warn( |
|
633 | 715 | "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning) |
|
634 | 716 | return self.EMPTY_COMMIT_ID |
|
635 | 717 | |
|
636 | 718 | @property |
|
637 | 719 | def revisions(self): |
|
638 | 720 | warnings.warn("Use commits attribute instead", DeprecationWarning) |
|
639 | 721 | return self.commit_ids |
|
640 | 722 | |
|
641 | 723 | @revisions.setter |
|
642 | 724 | def revisions(self, value): |
|
643 | 725 | warnings.warn("Use commits attribute instead", DeprecationWarning) |
|
644 | 726 | self.commit_ids = value |
|
645 | 727 | |
|
646 | 728 | def get_changeset(self, revision=None, pre_load=None): |
|
647 | 729 | warnings.warn("Use get_commit instead", DeprecationWarning) |
|
648 | 730 | commit_id = None |
|
649 | 731 | commit_idx = None |
|
650 | 732 | if isinstance(revision, basestring): |
|
651 | 733 | commit_id = revision |
|
652 | 734 | else: |
|
653 | 735 | commit_idx = revision |
|
654 | 736 | return self.get_commit( |
|
655 | 737 | commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load) |
|
656 | 738 | |
|
657 | 739 | def get_changesets( |
|
658 | 740 | self, start=None, end=None, start_date=None, end_date=None, |
|
659 | 741 | branch_name=None, pre_load=None): |
|
660 | 742 | warnings.warn("Use get_commits instead", DeprecationWarning) |
|
661 | 743 | start_id = self._revision_to_commit(start) |
|
662 | 744 | end_id = self._revision_to_commit(end) |
|
663 | 745 | return self.get_commits( |
|
664 | 746 | start_id=start_id, end_id=end_id, start_date=start_date, |
|
665 | 747 | end_date=end_date, branch_name=branch_name, pre_load=pre_load) |
|
666 | 748 | |
|
667 | 749 | def _revision_to_commit(self, revision): |
|
668 | 750 | """ |
|
669 | 751 | Translates a revision to a commit_id |
|
670 | 752 | |
|
671 | 753 | Helps to support the old changeset based API which allows to use |
|
672 | 754 | commit ids and commit indices interchangeable. |
|
673 | 755 | """ |
|
674 | 756 | if revision is None: |
|
675 | 757 | return revision |
|
676 | 758 | |
|
677 | 759 | if isinstance(revision, basestring): |
|
678 | 760 | commit_id = revision |
|
679 | 761 | else: |
|
680 | 762 | commit_id = self.commit_ids[revision] |
|
681 | 763 | return commit_id |
|
682 | 764 | |
|
683 | 765 | @property |
|
684 | 766 | def in_memory_changeset(self): |
|
685 | 767 | warnings.warn("Use in_memory_commit instead", DeprecationWarning) |
|
686 | 768 | return self.in_memory_commit |
|
687 | 769 | |
|
688 | 770 | def get_path_permissions(self, username): |
|
689 | 771 | """ |
|
690 | 772 | Returns a path permission checker or None if not supported |
|
691 | 773 | |
|
692 | 774 | :param username: session user name |
|
693 | 775 | :return: an instance of BasePathPermissionChecker or None |
|
694 | 776 | """ |
|
695 | 777 | return None |
|
696 | 778 | |
|
697 | 779 | def install_hooks(self, force=False): |
|
698 | 780 | return self._remote.install_hooks(force) |
|
699 | 781 | |
|
700 | 782 | |
|
701 | 783 | class BaseCommit(object): |
|
702 | 784 | """ |
|
703 | 785 | Each backend should implement it's commit representation. |
|
704 | 786 | |
|
705 | 787 | **Attributes** |
|
706 | 788 | |
|
707 | 789 | ``repository`` |
|
708 | 790 | repository object within which commit exists |
|
709 | 791 | |
|
710 | 792 | ``id`` |
|
711 | 793 | The commit id, may be ``raw_id`` or i.e. for mercurial's tip |
|
712 | 794 | just ``tip``. |
|
713 | 795 | |
|
714 | 796 | ``raw_id`` |
|
715 | 797 | raw commit representation (i.e. full 40 length sha for git |
|
716 | 798 | backend) |
|
717 | 799 | |
|
718 | 800 | ``short_id`` |
|
719 | 801 | shortened (if apply) version of ``raw_id``; it would be simple |
|
720 | 802 | shortcut for ``raw_id[:12]`` for git/mercurial backends or same |
|
721 | 803 | as ``raw_id`` for subversion |
|
722 | 804 | |
|
723 | 805 | ``idx`` |
|
724 | 806 | commit index |
|
725 | 807 | |
|
726 | 808 | ``files`` |
|
727 | 809 | list of ``FileNode`` (``Node`` with NodeKind.FILE) objects |
|
728 | 810 | |
|
729 | 811 | ``dirs`` |
|
730 | 812 | list of ``DirNode`` (``Node`` with NodeKind.DIR) objects |
|
731 | 813 | |
|
732 | 814 | ``nodes`` |
|
733 | 815 | combined list of ``Node`` objects |
|
734 | 816 | |
|
735 | 817 | ``author`` |
|
736 | 818 | author of the commit, as unicode |
|
737 | 819 | |
|
738 | 820 | ``message`` |
|
739 | 821 | message of the commit, as unicode |
|
740 | 822 | |
|
741 | 823 | ``parents`` |
|
742 | 824 | list of parent commits |
|
743 | 825 | |
|
744 | 826 | """ |
|
745 | 827 | |
|
746 | 828 | branch = None |
|
747 | 829 | """ |
|
748 | 830 | Depending on the backend this should be set to the branch name of the |
|
749 | 831 | commit. Backends not supporting branches on commits should leave this |
|
750 | 832 | value as ``None``. |
|
751 | 833 | """ |
|
752 | 834 | |
|
753 | 835 | _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}' |
|
754 | 836 | """ |
|
755 | 837 | This template is used to generate a default prefix for repository archives |
|
756 | 838 | if no prefix has been specified. |
|
757 | 839 | """ |
|
758 | 840 | |
|
759 | 841 | def __str__(self): |
|
760 | 842 | return '<%s at %s:%s>' % ( |
|
761 | 843 | self.__class__.__name__, self.idx, self.short_id) |
|
762 | 844 | |
|
763 | 845 | def __repr__(self): |
|
764 | 846 | return self.__str__() |
|
765 | 847 | |
|
766 | 848 | def __unicode__(self): |
|
767 | 849 | return u'%s:%s' % (self.idx, self.short_id) |
|
768 | 850 | |
|
769 | 851 | def __eq__(self, other): |
|
770 | 852 | same_instance = isinstance(other, self.__class__) |
|
771 | 853 | return same_instance and self.raw_id == other.raw_id |
|
772 | 854 | |
|
773 | 855 | def __json__(self): |
|
774 | 856 | parents = [] |
|
775 | 857 | try: |
|
776 | 858 | for parent in self.parents: |
|
777 | 859 | parents.append({'raw_id': parent.raw_id}) |
|
778 | 860 | except NotImplementedError: |
|
779 | 861 | # empty commit doesn't have parents implemented |
|
780 | 862 | pass |
|
781 | 863 | |
|
782 | 864 | return { |
|
783 | 865 | 'short_id': self.short_id, |
|
784 | 866 | 'raw_id': self.raw_id, |
|
785 | 867 | 'revision': self.idx, |
|
786 | 868 | 'message': self.message, |
|
787 | 869 | 'date': self.date, |
|
788 | 870 | 'author': self.author, |
|
789 | 871 | 'parents': parents, |
|
790 | 872 | 'branch': self.branch |
|
791 | 873 | } |
|
792 | 874 | |
|
793 | 875 | def __getstate__(self): |
|
794 | 876 | d = self.__dict__.copy() |
|
795 | 877 | d.pop('_remote', None) |
|
796 | 878 | d.pop('repository', None) |
|
797 | 879 | return d |
|
798 | 880 | |
|
799 | 881 | def _get_refs(self): |
|
800 | 882 | return { |
|
801 | 883 | 'branches': [self.branch] if self.branch else [], |
|
802 | 884 | 'bookmarks': getattr(self, 'bookmarks', []), |
|
803 | 885 | 'tags': self.tags |
|
804 | 886 | } |
|
805 | 887 | |
|
806 | 888 | @LazyProperty |
|
807 | 889 | def last(self): |
|
808 | 890 | """ |
|
809 | 891 | ``True`` if this is last commit in repository, ``False`` |
|
810 | 892 | otherwise; trying to access this attribute while there is no |
|
811 | 893 | commits would raise `EmptyRepositoryError` |
|
812 | 894 | """ |
|
813 | 895 | if self.repository is None: |
|
814 | 896 | raise CommitError("Cannot check if it's most recent commit") |
|
815 | 897 | return self.raw_id == self.repository.commit_ids[-1] |
|
816 | 898 | |
|
817 | 899 | @LazyProperty |
|
818 | 900 | def parents(self): |
|
819 | 901 | """ |
|
820 | 902 | Returns list of parent commits. |
|
821 | 903 | """ |
|
822 | 904 | raise NotImplementedError |
|
823 | 905 | |
|
824 | 906 | @LazyProperty |
|
825 | 907 | def first_parent(self): |
|
826 | 908 | """ |
|
827 | 909 | Returns list of parent commits. |
|
828 | 910 | """ |
|
829 | 911 | return self.parents[0] if self.parents else EmptyCommit() |
|
830 | 912 | |
|
831 | 913 | @property |
|
832 | 914 | def merge(self): |
|
833 | 915 | """ |
|
834 | 916 | Returns boolean if commit is a merge. |
|
835 | 917 | """ |
|
836 | 918 | return len(self.parents) > 1 |
|
837 | 919 | |
|
838 | 920 | @LazyProperty |
|
839 | 921 | def children(self): |
|
840 | 922 | """ |
|
841 | 923 | Returns list of child commits. |
|
842 | 924 | """ |
|
843 | 925 | raise NotImplementedError |
|
844 | 926 | |
|
845 | 927 | @LazyProperty |
|
846 | 928 | def id(self): |
|
847 | 929 | """ |
|
848 | 930 | Returns string identifying this commit. |
|
849 | 931 | """ |
|
850 | 932 | raise NotImplementedError |
|
851 | 933 | |
|
852 | 934 | @LazyProperty |
|
853 | 935 | def raw_id(self): |
|
854 | 936 | """ |
|
855 | 937 | Returns raw string identifying this commit. |
|
856 | 938 | """ |
|
857 | 939 | raise NotImplementedError |
|
858 | 940 | |
|
859 | 941 | @LazyProperty |
|
860 | 942 | def short_id(self): |
|
861 | 943 | """ |
|
862 | 944 | Returns shortened version of ``raw_id`` attribute, as string, |
|
863 | 945 | identifying this commit, useful for presentation to users. |
|
864 | 946 | """ |
|
865 | 947 | raise NotImplementedError |
|
866 | 948 | |
|
867 | 949 | @LazyProperty |
|
868 | 950 | def idx(self): |
|
869 | 951 | """ |
|
870 | 952 | Returns integer identifying this commit. |
|
871 | 953 | """ |
|
872 | 954 | raise NotImplementedError |
|
873 | 955 | |
|
874 | 956 | @LazyProperty |
|
875 | 957 | def committer(self): |
|
876 | 958 | """ |
|
877 | 959 | Returns committer for this commit |
|
878 | 960 | """ |
|
879 | 961 | raise NotImplementedError |
|
880 | 962 | |
|
881 | 963 | @LazyProperty |
|
882 | 964 | def committer_name(self): |
|
883 | 965 | """ |
|
884 | 966 | Returns committer name for this commit |
|
885 | 967 | """ |
|
886 | 968 | |
|
887 | 969 | return author_name(self.committer) |
|
888 | 970 | |
|
889 | 971 | @LazyProperty |
|
890 | 972 | def committer_email(self): |
|
891 | 973 | """ |
|
892 | 974 | Returns committer email address for this commit |
|
893 | 975 | """ |
|
894 | 976 | |
|
895 | 977 | return author_email(self.committer) |
|
896 | 978 | |
|
897 | 979 | @LazyProperty |
|
898 | 980 | def author(self): |
|
899 | 981 | """ |
|
900 | 982 | Returns author for this commit |
|
901 | 983 | """ |
|
902 | 984 | |
|
903 | 985 | raise NotImplementedError |
|
904 | 986 | |
|
905 | 987 | @LazyProperty |
|
906 | 988 | def author_name(self): |
|
907 | 989 | """ |
|
908 | 990 | Returns author name for this commit |
|
909 | 991 | """ |
|
910 | 992 | |
|
911 | 993 | return author_name(self.author) |
|
912 | 994 | |
|
913 | 995 | @LazyProperty |
|
914 | 996 | def author_email(self): |
|
915 | 997 | """ |
|
916 | 998 | Returns author email address for this commit |
|
917 | 999 | """ |
|
918 | 1000 | |
|
919 | 1001 | return author_email(self.author) |
|
920 | 1002 | |
|
921 | 1003 | def get_file_mode(self, path): |
|
922 | 1004 | """ |
|
923 | 1005 | Returns stat mode of the file at `path`. |
|
924 | 1006 | """ |
|
925 | 1007 | raise NotImplementedError |
|
926 | 1008 | |
|
927 | 1009 | def is_link(self, path): |
|
928 | 1010 | """ |
|
929 | 1011 | Returns ``True`` if given `path` is a symlink |
|
930 | 1012 | """ |
|
931 | 1013 | raise NotImplementedError |
|
932 | 1014 | |
|
933 | 1015 | def get_file_content(self, path): |
|
934 | 1016 | """ |
|
935 | 1017 | Returns content of the file at the given `path`. |
|
936 | 1018 | """ |
|
937 | 1019 | raise NotImplementedError |
|
938 | 1020 | |
|
939 | 1021 | def get_file_size(self, path): |
|
940 | 1022 | """ |
|
941 | 1023 | Returns size of the file at the given `path`. |
|
942 | 1024 | """ |
|
943 | 1025 | raise NotImplementedError |
|
944 | 1026 | |
|
945 | 1027 | def get_path_commit(self, path, pre_load=None): |
|
946 | 1028 | """ |
|
947 | 1029 | Returns last commit of the file at the given `path`. |
|
948 | 1030 | |
|
949 | 1031 | :param pre_load: Optional. List of commit attributes to load. |
|
950 | 1032 | """ |
|
951 | 1033 | commits = self.get_path_history(path, limit=1, pre_load=pre_load) |
|
952 | 1034 | if not commits: |
|
953 | 1035 | raise RepositoryError( |
|
954 | 1036 | 'Failed to fetch history for path {}. ' |
|
955 | 1037 | 'Please check if such path exists in your repository'.format( |
|
956 | 1038 | path)) |
|
957 | 1039 | return commits[0] |
|
958 | 1040 | |
|
959 | 1041 | def get_path_history(self, path, limit=None, pre_load=None): |
|
960 | 1042 | """ |
|
961 | 1043 | Returns history of file as reversed list of :class:`BaseCommit` |
|
962 | 1044 | objects for which file at given `path` has been modified. |
|
963 | 1045 | |
|
964 | 1046 | :param limit: Optional. Allows to limit the size of the returned |
|
965 | 1047 | history. This is intended as a hint to the underlying backend, so |
|
966 | 1048 | that it can apply optimizations depending on the limit. |
|
967 | 1049 | :param pre_load: Optional. List of commit attributes to load. |
|
968 | 1050 | """ |
|
969 | 1051 | raise NotImplementedError |
|
970 | 1052 | |
|
971 | 1053 | def get_file_annotate(self, path, pre_load=None): |
|
972 | 1054 | """ |
|
973 | 1055 | Returns a generator of four element tuples with |
|
974 | 1056 | lineno, sha, commit lazy loader and line |
|
975 | 1057 | |
|
976 | 1058 | :param pre_load: Optional. List of commit attributes to load. |
|
977 | 1059 | """ |
|
978 | 1060 | raise NotImplementedError |
|
979 | 1061 | |
|
980 | 1062 | def get_nodes(self, path): |
|
981 | 1063 | """ |
|
982 | 1064 | Returns combined ``DirNode`` and ``FileNode`` objects list representing |
|
983 | 1065 | state of commit at the given ``path``. |
|
984 | 1066 | |
|
985 | 1067 | :raises ``CommitError``: if node at the given ``path`` is not |
|
986 | 1068 | instance of ``DirNode`` |
|
987 | 1069 | """ |
|
988 | 1070 | raise NotImplementedError |
|
989 | 1071 | |
|
990 | 1072 | def get_node(self, path): |
|
991 | 1073 | """ |
|
992 | 1074 | Returns ``Node`` object from the given ``path``. |
|
993 | 1075 | |
|
994 | 1076 | :raises ``NodeDoesNotExistError``: if there is no node at the given |
|
995 | 1077 | ``path`` |
|
996 | 1078 | """ |
|
997 | 1079 | raise NotImplementedError |
|
998 | 1080 | |
|
999 | 1081 | def get_largefile_node(self, path): |
|
1000 | 1082 | """ |
|
1001 | 1083 | Returns the path to largefile from Mercurial/Git-lfs storage. |
|
1002 | 1084 | or None if it's not a largefile node |
|
1003 | 1085 | """ |
|
1004 | 1086 | return None |
|
1005 | 1087 | |
|
1006 | 1088 | def archive_repo(self, file_path, kind='tgz', subrepos=None, |
|
1007 | 1089 | prefix=None, write_metadata=False, mtime=None): |
|
1008 | 1090 | """ |
|
1009 | 1091 | Creates an archive containing the contents of the repository. |
|
1010 | 1092 | |
|
1011 | 1093 | :param file_path: path to the file which to create the archive. |
|
1012 | 1094 | :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``. |
|
1013 | 1095 | :param prefix: name of root directory in archive. |
|
1014 | 1096 | Default is repository name and commit's short_id joined with dash: |
|
1015 | 1097 | ``"{repo_name}-{short_id}"``. |
|
1016 | 1098 | :param write_metadata: write a metadata file into archive. |
|
1017 | 1099 | :param mtime: custom modification time for archive creation, defaults |
|
1018 | 1100 | to time.time() if not given. |
|
1019 | 1101 | |
|
1020 | 1102 | :raise VCSError: If prefix has a problem. |
|
1021 | 1103 | """ |
|
1022 | 1104 | allowed_kinds = settings.ARCHIVE_SPECS.keys() |
|
1023 | 1105 | if kind not in allowed_kinds: |
|
1024 | 1106 | raise ImproperArchiveTypeError( |
|
1025 | 1107 | 'Archive kind (%s) not supported use one of %s' % |
|
1026 | 1108 | (kind, allowed_kinds)) |
|
1027 | 1109 | |
|
1028 | 1110 | prefix = self._validate_archive_prefix(prefix) |
|
1029 | 1111 | |
|
1030 | 1112 | mtime = mtime or time.mktime(self.date.timetuple()) |
|
1031 | 1113 | |
|
1032 | 1114 | file_info = [] |
|
1033 | 1115 | cur_rev = self.repository.get_commit(commit_id=self.raw_id) |
|
1034 | 1116 | for _r, _d, files in cur_rev.walk('/'): |
|
1035 | 1117 | for f in files: |
|
1036 | 1118 | f_path = os.path.join(prefix, f.path) |
|
1037 | 1119 | file_info.append( |
|
1038 | 1120 | (f_path, f.mode, f.is_link(), f.raw_bytes)) |
|
1039 | 1121 | |
|
1040 | 1122 | if write_metadata: |
|
1041 | 1123 | metadata = [ |
|
1042 | 1124 | ('repo_name', self.repository.name), |
|
1043 | 1125 | ('rev', self.raw_id), |
|
1044 | 1126 | ('create_time', mtime), |
|
1045 | 1127 | ('branch', self.branch), |
|
1046 | 1128 | ('tags', ','.join(self.tags)), |
|
1047 | 1129 | ] |
|
1048 | 1130 | meta = ["%s:%s" % (f_name, value) for f_name, value in metadata] |
|
1049 | 1131 | file_info.append(('.archival.txt', 0o644, False, '\n'.join(meta))) |
|
1050 | 1132 | |
|
1051 | 1133 | connection.Hg.archive_repo(file_path, mtime, file_info, kind) |
|
1052 | 1134 | |
|
1053 | 1135 | def _validate_archive_prefix(self, prefix): |
|
1054 | 1136 | if prefix is None: |
|
1055 | 1137 | prefix = self._ARCHIVE_PREFIX_TEMPLATE.format( |
|
1056 | 1138 | repo_name=safe_str(self.repository.name), |
|
1057 | 1139 | short_id=self.short_id) |
|
1058 | 1140 | elif not isinstance(prefix, str): |
|
1059 | 1141 | raise ValueError("prefix not a bytes object: %s" % repr(prefix)) |
|
1060 | 1142 | elif prefix.startswith('/'): |
|
1061 | 1143 | raise VCSError("Prefix cannot start with leading slash") |
|
1062 | 1144 | elif prefix.strip() == '': |
|
1063 | 1145 | raise VCSError("Prefix cannot be empty") |
|
1064 | 1146 | return prefix |
|
1065 | 1147 | |
|
1066 | 1148 | @LazyProperty |
|
1067 | 1149 | def root(self): |
|
1068 | 1150 | """ |
|
1069 | 1151 | Returns ``RootNode`` object for this commit. |
|
1070 | 1152 | """ |
|
1071 | 1153 | return self.get_node('') |
|
1072 | 1154 | |
|
1073 | 1155 | def next(self, branch=None): |
|
1074 | 1156 | """ |
|
1075 | 1157 | Returns next commit from current, if branch is gives it will return |
|
1076 | 1158 | next commit belonging to this branch |
|
1077 | 1159 | |
|
1078 | 1160 | :param branch: show commits within the given named branch |
|
1079 | 1161 | """ |
|
1080 | 1162 | indexes = xrange(self.idx + 1, self.repository.count()) |
|
1081 | 1163 | return self._find_next(indexes, branch) |
|
1082 | 1164 | |
|
1083 | 1165 | def prev(self, branch=None): |
|
1084 | 1166 | """ |
|
1085 | 1167 | Returns previous commit from current, if branch is gives it will |
|
1086 | 1168 | return previous commit belonging to this branch |
|
1087 | 1169 | |
|
1088 | 1170 | :param branch: show commit within the given named branch |
|
1089 | 1171 | """ |
|
1090 | 1172 | indexes = xrange(self.idx - 1, -1, -1) |
|
1091 | 1173 | return self._find_next(indexes, branch) |
|
1092 | 1174 | |
|
1093 | 1175 | def _find_next(self, indexes, branch=None): |
|
1094 | 1176 | if branch and self.branch != branch: |
|
1095 | 1177 | raise VCSError('Branch option used on commit not belonging ' |
|
1096 | 1178 | 'to that branch') |
|
1097 | 1179 | |
|
1098 | 1180 | for next_idx in indexes: |
|
1099 | 1181 | commit = self.repository.get_commit(commit_idx=next_idx) |
|
1100 | 1182 | if branch and branch != commit.branch: |
|
1101 | 1183 | continue |
|
1102 | 1184 | return commit |
|
1103 | 1185 | raise CommitDoesNotExistError |
|
1104 | 1186 | |
|
1105 | 1187 | def diff(self, ignore_whitespace=True, context=3): |
|
1106 | 1188 | """ |
|
1107 | 1189 | Returns a `Diff` object representing the change made by this commit. |
|
1108 | 1190 | """ |
|
1109 | 1191 | parent = self.first_parent |
|
1110 | 1192 | diff = self.repository.get_diff( |
|
1111 | 1193 | parent, self, |
|
1112 | 1194 | ignore_whitespace=ignore_whitespace, |
|
1113 | 1195 | context=context) |
|
1114 | 1196 | return diff |
|
1115 | 1197 | |
|
1116 | 1198 | @LazyProperty |
|
1117 | 1199 | def added(self): |
|
1118 | 1200 | """ |
|
1119 | 1201 | Returns list of added ``FileNode`` objects. |
|
1120 | 1202 | """ |
|
1121 | 1203 | raise NotImplementedError |
|
1122 | 1204 | |
|
1123 | 1205 | @LazyProperty |
|
1124 | 1206 | def changed(self): |
|
1125 | 1207 | """ |
|
1126 | 1208 | Returns list of modified ``FileNode`` objects. |
|
1127 | 1209 | """ |
|
1128 | 1210 | raise NotImplementedError |
|
1129 | 1211 | |
|
1130 | 1212 | @LazyProperty |
|
1131 | 1213 | def removed(self): |
|
1132 | 1214 | """ |
|
1133 | 1215 | Returns list of removed ``FileNode`` objects. |
|
1134 | 1216 | """ |
|
1135 | 1217 | raise NotImplementedError |
|
1136 | 1218 | |
|
1137 | 1219 | @LazyProperty |
|
1138 | 1220 | def size(self): |
|
1139 | 1221 | """ |
|
1140 | 1222 | Returns total number of bytes from contents of all filenodes. |
|
1141 | 1223 | """ |
|
1142 | 1224 | return sum((node.size for node in self.get_filenodes_generator())) |
|
1143 | 1225 | |
|
1144 | 1226 | def walk(self, topurl=''): |
|
1145 | 1227 | """ |
|
1146 | 1228 | Similar to os.walk method. Insted of filesystem it walks through |
|
1147 | 1229 | commit starting at given ``topurl``. Returns generator of tuples |
|
1148 | 1230 | (topnode, dirnodes, filenodes). |
|
1149 | 1231 | """ |
|
1150 | 1232 | topnode = self.get_node(topurl) |
|
1151 | 1233 | if not topnode.is_dir(): |
|
1152 | 1234 | return |
|
1153 | 1235 | yield (topnode, topnode.dirs, topnode.files) |
|
1154 | 1236 | for dirnode in topnode.dirs: |
|
1155 | 1237 | for tup in self.walk(dirnode.path): |
|
1156 | 1238 | yield tup |
|
1157 | 1239 | |
|
1158 | 1240 | def get_filenodes_generator(self): |
|
1159 | 1241 | """ |
|
1160 | 1242 | Returns generator that yields *all* file nodes. |
|
1161 | 1243 | """ |
|
1162 | 1244 | for topnode, dirs, files in self.walk(): |
|
1163 | 1245 | for node in files: |
|
1164 | 1246 | yield node |
|
1165 | 1247 | |
|
1166 | 1248 | # |
|
1167 | 1249 | # Utilities for sub classes to support consistent behavior |
|
1168 | 1250 | # |
|
1169 | 1251 | |
|
1170 | 1252 | def no_node_at_path(self, path): |
|
1171 | 1253 | return NodeDoesNotExistError( |
|
1172 | 1254 | u"There is no file nor directory at the given path: " |
|
1173 | 1255 | u"`%s` at commit %s" % (safe_unicode(path), self.short_id)) |
|
1174 | 1256 | |
|
1175 | 1257 | def _fix_path(self, path): |
|
1176 | 1258 | """ |
|
1177 | 1259 | Paths are stored without trailing slash so we need to get rid off it if |
|
1178 | 1260 | needed. |
|
1179 | 1261 | """ |
|
1180 | 1262 | return path.rstrip('/') |
|
1181 | 1263 | |
|
1182 | 1264 | # |
|
1183 | 1265 | # Deprecated API based on changesets |
|
1184 | 1266 | # |
|
1185 | 1267 | |
|
1186 | 1268 | @property |
|
1187 | 1269 | def revision(self): |
|
1188 | 1270 | warnings.warn("Use idx instead", DeprecationWarning) |
|
1189 | 1271 | return self.idx |
|
1190 | 1272 | |
|
1191 | 1273 | @revision.setter |
|
1192 | 1274 | def revision(self, value): |
|
1193 | 1275 | warnings.warn("Use idx instead", DeprecationWarning) |
|
1194 | 1276 | self.idx = value |
|
1195 | 1277 | |
|
1196 | 1278 | def get_file_changeset(self, path): |
|
1197 | 1279 | warnings.warn("Use get_path_commit instead", DeprecationWarning) |
|
1198 | 1280 | return self.get_path_commit(path) |
|
1199 | 1281 | |
|
1200 | 1282 | |
|
1201 | 1283 | class BaseChangesetClass(type): |
|
1202 | 1284 | |
|
1203 | 1285 | def __instancecheck__(self, instance): |
|
1204 | 1286 | return isinstance(instance, BaseCommit) |
|
1205 | 1287 | |
|
1206 | 1288 | |
|
1207 | 1289 | class BaseChangeset(BaseCommit): |
|
1208 | 1290 | |
|
1209 | 1291 | __metaclass__ = BaseChangesetClass |
|
1210 | 1292 | |
|
1211 | 1293 | def __new__(cls, *args, **kwargs): |
|
1212 | 1294 | warnings.warn( |
|
1213 | 1295 | "Use BaseCommit instead of BaseChangeset", DeprecationWarning) |
|
1214 | 1296 | return super(BaseChangeset, cls).__new__(cls, *args, **kwargs) |
|
1215 | 1297 | |
|
1216 | 1298 | |
|
1217 | 1299 | class BaseInMemoryCommit(object): |
|
1218 | 1300 | """ |
|
1219 | 1301 | Represents differences between repository's state (most recent head) and |
|
1220 | 1302 | changes made *in place*. |
|
1221 | 1303 | |
|
1222 | 1304 | **Attributes** |
|
1223 | 1305 | |
|
1224 | 1306 | ``repository`` |
|
1225 | 1307 | repository object for this in-memory-commit |
|
1226 | 1308 | |
|
1227 | 1309 | ``added`` |
|
1228 | 1310 | list of ``FileNode`` objects marked as *added* |
|
1229 | 1311 | |
|
1230 | 1312 | ``changed`` |
|
1231 | 1313 | list of ``FileNode`` objects marked as *changed* |
|
1232 | 1314 | |
|
1233 | 1315 | ``removed`` |
|
1234 | 1316 | list of ``FileNode`` or ``RemovedFileNode`` objects marked to be |
|
1235 | 1317 | *removed* |
|
1236 | 1318 | |
|
1237 | 1319 | ``parents`` |
|
1238 | 1320 | list of :class:`BaseCommit` instances representing parents of |
|
1239 | 1321 | in-memory commit. Should always be 2-element sequence. |
|
1240 | 1322 | |
|
1241 | 1323 | """ |
|
1242 | 1324 | |
|
1243 | 1325 | def __init__(self, repository): |
|
1244 | 1326 | self.repository = repository |
|
1245 | 1327 | self.added = [] |
|
1246 | 1328 | self.changed = [] |
|
1247 | 1329 | self.removed = [] |
|
1248 | 1330 | self.parents = [] |
|
1249 | 1331 | |
|
1250 | 1332 | def add(self, *filenodes): |
|
1251 | 1333 | """ |
|
1252 | 1334 | Marks given ``FileNode`` objects as *to be committed*. |
|
1253 | 1335 | |
|
1254 | 1336 | :raises ``NodeAlreadyExistsError``: if node with same path exists at |
|
1255 | 1337 | latest commit |
|
1256 | 1338 | :raises ``NodeAlreadyAddedError``: if node with same path is already |
|
1257 | 1339 | marked as *added* |
|
1258 | 1340 | """ |
|
1259 | 1341 | # Check if not already marked as *added* first |
|
1260 | 1342 | for node in filenodes: |
|
1261 | 1343 | if node.path in (n.path for n in self.added): |
|
1262 | 1344 | raise NodeAlreadyAddedError( |
|
1263 | 1345 | "Such FileNode %s is already marked for addition" |
|
1264 | 1346 | % node.path) |
|
1265 | 1347 | for node in filenodes: |
|
1266 | 1348 | self.added.append(node) |
|
1267 | 1349 | |
|
1268 | 1350 | def change(self, *filenodes): |
|
1269 | 1351 | """ |
|
1270 | 1352 | Marks given ``FileNode`` objects to be *changed* in next commit. |
|
1271 | 1353 | |
|
1272 | 1354 | :raises ``EmptyRepositoryError``: if there are no commits yet |
|
1273 | 1355 | :raises ``NodeAlreadyExistsError``: if node with same path is already |
|
1274 | 1356 | marked to be *changed* |
|
1275 | 1357 | :raises ``NodeAlreadyRemovedError``: if node with same path is already |
|
1276 | 1358 | marked to be *removed* |
|
1277 | 1359 | :raises ``NodeDoesNotExistError``: if node doesn't exist in latest |
|
1278 | 1360 | commit |
|
1279 | 1361 | :raises ``NodeNotChangedError``: if node hasn't really be changed |
|
1280 | 1362 | """ |
|
1281 | 1363 | for node in filenodes: |
|
1282 | 1364 | if node.path in (n.path for n in self.removed): |
|
1283 | 1365 | raise NodeAlreadyRemovedError( |
|
1284 | 1366 | "Node at %s is already marked as removed" % node.path) |
|
1285 | 1367 | try: |
|
1286 | 1368 | self.repository.get_commit() |
|
1287 | 1369 | except EmptyRepositoryError: |
|
1288 | 1370 | raise EmptyRepositoryError( |
|
1289 | 1371 | "Nothing to change - try to *add* new nodes rather than " |
|
1290 | 1372 | "changing them") |
|
1291 | 1373 | for node in filenodes: |
|
1292 | 1374 | if node.path in (n.path for n in self.changed): |
|
1293 | 1375 | raise NodeAlreadyChangedError( |
|
1294 | 1376 | "Node at '%s' is already marked as changed" % node.path) |
|
1295 | 1377 | self.changed.append(node) |
|
1296 | 1378 | |
|
1297 | 1379 | def remove(self, *filenodes): |
|
1298 | 1380 | """ |
|
1299 | 1381 | Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be |
|
1300 | 1382 | *removed* in next commit. |
|
1301 | 1383 | |
|
1302 | 1384 | :raises ``NodeAlreadyRemovedError``: if node has been already marked to |
|
1303 | 1385 | be *removed* |
|
1304 | 1386 | :raises ``NodeAlreadyChangedError``: if node has been already marked to |
|
1305 | 1387 | be *changed* |
|
1306 | 1388 | """ |
|
1307 | 1389 | for node in filenodes: |
|
1308 | 1390 | if node.path in (n.path for n in self.removed): |
|
1309 | 1391 | raise NodeAlreadyRemovedError( |
|
1310 | 1392 | "Node is already marked to for removal at %s" % node.path) |
|
1311 | 1393 | if node.path in (n.path for n in self.changed): |
|
1312 | 1394 | raise NodeAlreadyChangedError( |
|
1313 | 1395 | "Node is already marked to be changed at %s" % node.path) |
|
1314 | 1396 | # We only mark node as *removed* - real removal is done by |
|
1315 | 1397 | # commit method |
|
1316 | 1398 | self.removed.append(node) |
|
1317 | 1399 | |
|
1318 | 1400 | def reset(self): |
|
1319 | 1401 | """ |
|
1320 | 1402 | Resets this instance to initial state (cleans ``added``, ``changed`` |
|
1321 | 1403 | and ``removed`` lists). |
|
1322 | 1404 | """ |
|
1323 | 1405 | self.added = [] |
|
1324 | 1406 | self.changed = [] |
|
1325 | 1407 | self.removed = [] |
|
1326 | 1408 | self.parents = [] |
|
1327 | 1409 | |
|
1328 | 1410 | def get_ipaths(self): |
|
1329 | 1411 | """ |
|
1330 | 1412 | Returns generator of paths from nodes marked as added, changed or |
|
1331 | 1413 | removed. |
|
1332 | 1414 | """ |
|
1333 | 1415 | for node in itertools.chain(self.added, self.changed, self.removed): |
|
1334 | 1416 | yield node.path |
|
1335 | 1417 | |
|
1336 | 1418 | def get_paths(self): |
|
1337 | 1419 | """ |
|
1338 | 1420 | Returns list of paths from nodes marked as added, changed or removed. |
|
1339 | 1421 | """ |
|
1340 | 1422 | return list(self.get_ipaths()) |
|
1341 | 1423 | |
|
1342 | 1424 | def check_integrity(self, parents=None): |
|
1343 | 1425 | """ |
|
1344 | 1426 | Checks in-memory commit's integrity. Also, sets parents if not |
|
1345 | 1427 | already set. |
|
1346 | 1428 | |
|
1347 | 1429 | :raises CommitError: if any error occurs (i.e. |
|
1348 | 1430 | ``NodeDoesNotExistError``). |
|
1349 | 1431 | """ |
|
1350 | 1432 | if not self.parents: |
|
1351 | 1433 | parents = parents or [] |
|
1352 | 1434 | if len(parents) == 0: |
|
1353 | 1435 | try: |
|
1354 | 1436 | parents = [self.repository.get_commit(), None] |
|
1355 | 1437 | except EmptyRepositoryError: |
|
1356 | 1438 | parents = [None, None] |
|
1357 | 1439 | elif len(parents) == 1: |
|
1358 | 1440 | parents += [None] |
|
1359 | 1441 | self.parents = parents |
|
1360 | 1442 | |
|
1361 | 1443 | # Local parents, only if not None |
|
1362 | 1444 | parents = [p for p in self.parents if p] |
|
1363 | 1445 | |
|
1364 | 1446 | # Check nodes marked as added |
|
1365 | 1447 | for p in parents: |
|
1366 | 1448 | for node in self.added: |
|
1367 | 1449 | try: |
|
1368 | 1450 | p.get_node(node.path) |
|
1369 | 1451 | except NodeDoesNotExistError: |
|
1370 | 1452 | pass |
|
1371 | 1453 | else: |
|
1372 | 1454 | raise NodeAlreadyExistsError( |
|
1373 | 1455 | "Node `%s` already exists at %s" % (node.path, p)) |
|
1374 | 1456 | |
|
1375 | 1457 | # Check nodes marked as changed |
|
1376 | 1458 | missing = set(self.changed) |
|
1377 | 1459 | not_changed = set(self.changed) |
|
1378 | 1460 | if self.changed and not parents: |
|
1379 | 1461 | raise NodeDoesNotExistError(str(self.changed[0].path)) |
|
1380 | 1462 | for p in parents: |
|
1381 | 1463 | for node in self.changed: |
|
1382 | 1464 | try: |
|
1383 | 1465 | old = p.get_node(node.path) |
|
1384 | 1466 | missing.remove(node) |
|
1385 | 1467 | # if content actually changed, remove node from not_changed |
|
1386 | 1468 | if old.content != node.content: |
|
1387 | 1469 | not_changed.remove(node) |
|
1388 | 1470 | except NodeDoesNotExistError: |
|
1389 | 1471 | pass |
|
1390 | 1472 | if self.changed and missing: |
|
1391 | 1473 | raise NodeDoesNotExistError( |
|
1392 | 1474 | "Node `%s` marked as modified but missing in parents: %s" |
|
1393 | 1475 | % (node.path, parents)) |
|
1394 | 1476 | |
|
1395 | 1477 | if self.changed and not_changed: |
|
1396 | 1478 | raise NodeNotChangedError( |
|
1397 | 1479 | "Node `%s` wasn't actually changed (parents: %s)" |
|
1398 | 1480 | % (not_changed.pop().path, parents)) |
|
1399 | 1481 | |
|
1400 | 1482 | # Check nodes marked as removed |
|
1401 | 1483 | if self.removed and not parents: |
|
1402 | 1484 | raise NodeDoesNotExistError( |
|
1403 | 1485 | "Cannot remove node at %s as there " |
|
1404 | 1486 | "were no parents specified" % self.removed[0].path) |
|
1405 | 1487 | really_removed = set() |
|
1406 | 1488 | for p in parents: |
|
1407 | 1489 | for node in self.removed: |
|
1408 | 1490 | try: |
|
1409 | 1491 | p.get_node(node.path) |
|
1410 | 1492 | really_removed.add(node) |
|
1411 | 1493 | except CommitError: |
|
1412 | 1494 | pass |
|
1413 | 1495 | not_removed = set(self.removed) - really_removed |
|
1414 | 1496 | if not_removed: |
|
1415 | 1497 | # TODO: johbo: This code branch does not seem to be covered |
|
1416 | 1498 | raise NodeDoesNotExistError( |
|
1417 | 1499 | "Cannot remove node at %s from " |
|
1418 | 1500 | "following parents: %s" % (not_removed, parents)) |
|
1419 | 1501 | |
|
1420 | 1502 | def commit( |
|
1421 | 1503 | self, message, author, parents=None, branch=None, date=None, |
|
1422 | 1504 | **kwargs): |
|
1423 | 1505 | """ |
|
1424 | 1506 | Performs in-memory commit (doesn't check workdir in any way) and |
|
1425 | 1507 | returns newly created :class:`BaseCommit`. Updates repository's |
|
1426 | 1508 | attribute `commits`. |
|
1427 | 1509 | |
|
1428 | 1510 | .. note:: |
|
1429 | 1511 | |
|
1430 | 1512 | While overriding this method each backend's should call |
|
1431 | 1513 | ``self.check_integrity(parents)`` in the first place. |
|
1432 | 1514 | |
|
1433 | 1515 | :param message: message of the commit |
|
1434 | 1516 | :param author: full username, i.e. "Joe Doe <joe.doe@example.com>" |
|
1435 | 1517 | :param parents: single parent or sequence of parents from which commit |
|
1436 | 1518 | would be derived |
|
1437 | 1519 | :param date: ``datetime.datetime`` instance. Defaults to |
|
1438 | 1520 | ``datetime.datetime.now()``. |
|
1439 | 1521 | :param branch: branch name, as string. If none given, default backend's |
|
1440 | 1522 | branch would be used. |
|
1441 | 1523 | |
|
1442 | 1524 | :raises ``CommitError``: if any error occurs while committing |
|
1443 | 1525 | """ |
|
1444 | 1526 | raise NotImplementedError |
|
1445 | 1527 | |
|
1446 | 1528 | |
|
1447 | 1529 | class BaseInMemoryChangesetClass(type): |
|
1448 | 1530 | |
|
1449 | 1531 | def __instancecheck__(self, instance): |
|
1450 | 1532 | return isinstance(instance, BaseInMemoryCommit) |
|
1451 | 1533 | |
|
1452 | 1534 | |
|
1453 | 1535 | class BaseInMemoryChangeset(BaseInMemoryCommit): |
|
1454 | 1536 | |
|
1455 | 1537 | __metaclass__ = BaseInMemoryChangesetClass |
|
1456 | 1538 | |
|
1457 | 1539 | def __new__(cls, *args, **kwargs): |
|
1458 | 1540 | warnings.warn( |
|
1459 | 1541 | "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning) |
|
1460 | 1542 | return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs) |
|
1461 | 1543 | |
|
1462 | 1544 | |
|
1463 | 1545 | class EmptyCommit(BaseCommit): |
|
1464 | 1546 | """ |
|
1465 | 1547 | An dummy empty commit. It's possible to pass hash when creating |
|
1466 | 1548 | an EmptyCommit |
|
1467 | 1549 | """ |
|
1468 | 1550 | |
|
1469 | 1551 | def __init__( |
|
1470 | 1552 | self, commit_id='0' * 40, repo=None, alias=None, idx=-1, |
|
1471 | 1553 | message='', author='', date=None): |
|
1472 | 1554 | self._empty_commit_id = commit_id |
|
1473 | 1555 | # TODO: johbo: Solve idx parameter, default value does not make |
|
1474 | 1556 | # too much sense |
|
1475 | 1557 | self.idx = idx |
|
1476 | 1558 | self.message = message |
|
1477 | 1559 | self.author = author |
|
1478 | 1560 | self.date = date or datetime.datetime.fromtimestamp(0) |
|
1479 | 1561 | self.repository = repo |
|
1480 | 1562 | self.alias = alias |
|
1481 | 1563 | |
|
1482 | 1564 | @LazyProperty |
|
1483 | 1565 | def raw_id(self): |
|
1484 | 1566 | """ |
|
1485 | 1567 | Returns raw string identifying this commit, useful for web |
|
1486 | 1568 | representation. |
|
1487 | 1569 | """ |
|
1488 | 1570 | |
|
1489 | 1571 | return self._empty_commit_id |
|
1490 | 1572 | |
|
1491 | 1573 | @LazyProperty |
|
1492 | 1574 | def branch(self): |
|
1493 | 1575 | if self.alias: |
|
1494 | 1576 | from rhodecode.lib.vcs.backends import get_backend |
|
1495 | 1577 | return get_backend(self.alias).DEFAULT_BRANCH_NAME |
|
1496 | 1578 | |
|
1497 | 1579 | @LazyProperty |
|
1498 | 1580 | def short_id(self): |
|
1499 | 1581 | return self.raw_id[:12] |
|
1500 | 1582 | |
|
1501 | 1583 | @LazyProperty |
|
1502 | 1584 | def id(self): |
|
1503 | 1585 | return self.raw_id |
|
1504 | 1586 | |
|
1505 | 1587 | def get_path_commit(self, path): |
|
1506 | 1588 | return self |
|
1507 | 1589 | |
|
1508 | 1590 | def get_file_content(self, path): |
|
1509 | 1591 | return u'' |
|
1510 | 1592 | |
|
1511 | 1593 | def get_file_size(self, path): |
|
1512 | 1594 | return 0 |
|
1513 | 1595 | |
|
1514 | 1596 | |
|
1515 | 1597 | class EmptyChangesetClass(type): |
|
1516 | 1598 | |
|
1517 | 1599 | def __instancecheck__(self, instance): |
|
1518 | 1600 | return isinstance(instance, EmptyCommit) |
|
1519 | 1601 | |
|
1520 | 1602 | |
|
1521 | 1603 | class EmptyChangeset(EmptyCommit): |
|
1522 | 1604 | |
|
1523 | 1605 | __metaclass__ = EmptyChangesetClass |
|
1524 | 1606 | |
|
1525 | 1607 | def __new__(cls, *args, **kwargs): |
|
1526 | 1608 | warnings.warn( |
|
1527 | 1609 | "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning) |
|
1528 | 1610 | return super(EmptyCommit, cls).__new__(cls, *args, **kwargs) |
|
1529 | 1611 | |
|
1530 | 1612 | def __init__(self, cs='0' * 40, repo=None, requested_revision=None, |
|
1531 | 1613 | alias=None, revision=-1, message='', author='', date=None): |
|
1532 | 1614 | if requested_revision is not None: |
|
1533 | 1615 | warnings.warn( |
|
1534 | 1616 | "Parameter requested_revision not supported anymore", |
|
1535 | 1617 | DeprecationWarning) |
|
1536 | 1618 | super(EmptyChangeset, self).__init__( |
|
1537 | 1619 | commit_id=cs, repo=repo, alias=alias, idx=revision, |
|
1538 | 1620 | message=message, author=author, date=date) |
|
1539 | 1621 | |
|
1540 | 1622 | @property |
|
1541 | 1623 | def revision(self): |
|
1542 | 1624 | warnings.warn("Use idx instead", DeprecationWarning) |
|
1543 | 1625 | return self.idx |
|
1544 | 1626 | |
|
1545 | 1627 | @revision.setter |
|
1546 | 1628 | def revision(self, value): |
|
1547 | 1629 | warnings.warn("Use idx instead", DeprecationWarning) |
|
1548 | 1630 | self.idx = value |
|
1549 | 1631 | |
|
1550 | 1632 | |
|
1551 | 1633 | class EmptyRepository(BaseRepository): |
|
1552 | 1634 | def __init__(self, repo_path=None, config=None, create=False, **kwargs): |
|
1553 | 1635 | pass |
|
1554 | 1636 | |
|
1555 | 1637 | def get_diff(self, *args, **kwargs): |
|
1556 | 1638 | from rhodecode.lib.vcs.backends.git.diff import GitDiff |
|
1557 | 1639 | return GitDiff('') |
|
1558 | 1640 | |
|
1559 | 1641 | |
|
1560 | 1642 | class CollectionGenerator(object): |
|
1561 | 1643 | |
|
1562 | 1644 | def __init__(self, repo, commit_ids, collection_size=None, pre_load=None): |
|
1563 | 1645 | self.repo = repo |
|
1564 | 1646 | self.commit_ids = commit_ids |
|
1565 | 1647 | # TODO: (oliver) this isn't currently hooked up |
|
1566 | 1648 | self.collection_size = None |
|
1567 | 1649 | self.pre_load = pre_load |
|
1568 | 1650 | |
|
1569 | 1651 | def __len__(self): |
|
1570 | 1652 | if self.collection_size is not None: |
|
1571 | 1653 | return self.collection_size |
|
1572 | 1654 | return self.commit_ids.__len__() |
|
1573 | 1655 | |
|
1574 | 1656 | def __iter__(self): |
|
1575 | 1657 | for commit_id in self.commit_ids: |
|
1576 | 1658 | # TODO: johbo: Mercurial passes in commit indices or commit ids |
|
1577 | 1659 | yield self._commit_factory(commit_id) |
|
1578 | 1660 | |
|
1579 | 1661 | def _commit_factory(self, commit_id): |
|
1580 | 1662 | """ |
|
1581 | 1663 | Allows backends to override the way commits are generated. |
|
1582 | 1664 | """ |
|
1583 | 1665 | return self.repo.get_commit(commit_id=commit_id, |
|
1584 | 1666 | pre_load=self.pre_load) |
|
1585 | 1667 | |
|
1586 | 1668 | def __getslice__(self, i, j): |
|
1587 | 1669 | """ |
|
1588 | 1670 | Returns an iterator of sliced repository |
|
1589 | 1671 | """ |
|
1590 | 1672 | commit_ids = self.commit_ids[i:j] |
|
1591 | 1673 | return self.__class__( |
|
1592 | 1674 | self.repo, commit_ids, pre_load=self.pre_load) |
|
1593 | 1675 | |
|
1594 | 1676 | def __repr__(self): |
|
1595 | 1677 | return '<CollectionGenerator[len:%s]>' % (self.__len__()) |
|
1596 | 1678 | |
|
1597 | 1679 | |
|
1598 | 1680 | class Config(object): |
|
1599 | 1681 | """ |
|
1600 | 1682 | Represents the configuration for a repository. |
|
1601 | 1683 | |
|
1602 | 1684 | The API is inspired by :class:`ConfigParser.ConfigParser` from the |
|
1603 | 1685 | standard library. It implements only the needed subset. |
|
1604 | 1686 | """ |
|
1605 | 1687 | |
|
1606 | 1688 | def __init__(self): |
|
1607 | 1689 | self._values = {} |
|
1608 | 1690 | |
|
1609 | 1691 | def copy(self): |
|
1610 | 1692 | clone = Config() |
|
1611 | 1693 | for section, values in self._values.items(): |
|
1612 | 1694 | clone._values[section] = values.copy() |
|
1613 | 1695 | return clone |
|
1614 | 1696 | |
|
1615 | 1697 | def __repr__(self): |
|
1616 | 1698 | return '<Config(%s sections) at %s>' % ( |
|
1617 | 1699 | len(self._values), hex(id(self))) |
|
1618 | 1700 | |
|
1619 | 1701 | def items(self, section): |
|
1620 | 1702 | return self._values.get(section, {}).iteritems() |
|
1621 | 1703 | |
|
1622 | 1704 | def get(self, section, option): |
|
1623 | 1705 | return self._values.get(section, {}).get(option) |
|
1624 | 1706 | |
|
1625 | 1707 | def set(self, section, option, value): |
|
1626 | 1708 | section_values = self._values.setdefault(section, {}) |
|
1627 | 1709 | section_values[option] = value |
|
1628 | 1710 | |
|
1629 | 1711 | def clear_section(self, section): |
|
1630 | 1712 | self._values[section] = {} |
|
1631 | 1713 | |
|
1632 | 1714 | def serialize(self): |
|
1633 | 1715 | """ |
|
1634 | 1716 | Creates a list of three tuples (section, key, value) representing |
|
1635 | 1717 | this config object. |
|
1636 | 1718 | """ |
|
1637 | 1719 | items = [] |
|
1638 | 1720 | for section in self._values: |
|
1639 | 1721 | for option, value in self._values[section].items(): |
|
1640 | 1722 | items.append( |
|
1641 | 1723 | (safe_str(section), safe_str(option), safe_str(value))) |
|
1642 | 1724 | return items |
|
1643 | 1725 | |
|
1644 | 1726 | |
|
1645 | 1727 | class Diff(object): |
|
1646 | 1728 | """ |
|
1647 | 1729 | Represents a diff result from a repository backend. |
|
1648 | 1730 | |
|
1649 | 1731 | Subclasses have to provide a backend specific value for |
|
1650 | 1732 | :attr:`_header_re` and :attr:`_meta_re`. |
|
1651 | 1733 | """ |
|
1652 | 1734 | _meta_re = None |
|
1653 | 1735 | _header_re = None |
|
1654 | 1736 | |
|
1655 | 1737 | def __init__(self, raw_diff): |
|
1656 | 1738 | self.raw = raw_diff |
|
1657 | 1739 | |
|
1658 | 1740 | def chunks(self): |
|
1659 | 1741 | """ |
|
1660 | 1742 | split the diff in chunks of separate --git a/file b/file chunks |
|
1661 | 1743 | to make diffs consistent we must prepend with \n, and make sure |
|
1662 | 1744 | we can detect last chunk as this was also has special rule |
|
1663 | 1745 | """ |
|
1664 | 1746 | |
|
1665 | 1747 | diff_parts = ('\n' + self.raw).split('\ndiff --git') |
|
1666 | 1748 | header = diff_parts[0] |
|
1667 | 1749 | |
|
1668 | 1750 | if self._meta_re: |
|
1669 | 1751 | match = self._meta_re.match(header) |
|
1670 | 1752 | |
|
1671 | 1753 | chunks = diff_parts[1:] |
|
1672 | 1754 | total_chunks = len(chunks) |
|
1673 | 1755 | |
|
1674 | 1756 | return ( |
|
1675 | 1757 | DiffChunk(chunk, self, cur_chunk == total_chunks) |
|
1676 | 1758 | for cur_chunk, chunk in enumerate(chunks, start=1)) |
|
1677 | 1759 | |
|
1678 | 1760 | |
|
1679 | 1761 | class DiffChunk(object): |
|
1680 | 1762 | |
|
1681 | 1763 | def __init__(self, chunk, diff, last_chunk): |
|
1682 | 1764 | self._diff = diff |
|
1683 | 1765 | |
|
1684 | 1766 | # since we split by \ndiff --git that part is lost from original diff |
|
1685 | 1767 | # we need to re-apply it at the end, EXCEPT ! if it's last chunk |
|
1686 | 1768 | if not last_chunk: |
|
1687 | 1769 | chunk += '\n' |
|
1688 | 1770 | |
|
1689 | 1771 | match = self._diff._header_re.match(chunk) |
|
1690 | 1772 | self.header = match.groupdict() |
|
1691 | 1773 | self.diff = chunk[match.end():] |
|
1692 | 1774 | self.raw = chunk |
|
1693 | 1775 | |
|
1694 | 1776 | |
|
1695 | 1777 | class BasePathPermissionChecker(object): |
|
1696 | 1778 | |
|
1697 | 1779 | @staticmethod |
|
1698 | 1780 | def create_from_patterns(includes, excludes): |
|
1699 | 1781 | if includes and '*' in includes and not excludes: |
|
1700 | 1782 | return AllPathPermissionChecker() |
|
1701 | 1783 | elif excludes and '*' in excludes: |
|
1702 | 1784 | return NonePathPermissionChecker() |
|
1703 | 1785 | else: |
|
1704 | 1786 | return PatternPathPermissionChecker(includes, excludes) |
|
1705 | 1787 | |
|
1706 | 1788 | @property |
|
1707 | 1789 | def has_full_access(self): |
|
1708 | 1790 | raise NotImplemented() |
|
1709 | 1791 | |
|
1710 | 1792 | def has_access(self, path): |
|
1711 | 1793 | raise NotImplemented() |
|
1712 | 1794 | |
|
1713 | 1795 | |
|
1714 | 1796 | class AllPathPermissionChecker(BasePathPermissionChecker): |
|
1715 | 1797 | |
|
1716 | 1798 | @property |
|
1717 | 1799 | def has_full_access(self): |
|
1718 | 1800 | return True |
|
1719 | 1801 | |
|
1720 | 1802 | def has_access(self, path): |
|
1721 | 1803 | return True |
|
1722 | 1804 | |
|
1723 | 1805 | |
|
1724 | 1806 | class NonePathPermissionChecker(BasePathPermissionChecker): |
|
1725 | 1807 | |
|
1726 | 1808 | @property |
|
1727 | 1809 | def has_full_access(self): |
|
1728 | 1810 | return False |
|
1729 | 1811 | |
|
1730 | 1812 | def has_access(self, path): |
|
1731 | 1813 | return False |
|
1732 | 1814 | |
|
1733 | 1815 | |
|
1734 | 1816 | class PatternPathPermissionChecker(BasePathPermissionChecker): |
|
1735 | 1817 | |
|
1736 | 1818 | def __init__(self, includes, excludes): |
|
1737 | 1819 | self.includes = includes |
|
1738 | 1820 | self.excludes = excludes |
|
1739 | 1821 | self.includes_re = [] if not includes else [ |
|
1740 | 1822 | re.compile(fnmatch.translate(pattern)) for pattern in includes] |
|
1741 | 1823 | self.excludes_re = [] if not excludes else [ |
|
1742 | 1824 | re.compile(fnmatch.translate(pattern)) for pattern in excludes] |
|
1743 | 1825 | |
|
1744 | 1826 | @property |
|
1745 | 1827 | def has_full_access(self): |
|
1746 | 1828 | return '*' in self.includes and not self.excludes |
|
1747 | 1829 | |
|
1748 | 1830 | def has_access(self, path): |
|
1749 | 1831 | for regex in self.excludes_re: |
|
1750 | 1832 | if regex.match(path): |
|
1751 | 1833 | return False |
|
1752 | 1834 | for regex in self.includes_re: |
|
1753 | 1835 | if regex.match(path): |
|
1754 | 1836 | return True |
|
1755 | 1837 | return False |
@@ -1,999 +1,1010 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2014-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | GIT repository module |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import logging |
|
26 | 26 | import os |
|
27 | 27 | import re |
|
28 | 28 | |
|
29 | 29 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
30 | 30 | |
|
31 | 31 | from rhodecode.lib.compat import OrderedDict |
|
32 | 32 | from rhodecode.lib.datelib import ( |
|
33 | 33 | utcdate_fromtimestamp, makedate, date_astimestamp) |
|
34 | 34 | from rhodecode.lib.utils import safe_unicode, safe_str |
|
35 | 35 | from rhodecode.lib.vcs import connection, path as vcspath |
|
36 | 36 | from rhodecode.lib.vcs.backends.base import ( |
|
37 | 37 | BaseRepository, CollectionGenerator, Config, MergeResponse, |
|
38 | 38 | MergeFailureReason, Reference) |
|
39 | 39 | from rhodecode.lib.vcs.backends.git.commit import GitCommit |
|
40 | 40 | from rhodecode.lib.vcs.backends.git.diff import GitDiff |
|
41 | 41 | from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit |
|
42 | 42 | from rhodecode.lib.vcs.exceptions import ( |
|
43 | 43 | CommitDoesNotExistError, EmptyRepositoryError, |
|
44 | 44 | RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError) |
|
45 | 45 | |
|
46 | 46 | |
|
47 | 47 | SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$') |
|
48 | 48 | |
|
49 | 49 | log = logging.getLogger(__name__) |
|
50 | 50 | |
|
51 | 51 | |
|
52 | 52 | class GitRepository(BaseRepository): |
|
53 | 53 | """ |
|
54 | 54 | Git repository backend. |
|
55 | 55 | """ |
|
56 | 56 | DEFAULT_BRANCH_NAME = 'master' |
|
57 | 57 | |
|
58 | 58 | contact = BaseRepository.DEFAULT_CONTACT |
|
59 | 59 | |
|
60 | 60 | def __init__(self, repo_path, config=None, create=False, src_url=None, |
|
61 | 61 | do_workspace_checkout=False, with_wire=None, bare=False): |
|
62 | 62 | |
|
63 | 63 | self.path = safe_str(os.path.abspath(repo_path)) |
|
64 | 64 | self.config = config if config else self.get_default_config() |
|
65 | 65 | self.with_wire = with_wire |
|
66 | 66 | |
|
67 | 67 | self._init_repo(create, src_url, do_workspace_checkout, bare) |
|
68 | 68 | |
|
69 | 69 | # caches |
|
70 | 70 | self._commit_ids = {} |
|
71 | 71 | |
|
72 | 72 | @LazyProperty |
|
73 | 73 | def _remote(self): |
|
74 | 74 | return connection.Git(self.path, self.config, with_wire=self.with_wire) |
|
75 | 75 | |
|
76 | 76 | @LazyProperty |
|
77 | 77 | def bare(self): |
|
78 | 78 | return self._remote.bare() |
|
79 | 79 | |
|
80 | 80 | @LazyProperty |
|
81 | 81 | def head(self): |
|
82 | 82 | return self._remote.head() |
|
83 | 83 | |
|
84 | 84 | @LazyProperty |
|
85 | 85 | def commit_ids(self): |
|
86 | 86 | """ |
|
87 | 87 | Returns list of commit ids, in ascending order. Being lazy |
|
88 | 88 | attribute allows external tools to inject commit ids from cache. |
|
89 | 89 | """ |
|
90 | 90 | commit_ids = self._get_all_commit_ids() |
|
91 | 91 | self._rebuild_cache(commit_ids) |
|
92 | 92 | return commit_ids |
|
93 | 93 | |
|
94 | 94 | def _rebuild_cache(self, commit_ids): |
|
95 | 95 | self._commit_ids = dict((commit_id, index) |
|
96 | 96 | for index, commit_id in enumerate(commit_ids)) |
|
97 | 97 | |
|
98 | 98 | def run_git_command(self, cmd, **opts): |
|
99 | 99 | """ |
|
100 | 100 | Runs given ``cmd`` as git command and returns tuple |
|
101 | 101 | (stdout, stderr). |
|
102 | 102 | |
|
103 | 103 | :param cmd: git command to be executed |
|
104 | 104 | :param opts: env options to pass into Subprocess command |
|
105 | 105 | """ |
|
106 | 106 | if not isinstance(cmd, list): |
|
107 | 107 | raise ValueError('cmd must be a list, got %s instead' % type(cmd)) |
|
108 | 108 | |
|
109 | 109 | skip_stderr_log = opts.pop('skip_stderr_log', False) |
|
110 | 110 | out, err = self._remote.run_git_command(cmd, **opts) |
|
111 | 111 | if err and not skip_stderr_log: |
|
112 | 112 | log.debug('Stderr output of git command "%s":\n%s', cmd, err) |
|
113 | 113 | return out, err |
|
114 | 114 | |
|
115 | 115 | @staticmethod |
|
116 | 116 | def check_url(url, config): |
|
117 | 117 | """ |
|
118 | 118 | Function will check given url and try to verify if it's a valid |
|
119 | 119 | link. Sometimes it may happened that git will issue basic |
|
120 | 120 | auth request that can cause whole API to hang when used from python |
|
121 | 121 | or other external calls. |
|
122 | 122 | |
|
123 | 123 | On failures it'll raise urllib2.HTTPError, exception is also thrown |
|
124 | 124 | when the return code is non 200 |
|
125 | 125 | """ |
|
126 | 126 | # check first if it's not an url |
|
127 | 127 | if os.path.isdir(url) or url.startswith('file:'): |
|
128 | 128 | return True |
|
129 | 129 | |
|
130 | 130 | if '+' in url.split('://', 1)[0]: |
|
131 | 131 | url = url.split('+', 1)[1] |
|
132 | 132 | |
|
133 | 133 | # Request the _remote to verify the url |
|
134 | 134 | return connection.Git.check_url(url, config.serialize()) |
|
135 | 135 | |
|
136 | 136 | @staticmethod |
|
137 | 137 | def is_valid_repository(path): |
|
138 | 138 | if os.path.isdir(os.path.join(path, '.git')): |
|
139 | 139 | return True |
|
140 | 140 | # check case of bare repository |
|
141 | 141 | try: |
|
142 | 142 | GitRepository(path) |
|
143 | 143 | return True |
|
144 | 144 | except VCSError: |
|
145 | 145 | pass |
|
146 | 146 | return False |
|
147 | 147 | |
|
148 | 148 | def _init_repo(self, create, src_url=None, do_workspace_checkout=False, |
|
149 | 149 | bare=False): |
|
150 | 150 | if create and os.path.exists(self.path): |
|
151 | 151 | raise RepositoryError( |
|
152 | 152 | "Cannot create repository at %s, location already exist" |
|
153 | 153 | % self.path) |
|
154 | 154 | |
|
155 | 155 | if bare and do_workspace_checkout: |
|
156 | 156 | raise RepositoryError("Cannot update a bare repository") |
|
157 | 157 | try: |
|
158 | 158 | |
|
159 | 159 | if src_url: |
|
160 | 160 | # check URL before any actions |
|
161 | 161 | GitRepository.check_url(src_url, self.config) |
|
162 | 162 | |
|
163 | 163 | if create: |
|
164 | 164 | os.makedirs(self.path, mode=0o755) |
|
165 | 165 | |
|
166 | 166 | if bare: |
|
167 | 167 | self._remote.init_bare() |
|
168 | 168 | else: |
|
169 | 169 | self._remote.init() |
|
170 | 170 | |
|
171 | 171 | if src_url and bare: |
|
172 | 172 | # bare repository only allows a fetch and checkout is not allowed |
|
173 | 173 | self.fetch(src_url, commit_ids=None) |
|
174 | 174 | elif src_url: |
|
175 | 175 | self.pull(src_url, commit_ids=None, |
|
176 | 176 | update_after=do_workspace_checkout) |
|
177 | 177 | |
|
178 | 178 | else: |
|
179 | 179 | if not self._remote.assert_correct_path(): |
|
180 | 180 | raise RepositoryError( |
|
181 | 181 | 'Path "%s" does not contain a Git repository' % |
|
182 | 182 | (self.path,)) |
|
183 | 183 | |
|
184 | 184 | # TODO: johbo: check if we have to translate the OSError here |
|
185 | 185 | except OSError as err: |
|
186 | 186 | raise RepositoryError(err) |
|
187 | 187 | |
|
188 | 188 | def _get_all_commit_ids(self, filters=None): |
|
189 | 189 | # we must check if this repo is not empty, since later command |
|
190 | 190 | # fails if it is. And it's cheaper to ask than throw the subprocess |
|
191 | 191 | # errors |
|
192 | 192 | |
|
193 | 193 | head = self._remote.head(show_exc=False) |
|
194 | 194 | if not head: |
|
195 | 195 | return [] |
|
196 | 196 | |
|
197 | 197 | rev_filter = ['--branches', '--tags'] |
|
198 | 198 | extra_filter = [] |
|
199 | 199 | |
|
200 | 200 | if filters: |
|
201 | 201 | if filters.get('since'): |
|
202 | 202 | extra_filter.append('--since=%s' % (filters['since'])) |
|
203 | 203 | if filters.get('until'): |
|
204 | 204 | extra_filter.append('--until=%s' % (filters['until'])) |
|
205 | 205 | if filters.get('branch_name'): |
|
206 | 206 | rev_filter = ['--tags'] |
|
207 | 207 | extra_filter.append(filters['branch_name']) |
|
208 | 208 | rev_filter.extend(extra_filter) |
|
209 | 209 | |
|
210 | 210 | # if filters.get('start') or filters.get('end'): |
|
211 | 211 | # # skip is offset, max-count is limit |
|
212 | 212 | # if filters.get('start'): |
|
213 | 213 | # extra_filter += ' --skip=%s' % filters['start'] |
|
214 | 214 | # if filters.get('end'): |
|
215 | 215 | # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0)) |
|
216 | 216 | |
|
217 | 217 | cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter |
|
218 | 218 | try: |
|
219 | 219 | output, __ = self.run_git_command(cmd) |
|
220 | 220 | except RepositoryError: |
|
221 | 221 | # Can be raised for empty repositories |
|
222 | 222 | return [] |
|
223 | 223 | return output.splitlines() |
|
224 | 224 | |
|
225 | 225 | def _get_commit_id(self, commit_id_or_idx): |
|
226 | 226 | def is_null(value): |
|
227 | 227 | return len(value) == commit_id_or_idx.count('0') |
|
228 | 228 | |
|
229 | 229 | if self.is_empty(): |
|
230 | 230 | raise EmptyRepositoryError("There are no commits yet") |
|
231 | 231 | |
|
232 | 232 | if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1): |
|
233 | 233 | return self.commit_ids[-1] |
|
234 | 234 | |
|
235 | 235 | is_bstr = isinstance(commit_id_or_idx, (str, unicode)) |
|
236 | 236 | if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12) |
|
237 | 237 | or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)): |
|
238 | 238 | try: |
|
239 | 239 | commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)] |
|
240 | 240 | except Exception: |
|
241 | 241 | msg = "Commit %s does not exist for %s" % ( |
|
242 | 242 | commit_id_or_idx, self) |
|
243 | 243 | raise CommitDoesNotExistError(msg) |
|
244 | 244 | |
|
245 | 245 | elif is_bstr: |
|
246 | 246 | # check full path ref, eg. refs/heads/master |
|
247 | 247 | ref_id = self._refs.get(commit_id_or_idx) |
|
248 | 248 | if ref_id: |
|
249 | 249 | return ref_id |
|
250 | 250 | |
|
251 | 251 | # check branch name |
|
252 | 252 | branch_ids = self.branches.values() |
|
253 | 253 | ref_id = self._refs.get('refs/heads/%s' % commit_id_or_idx) |
|
254 | 254 | if ref_id: |
|
255 | 255 | return ref_id |
|
256 | 256 | |
|
257 | 257 | # check tag name |
|
258 | 258 | ref_id = self._refs.get('refs/tags/%s' % commit_id_or_idx) |
|
259 | 259 | if ref_id: |
|
260 | 260 | return ref_id |
|
261 | 261 | |
|
262 | 262 | if (not SHA_PATTERN.match(commit_id_or_idx) or |
|
263 | 263 | commit_id_or_idx not in self.commit_ids): |
|
264 | 264 | msg = "Commit %s does not exist for %s" % ( |
|
265 | 265 | commit_id_or_idx, self) |
|
266 | 266 | raise CommitDoesNotExistError(msg) |
|
267 | 267 | |
|
268 | 268 | # Ensure we return full id |
|
269 | 269 | if not SHA_PATTERN.match(str(commit_id_or_idx)): |
|
270 | 270 | raise CommitDoesNotExistError( |
|
271 | 271 | "Given commit id %s not recognized" % commit_id_or_idx) |
|
272 | 272 | return commit_id_or_idx |
|
273 | 273 | |
|
274 | 274 | def get_hook_location(self): |
|
275 | 275 | """ |
|
276 | 276 | returns absolute path to location where hooks are stored |
|
277 | 277 | """ |
|
278 | 278 | loc = os.path.join(self.path, 'hooks') |
|
279 | 279 | if not self.bare: |
|
280 | 280 | loc = os.path.join(self.path, '.git', 'hooks') |
|
281 | 281 | return loc |
|
282 | 282 | |
|
283 | 283 | @LazyProperty |
|
284 | 284 | def last_change(self): |
|
285 | 285 | """ |
|
286 | 286 | Returns last change made on this repository as |
|
287 | 287 | `datetime.datetime` object. |
|
288 | 288 | """ |
|
289 | 289 | try: |
|
290 | 290 | return self.get_commit().date |
|
291 | 291 | except RepositoryError: |
|
292 | 292 | tzoffset = makedate()[1] |
|
293 | 293 | return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset) |
|
294 | 294 | |
|
295 | 295 | def _get_fs_mtime(self): |
|
296 | 296 | idx_loc = '' if self.bare else '.git' |
|
297 | 297 | # fallback to filesystem |
|
298 | 298 | in_path = os.path.join(self.path, idx_loc, "index") |
|
299 | 299 | he_path = os.path.join(self.path, idx_loc, "HEAD") |
|
300 | 300 | if os.path.exists(in_path): |
|
301 | 301 | return os.stat(in_path).st_mtime |
|
302 | 302 | else: |
|
303 | 303 | return os.stat(he_path).st_mtime |
|
304 | 304 | |
|
305 | 305 | @LazyProperty |
|
306 | 306 | def description(self): |
|
307 | 307 | description = self._remote.get_description() |
|
308 | 308 | return safe_unicode(description or self.DEFAULT_DESCRIPTION) |
|
309 | 309 | |
|
310 | 310 | def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True): |
|
311 | 311 | if self.is_empty(): |
|
312 | 312 | return OrderedDict() |
|
313 | 313 | |
|
314 | 314 | result = [] |
|
315 | 315 | for ref, sha in self._refs.iteritems(): |
|
316 | 316 | if ref.startswith(prefix): |
|
317 | 317 | ref_name = ref |
|
318 | 318 | if strip_prefix: |
|
319 | 319 | ref_name = ref[len(prefix):] |
|
320 | 320 | result.append((safe_unicode(ref_name), sha)) |
|
321 | 321 | |
|
322 | 322 | def get_name(entry): |
|
323 | 323 | return entry[0] |
|
324 | 324 | |
|
325 | 325 | return OrderedDict(sorted(result, key=get_name, reverse=reverse)) |
|
326 | 326 | |
|
327 | 327 | def _get_branches(self): |
|
328 | 328 | return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True) |
|
329 | 329 | |
|
330 | 330 | @LazyProperty |
|
331 | 331 | def branches(self): |
|
332 | 332 | return self._get_branches() |
|
333 | 333 | |
|
334 | 334 | @LazyProperty |
|
335 | 335 | def branches_closed(self): |
|
336 | 336 | return {} |
|
337 | 337 | |
|
338 | 338 | @LazyProperty |
|
339 | 339 | def bookmarks(self): |
|
340 | 340 | return {} |
|
341 | 341 | |
|
342 | 342 | @LazyProperty |
|
343 | 343 | def branches_all(self): |
|
344 | 344 | all_branches = {} |
|
345 | 345 | all_branches.update(self.branches) |
|
346 | 346 | all_branches.update(self.branches_closed) |
|
347 | 347 | return all_branches |
|
348 | 348 | |
|
349 | 349 | @LazyProperty |
|
350 | 350 | def tags(self): |
|
351 | 351 | return self._get_tags() |
|
352 | 352 | |
|
353 | 353 | def _get_tags(self): |
|
354 | 354 | return self._get_refs_entries( |
|
355 | 355 | prefix='refs/tags/', strip_prefix=True, reverse=True) |
|
356 | 356 | |
|
357 | 357 | def tag(self, name, user, commit_id=None, message=None, date=None, |
|
358 | 358 | **kwargs): |
|
359 | 359 | # TODO: fix this method to apply annotated tags correct with message |
|
360 | 360 | """ |
|
361 | 361 | Creates and returns a tag for the given ``commit_id``. |
|
362 | 362 | |
|
363 | 363 | :param name: name for new tag |
|
364 | 364 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" |
|
365 | 365 | :param commit_id: commit id for which new tag would be created |
|
366 | 366 | :param message: message of the tag's commit |
|
367 | 367 | :param date: date of tag's commit |
|
368 | 368 | |
|
369 | 369 | :raises TagAlreadyExistError: if tag with same name already exists |
|
370 | 370 | """ |
|
371 | 371 | if name in self.tags: |
|
372 | 372 | raise TagAlreadyExistError("Tag %s already exists" % name) |
|
373 | 373 | commit = self.get_commit(commit_id=commit_id) |
|
374 | 374 | message = message or "Added tag %s for commit %s" % ( |
|
375 | 375 | name, commit.raw_id) |
|
376 | 376 | self._remote.set_refs('refs/tags/%s' % name, commit._commit['id']) |
|
377 | 377 | |
|
378 | 378 | self._refs = self._get_refs() |
|
379 | 379 | self.tags = self._get_tags() |
|
380 | 380 | return commit |
|
381 | 381 | |
|
382 | 382 | def remove_tag(self, name, user, message=None, date=None): |
|
383 | 383 | """ |
|
384 | 384 | Removes tag with the given ``name``. |
|
385 | 385 | |
|
386 | 386 | :param name: name of the tag to be removed |
|
387 | 387 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" |
|
388 | 388 | :param message: message of the tag's removal commit |
|
389 | 389 | :param date: date of tag's removal commit |
|
390 | 390 | |
|
391 | 391 | :raises TagDoesNotExistError: if tag with given name does not exists |
|
392 | 392 | """ |
|
393 | 393 | if name not in self.tags: |
|
394 | 394 | raise TagDoesNotExistError("Tag %s does not exist" % name) |
|
395 | 395 | tagpath = vcspath.join( |
|
396 | 396 | self._remote.get_refs_path(), 'refs', 'tags', name) |
|
397 | 397 | try: |
|
398 | 398 | os.remove(tagpath) |
|
399 | 399 | self._refs = self._get_refs() |
|
400 | 400 | self.tags = self._get_tags() |
|
401 | 401 | except OSError as e: |
|
402 | 402 | raise RepositoryError(e.strerror) |
|
403 | 403 | |
|
404 | 404 | def _get_refs(self): |
|
405 | 405 | return self._remote.get_refs() |
|
406 | 406 | |
|
407 | 407 | @LazyProperty |
|
408 | 408 | def _refs(self): |
|
409 | 409 | return self._get_refs() |
|
410 | 410 | |
|
411 | 411 | @property |
|
412 | 412 | def _ref_tree(self): |
|
413 | 413 | node = tree = {} |
|
414 | 414 | for ref, sha in self._refs.iteritems(): |
|
415 | 415 | path = ref.split('/') |
|
416 | 416 | for bit in path[:-1]: |
|
417 | 417 | node = node.setdefault(bit, {}) |
|
418 | 418 | node[path[-1]] = sha |
|
419 | 419 | node = tree |
|
420 | 420 | return tree |
|
421 | 421 | |
|
422 | 422 | def get_remote_ref(self, ref_name): |
|
423 | 423 | ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name)) |
|
424 | 424 | try: |
|
425 | 425 | return self._refs[ref_key] |
|
426 | 426 | except Exception: |
|
427 | 427 | return |
|
428 | 428 | |
|
429 | 429 | def get_commit(self, commit_id=None, commit_idx=None, pre_load=None): |
|
430 | 430 | """ |
|
431 | 431 | Returns `GitCommit` object representing commit from git repository |
|
432 | 432 | at the given `commit_id` or head (most recent commit) if None given. |
|
433 | 433 | """ |
|
434 | 434 | if commit_id is not None: |
|
435 | 435 | self._validate_commit_id(commit_id) |
|
436 | 436 | elif commit_idx is not None: |
|
437 | 437 | self._validate_commit_idx(commit_idx) |
|
438 | 438 | commit_id = commit_idx |
|
439 | 439 | commit_id = self._get_commit_id(commit_id) |
|
440 | 440 | try: |
|
441 | 441 | # Need to call remote to translate id for tagging scenario |
|
442 | 442 | commit_id = self._remote.get_object(commit_id)["commit_id"] |
|
443 | 443 | idx = self._commit_ids[commit_id] |
|
444 | 444 | except KeyError: |
|
445 | 445 | raise RepositoryError("Cannot get object with id %s" % commit_id) |
|
446 | 446 | |
|
447 | 447 | return GitCommit(self, commit_id, idx, pre_load=pre_load) |
|
448 | 448 | |
|
449 | 449 | def get_commits( |
|
450 | 450 | self, start_id=None, end_id=None, start_date=None, end_date=None, |
|
451 | 451 | branch_name=None, show_hidden=False, pre_load=None): |
|
452 | 452 | """ |
|
453 | 453 | Returns generator of `GitCommit` objects from start to end (both |
|
454 | 454 | are inclusive), in ascending date order. |
|
455 | 455 | |
|
456 | 456 | :param start_id: None, str(commit_id) |
|
457 | 457 | :param end_id: None, str(commit_id) |
|
458 | 458 | :param start_date: if specified, commits with commit date less than |
|
459 | 459 | ``start_date`` would be filtered out from returned set |
|
460 | 460 | :param end_date: if specified, commits with commit date greater than |
|
461 | 461 | ``end_date`` would be filtered out from returned set |
|
462 | 462 | :param branch_name: if specified, commits not reachable from given |
|
463 | 463 | branch would be filtered out from returned set |
|
464 | 464 | :param show_hidden: Show hidden commits such as obsolete or hidden from |
|
465 | 465 | Mercurial evolve |
|
466 | 466 | :raise BranchDoesNotExistError: If given `branch_name` does not |
|
467 | 467 | exist. |
|
468 | 468 | :raise CommitDoesNotExistError: If commits for given `start` or |
|
469 | 469 | `end` could not be found. |
|
470 | 470 | |
|
471 | 471 | """ |
|
472 | 472 | if self.is_empty(): |
|
473 | 473 | raise EmptyRepositoryError("There are no commits yet") |
|
474 | 474 | self._validate_branch_name(branch_name) |
|
475 | 475 | |
|
476 | 476 | if start_id is not None: |
|
477 | 477 | self._validate_commit_id(start_id) |
|
478 | 478 | if end_id is not None: |
|
479 | 479 | self._validate_commit_id(end_id) |
|
480 | 480 | |
|
481 | 481 | start_raw_id = self._get_commit_id(start_id) |
|
482 | 482 | start_pos = self._commit_ids[start_raw_id] if start_id else None |
|
483 | 483 | end_raw_id = self._get_commit_id(end_id) |
|
484 | 484 | end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None |
|
485 | 485 | |
|
486 | 486 | if None not in [start_id, end_id] and start_pos > end_pos: |
|
487 | 487 | raise RepositoryError( |
|
488 | 488 | "Start commit '%s' cannot be after end commit '%s'" % |
|
489 | 489 | (start_id, end_id)) |
|
490 | 490 | |
|
491 | 491 | if end_pos is not None: |
|
492 | 492 | end_pos += 1 |
|
493 | 493 | |
|
494 | 494 | filter_ = [] |
|
495 | 495 | if branch_name: |
|
496 | 496 | filter_.append({'branch_name': branch_name}) |
|
497 | 497 | if start_date and not end_date: |
|
498 | 498 | filter_.append({'since': start_date}) |
|
499 | 499 | if end_date and not start_date: |
|
500 | 500 | filter_.append({'until': end_date}) |
|
501 | 501 | if start_date and end_date: |
|
502 | 502 | filter_.append({'since': start_date}) |
|
503 | 503 | filter_.append({'until': end_date}) |
|
504 | 504 | |
|
505 | 505 | # if start_pos or end_pos: |
|
506 | 506 | # filter_.append({'start': start_pos}) |
|
507 | 507 | # filter_.append({'end': end_pos}) |
|
508 | 508 | |
|
509 | 509 | if filter_: |
|
510 | 510 | revfilters = { |
|
511 | 511 | 'branch_name': branch_name, |
|
512 | 512 | 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None, |
|
513 | 513 | 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None, |
|
514 | 514 | 'start': start_pos, |
|
515 | 515 | 'end': end_pos, |
|
516 | 516 | } |
|
517 | 517 | commit_ids = self._get_all_commit_ids(filters=revfilters) |
|
518 | 518 | |
|
519 | 519 | # pure python stuff, it's slow due to walker walking whole repo |
|
520 | 520 | # def get_revs(walker): |
|
521 | 521 | # for walker_entry in walker: |
|
522 | 522 | # yield walker_entry.commit.id |
|
523 | 523 | # revfilters = {} |
|
524 | 524 | # commit_ids = list(reversed(list(get_revs(self._repo.get_walker(**revfilters))))) |
|
525 | 525 | else: |
|
526 | 526 | commit_ids = self.commit_ids |
|
527 | 527 | |
|
528 | 528 | if start_pos or end_pos: |
|
529 | 529 | commit_ids = commit_ids[start_pos: end_pos] |
|
530 | 530 | |
|
531 | 531 | return CollectionGenerator(self, commit_ids, pre_load=pre_load) |
|
532 | 532 | |
|
533 | 533 | def get_diff( |
|
534 | 534 | self, commit1, commit2, path='', ignore_whitespace=False, |
|
535 | 535 | context=3, path1=None): |
|
536 | 536 | """ |
|
537 | 537 | Returns (git like) *diff*, as plain text. Shows changes introduced by |
|
538 | 538 | ``commit2`` since ``commit1``. |
|
539 | 539 | |
|
540 | 540 | :param commit1: Entry point from which diff is shown. Can be |
|
541 | 541 | ``self.EMPTY_COMMIT`` - in this case, patch showing all |
|
542 | 542 | the changes since empty state of the repository until ``commit2`` |
|
543 | 543 | :param commit2: Until which commits changes should be shown. |
|
544 | 544 | :param ignore_whitespace: If set to ``True``, would not show whitespace |
|
545 | 545 | changes. Defaults to ``False``. |
|
546 | 546 | :param context: How many lines before/after changed lines should be |
|
547 | 547 | shown. Defaults to ``3``. |
|
548 | 548 | """ |
|
549 | 549 | self._validate_diff_commits(commit1, commit2) |
|
550 | 550 | if path1 is not None and path1 != path: |
|
551 | 551 | raise ValueError("Diff of two different paths not supported.") |
|
552 | 552 | |
|
553 | 553 | flags = [ |
|
554 | 554 | '-U%s' % context, '--full-index', '--binary', '-p', |
|
555 | 555 | '-M', '--abbrev=40'] |
|
556 | 556 | if ignore_whitespace: |
|
557 | 557 | flags.append('-w') |
|
558 | 558 | |
|
559 | 559 | if commit1 == self.EMPTY_COMMIT: |
|
560 | 560 | cmd = ['show'] + flags + [commit2.raw_id] |
|
561 | 561 | else: |
|
562 | 562 | cmd = ['diff'] + flags + [commit1.raw_id, commit2.raw_id] |
|
563 | 563 | |
|
564 | 564 | if path: |
|
565 | 565 | cmd.extend(['--', path]) |
|
566 | 566 | |
|
567 | 567 | stdout, __ = self.run_git_command(cmd) |
|
568 | 568 | # If we used 'show' command, strip first few lines (until actual diff |
|
569 | 569 | # starts) |
|
570 | 570 | if commit1 == self.EMPTY_COMMIT: |
|
571 | 571 | lines = stdout.splitlines() |
|
572 | 572 | x = 0 |
|
573 | 573 | for line in lines: |
|
574 | 574 | if line.startswith('diff'): |
|
575 | 575 | break |
|
576 | 576 | x += 1 |
|
577 | 577 | # Append new line just like 'diff' command do |
|
578 | 578 | stdout = '\n'.join(lines[x:]) + '\n' |
|
579 | 579 | return GitDiff(stdout) |
|
580 | 580 | |
|
581 | 581 | def strip(self, commit_id, branch_name): |
|
582 | 582 | commit = self.get_commit(commit_id=commit_id) |
|
583 | 583 | if commit.merge: |
|
584 | 584 | raise Exception('Cannot reset to merge commit') |
|
585 | 585 | |
|
586 | 586 | # parent is going to be the new head now |
|
587 | 587 | commit = commit.parents[0] |
|
588 | 588 | self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id) |
|
589 | 589 | |
|
590 | 590 | self.commit_ids = self._get_all_commit_ids() |
|
591 | 591 | self._rebuild_cache(self.commit_ids) |
|
592 | 592 | |
|
593 | 593 | def get_common_ancestor(self, commit_id1, commit_id2, repo2): |
|
594 | 594 | if commit_id1 == commit_id2: |
|
595 | 595 | return commit_id1 |
|
596 | 596 | |
|
597 | 597 | if self != repo2: |
|
598 | 598 | commits = self._remote.get_missing_revs( |
|
599 | 599 | commit_id1, commit_id2, repo2.path) |
|
600 | 600 | if commits: |
|
601 | 601 | commit = repo2.get_commit(commits[-1]) |
|
602 | 602 | if commit.parents: |
|
603 | 603 | ancestor_id = commit.parents[0].raw_id |
|
604 | 604 | else: |
|
605 | 605 | ancestor_id = None |
|
606 | 606 | else: |
|
607 | 607 | # no commits from other repo, ancestor_id is the commit_id2 |
|
608 | 608 | ancestor_id = commit_id2 |
|
609 | 609 | else: |
|
610 | 610 | output, __ = self.run_git_command( |
|
611 | 611 | ['merge-base', commit_id1, commit_id2]) |
|
612 | 612 | ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0] |
|
613 | 613 | |
|
614 | 614 | return ancestor_id |
|
615 | 615 | |
|
616 | 616 | def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None): |
|
617 | 617 | repo1 = self |
|
618 | 618 | ancestor_id = None |
|
619 | 619 | |
|
620 | 620 | if commit_id1 == commit_id2: |
|
621 | 621 | commits = [] |
|
622 | 622 | elif repo1 != repo2: |
|
623 | 623 | missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2, |
|
624 | 624 | repo2.path) |
|
625 | 625 | commits = [ |
|
626 | 626 | repo2.get_commit(commit_id=commit_id, pre_load=pre_load) |
|
627 | 627 | for commit_id in reversed(missing_ids)] |
|
628 | 628 | else: |
|
629 | 629 | output, __ = repo1.run_git_command( |
|
630 | 630 | ['log', '--reverse', '--pretty=format: %H', '-s', |
|
631 | 631 | '%s..%s' % (commit_id1, commit_id2)]) |
|
632 | 632 | commits = [ |
|
633 | 633 | repo1.get_commit(commit_id=commit_id, pre_load=pre_load) |
|
634 | 634 | for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)] |
|
635 | 635 | |
|
636 | 636 | return commits |
|
637 | 637 | |
|
638 | 638 | @LazyProperty |
|
639 | 639 | def in_memory_commit(self): |
|
640 | 640 | """ |
|
641 | 641 | Returns ``GitInMemoryCommit`` object for this repository. |
|
642 | 642 | """ |
|
643 | 643 | return GitInMemoryCommit(self) |
|
644 | 644 | |
|
645 | 645 | def pull(self, url, commit_ids=None, update_after=False): |
|
646 | 646 | """ |
|
647 | 647 | Pull changes from external location. Pull is different in GIT |
|
648 | 648 | that fetch since it's doing a checkout |
|
649 | 649 | |
|
650 | 650 | :param commit_ids: Optional. Can be set to a list of commit ids |
|
651 | 651 | which shall be pulled from the other repository. |
|
652 | 652 | """ |
|
653 | 653 | refs = None |
|
654 | 654 | if commit_ids is not None: |
|
655 | 655 | remote_refs = self._remote.get_remote_refs(url) |
|
656 | 656 | refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids] |
|
657 | 657 | self._remote.pull(url, refs=refs, update_after=update_after) |
|
658 | 658 | self._remote.invalidate_vcs_cache() |
|
659 | 659 | |
|
660 | 660 | def fetch(self, url, commit_ids=None): |
|
661 | 661 | """ |
|
662 | 662 | Fetch all git objects from external location. |
|
663 | 663 | """ |
|
664 | 664 | self._remote.sync_fetch(url, refs=commit_ids) |
|
665 | 665 | self._remote.invalidate_vcs_cache() |
|
666 | 666 | |
|
667 | 667 | def push(self, url): |
|
668 | 668 | refs = None |
|
669 | 669 | self._remote.sync_push(url, refs=refs) |
|
670 | 670 | |
|
671 | 671 | def set_refs(self, ref_name, commit_id): |
|
672 | 672 | self._remote.set_refs(ref_name, commit_id) |
|
673 | 673 | |
|
674 | 674 | def remove_ref(self, ref_name): |
|
675 | 675 | self._remote.remove_ref(ref_name) |
|
676 | 676 | |
|
677 | 677 | def _update_server_info(self): |
|
678 | 678 | """ |
|
679 | 679 | runs gits update-server-info command in this repo instance |
|
680 | 680 | """ |
|
681 | 681 | self._remote.update_server_info() |
|
682 | 682 | |
|
683 | 683 | def _current_branch(self): |
|
684 | 684 | """ |
|
685 | 685 | Return the name of the current branch. |
|
686 | 686 | |
|
687 | 687 | It only works for non bare repositories (i.e. repositories with a |
|
688 | 688 | working copy) |
|
689 | 689 | """ |
|
690 | 690 | if self.bare: |
|
691 | 691 | raise RepositoryError('Bare git repos do not have active branches') |
|
692 | 692 | |
|
693 | 693 | if self.is_empty(): |
|
694 | 694 | return None |
|
695 | 695 | |
|
696 | 696 | stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD']) |
|
697 | 697 | return stdout.strip() |
|
698 | 698 | |
|
699 | 699 | def _checkout(self, branch_name, create=False, force=False): |
|
700 | 700 | """ |
|
701 | 701 | Checkout a branch in the working directory. |
|
702 | 702 | |
|
703 | 703 | It tries to create the branch if create is True, failing if the branch |
|
704 | 704 | already exists. |
|
705 | 705 | |
|
706 | 706 | It only works for non bare repositories (i.e. repositories with a |
|
707 | 707 | working copy) |
|
708 | 708 | """ |
|
709 | 709 | if self.bare: |
|
710 | 710 | raise RepositoryError('Cannot checkout branches in a bare git repo') |
|
711 | 711 | |
|
712 | 712 | cmd = ['checkout'] |
|
713 | 713 | if force: |
|
714 | 714 | cmd.append('-f') |
|
715 | 715 | if create: |
|
716 | 716 | cmd.append('-b') |
|
717 | 717 | cmd.append(branch_name) |
|
718 | 718 | self.run_git_command(cmd, fail_on_stderr=False) |
|
719 | 719 | |
|
720 | 720 | def _identify(self): |
|
721 | 721 | """ |
|
722 | 722 | Return the current state of the working directory. |
|
723 | 723 | """ |
|
724 | 724 | if self.bare: |
|
725 | 725 | raise RepositoryError('Bare git repos do not have active branches') |
|
726 | 726 | |
|
727 | 727 | if self.is_empty(): |
|
728 | 728 | return None |
|
729 | 729 | |
|
730 | 730 | stdout, _ = self.run_git_command(['rev-parse', 'HEAD']) |
|
731 | 731 | return stdout.strip() |
|
732 | 732 | |
|
733 | 733 | def _local_clone(self, clone_path, branch_name, source_branch=None): |
|
734 | 734 | """ |
|
735 | 735 | Create a local clone of the current repo. |
|
736 | 736 | """ |
|
737 | 737 | # N.B.(skreft): the --branch option is required as otherwise the shallow |
|
738 | 738 | # clone will only fetch the active branch. |
|
739 | 739 | cmd = ['clone', '--branch', branch_name, |
|
740 | 740 | self.path, os.path.abspath(clone_path)] |
|
741 | 741 | |
|
742 | 742 | self.run_git_command(cmd, fail_on_stderr=False) |
|
743 | 743 | |
|
744 | 744 | # if we get the different source branch, make sure we also fetch it for |
|
745 | 745 | # merge conditions |
|
746 | 746 | if source_branch and source_branch != branch_name: |
|
747 | 747 | # check if the ref exists. |
|
748 | 748 | shadow_repo = GitRepository(os.path.abspath(clone_path)) |
|
749 | 749 | if shadow_repo.get_remote_ref(source_branch): |
|
750 | 750 | cmd = ['fetch', self.path, source_branch] |
|
751 | 751 | self.run_git_command(cmd, fail_on_stderr=False) |
|
752 | 752 | |
|
753 | 753 | def _local_fetch(self, repository_path, branch_name, use_origin=False): |
|
754 | 754 | """ |
|
755 | 755 | Fetch a branch from a local repository. |
|
756 | 756 | """ |
|
757 | 757 | repository_path = os.path.abspath(repository_path) |
|
758 | 758 | if repository_path == self.path: |
|
759 | 759 | raise ValueError('Cannot fetch from the same repository') |
|
760 | 760 | |
|
761 | 761 | if use_origin: |
|
762 | 762 | branch_name = '+{branch}:refs/heads/{branch}'.format( |
|
763 | 763 | branch=branch_name) |
|
764 | 764 | |
|
765 | 765 | cmd = ['fetch', '--no-tags', '--update-head-ok', |
|
766 | 766 | repository_path, branch_name] |
|
767 | 767 | self.run_git_command(cmd, fail_on_stderr=False) |
|
768 | 768 | |
|
769 | 769 | def _local_reset(self, branch_name): |
|
770 | 770 | branch_name = '{}'.format(branch_name) |
|
771 | 771 | cmd = ['reset', '--hard', branch_name, '--'] |
|
772 | 772 | self.run_git_command(cmd, fail_on_stderr=False) |
|
773 | 773 | |
|
774 | 774 | def _last_fetch_heads(self): |
|
775 | 775 | """ |
|
776 | 776 | Return the last fetched heads that need merging. |
|
777 | 777 | |
|
778 | 778 | The algorithm is defined at |
|
779 | 779 | https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283 |
|
780 | 780 | """ |
|
781 | 781 | if not self.bare: |
|
782 | 782 | fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD') |
|
783 | 783 | else: |
|
784 | 784 | fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD') |
|
785 | 785 | |
|
786 | 786 | heads = [] |
|
787 | 787 | with open(fetch_heads_path) as f: |
|
788 | 788 | for line in f: |
|
789 | 789 | if ' not-for-merge ' in line: |
|
790 | 790 | continue |
|
791 | 791 | line = re.sub('\t.*', '', line, flags=re.DOTALL) |
|
792 | 792 | heads.append(line) |
|
793 | 793 | |
|
794 | 794 | return heads |
|
795 | 795 | |
|
796 | 796 | def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False): |
|
797 | 797 | return GitRepository(shadow_repository_path) |
|
798 | 798 | |
|
799 | 799 | def _local_pull(self, repository_path, branch_name, ff_only=True): |
|
800 | 800 | """ |
|
801 | 801 | Pull a branch from a local repository. |
|
802 | 802 | """ |
|
803 | 803 | if self.bare: |
|
804 | 804 | raise RepositoryError('Cannot pull into a bare git repository') |
|
805 | 805 | # N.B.(skreft): The --ff-only option is to make sure this is a |
|
806 | 806 | # fast-forward (i.e., we are only pulling new changes and there are no |
|
807 | 807 | # conflicts with our current branch) |
|
808 | 808 | # Additionally, that option needs to go before --no-tags, otherwise git |
|
809 | 809 | # pull complains about it being an unknown flag. |
|
810 | 810 | cmd = ['pull'] |
|
811 | 811 | if ff_only: |
|
812 | 812 | cmd.append('--ff-only') |
|
813 | 813 | cmd.extend(['--no-tags', repository_path, branch_name]) |
|
814 | 814 | self.run_git_command(cmd, fail_on_stderr=False) |
|
815 | 815 | |
|
816 | 816 | def _local_merge(self, merge_message, user_name, user_email, heads): |
|
817 | 817 | """ |
|
818 | 818 | Merge the given head into the checked out branch. |
|
819 | 819 | |
|
820 | 820 | It will force a merge commit. |
|
821 | 821 | |
|
822 | 822 | Currently it raises an error if the repo is empty, as it is not possible |
|
823 | 823 | to create a merge commit in an empty repo. |
|
824 | 824 | |
|
825 | 825 | :param merge_message: The message to use for the merge commit. |
|
826 | 826 | :param heads: the heads to merge. |
|
827 | 827 | """ |
|
828 | 828 | if self.bare: |
|
829 | 829 | raise RepositoryError('Cannot merge into a bare git repository') |
|
830 | 830 | |
|
831 | 831 | if not heads: |
|
832 | 832 | return |
|
833 | 833 | |
|
834 | 834 | if self.is_empty(): |
|
835 | 835 | # TODO(skreft): do somehting more robust in this case. |
|
836 | 836 | raise RepositoryError( |
|
837 | 837 | 'Do not know how to merge into empty repositories yet') |
|
838 | 838 | |
|
839 | 839 | # N.B.(skreft): the --no-ff option is used to enforce the creation of a |
|
840 | 840 | # commit message. We also specify the user who is doing the merge. |
|
841 | 841 | cmd = ['-c', 'user.name="%s"' % safe_str(user_name), |
|
842 | 842 | '-c', 'user.email=%s' % safe_str(user_email), |
|
843 | 843 | 'merge', '--no-ff', '-m', safe_str(merge_message)] |
|
844 | 844 | cmd.extend(heads) |
|
845 | 845 | try: |
|
846 | 846 | output = self.run_git_command(cmd, fail_on_stderr=False) |
|
847 | 847 | except RepositoryError: |
|
848 | 848 | # Cleanup any merge leftovers |
|
849 | 849 | self.run_git_command(['merge', '--abort'], fail_on_stderr=False) |
|
850 | 850 | raise |
|
851 | 851 | |
|
852 | 852 | def _local_push( |
|
853 | 853 | self, source_branch, repository_path, target_branch, |
|
854 | 854 | enable_hooks=False, rc_scm_data=None): |
|
855 | 855 | """ |
|
856 | 856 | Push the source_branch to the given repository and target_branch. |
|
857 | 857 | |
|
858 | 858 | Currently it if the target_branch is not master and the target repo is |
|
859 | 859 | empty, the push will work, but then GitRepository won't be able to find |
|
860 | 860 | the pushed branch or the commits. As the HEAD will be corrupted (i.e., |
|
861 | 861 | pointing to master, which does not exist). |
|
862 | 862 | |
|
863 | 863 | It does not run the hooks in the target repo. |
|
864 | 864 | """ |
|
865 | 865 | # TODO(skreft): deal with the case in which the target repo is empty, |
|
866 | 866 | # and the target_branch is not master. |
|
867 | 867 | target_repo = GitRepository(repository_path) |
|
868 | 868 | if (not target_repo.bare and |
|
869 | 869 | target_repo._current_branch() == target_branch): |
|
870 | 870 | # Git prevents pushing to the checked out branch, so simulate it by |
|
871 | 871 | # pulling into the target repository. |
|
872 | 872 | target_repo._local_pull(self.path, source_branch) |
|
873 | 873 | else: |
|
874 | 874 | cmd = ['push', os.path.abspath(repository_path), |
|
875 | 875 | '%s:%s' % (source_branch, target_branch)] |
|
876 | 876 | gitenv = {} |
|
877 | 877 | if rc_scm_data: |
|
878 | 878 | gitenv.update({'RC_SCM_DATA': rc_scm_data}) |
|
879 | 879 | |
|
880 | 880 | if not enable_hooks: |
|
881 | 881 | gitenv['RC_SKIP_HOOKS'] = '1' |
|
882 | 882 | self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv) |
|
883 | 883 | |
|
884 | 884 | def _get_new_pr_branch(self, source_branch, target_branch): |
|
885 | 885 | prefix = 'pr_%s-%s_' % (source_branch, target_branch) |
|
886 | 886 | pr_branches = [] |
|
887 | 887 | for branch in self.branches: |
|
888 | 888 | if branch.startswith(prefix): |
|
889 | 889 | pr_branches.append(int(branch[len(prefix):])) |
|
890 | 890 | |
|
891 | 891 | if not pr_branches: |
|
892 | 892 | branch_id = 0 |
|
893 | 893 | else: |
|
894 | 894 | branch_id = max(pr_branches) + 1 |
|
895 | 895 | |
|
896 | 896 | return '%s%d' % (prefix, branch_id) |
|
897 | 897 | |
|
898 | 898 | def _maybe_prepare_merge_workspace( |
|
899 | 899 | self, repo_id, workspace_id, target_ref, source_ref): |
|
900 | 900 | shadow_repository_path = self._get_shadow_repository_path( |
|
901 | 901 | repo_id, workspace_id) |
|
902 | 902 | if not os.path.exists(shadow_repository_path): |
|
903 | 903 | self._local_clone( |
|
904 | 904 | shadow_repository_path, target_ref.name, source_ref.name) |
|
905 | 905 | log.debug( |
|
906 | 906 | 'Prepared shadow repository in %s', shadow_repository_path) |
|
907 | 907 | |
|
908 | 908 | return shadow_repository_path |
|
909 | 909 | |
|
910 | 910 | def _merge_repo(self, repo_id, workspace_id, target_ref, |
|
911 | 911 | source_repo, source_ref, merge_message, |
|
912 | 912 | merger_name, merger_email, dry_run=False, |
|
913 | 913 | use_rebase=False, close_branch=False): |
|
914 | ||
|
915 | log.debug('Executing merge_repo with %s strategy, dry_run mode:%s', | |
|
916 | 'rebase' if use_rebase else 'merge', dry_run) | |
|
914 | 917 | if target_ref.commit_id != self.branches[target_ref.name]: |
|
915 | 918 | log.warning('Target ref %s commit mismatch %s vs %s', target_ref, |
|
916 | 919 | target_ref.commit_id, self.branches[target_ref.name]) |
|
917 | 920 | return MergeResponse( |
|
918 |
False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD |
|
|
921 | False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD, | |
|
922 | metadata={'target_ref': target_ref}) | |
|
919 | 923 | |
|
920 | 924 | shadow_repository_path = self._maybe_prepare_merge_workspace( |
|
921 | 925 | repo_id, workspace_id, target_ref, source_ref) |
|
922 | 926 | shadow_repo = self._get_shadow_instance(shadow_repository_path) |
|
923 | 927 | |
|
924 | 928 | # checkout source, if it's different. Otherwise we could not |
|
925 | 929 | # fetch proper commits for merge testing |
|
926 | 930 | if source_ref.name != target_ref.name: |
|
927 | 931 | if shadow_repo.get_remote_ref(source_ref.name): |
|
928 | 932 | shadow_repo._checkout(source_ref.name, force=True) |
|
929 | 933 | |
|
930 | 934 | # checkout target, and fetch changes |
|
931 | 935 | shadow_repo._checkout(target_ref.name, force=True) |
|
932 | 936 | |
|
933 | 937 | # fetch/reset pull the target, in case it is changed |
|
934 | 938 | # this handles even force changes |
|
935 | 939 | shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True) |
|
936 | 940 | shadow_repo._local_reset(target_ref.name) |
|
937 | 941 | |
|
938 | 942 | # Need to reload repo to invalidate the cache, or otherwise we cannot |
|
939 | 943 | # retrieve the last target commit. |
|
940 | 944 | shadow_repo = self._get_shadow_instance(shadow_repository_path) |
|
941 | 945 | if target_ref.commit_id != shadow_repo.branches[target_ref.name]: |
|
942 | 946 | log.warning('Shadow Target ref %s commit mismatch %s vs %s', |
|
943 | 947 | target_ref, target_ref.commit_id, |
|
944 | 948 | shadow_repo.branches[target_ref.name]) |
|
945 | 949 | return MergeResponse( |
|
946 |
False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD |
|
|
950 | False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD, | |
|
951 | metadata={'target_ref': target_ref}) | |
|
947 | 952 | |
|
948 | 953 | # calculate new branch |
|
949 | 954 | pr_branch = shadow_repo._get_new_pr_branch( |
|
950 | 955 | source_ref.name, target_ref.name) |
|
951 | 956 | log.debug('using pull-request merge branch: `%s`', pr_branch) |
|
952 | 957 | # checkout to temp branch, and fetch changes |
|
953 | 958 | shadow_repo._checkout(pr_branch, create=True) |
|
954 | 959 | try: |
|
955 | 960 | shadow_repo._local_fetch(source_repo.path, source_ref.name) |
|
956 | 961 | except RepositoryError: |
|
957 |
log.exception('Failure when doing local fetch on |
|
|
962 | log.exception('Failure when doing local fetch on ' | |
|
963 | 'shadow repo: %s', shadow_repo) | |
|
958 | 964 | return MergeResponse( |
|
959 |
False, False, None, MergeFailureReason.MISSING_SOURCE_REF |
|
|
965 | False, False, None, MergeFailureReason.MISSING_SOURCE_REF, | |
|
966 | metadata={'source_ref': source_ref}) | |
|
960 | 967 | |
|
961 | 968 | merge_ref = None |
|
962 | 969 | merge_failure_reason = MergeFailureReason.NONE |
|
970 | metadata = {} | |
|
963 | 971 | try: |
|
964 | 972 | shadow_repo._local_merge(merge_message, merger_name, merger_email, |
|
965 | 973 | [source_ref.commit_id]) |
|
966 | 974 | merge_possible = True |
|
967 | 975 | |
|
968 | 976 | # Need to reload repo to invalidate the cache, or otherwise we |
|
969 | 977 | # cannot retrieve the merge commit. |
|
970 | 978 | shadow_repo = GitRepository(shadow_repository_path) |
|
971 | 979 | merge_commit_id = shadow_repo.branches[pr_branch] |
|
972 | 980 | |
|
973 | 981 | # Set a reference pointing to the merge commit. This reference may |
|
974 | 982 | # be used to easily identify the last successful merge commit in |
|
975 | 983 | # the shadow repository. |
|
976 | 984 | shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id) |
|
977 | 985 | merge_ref = Reference('branch', 'pr-merge', merge_commit_id) |
|
978 | 986 | except RepositoryError: |
|
979 | 987 | log.exception('Failure when doing local merge on git shadow repo') |
|
980 | 988 | merge_possible = False |
|
981 | 989 | merge_failure_reason = MergeFailureReason.MERGE_FAILED |
|
982 | 990 | |
|
983 | 991 | if merge_possible and not dry_run: |
|
984 | 992 | try: |
|
985 | 993 | shadow_repo._local_push( |
|
986 | 994 | pr_branch, self.path, target_ref.name, enable_hooks=True, |
|
987 | 995 | rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA')) |
|
988 | 996 | merge_succeeded = True |
|
989 | 997 | except RepositoryError: |
|
990 | 998 | log.exception( |
|
991 |
'Failure when doing local push |
|
|
999 | 'Failure when doing local push from the shadow ' | |
|
1000 | 'repository to the target repository at %s.', self.path) | |
|
992 | 1001 | merge_succeeded = False |
|
993 | 1002 | merge_failure_reason = MergeFailureReason.PUSH_FAILED |
|
1003 | metadata['target'] = 'git shadow repo' | |
|
1004 | metadata['merge_commit'] = pr_branch | |
|
994 | 1005 | else: |
|
995 | 1006 | merge_succeeded = False |
|
996 | 1007 | |
|
997 | 1008 | return MergeResponse( |
|
998 | merge_possible, merge_succeeded, merge_ref, | |
|
999 | merge_failure_reason) | |
|
1009 | merge_possible, merge_succeeded, merge_ref, merge_failure_reason, | |
|
1010 | metadata=metadata) |
@@ -1,924 +1,932 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2014-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | HG repository module |
|
23 | 23 | """ |
|
24 | 24 | import os |
|
25 | 25 | import logging |
|
26 | 26 | import binascii |
|
27 | 27 | import urllib |
|
28 | 28 | |
|
29 | 29 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
30 | 30 | |
|
31 | 31 | from rhodecode.lib.compat import OrderedDict |
|
32 | 32 | from rhodecode.lib.datelib import ( |
|
33 | 33 | date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate) |
|
34 | 34 | from rhodecode.lib.utils import safe_unicode, safe_str |
|
35 | 35 | from rhodecode.lib.vcs import connection, exceptions |
|
36 | 36 | from rhodecode.lib.vcs.backends.base import ( |
|
37 | 37 | BaseRepository, CollectionGenerator, Config, MergeResponse, |
|
38 | 38 | MergeFailureReason, Reference, BasePathPermissionChecker) |
|
39 | 39 | from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit |
|
40 | 40 | from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff |
|
41 | 41 | from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit |
|
42 | 42 | from rhodecode.lib.vcs.exceptions import ( |
|
43 | 43 | EmptyRepositoryError, RepositoryError, TagAlreadyExistError, |
|
44 | 44 | TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError) |
|
45 | 45 | from rhodecode.lib.vcs.compat import configparser |
|
46 | 46 | |
|
47 | 47 | hexlify = binascii.hexlify |
|
48 | 48 | nullid = "\0" * 20 |
|
49 | 49 | |
|
50 | 50 | log = logging.getLogger(__name__) |
|
51 | 51 | |
|
52 | 52 | |
|
53 | 53 | class MercurialRepository(BaseRepository): |
|
54 | 54 | """ |
|
55 | 55 | Mercurial repository backend |
|
56 | 56 | """ |
|
57 | 57 | DEFAULT_BRANCH_NAME = 'default' |
|
58 | 58 | |
|
59 | 59 | def __init__(self, repo_path, config=None, create=False, src_url=None, |
|
60 | 60 | do_workspace_checkout=False, with_wire=None, bare=False): |
|
61 | 61 | """ |
|
62 | 62 | Raises RepositoryError if repository could not be find at the given |
|
63 | 63 | ``repo_path``. |
|
64 | 64 | |
|
65 | 65 | :param repo_path: local path of the repository |
|
66 | 66 | :param config: config object containing the repo configuration |
|
67 | 67 | :param create=False: if set to True, would try to create repository if |
|
68 | 68 | it does not exist rather than raising exception |
|
69 | 69 | :param src_url=None: would try to clone repository from given location |
|
70 | 70 | :param do_workspace_checkout=False: sets update of working copy after |
|
71 | 71 | making a clone |
|
72 | 72 | :param bare: not used, compatible with other VCS |
|
73 | 73 | """ |
|
74 | 74 | |
|
75 | 75 | self.path = safe_str(os.path.abspath(repo_path)) |
|
76 | 76 | # mercurial since 4.4.X requires certain configuration to be present |
|
77 | 77 | # because sometimes we init the repos with config we need to meet |
|
78 | 78 | # special requirements |
|
79 | 79 | self.config = config if config else self.get_default_config( |
|
80 | 80 | default=[('extensions', 'largefiles', '1')]) |
|
81 | 81 | self.with_wire = with_wire |
|
82 | 82 | |
|
83 | 83 | self._init_repo(create, src_url, do_workspace_checkout) |
|
84 | 84 | |
|
85 | 85 | # caches |
|
86 | 86 | self._commit_ids = {} |
|
87 | 87 | |
|
88 | 88 | @LazyProperty |
|
89 | 89 | def _remote(self): |
|
90 | 90 | return connection.Hg(self.path, self.config, with_wire=self.with_wire) |
|
91 | 91 | |
|
92 | 92 | @LazyProperty |
|
93 | 93 | def commit_ids(self): |
|
94 | 94 | """ |
|
95 | 95 | Returns list of commit ids, in ascending order. Being lazy |
|
96 | 96 | attribute allows external tools to inject shas from cache. |
|
97 | 97 | """ |
|
98 | 98 | commit_ids = self._get_all_commit_ids() |
|
99 | 99 | self._rebuild_cache(commit_ids) |
|
100 | 100 | return commit_ids |
|
101 | 101 | |
|
102 | 102 | def _rebuild_cache(self, commit_ids): |
|
103 | 103 | self._commit_ids = dict((commit_id, index) |
|
104 | 104 | for index, commit_id in enumerate(commit_ids)) |
|
105 | 105 | |
|
106 | 106 | @LazyProperty |
|
107 | 107 | def branches(self): |
|
108 | 108 | return self._get_branches() |
|
109 | 109 | |
|
110 | 110 | @LazyProperty |
|
111 | 111 | def branches_closed(self): |
|
112 | 112 | return self._get_branches(active=False, closed=True) |
|
113 | 113 | |
|
114 | 114 | @LazyProperty |
|
115 | 115 | def branches_all(self): |
|
116 | 116 | all_branches = {} |
|
117 | 117 | all_branches.update(self.branches) |
|
118 | 118 | all_branches.update(self.branches_closed) |
|
119 | 119 | return all_branches |
|
120 | 120 | |
|
121 | 121 | def _get_branches(self, active=True, closed=False): |
|
122 | 122 | """ |
|
123 | 123 | Gets branches for this repository |
|
124 | 124 | Returns only not closed active branches by default |
|
125 | 125 | |
|
126 | 126 | :param active: return also active branches |
|
127 | 127 | :param closed: return also closed branches |
|
128 | 128 | |
|
129 | 129 | """ |
|
130 | 130 | if self.is_empty(): |
|
131 | 131 | return {} |
|
132 | 132 | |
|
133 | 133 | def get_name(ctx): |
|
134 | 134 | return ctx[0] |
|
135 | 135 | |
|
136 | 136 | _branches = [(safe_unicode(n), hexlify(h),) for n, h in |
|
137 | 137 | self._remote.branches(active, closed).items()] |
|
138 | 138 | |
|
139 | 139 | return OrderedDict(sorted(_branches, key=get_name, reverse=False)) |
|
140 | 140 | |
|
141 | 141 | @LazyProperty |
|
142 | 142 | def tags(self): |
|
143 | 143 | """ |
|
144 | 144 | Gets tags for this repository |
|
145 | 145 | """ |
|
146 | 146 | return self._get_tags() |
|
147 | 147 | |
|
148 | 148 | def _get_tags(self): |
|
149 | 149 | if self.is_empty(): |
|
150 | 150 | return {} |
|
151 | 151 | |
|
152 | 152 | def get_name(ctx): |
|
153 | 153 | return ctx[0] |
|
154 | 154 | |
|
155 | 155 | _tags = [(safe_unicode(n), hexlify(h),) for n, h in |
|
156 | 156 | self._remote.tags().items()] |
|
157 | 157 | |
|
158 | 158 | return OrderedDict(sorted(_tags, key=get_name, reverse=True)) |
|
159 | 159 | |
|
160 | 160 | def tag(self, name, user, commit_id=None, message=None, date=None, |
|
161 | 161 | **kwargs): |
|
162 | 162 | """ |
|
163 | 163 | Creates and returns a tag for the given ``commit_id``. |
|
164 | 164 | |
|
165 | 165 | :param name: name for new tag |
|
166 | 166 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" |
|
167 | 167 | :param commit_id: commit id for which new tag would be created |
|
168 | 168 | :param message: message of the tag's commit |
|
169 | 169 | :param date: date of tag's commit |
|
170 | 170 | |
|
171 | 171 | :raises TagAlreadyExistError: if tag with same name already exists |
|
172 | 172 | """ |
|
173 | 173 | if name in self.tags: |
|
174 | 174 | raise TagAlreadyExistError("Tag %s already exists" % name) |
|
175 | 175 | commit = self.get_commit(commit_id=commit_id) |
|
176 | 176 | local = kwargs.setdefault('local', False) |
|
177 | 177 | |
|
178 | 178 | if message is None: |
|
179 | 179 | message = "Added tag %s for commit %s" % (name, commit.short_id) |
|
180 | 180 | |
|
181 | 181 | date, tz = date_to_timestamp_plus_offset(date) |
|
182 | 182 | |
|
183 | 183 | self._remote.tag( |
|
184 | 184 | name, commit.raw_id, message, local, user, date, tz) |
|
185 | 185 | self._remote.invalidate_vcs_cache() |
|
186 | 186 | |
|
187 | 187 | # Reinitialize tags |
|
188 | 188 | self.tags = self._get_tags() |
|
189 | 189 | tag_id = self.tags[name] |
|
190 | 190 | |
|
191 | 191 | return self.get_commit(commit_id=tag_id) |
|
192 | 192 | |
|
193 | 193 | def remove_tag(self, name, user, message=None, date=None): |
|
194 | 194 | """ |
|
195 | 195 | Removes tag with the given `name`. |
|
196 | 196 | |
|
197 | 197 | :param name: name of the tag to be removed |
|
198 | 198 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" |
|
199 | 199 | :param message: message of the tag's removal commit |
|
200 | 200 | :param date: date of tag's removal commit |
|
201 | 201 | |
|
202 | 202 | :raises TagDoesNotExistError: if tag with given name does not exists |
|
203 | 203 | """ |
|
204 | 204 | if name not in self.tags: |
|
205 | 205 | raise TagDoesNotExistError("Tag %s does not exist" % name) |
|
206 | 206 | if message is None: |
|
207 | 207 | message = "Removed tag %s" % name |
|
208 | 208 | local = False |
|
209 | 209 | |
|
210 | 210 | date, tz = date_to_timestamp_plus_offset(date) |
|
211 | 211 | |
|
212 | 212 | self._remote.tag(name, nullid, message, local, user, date, tz) |
|
213 | 213 | self._remote.invalidate_vcs_cache() |
|
214 | 214 | self.tags = self._get_tags() |
|
215 | 215 | |
|
216 | 216 | @LazyProperty |
|
217 | 217 | def bookmarks(self): |
|
218 | 218 | """ |
|
219 | 219 | Gets bookmarks for this repository |
|
220 | 220 | """ |
|
221 | 221 | return self._get_bookmarks() |
|
222 | 222 | |
|
223 | 223 | def _get_bookmarks(self): |
|
224 | 224 | if self.is_empty(): |
|
225 | 225 | return {} |
|
226 | 226 | |
|
227 | 227 | def get_name(ctx): |
|
228 | 228 | return ctx[0] |
|
229 | 229 | |
|
230 | 230 | _bookmarks = [ |
|
231 | 231 | (safe_unicode(n), hexlify(h)) for n, h in |
|
232 | 232 | self._remote.bookmarks().items()] |
|
233 | 233 | |
|
234 | 234 | return OrderedDict(sorted(_bookmarks, key=get_name)) |
|
235 | 235 | |
|
236 | 236 | def _get_all_commit_ids(self): |
|
237 | 237 | return self._remote.get_all_commit_ids('visible') |
|
238 | 238 | |
|
239 | 239 | def get_diff( |
|
240 | 240 | self, commit1, commit2, path='', ignore_whitespace=False, |
|
241 | 241 | context=3, path1=None): |
|
242 | 242 | """ |
|
243 | 243 | Returns (git like) *diff*, as plain text. Shows changes introduced by |
|
244 | 244 | `commit2` since `commit1`. |
|
245 | 245 | |
|
246 | 246 | :param commit1: Entry point from which diff is shown. Can be |
|
247 | 247 | ``self.EMPTY_COMMIT`` - in this case, patch showing all |
|
248 | 248 | the changes since empty state of the repository until `commit2` |
|
249 | 249 | :param commit2: Until which commit changes should be shown. |
|
250 | 250 | :param ignore_whitespace: If set to ``True``, would not show whitespace |
|
251 | 251 | changes. Defaults to ``False``. |
|
252 | 252 | :param context: How many lines before/after changed lines should be |
|
253 | 253 | shown. Defaults to ``3``. |
|
254 | 254 | """ |
|
255 | 255 | self._validate_diff_commits(commit1, commit2) |
|
256 | 256 | if path1 is not None and path1 != path: |
|
257 | 257 | raise ValueError("Diff of two different paths not supported.") |
|
258 | 258 | |
|
259 | 259 | if path: |
|
260 | 260 | file_filter = [self.path, path] |
|
261 | 261 | else: |
|
262 | 262 | file_filter = None |
|
263 | 263 | |
|
264 | 264 | diff = self._remote.diff( |
|
265 | 265 | commit1.raw_id, commit2.raw_id, file_filter=file_filter, |
|
266 | 266 | opt_git=True, opt_ignorews=ignore_whitespace, |
|
267 | 267 | context=context) |
|
268 | 268 | return MercurialDiff(diff) |
|
269 | 269 | |
|
270 | 270 | def strip(self, commit_id, branch=None): |
|
271 | 271 | self._remote.strip(commit_id, update=False, backup="none") |
|
272 | 272 | |
|
273 | 273 | self._remote.invalidate_vcs_cache() |
|
274 | 274 | self.commit_ids = self._get_all_commit_ids() |
|
275 | 275 | self._rebuild_cache(self.commit_ids) |
|
276 | 276 | |
|
277 | 277 | def verify(self): |
|
278 | 278 | verify = self._remote.verify() |
|
279 | 279 | |
|
280 | 280 | self._remote.invalidate_vcs_cache() |
|
281 | 281 | return verify |
|
282 | 282 | |
|
283 | 283 | def get_common_ancestor(self, commit_id1, commit_id2, repo2): |
|
284 | 284 | if commit_id1 == commit_id2: |
|
285 | 285 | return commit_id1 |
|
286 | 286 | |
|
287 | 287 | ancestors = self._remote.revs_from_revspec( |
|
288 | 288 | "ancestor(id(%s), id(%s))", commit_id1, commit_id2, |
|
289 | 289 | other_path=repo2.path) |
|
290 | 290 | return repo2[ancestors[0]].raw_id if ancestors else None |
|
291 | 291 | |
|
292 | 292 | def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None): |
|
293 | 293 | if commit_id1 == commit_id2: |
|
294 | 294 | commits = [] |
|
295 | 295 | else: |
|
296 | 296 | if merge: |
|
297 | 297 | indexes = self._remote.revs_from_revspec( |
|
298 | 298 | "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)", |
|
299 | 299 | commit_id2, commit_id1, commit_id1, other_path=repo2.path) |
|
300 | 300 | else: |
|
301 | 301 | indexes = self._remote.revs_from_revspec( |
|
302 | 302 | "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2, |
|
303 | 303 | commit_id1, other_path=repo2.path) |
|
304 | 304 | |
|
305 | 305 | commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load) |
|
306 | 306 | for idx in indexes] |
|
307 | 307 | |
|
308 | 308 | return commits |
|
309 | 309 | |
|
310 | 310 | @staticmethod |
|
311 | 311 | def check_url(url, config): |
|
312 | 312 | """ |
|
313 | 313 | Function will check given url and try to verify if it's a valid |
|
314 | 314 | link. Sometimes it may happened that mercurial will issue basic |
|
315 | 315 | auth request that can cause whole API to hang when used from python |
|
316 | 316 | or other external calls. |
|
317 | 317 | |
|
318 | 318 | On failures it'll raise urllib2.HTTPError, exception is also thrown |
|
319 | 319 | when the return code is non 200 |
|
320 | 320 | """ |
|
321 | 321 | # check first if it's not an local url |
|
322 | 322 | if os.path.isdir(url) or url.startswith('file:'): |
|
323 | 323 | return True |
|
324 | 324 | |
|
325 | 325 | # Request the _remote to verify the url |
|
326 | 326 | return connection.Hg.check_url(url, config.serialize()) |
|
327 | 327 | |
|
328 | 328 | @staticmethod |
|
329 | 329 | def is_valid_repository(path): |
|
330 | 330 | return os.path.isdir(os.path.join(path, '.hg')) |
|
331 | 331 | |
|
332 | 332 | def _init_repo(self, create, src_url=None, do_workspace_checkout=False): |
|
333 | 333 | """ |
|
334 | 334 | Function will check for mercurial repository in given path. If there |
|
335 | 335 | is no repository in that path it will raise an exception unless |
|
336 | 336 | `create` parameter is set to True - in that case repository would |
|
337 | 337 | be created. |
|
338 | 338 | |
|
339 | 339 | If `src_url` is given, would try to clone repository from the |
|
340 | 340 | location at given clone_point. Additionally it'll make update to |
|
341 | 341 | working copy accordingly to `do_workspace_checkout` flag. |
|
342 | 342 | """ |
|
343 | 343 | if create and os.path.exists(self.path): |
|
344 | 344 | raise RepositoryError( |
|
345 | 345 | "Cannot create repository at %s, location already exist" |
|
346 | 346 | % self.path) |
|
347 | 347 | |
|
348 | 348 | if src_url: |
|
349 | 349 | url = str(self._get_url(src_url)) |
|
350 | 350 | MercurialRepository.check_url(url, self.config) |
|
351 | 351 | |
|
352 | 352 | self._remote.clone(url, self.path, do_workspace_checkout) |
|
353 | 353 | |
|
354 | 354 | # Don't try to create if we've already cloned repo |
|
355 | 355 | create = False |
|
356 | 356 | |
|
357 | 357 | if create: |
|
358 | 358 | os.makedirs(self.path, mode=0o755) |
|
359 | 359 | |
|
360 | 360 | self._remote.localrepository(create) |
|
361 | 361 | |
|
362 | 362 | @LazyProperty |
|
363 | 363 | def in_memory_commit(self): |
|
364 | 364 | return MercurialInMemoryCommit(self) |
|
365 | 365 | |
|
366 | 366 | @LazyProperty |
|
367 | 367 | def description(self): |
|
368 | 368 | description = self._remote.get_config_value( |
|
369 | 369 | 'web', 'description', untrusted=True) |
|
370 | 370 | return safe_unicode(description or self.DEFAULT_DESCRIPTION) |
|
371 | 371 | |
|
372 | 372 | @LazyProperty |
|
373 | 373 | def contact(self): |
|
374 | 374 | contact = ( |
|
375 | 375 | self._remote.get_config_value("web", "contact") or |
|
376 | 376 | self._remote.get_config_value("ui", "username")) |
|
377 | 377 | return safe_unicode(contact or self.DEFAULT_CONTACT) |
|
378 | 378 | |
|
379 | 379 | @LazyProperty |
|
380 | 380 | def last_change(self): |
|
381 | 381 | """ |
|
382 | 382 | Returns last change made on this repository as |
|
383 | 383 | `datetime.datetime` object. |
|
384 | 384 | """ |
|
385 | 385 | try: |
|
386 | 386 | return self.get_commit().date |
|
387 | 387 | except RepositoryError: |
|
388 | 388 | tzoffset = makedate()[1] |
|
389 | 389 | return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset) |
|
390 | 390 | |
|
391 | 391 | def _get_fs_mtime(self): |
|
392 | 392 | # fallback to filesystem |
|
393 | 393 | cl_path = os.path.join(self.path, '.hg', "00changelog.i") |
|
394 | 394 | st_path = os.path.join(self.path, '.hg', "store") |
|
395 | 395 | if os.path.exists(cl_path): |
|
396 | 396 | return os.stat(cl_path).st_mtime |
|
397 | 397 | else: |
|
398 | 398 | return os.stat(st_path).st_mtime |
|
399 | 399 | |
|
400 | 400 | def _get_url(self, url): |
|
401 | 401 | """ |
|
402 | 402 | Returns normalized url. If schema is not given, would fall |
|
403 | 403 | to filesystem |
|
404 | 404 | (``file:///``) schema. |
|
405 | 405 | """ |
|
406 | 406 | url = url.encode('utf8') |
|
407 | 407 | if url != 'default' and '://' not in url: |
|
408 | 408 | url = "file:" + urllib.pathname2url(url) |
|
409 | 409 | return url |
|
410 | 410 | |
|
411 | 411 | def get_hook_location(self): |
|
412 | 412 | """ |
|
413 | 413 | returns absolute path to location where hooks are stored |
|
414 | 414 | """ |
|
415 | 415 | return os.path.join(self.path, '.hg', '.hgrc') |
|
416 | 416 | |
|
417 | 417 | def get_commit(self, commit_id=None, commit_idx=None, pre_load=None): |
|
418 | 418 | """ |
|
419 | 419 | Returns ``MercurialCommit`` object representing repository's |
|
420 | 420 | commit at the given `commit_id` or `commit_idx`. |
|
421 | 421 | """ |
|
422 | 422 | if self.is_empty(): |
|
423 | 423 | raise EmptyRepositoryError("There are no commits yet") |
|
424 | 424 | |
|
425 | 425 | if commit_id is not None: |
|
426 | 426 | self._validate_commit_id(commit_id) |
|
427 | 427 | try: |
|
428 | 428 | idx = self._commit_ids[commit_id] |
|
429 | 429 | return MercurialCommit(self, commit_id, idx, pre_load=pre_load) |
|
430 | 430 | except KeyError: |
|
431 | 431 | pass |
|
432 | 432 | elif commit_idx is not None: |
|
433 | 433 | self._validate_commit_idx(commit_idx) |
|
434 | 434 | try: |
|
435 | 435 | id_ = self.commit_ids[commit_idx] |
|
436 | 436 | if commit_idx < 0: |
|
437 | 437 | commit_idx += len(self.commit_ids) |
|
438 | 438 | return MercurialCommit( |
|
439 | 439 | self, id_, commit_idx, pre_load=pre_load) |
|
440 | 440 | except IndexError: |
|
441 | 441 | commit_id = commit_idx |
|
442 | 442 | else: |
|
443 | 443 | commit_id = "tip" |
|
444 | 444 | |
|
445 | 445 | if isinstance(commit_id, unicode): |
|
446 | 446 | commit_id = safe_str(commit_id) |
|
447 | 447 | |
|
448 | 448 | try: |
|
449 | 449 | raw_id, idx = self._remote.lookup(commit_id, both=True) |
|
450 | 450 | except CommitDoesNotExistError: |
|
451 | 451 | msg = "Commit %s does not exist for %s" % ( |
|
452 | 452 | commit_id, self) |
|
453 | 453 | raise CommitDoesNotExistError(msg) |
|
454 | 454 | |
|
455 | 455 | return MercurialCommit(self, raw_id, idx, pre_load=pre_load) |
|
456 | 456 | |
|
457 | 457 | def get_commits( |
|
458 | 458 | self, start_id=None, end_id=None, start_date=None, end_date=None, |
|
459 | 459 | branch_name=None, show_hidden=False, pre_load=None): |
|
460 | 460 | """ |
|
461 | 461 | Returns generator of ``MercurialCommit`` objects from start to end |
|
462 | 462 | (both are inclusive) |
|
463 | 463 | |
|
464 | 464 | :param start_id: None, str(commit_id) |
|
465 | 465 | :param end_id: None, str(commit_id) |
|
466 | 466 | :param start_date: if specified, commits with commit date less than |
|
467 | 467 | ``start_date`` would be filtered out from returned set |
|
468 | 468 | :param end_date: if specified, commits with commit date greater than |
|
469 | 469 | ``end_date`` would be filtered out from returned set |
|
470 | 470 | :param branch_name: if specified, commits not reachable from given |
|
471 | 471 | branch would be filtered out from returned set |
|
472 | 472 | :param show_hidden: Show hidden commits such as obsolete or hidden from |
|
473 | 473 | Mercurial evolve |
|
474 | 474 | :raise BranchDoesNotExistError: If given ``branch_name`` does not |
|
475 | 475 | exist. |
|
476 | 476 | :raise CommitDoesNotExistError: If commit for given ``start`` or |
|
477 | 477 | ``end`` could not be found. |
|
478 | 478 | """ |
|
479 | 479 | # actually we should check now if it's not an empty repo |
|
480 | 480 | branch_ancestors = False |
|
481 | 481 | if self.is_empty(): |
|
482 | 482 | raise EmptyRepositoryError("There are no commits yet") |
|
483 | 483 | self._validate_branch_name(branch_name) |
|
484 | 484 | |
|
485 | 485 | if start_id is not None: |
|
486 | 486 | self._validate_commit_id(start_id) |
|
487 | 487 | c_start = self.get_commit(commit_id=start_id) |
|
488 | 488 | start_pos = self._commit_ids[c_start.raw_id] |
|
489 | 489 | else: |
|
490 | 490 | start_pos = None |
|
491 | 491 | |
|
492 | 492 | if end_id is not None: |
|
493 | 493 | self._validate_commit_id(end_id) |
|
494 | 494 | c_end = self.get_commit(commit_id=end_id) |
|
495 | 495 | end_pos = max(0, self._commit_ids[c_end.raw_id]) |
|
496 | 496 | else: |
|
497 | 497 | end_pos = None |
|
498 | 498 | |
|
499 | 499 | if None not in [start_id, end_id] and start_pos > end_pos: |
|
500 | 500 | raise RepositoryError( |
|
501 | 501 | "Start commit '%s' cannot be after end commit '%s'" % |
|
502 | 502 | (start_id, end_id)) |
|
503 | 503 | |
|
504 | 504 | if end_pos is not None: |
|
505 | 505 | end_pos += 1 |
|
506 | 506 | |
|
507 | 507 | commit_filter = [] |
|
508 | 508 | |
|
509 | 509 | if branch_name and not branch_ancestors: |
|
510 | 510 | commit_filter.append('branch("%s")' % (branch_name,)) |
|
511 | 511 | elif branch_name and branch_ancestors: |
|
512 | 512 | commit_filter.append('ancestors(branch("%s"))' % (branch_name,)) |
|
513 | 513 | |
|
514 | 514 | if start_date and not end_date: |
|
515 | 515 | commit_filter.append('date(">%s")' % (start_date,)) |
|
516 | 516 | if end_date and not start_date: |
|
517 | 517 | commit_filter.append('date("<%s")' % (end_date,)) |
|
518 | 518 | if start_date and end_date: |
|
519 | 519 | commit_filter.append( |
|
520 | 520 | 'date(">%s") and date("<%s")' % (start_date, end_date)) |
|
521 | 521 | |
|
522 | 522 | if not show_hidden: |
|
523 | 523 | commit_filter.append('not obsolete()') |
|
524 | 524 | commit_filter.append('not hidden()') |
|
525 | 525 | |
|
526 | 526 | # TODO: johbo: Figure out a simpler way for this solution |
|
527 | 527 | collection_generator = CollectionGenerator |
|
528 | 528 | if commit_filter: |
|
529 | 529 | commit_filter = ' and '.join(map(safe_str, commit_filter)) |
|
530 | 530 | revisions = self._remote.rev_range([commit_filter]) |
|
531 | 531 | collection_generator = MercurialIndexBasedCollectionGenerator |
|
532 | 532 | else: |
|
533 | 533 | revisions = self.commit_ids |
|
534 | 534 | |
|
535 | 535 | if start_pos or end_pos: |
|
536 | 536 | revisions = revisions[start_pos:end_pos] |
|
537 | 537 | |
|
538 | 538 | return collection_generator(self, revisions, pre_load=pre_load) |
|
539 | 539 | |
|
540 | 540 | def pull(self, url, commit_ids=None): |
|
541 | 541 | """ |
|
542 | 542 | Pull changes from external location. |
|
543 | 543 | |
|
544 | 544 | :param commit_ids: Optional. Can be set to a list of commit ids |
|
545 | 545 | which shall be pulled from the other repository. |
|
546 | 546 | """ |
|
547 | 547 | url = self._get_url(url) |
|
548 | 548 | self._remote.pull(url, commit_ids=commit_ids) |
|
549 | 549 | self._remote.invalidate_vcs_cache() |
|
550 | 550 | |
|
551 | 551 | def fetch(self, url, commit_ids=None): |
|
552 | 552 | """ |
|
553 | 553 | Backward compatibility with GIT fetch==pull |
|
554 | 554 | """ |
|
555 | 555 | return self.pull(url, commit_ids=commit_ids) |
|
556 | 556 | |
|
557 | 557 | def push(self, url): |
|
558 | 558 | url = self._get_url(url) |
|
559 | 559 | self._remote.sync_push(url) |
|
560 | 560 | |
|
561 | 561 | def _local_clone(self, clone_path): |
|
562 | 562 | """ |
|
563 | 563 | Create a local clone of the current repo. |
|
564 | 564 | """ |
|
565 | 565 | self._remote.clone(self.path, clone_path, update_after_clone=True, |
|
566 | 566 | hooks=False) |
|
567 | 567 | |
|
568 | 568 | def _update(self, revision, clean=False): |
|
569 | 569 | """ |
|
570 | 570 | Update the working copy to the specified revision. |
|
571 | 571 | """ |
|
572 | 572 | log.debug('Doing checkout to commit: `%s` for %s', revision, self) |
|
573 | 573 | self._remote.update(revision, clean=clean) |
|
574 | 574 | |
|
575 | 575 | def _identify(self): |
|
576 | 576 | """ |
|
577 | 577 | Return the current state of the working directory. |
|
578 | 578 | """ |
|
579 | 579 | return self._remote.identify().strip().rstrip('+') |
|
580 | 580 | |
|
581 | 581 | def _heads(self, branch=None): |
|
582 | 582 | """ |
|
583 | 583 | Return the commit ids of the repository heads. |
|
584 | 584 | """ |
|
585 | 585 | return self._remote.heads(branch=branch).strip().split(' ') |
|
586 | 586 | |
|
587 | 587 | def _ancestor(self, revision1, revision2): |
|
588 | 588 | """ |
|
589 | 589 | Return the common ancestor of the two revisions. |
|
590 | 590 | """ |
|
591 | 591 | return self._remote.ancestor(revision1, revision2) |
|
592 | 592 | |
|
593 | 593 | def _local_push( |
|
594 | 594 | self, revision, repository_path, push_branches=False, |
|
595 | 595 | enable_hooks=False): |
|
596 | 596 | """ |
|
597 | 597 | Push the given revision to the specified repository. |
|
598 | 598 | |
|
599 | 599 | :param push_branches: allow to create branches in the target repo. |
|
600 | 600 | """ |
|
601 | 601 | self._remote.push( |
|
602 | 602 | [revision], repository_path, hooks=enable_hooks, |
|
603 | 603 | push_branches=push_branches) |
|
604 | 604 | |
|
605 | 605 | def _local_merge(self, target_ref, merge_message, user_name, user_email, |
|
606 | 606 | source_ref, use_rebase=False, dry_run=False): |
|
607 | 607 | """ |
|
608 | 608 | Merge the given source_revision into the checked out revision. |
|
609 | 609 | |
|
610 | 610 | Returns the commit id of the merge and a boolean indicating if the |
|
611 | 611 | commit needs to be pushed. |
|
612 | 612 | """ |
|
613 | self._update(target_ref.commit_id) | |
|
613 | self._update(target_ref.commit_id, clean=True) | |
|
614 | 614 | |
|
615 | 615 | ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id) |
|
616 | 616 | is_the_same_branch = self._is_the_same_branch(target_ref, source_ref) |
|
617 | 617 | |
|
618 | 618 | if ancestor == source_ref.commit_id: |
|
619 | 619 | # Nothing to do, the changes were already integrated |
|
620 | 620 | return target_ref.commit_id, False |
|
621 | 621 | |
|
622 | 622 | elif ancestor == target_ref.commit_id and is_the_same_branch: |
|
623 | 623 | # In this case we should force a commit message |
|
624 | 624 | return source_ref.commit_id, True |
|
625 | 625 | |
|
626 | 626 | if use_rebase: |
|
627 | 627 | try: |
|
628 | 628 | bookmark_name = 'rcbook%s%s' % (source_ref.commit_id, |
|
629 | 629 | target_ref.commit_id) |
|
630 | 630 | self.bookmark(bookmark_name, revision=source_ref.commit_id) |
|
631 | 631 | self._remote.rebase( |
|
632 | 632 | source=source_ref.commit_id, dest=target_ref.commit_id) |
|
633 | 633 | self._remote.invalidate_vcs_cache() |
|
634 | self._update(bookmark_name) | |
|
634 | self._update(bookmark_name, clean=True) | |
|
635 | 635 | return self._identify(), True |
|
636 | 636 | except RepositoryError: |
|
637 | 637 | # The rebase-abort may raise another exception which 'hides' |
|
638 | 638 | # the original one, therefore we log it here. |
|
639 | 639 | log.exception('Error while rebasing shadow repo during merge.') |
|
640 | 640 | |
|
641 | 641 | # Cleanup any rebase leftovers |
|
642 | 642 | self._remote.invalidate_vcs_cache() |
|
643 | 643 | self._remote.rebase(abort=True) |
|
644 | 644 | self._remote.invalidate_vcs_cache() |
|
645 | 645 | self._remote.update(clean=True) |
|
646 | 646 | raise |
|
647 | 647 | else: |
|
648 | 648 | try: |
|
649 | 649 | self._remote.merge(source_ref.commit_id) |
|
650 | 650 | self._remote.invalidate_vcs_cache() |
|
651 | 651 | self._remote.commit( |
|
652 | 652 | message=safe_str(merge_message), |
|
653 | 653 | username=safe_str('%s <%s>' % (user_name, user_email))) |
|
654 | 654 | self._remote.invalidate_vcs_cache() |
|
655 | 655 | return self._identify(), True |
|
656 | 656 | except RepositoryError: |
|
657 | 657 | # Cleanup any merge leftovers |
|
658 | 658 | self._remote.update(clean=True) |
|
659 | 659 | raise |
|
660 | 660 | |
|
661 | 661 | def _local_close(self, target_ref, user_name, user_email, |
|
662 | 662 | source_ref, close_message=''): |
|
663 | 663 | """ |
|
664 | 664 | Close the branch of the given source_revision |
|
665 | 665 | |
|
666 | 666 | Returns the commit id of the close and a boolean indicating if the |
|
667 | 667 | commit needs to be pushed. |
|
668 | 668 | """ |
|
669 | 669 | self._update(source_ref.commit_id) |
|
670 | 670 | message = close_message or "Closing branch: `{}`".format(source_ref.name) |
|
671 | 671 | try: |
|
672 | 672 | self._remote.commit( |
|
673 | 673 | message=safe_str(message), |
|
674 | 674 | username=safe_str('%s <%s>' % (user_name, user_email)), |
|
675 | 675 | close_branch=True) |
|
676 | 676 | self._remote.invalidate_vcs_cache() |
|
677 | 677 | return self._identify(), True |
|
678 | 678 | except RepositoryError: |
|
679 | 679 | # Cleanup any commit leftovers |
|
680 | 680 | self._remote.update(clean=True) |
|
681 | 681 | raise |
|
682 | 682 | |
|
683 | 683 | def _is_the_same_branch(self, target_ref, source_ref): |
|
684 | 684 | return ( |
|
685 | 685 | self._get_branch_name(target_ref) == |
|
686 | 686 | self._get_branch_name(source_ref)) |
|
687 | 687 | |
|
688 | 688 | def _get_branch_name(self, ref): |
|
689 | 689 | if ref.type == 'branch': |
|
690 | 690 | return ref.name |
|
691 | 691 | return self._remote.ctx_branch(ref.commit_id) |
|
692 | 692 | |
|
693 | 693 | def _maybe_prepare_merge_workspace( |
|
694 | 694 | self, repo_id, workspace_id, unused_target_ref, unused_source_ref): |
|
695 | 695 | shadow_repository_path = self._get_shadow_repository_path( |
|
696 | 696 | repo_id, workspace_id) |
|
697 | 697 | if not os.path.exists(shadow_repository_path): |
|
698 | 698 | self._local_clone(shadow_repository_path) |
|
699 | 699 | log.debug( |
|
700 | 700 | 'Prepared shadow repository in %s', shadow_repository_path) |
|
701 | 701 | |
|
702 | 702 | return shadow_repository_path |
|
703 | 703 | |
|
704 | 704 | def _merge_repo(self, repo_id, workspace_id, target_ref, |
|
705 | 705 | source_repo, source_ref, merge_message, |
|
706 | 706 | merger_name, merger_email, dry_run=False, |
|
707 | 707 | use_rebase=False, close_branch=False): |
|
708 | 708 | |
|
709 | 709 | log.debug('Executing merge_repo with %s strategy, dry_run mode:%s', |
|
710 | 710 | 'rebase' if use_rebase else 'merge', dry_run) |
|
711 | 711 | if target_ref.commit_id not in self._heads(): |
|
712 | 712 | return MergeResponse( |
|
713 |
False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD |
|
|
713 | False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD, | |
|
714 | metadata={'target_ref': target_ref}) | |
|
714 | 715 | |
|
715 | 716 | try: |
|
716 |
if |
|
|
717 |
|
|
|
717 | if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1: | |
|
718 | heads = ','.join(self._heads(target_ref.name)) | |
|
718 | 719 | return MergeResponse( |
|
719 | 720 | False, False, None, |
|
720 |
MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS |
|
|
721 | MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS, | |
|
722 | metadata={'heads': heads}) | |
|
721 | 723 | except CommitDoesNotExistError: |
|
722 | 724 | log.exception('Failure when looking up branch heads on hg target') |
|
723 | 725 | return MergeResponse( |
|
724 |
False, False, None, MergeFailureReason.MISSING_TARGET_REF |
|
|
726 | False, False, None, MergeFailureReason.MISSING_TARGET_REF, | |
|
727 | metadata={'target_ref': target_ref}) | |
|
725 | 728 | |
|
726 | 729 | shadow_repository_path = self._maybe_prepare_merge_workspace( |
|
727 | 730 | repo_id, workspace_id, target_ref, source_ref) |
|
728 | 731 | shadow_repo = self._get_shadow_instance(shadow_repository_path) |
|
729 | 732 | |
|
730 | 733 | log.debug('Pulling in target reference %s', target_ref) |
|
731 | 734 | self._validate_pull_reference(target_ref) |
|
732 | 735 | shadow_repo._local_pull(self.path, target_ref) |
|
736 | ||
|
733 | 737 | try: |
|
734 | 738 | log.debug('Pulling in source reference %s', source_ref) |
|
735 | 739 | source_repo._validate_pull_reference(source_ref) |
|
736 | 740 | shadow_repo._local_pull(source_repo.path, source_ref) |
|
737 | 741 | except CommitDoesNotExistError: |
|
738 | 742 | log.exception('Failure when doing local pull on hg shadow repo') |
|
739 | 743 | return MergeResponse( |
|
740 |
False, False, None, MergeFailureReason.MISSING_SOURCE_REF |
|
|
744 | False, False, None, MergeFailureReason.MISSING_SOURCE_REF, | |
|
745 | metadata={'source_ref': source_ref}) | |
|
741 | 746 | |
|
742 | 747 | merge_ref = None |
|
743 | 748 | merge_commit_id = None |
|
744 | 749 | close_commit_id = None |
|
745 | 750 | merge_failure_reason = MergeFailureReason.NONE |
|
751 | metadata = {} | |
|
746 | 752 | |
|
747 | 753 | # enforce that close branch should be used only in case we source from |
|
748 | 754 | # an actual Branch |
|
749 | 755 | close_branch = close_branch and source_ref.type == 'branch' |
|
750 | 756 | |
|
751 | 757 | # don't allow to close branch if source and target are the same |
|
752 | 758 | close_branch = close_branch and source_ref.name != target_ref.name |
|
753 | 759 | |
|
754 | 760 | needs_push_on_close = False |
|
755 | 761 | if close_branch and not use_rebase and not dry_run: |
|
756 | 762 | try: |
|
757 | 763 | close_commit_id, needs_push_on_close = shadow_repo._local_close( |
|
758 | 764 | target_ref, merger_name, merger_email, source_ref) |
|
759 | 765 | merge_possible = True |
|
760 | 766 | except RepositoryError: |
|
761 | log.exception( | |
|
762 | 'Failure when doing close branch on hg shadow repo') | |
|
767 | log.exception('Failure when doing close branch on ' | |
|
768 | 'shadow repo: %s', shadow_repo) | |
|
763 | 769 | merge_possible = False |
|
764 | 770 | merge_failure_reason = MergeFailureReason.MERGE_FAILED |
|
765 | 771 | else: |
|
766 | 772 | merge_possible = True |
|
767 | 773 | |
|
768 | 774 | needs_push = False |
|
769 | 775 | if merge_possible: |
|
770 | 776 | try: |
|
771 | 777 | merge_commit_id, needs_push = shadow_repo._local_merge( |
|
772 | 778 | target_ref, merge_message, merger_name, merger_email, |
|
773 | 779 | source_ref, use_rebase=use_rebase, dry_run=dry_run) |
|
774 | 780 | merge_possible = True |
|
775 | 781 | |
|
776 | 782 | # read the state of the close action, if it |
|
777 | 783 | # maybe required a push |
|
778 | 784 | needs_push = needs_push or needs_push_on_close |
|
779 | 785 | |
|
780 | 786 | # Set a bookmark pointing to the merge commit. This bookmark |
|
781 | 787 | # may be used to easily identify the last successful merge |
|
782 | 788 | # commit in the shadow repository. |
|
783 | 789 | shadow_repo.bookmark('pr-merge', revision=merge_commit_id) |
|
784 | 790 | merge_ref = Reference('book', 'pr-merge', merge_commit_id) |
|
785 | 791 | except SubrepoMergeError: |
|
786 | 792 | log.exception( |
|
787 | 793 | 'Subrepo merge error during local merge on hg shadow repo.') |
|
788 | 794 | merge_possible = False |
|
789 | 795 | merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED |
|
790 | 796 | needs_push = False |
|
791 | 797 | except RepositoryError: |
|
792 | 798 | log.exception('Failure when doing local merge on hg shadow repo') |
|
793 | 799 | merge_possible = False |
|
794 | 800 | merge_failure_reason = MergeFailureReason.MERGE_FAILED |
|
795 | 801 | needs_push = False |
|
796 | 802 | |
|
797 | 803 | if merge_possible and not dry_run: |
|
798 | 804 | if needs_push: |
|
799 | 805 | # In case the target is a bookmark, update it, so after pushing |
|
800 | 806 | # the bookmarks is also updated in the target. |
|
801 | 807 | if target_ref.type == 'book': |
|
802 | 808 | shadow_repo.bookmark( |
|
803 | 809 | target_ref.name, revision=merge_commit_id) |
|
804 | 810 | try: |
|
805 | 811 | shadow_repo_with_hooks = self._get_shadow_instance( |
|
806 | 812 | shadow_repository_path, |
|
807 | 813 | enable_hooks=True) |
|
808 | 814 | # This is the actual merge action, we push from shadow |
|
809 | 815 | # into origin. |
|
810 | 816 | # Note: the push_branches option will push any new branch |
|
811 | 817 | # defined in the source repository to the target. This may |
|
812 | 818 | # be dangerous as branches are permanent in Mercurial. |
|
813 | 819 | # This feature was requested in issue #441. |
|
814 | 820 | shadow_repo_with_hooks._local_push( |
|
815 | 821 | merge_commit_id, self.path, push_branches=True, |
|
816 | 822 | enable_hooks=True) |
|
817 | 823 | |
|
818 | 824 | # maybe we also need to push the close_commit_id |
|
819 | 825 | if close_commit_id: |
|
820 | 826 | shadow_repo_with_hooks._local_push( |
|
821 | 827 | close_commit_id, self.path, push_branches=True, |
|
822 | 828 | enable_hooks=True) |
|
823 | 829 | merge_succeeded = True |
|
824 | 830 | except RepositoryError: |
|
825 | 831 | log.exception( |
|
826 | 832 | 'Failure when doing local push from the shadow ' |
|
827 | 'repository to the target repository.') | |
|
833 | 'repository to the target repository at %s.', self.path) | |
|
828 | 834 | merge_succeeded = False |
|
829 | 835 | merge_failure_reason = MergeFailureReason.PUSH_FAILED |
|
836 | metadata['target'] = 'hg shadow repo' | |
|
837 | metadata['merge_commit'] = merge_commit_id | |
|
830 | 838 | else: |
|
831 | 839 | merge_succeeded = True |
|
832 | 840 | else: |
|
833 | 841 | merge_succeeded = False |
|
834 | 842 | |
|
835 | 843 | return MergeResponse( |
|
836 |
merge_possible, merge_succeeded, merge_ref, merge_failure_reason |
|
|
844 | merge_possible, merge_succeeded, merge_ref, merge_failure_reason, | |
|
845 | metadata=metadata) | |
|
837 | 846 | |
|
838 | def _get_shadow_instance( | |
|
839 | self, shadow_repository_path, enable_hooks=False): | |
|
847 | def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False): | |
|
840 | 848 | config = self.config.copy() |
|
841 | 849 | if not enable_hooks: |
|
842 | 850 | config.clear_section('hooks') |
|
843 | 851 | return MercurialRepository(shadow_repository_path, config) |
|
844 | 852 | |
|
845 | 853 | def _validate_pull_reference(self, reference): |
|
846 | 854 | if not (reference.name in self.bookmarks or |
|
847 | 855 | reference.name in self.branches or |
|
848 | 856 | self.get_commit(reference.commit_id)): |
|
849 | 857 | raise CommitDoesNotExistError( |
|
850 | 858 | 'Unknown branch, bookmark or commit id') |
|
851 | 859 | |
|
852 | 860 | def _local_pull(self, repository_path, reference): |
|
853 | 861 | """ |
|
854 | 862 | Fetch a branch, bookmark or commit from a local repository. |
|
855 | 863 | """ |
|
856 | 864 | repository_path = os.path.abspath(repository_path) |
|
857 | 865 | if repository_path == self.path: |
|
858 | 866 | raise ValueError('Cannot pull from the same repository') |
|
859 | 867 | |
|
860 | 868 | reference_type_to_option_name = { |
|
861 | 869 | 'book': 'bookmark', |
|
862 | 870 | 'branch': 'branch', |
|
863 | 871 | } |
|
864 | 872 | option_name = reference_type_to_option_name.get( |
|
865 | 873 | reference.type, 'revision') |
|
866 | 874 | |
|
867 | 875 | if option_name == 'revision': |
|
868 | 876 | ref = reference.commit_id |
|
869 | 877 | else: |
|
870 | 878 | ref = reference.name |
|
871 | 879 | |
|
872 | 880 | options = {option_name: [ref]} |
|
873 | 881 | self._remote.pull_cmd(repository_path, hooks=False, **options) |
|
874 | 882 | self._remote.invalidate_vcs_cache() |
|
875 | 883 | |
|
876 | 884 | def bookmark(self, bookmark, revision=None): |
|
877 | 885 | if isinstance(bookmark, unicode): |
|
878 | 886 | bookmark = safe_str(bookmark) |
|
879 | 887 | self._remote.bookmark(bookmark, revision=revision) |
|
880 | 888 | self._remote.invalidate_vcs_cache() |
|
881 | 889 | |
|
882 | 890 | def get_path_permissions(self, username): |
|
883 | 891 | hgacl_file = os.path.join(self.path, '.hg/hgacl') |
|
884 | 892 | |
|
885 | 893 | def read_patterns(suffix): |
|
886 | 894 | svalue = None |
|
887 | 895 | try: |
|
888 | 896 | svalue = hgacl.get('narrowhgacl', username + suffix) |
|
889 | 897 | except configparser.NoOptionError: |
|
890 | 898 | try: |
|
891 | 899 | svalue = hgacl.get('narrowhgacl', 'default' + suffix) |
|
892 | 900 | except configparser.NoOptionError: |
|
893 | 901 | pass |
|
894 | 902 | if not svalue: |
|
895 | 903 | return None |
|
896 | 904 | result = ['/'] |
|
897 | 905 | for pattern in svalue.split(): |
|
898 | 906 | result.append(pattern) |
|
899 | 907 | if '*' not in pattern and '?' not in pattern: |
|
900 | 908 | result.append(pattern + '/*') |
|
901 | 909 | return result |
|
902 | 910 | |
|
903 | 911 | if os.path.exists(hgacl_file): |
|
904 | 912 | try: |
|
905 | 913 | hgacl = configparser.RawConfigParser() |
|
906 | 914 | hgacl.read(hgacl_file) |
|
907 | 915 | |
|
908 | 916 | includes = read_patterns('.includes') |
|
909 | 917 | excludes = read_patterns('.excludes') |
|
910 | 918 | return BasePathPermissionChecker.create_from_patterns( |
|
911 | 919 | includes, excludes) |
|
912 | 920 | except BaseException as e: |
|
913 | 921 | msg = 'Cannot read ACL settings from {} on {}: {}'.format( |
|
914 | 922 | hgacl_file, self.name, e) |
|
915 | 923 | raise exceptions.RepositoryRequirementError(msg) |
|
916 | 924 | else: |
|
917 | 925 | return None |
|
918 | 926 | |
|
919 | 927 | |
|
920 | 928 | class MercurialIndexBasedCollectionGenerator(CollectionGenerator): |
|
921 | 929 | |
|
922 | 930 | def _commit_factory(self, commit_id): |
|
923 | 931 | return self.repo.get_commit( |
|
924 | 932 | commit_idx=commit_id, pre_load=self.pre_load) |
@@ -1,1737 +1,1694 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2012-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | |
|
22 | 22 | """ |
|
23 | 23 | pull request model for RhodeCode |
|
24 | 24 | """ |
|
25 | 25 | |
|
26 | 26 | |
|
27 | 27 | import json |
|
28 | 28 | import logging |
|
29 | 29 | import datetime |
|
30 | 30 | import urllib |
|
31 | 31 | import collections |
|
32 | 32 | |
|
33 | 33 | from pyramid.threadlocal import get_current_request |
|
34 | 34 | |
|
35 | 35 | from rhodecode import events |
|
36 |
from rhodecode.translation import lazy_ugettext |
|
|
36 | from rhodecode.translation import lazy_ugettext | |
|
37 | 37 | from rhodecode.lib import helpers as h, hooks_utils, diffs |
|
38 | 38 | from rhodecode.lib import audit_logger |
|
39 | 39 | from rhodecode.lib.compat import OrderedDict |
|
40 | 40 | from rhodecode.lib.hooks_daemon import prepare_callback_daemon |
|
41 | 41 | from rhodecode.lib.markup_renderer import ( |
|
42 | 42 | DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer) |
|
43 | 43 | from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe |
|
44 | 44 | from rhodecode.lib.vcs.backends.base import ( |
|
45 | 45 | Reference, MergeResponse, MergeFailureReason, UpdateFailureReason) |
|
46 | 46 | from rhodecode.lib.vcs.conf import settings as vcs_settings |
|
47 | 47 | from rhodecode.lib.vcs.exceptions import ( |
|
48 | 48 | CommitDoesNotExistError, EmptyRepositoryError) |
|
49 | 49 | from rhodecode.model import BaseModel |
|
50 | 50 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
51 | 51 | from rhodecode.model.comment import CommentsModel |
|
52 | 52 | from rhodecode.model.db import ( |
|
53 | 53 | or_, PullRequest, PullRequestReviewers, ChangesetStatus, |
|
54 | 54 | PullRequestVersion, ChangesetComment, Repository, RepoReviewRule) |
|
55 | 55 | from rhodecode.model.meta import Session |
|
56 | 56 | from rhodecode.model.notification import NotificationModel, \ |
|
57 | 57 | EmailNotificationModel |
|
58 | 58 | from rhodecode.model.scm import ScmModel |
|
59 | 59 | from rhodecode.model.settings import VcsSettingsModel |
|
60 | 60 | |
|
61 | 61 | |
|
62 | 62 | log = logging.getLogger(__name__) |
|
63 | 63 | |
|
64 | 64 | |
|
65 | 65 | # Data structure to hold the response data when updating commits during a pull |
|
66 | 66 | # request update. |
|
67 | 67 | UpdateResponse = collections.namedtuple('UpdateResponse', [ |
|
68 | 68 | 'executed', 'reason', 'new', 'old', 'changes', |
|
69 | 69 | 'source_changed', 'target_changed']) |
|
70 | 70 | |
|
71 | 71 | |
|
72 | 72 | class PullRequestModel(BaseModel): |
|
73 | 73 | |
|
74 | 74 | cls = PullRequest |
|
75 | 75 | |
|
76 | 76 | DIFF_CONTEXT = diffs.DEFAULT_CONTEXT |
|
77 | 77 | |
|
78 | MERGE_STATUS_MESSAGES = { | |
|
79 | MergeFailureReason.NONE: lazy_ugettext( | |
|
80 | 'This pull request can be automatically merged.'), | |
|
81 | MergeFailureReason.UNKNOWN: lazy_ugettext( | |
|
82 | 'This pull request cannot be merged because of an unhandled' | |
|
83 | ' exception.'), | |
|
84 | MergeFailureReason.MERGE_FAILED: lazy_ugettext( | |
|
85 | 'This pull request cannot be merged because of merge conflicts.'), | |
|
86 | MergeFailureReason.PUSH_FAILED: lazy_ugettext( | |
|
87 | 'This pull request could not be merged because push to target' | |
|
88 | ' failed.'), | |
|
89 | MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext( | |
|
90 | 'This pull request cannot be merged because the target is not a' | |
|
91 | ' head.'), | |
|
92 | MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext( | |
|
93 | 'This pull request cannot be merged because the source contains' | |
|
94 | ' more branches than the target.'), | |
|
95 | MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext( | |
|
96 | 'This pull request cannot be merged because the target has' | |
|
97 | ' multiple heads.'), | |
|
98 | MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext( | |
|
99 | 'This pull request cannot be merged because the target repository' | |
|
100 | ' is locked.'), | |
|
101 | MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext( | |
|
102 | 'This pull request cannot be merged because the target or the ' | |
|
103 | 'source reference is missing.'), | |
|
104 | MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext( | |
|
105 | 'This pull request cannot be merged because the target ' | |
|
106 | 'reference is missing.'), | |
|
107 | MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext( | |
|
108 | 'This pull request cannot be merged because the source ' | |
|
109 | 'reference is missing.'), | |
|
110 | MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext( | |
|
111 | 'This pull request cannot be merged because of conflicts related ' | |
|
112 | 'to sub repositories.'), | |
|
113 | } | |
|
114 | ||
|
115 | 78 | UPDATE_STATUS_MESSAGES = { |
|
116 | 79 | UpdateFailureReason.NONE: lazy_ugettext( |
|
117 | 80 | 'Pull request update successful.'), |
|
118 | 81 | UpdateFailureReason.UNKNOWN: lazy_ugettext( |
|
119 | 82 | 'Pull request update failed because of an unknown error.'), |
|
120 | 83 | UpdateFailureReason.NO_CHANGE: lazy_ugettext( |
|
121 | 84 | 'No update needed because the source and target have not changed.'), |
|
122 | 85 | UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext( |
|
123 | 86 | 'Pull request cannot be updated because the reference type is ' |
|
124 | 87 | 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'), |
|
125 | 88 | UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext( |
|
126 | 89 | 'This pull request cannot be updated because the target ' |
|
127 | 90 | 'reference is missing.'), |
|
128 | 91 | UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext( |
|
129 | 92 | 'This pull request cannot be updated because the source ' |
|
130 | 93 | 'reference is missing.'), |
|
131 | 94 | } |
|
132 | 95 | |
|
133 | 96 | def __get_pull_request(self, pull_request): |
|
134 | 97 | return self._get_instance(( |
|
135 | 98 | PullRequest, PullRequestVersion), pull_request) |
|
136 | 99 | |
|
137 | 100 | def _check_perms(self, perms, pull_request, user, api=False): |
|
138 | 101 | if not api: |
|
139 | 102 | return h.HasRepoPermissionAny(*perms)( |
|
140 | 103 | user=user, repo_name=pull_request.target_repo.repo_name) |
|
141 | 104 | else: |
|
142 | 105 | return h.HasRepoPermissionAnyApi(*perms)( |
|
143 | 106 | user=user, repo_name=pull_request.target_repo.repo_name) |
|
144 | 107 | |
|
145 | 108 | def check_user_read(self, pull_request, user, api=False): |
|
146 | 109 | _perms = ('repository.admin', 'repository.write', 'repository.read',) |
|
147 | 110 | return self._check_perms(_perms, pull_request, user, api) |
|
148 | 111 | |
|
149 | 112 | def check_user_merge(self, pull_request, user, api=False): |
|
150 | 113 | _perms = ('repository.admin', 'repository.write', 'hg.admin',) |
|
151 | 114 | return self._check_perms(_perms, pull_request, user, api) |
|
152 | 115 | |
|
153 | 116 | def check_user_update(self, pull_request, user, api=False): |
|
154 | 117 | owner = user.user_id == pull_request.user_id |
|
155 | 118 | return self.check_user_merge(pull_request, user, api) or owner |
|
156 | 119 | |
|
157 | 120 | def check_user_delete(self, pull_request, user): |
|
158 | 121 | owner = user.user_id == pull_request.user_id |
|
159 | 122 | _perms = ('repository.admin',) |
|
160 | 123 | return self._check_perms(_perms, pull_request, user) or owner |
|
161 | 124 | |
|
162 | 125 | def check_user_change_status(self, pull_request, user, api=False): |
|
163 | 126 | reviewer = user.user_id in [x.user_id for x in |
|
164 | 127 | pull_request.reviewers] |
|
165 | 128 | return self.check_user_update(pull_request, user, api) or reviewer |
|
166 | 129 | |
|
167 | 130 | def check_user_comment(self, pull_request, user): |
|
168 | 131 | owner = user.user_id == pull_request.user_id |
|
169 | 132 | return self.check_user_read(pull_request, user) or owner |
|
170 | 133 | |
|
171 | 134 | def get(self, pull_request): |
|
172 | 135 | return self.__get_pull_request(pull_request) |
|
173 | 136 | |
|
174 | 137 | def _prepare_get_all_query(self, repo_name, source=False, statuses=None, |
|
175 | 138 | opened_by=None, order_by=None, |
|
176 | 139 | order_dir='desc'): |
|
177 | 140 | repo = None |
|
178 | 141 | if repo_name: |
|
179 | 142 | repo = self._get_repo(repo_name) |
|
180 | 143 | |
|
181 | 144 | q = PullRequest.query() |
|
182 | 145 | |
|
183 | 146 | # source or target |
|
184 | 147 | if repo and source: |
|
185 | 148 | q = q.filter(PullRequest.source_repo == repo) |
|
186 | 149 | elif repo: |
|
187 | 150 | q = q.filter(PullRequest.target_repo == repo) |
|
188 | 151 | |
|
189 | 152 | # closed,opened |
|
190 | 153 | if statuses: |
|
191 | 154 | q = q.filter(PullRequest.status.in_(statuses)) |
|
192 | 155 | |
|
193 | 156 | # opened by filter |
|
194 | 157 | if opened_by: |
|
195 | 158 | q = q.filter(PullRequest.user_id.in_(opened_by)) |
|
196 | 159 | |
|
197 | 160 | if order_by: |
|
198 | 161 | order_map = { |
|
199 | 162 | 'name_raw': PullRequest.pull_request_id, |
|
200 | 163 | 'title': PullRequest.title, |
|
201 | 164 | 'updated_on_raw': PullRequest.updated_on, |
|
202 | 165 | 'target_repo': PullRequest.target_repo_id |
|
203 | 166 | } |
|
204 | 167 | if order_dir == 'asc': |
|
205 | 168 | q = q.order_by(order_map[order_by].asc()) |
|
206 | 169 | else: |
|
207 | 170 | q = q.order_by(order_map[order_by].desc()) |
|
208 | 171 | |
|
209 | 172 | return q |
|
210 | 173 | |
|
211 | 174 | def count_all(self, repo_name, source=False, statuses=None, |
|
212 | 175 | opened_by=None): |
|
213 | 176 | """ |
|
214 | 177 | Count the number of pull requests for a specific repository. |
|
215 | 178 | |
|
216 | 179 | :param repo_name: target or source repo |
|
217 | 180 | :param source: boolean flag to specify if repo_name refers to source |
|
218 | 181 | :param statuses: list of pull request statuses |
|
219 | 182 | :param opened_by: author user of the pull request |
|
220 | 183 | :returns: int number of pull requests |
|
221 | 184 | """ |
|
222 | 185 | q = self._prepare_get_all_query( |
|
223 | 186 | repo_name, source=source, statuses=statuses, opened_by=opened_by) |
|
224 | 187 | |
|
225 | 188 | return q.count() |
|
226 | 189 | |
|
227 | 190 | def get_all(self, repo_name, source=False, statuses=None, opened_by=None, |
|
228 | 191 | offset=0, length=None, order_by=None, order_dir='desc'): |
|
229 | 192 | """ |
|
230 | 193 | Get all pull requests for a specific repository. |
|
231 | 194 | |
|
232 | 195 | :param repo_name: target or source repo |
|
233 | 196 | :param source: boolean flag to specify if repo_name refers to source |
|
234 | 197 | :param statuses: list of pull request statuses |
|
235 | 198 | :param opened_by: author user of the pull request |
|
236 | 199 | :param offset: pagination offset |
|
237 | 200 | :param length: length of returned list |
|
238 | 201 | :param order_by: order of the returned list |
|
239 | 202 | :param order_dir: 'asc' or 'desc' ordering direction |
|
240 | 203 | :returns: list of pull requests |
|
241 | 204 | """ |
|
242 | 205 | q = self._prepare_get_all_query( |
|
243 | 206 | repo_name, source=source, statuses=statuses, opened_by=opened_by, |
|
244 | 207 | order_by=order_by, order_dir=order_dir) |
|
245 | 208 | |
|
246 | 209 | if length: |
|
247 | 210 | pull_requests = q.limit(length).offset(offset).all() |
|
248 | 211 | else: |
|
249 | 212 | pull_requests = q.all() |
|
250 | 213 | |
|
251 | 214 | return pull_requests |
|
252 | 215 | |
|
253 | 216 | def count_awaiting_review(self, repo_name, source=False, statuses=None, |
|
254 | 217 | opened_by=None): |
|
255 | 218 | """ |
|
256 | 219 | Count the number of pull requests for a specific repository that are |
|
257 | 220 | awaiting review. |
|
258 | 221 | |
|
259 | 222 | :param repo_name: target or source repo |
|
260 | 223 | :param source: boolean flag to specify if repo_name refers to source |
|
261 | 224 | :param statuses: list of pull request statuses |
|
262 | 225 | :param opened_by: author user of the pull request |
|
263 | 226 | :returns: int number of pull requests |
|
264 | 227 | """ |
|
265 | 228 | pull_requests = self.get_awaiting_review( |
|
266 | 229 | repo_name, source=source, statuses=statuses, opened_by=opened_by) |
|
267 | 230 | |
|
268 | 231 | return len(pull_requests) |
|
269 | 232 | |
|
270 | 233 | def get_awaiting_review(self, repo_name, source=False, statuses=None, |
|
271 | 234 | opened_by=None, offset=0, length=None, |
|
272 | 235 | order_by=None, order_dir='desc'): |
|
273 | 236 | """ |
|
274 | 237 | Get all pull requests for a specific repository that are awaiting |
|
275 | 238 | review. |
|
276 | 239 | |
|
277 | 240 | :param repo_name: target or source repo |
|
278 | 241 | :param source: boolean flag to specify if repo_name refers to source |
|
279 | 242 | :param statuses: list of pull request statuses |
|
280 | 243 | :param opened_by: author user of the pull request |
|
281 | 244 | :param offset: pagination offset |
|
282 | 245 | :param length: length of returned list |
|
283 | 246 | :param order_by: order of the returned list |
|
284 | 247 | :param order_dir: 'asc' or 'desc' ordering direction |
|
285 | 248 | :returns: list of pull requests |
|
286 | 249 | """ |
|
287 | 250 | pull_requests = self.get_all( |
|
288 | 251 | repo_name, source=source, statuses=statuses, opened_by=opened_by, |
|
289 | 252 | order_by=order_by, order_dir=order_dir) |
|
290 | 253 | |
|
291 | 254 | _filtered_pull_requests = [] |
|
292 | 255 | for pr in pull_requests: |
|
293 | 256 | status = pr.calculated_review_status() |
|
294 | 257 | if status in [ChangesetStatus.STATUS_NOT_REVIEWED, |
|
295 | 258 | ChangesetStatus.STATUS_UNDER_REVIEW]: |
|
296 | 259 | _filtered_pull_requests.append(pr) |
|
297 | 260 | if length: |
|
298 | 261 | return _filtered_pull_requests[offset:offset+length] |
|
299 | 262 | else: |
|
300 | 263 | return _filtered_pull_requests |
|
301 | 264 | |
|
302 | 265 | def count_awaiting_my_review(self, repo_name, source=False, statuses=None, |
|
303 | 266 | opened_by=None, user_id=None): |
|
304 | 267 | """ |
|
305 | 268 | Count the number of pull requests for a specific repository that are |
|
306 | 269 | awaiting review from a specific user. |
|
307 | 270 | |
|
308 | 271 | :param repo_name: target or source repo |
|
309 | 272 | :param source: boolean flag to specify if repo_name refers to source |
|
310 | 273 | :param statuses: list of pull request statuses |
|
311 | 274 | :param opened_by: author user of the pull request |
|
312 | 275 | :param user_id: reviewer user of the pull request |
|
313 | 276 | :returns: int number of pull requests |
|
314 | 277 | """ |
|
315 | 278 | pull_requests = self.get_awaiting_my_review( |
|
316 | 279 | repo_name, source=source, statuses=statuses, opened_by=opened_by, |
|
317 | 280 | user_id=user_id) |
|
318 | 281 | |
|
319 | 282 | return len(pull_requests) |
|
320 | 283 | |
|
321 | 284 | def get_awaiting_my_review(self, repo_name, source=False, statuses=None, |
|
322 | 285 | opened_by=None, user_id=None, offset=0, |
|
323 | 286 | length=None, order_by=None, order_dir='desc'): |
|
324 | 287 | """ |
|
325 | 288 | Get all pull requests for a specific repository that are awaiting |
|
326 | 289 | review from a specific user. |
|
327 | 290 | |
|
328 | 291 | :param repo_name: target or source repo |
|
329 | 292 | :param source: boolean flag to specify if repo_name refers to source |
|
330 | 293 | :param statuses: list of pull request statuses |
|
331 | 294 | :param opened_by: author user of the pull request |
|
332 | 295 | :param user_id: reviewer user of the pull request |
|
333 | 296 | :param offset: pagination offset |
|
334 | 297 | :param length: length of returned list |
|
335 | 298 | :param order_by: order of the returned list |
|
336 | 299 | :param order_dir: 'asc' or 'desc' ordering direction |
|
337 | 300 | :returns: list of pull requests |
|
338 | 301 | """ |
|
339 | 302 | pull_requests = self.get_all( |
|
340 | 303 | repo_name, source=source, statuses=statuses, opened_by=opened_by, |
|
341 | 304 | order_by=order_by, order_dir=order_dir) |
|
342 | 305 | |
|
343 | 306 | _my = PullRequestModel().get_not_reviewed(user_id) |
|
344 | 307 | my_participation = [] |
|
345 | 308 | for pr in pull_requests: |
|
346 | 309 | if pr in _my: |
|
347 | 310 | my_participation.append(pr) |
|
348 | 311 | _filtered_pull_requests = my_participation |
|
349 | 312 | if length: |
|
350 | 313 | return _filtered_pull_requests[offset:offset+length] |
|
351 | 314 | else: |
|
352 | 315 | return _filtered_pull_requests |
|
353 | 316 | |
|
354 | 317 | def get_not_reviewed(self, user_id): |
|
355 | 318 | return [ |
|
356 | 319 | x.pull_request for x in PullRequestReviewers.query().filter( |
|
357 | 320 | PullRequestReviewers.user_id == user_id).all() |
|
358 | 321 | ] |
|
359 | 322 | |
|
360 | 323 | def _prepare_participating_query(self, user_id=None, statuses=None, |
|
361 | 324 | order_by=None, order_dir='desc'): |
|
362 | 325 | q = PullRequest.query() |
|
363 | 326 | if user_id: |
|
364 | 327 | reviewers_subquery = Session().query( |
|
365 | 328 | PullRequestReviewers.pull_request_id).filter( |
|
366 | 329 | PullRequestReviewers.user_id == user_id).subquery() |
|
367 | 330 | user_filter = or_( |
|
368 | 331 | PullRequest.user_id == user_id, |
|
369 | 332 | PullRequest.pull_request_id.in_(reviewers_subquery) |
|
370 | 333 | ) |
|
371 | 334 | q = PullRequest.query().filter(user_filter) |
|
372 | 335 | |
|
373 | 336 | # closed,opened |
|
374 | 337 | if statuses: |
|
375 | 338 | q = q.filter(PullRequest.status.in_(statuses)) |
|
376 | 339 | |
|
377 | 340 | if order_by: |
|
378 | 341 | order_map = { |
|
379 | 342 | 'name_raw': PullRequest.pull_request_id, |
|
380 | 343 | 'title': PullRequest.title, |
|
381 | 344 | 'updated_on_raw': PullRequest.updated_on, |
|
382 | 345 | 'target_repo': PullRequest.target_repo_id |
|
383 | 346 | } |
|
384 | 347 | if order_dir == 'asc': |
|
385 | 348 | q = q.order_by(order_map[order_by].asc()) |
|
386 | 349 | else: |
|
387 | 350 | q = q.order_by(order_map[order_by].desc()) |
|
388 | 351 | |
|
389 | 352 | return q |
|
390 | 353 | |
|
391 | 354 | def count_im_participating_in(self, user_id=None, statuses=None): |
|
392 | 355 | q = self._prepare_participating_query(user_id, statuses=statuses) |
|
393 | 356 | return q.count() |
|
394 | 357 | |
|
395 | 358 | def get_im_participating_in( |
|
396 | 359 | self, user_id=None, statuses=None, offset=0, |
|
397 | 360 | length=None, order_by=None, order_dir='desc'): |
|
398 | 361 | """ |
|
399 | 362 | Get all Pull requests that i'm participating in, or i have opened |
|
400 | 363 | """ |
|
401 | 364 | |
|
402 | 365 | q = self._prepare_participating_query( |
|
403 | 366 | user_id, statuses=statuses, order_by=order_by, |
|
404 | 367 | order_dir=order_dir) |
|
405 | 368 | |
|
406 | 369 | if length: |
|
407 | 370 | pull_requests = q.limit(length).offset(offset).all() |
|
408 | 371 | else: |
|
409 | 372 | pull_requests = q.all() |
|
410 | 373 | |
|
411 | 374 | return pull_requests |
|
412 | 375 | |
|
413 | 376 | def get_versions(self, pull_request): |
|
414 | 377 | """ |
|
415 | 378 | returns version of pull request sorted by ID descending |
|
416 | 379 | """ |
|
417 | 380 | return PullRequestVersion.query()\ |
|
418 | 381 | .filter(PullRequestVersion.pull_request == pull_request)\ |
|
419 | 382 | .order_by(PullRequestVersion.pull_request_version_id.asc())\ |
|
420 | 383 | .all() |
|
421 | 384 | |
|
422 | 385 | def get_pr_version(self, pull_request_id, version=None): |
|
423 | 386 | at_version = None |
|
424 | 387 | |
|
425 | 388 | if version and version == 'latest': |
|
426 | 389 | pull_request_ver = PullRequest.get(pull_request_id) |
|
427 | 390 | pull_request_obj = pull_request_ver |
|
428 | 391 | _org_pull_request_obj = pull_request_obj |
|
429 | 392 | at_version = 'latest' |
|
430 | 393 | elif version: |
|
431 | 394 | pull_request_ver = PullRequestVersion.get_or_404(version) |
|
432 | 395 | pull_request_obj = pull_request_ver |
|
433 | 396 | _org_pull_request_obj = pull_request_ver.pull_request |
|
434 | 397 | at_version = pull_request_ver.pull_request_version_id |
|
435 | 398 | else: |
|
436 | 399 | _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404( |
|
437 | 400 | pull_request_id) |
|
438 | 401 | |
|
439 | 402 | pull_request_display_obj = PullRequest.get_pr_display_object( |
|
440 | 403 | pull_request_obj, _org_pull_request_obj) |
|
441 | 404 | |
|
442 | 405 | return _org_pull_request_obj, pull_request_obj, \ |
|
443 | 406 | pull_request_display_obj, at_version |
|
444 | 407 | |
|
445 | 408 | def create(self, created_by, source_repo, source_ref, target_repo, |
|
446 | 409 | target_ref, revisions, reviewers, title, description=None, |
|
447 | 410 | description_renderer=None, |
|
448 | 411 | reviewer_data=None, translator=None, auth_user=None): |
|
449 | 412 | translator = translator or get_current_request().translate |
|
450 | 413 | |
|
451 | 414 | created_by_user = self._get_user(created_by) |
|
452 | 415 | auth_user = auth_user or created_by_user.AuthUser() |
|
453 | 416 | source_repo = self._get_repo(source_repo) |
|
454 | 417 | target_repo = self._get_repo(target_repo) |
|
455 | 418 | |
|
456 | 419 | pull_request = PullRequest() |
|
457 | 420 | pull_request.source_repo = source_repo |
|
458 | 421 | pull_request.source_ref = source_ref |
|
459 | 422 | pull_request.target_repo = target_repo |
|
460 | 423 | pull_request.target_ref = target_ref |
|
461 | 424 | pull_request.revisions = revisions |
|
462 | 425 | pull_request.title = title |
|
463 | 426 | pull_request.description = description |
|
464 | 427 | pull_request.description_renderer = description_renderer |
|
465 | 428 | pull_request.author = created_by_user |
|
466 | 429 | pull_request.reviewer_data = reviewer_data |
|
467 | 430 | |
|
468 | 431 | Session().add(pull_request) |
|
469 | 432 | Session().flush() |
|
470 | 433 | |
|
471 | 434 | reviewer_ids = set() |
|
472 | 435 | # members / reviewers |
|
473 | 436 | for reviewer_object in reviewers: |
|
474 | 437 | user_id, reasons, mandatory, rules = reviewer_object |
|
475 | 438 | user = self._get_user(user_id) |
|
476 | 439 | |
|
477 | 440 | # skip duplicates |
|
478 | 441 | if user.user_id in reviewer_ids: |
|
479 | 442 | continue |
|
480 | 443 | |
|
481 | 444 | reviewer_ids.add(user.user_id) |
|
482 | 445 | |
|
483 | 446 | reviewer = PullRequestReviewers() |
|
484 | 447 | reviewer.user = user |
|
485 | 448 | reviewer.pull_request = pull_request |
|
486 | 449 | reviewer.reasons = reasons |
|
487 | 450 | reviewer.mandatory = mandatory |
|
488 | 451 | |
|
489 | 452 | # NOTE(marcink): pick only first rule for now |
|
490 | 453 | rule_id = list(rules)[0] if rules else None |
|
491 | 454 | rule = RepoReviewRule.get(rule_id) if rule_id else None |
|
492 | 455 | if rule: |
|
493 | 456 | review_group = rule.user_group_vote_rule(user_id) |
|
494 | 457 | # we check if this particular reviewer is member of a voting group |
|
495 | 458 | if review_group: |
|
496 | 459 | # NOTE(marcink): |
|
497 | 460 | # can be that user is member of more but we pick the first same, |
|
498 | 461 | # same as default reviewers algo |
|
499 | 462 | review_group = review_group[0] |
|
500 | 463 | |
|
501 | 464 | rule_data = { |
|
502 | 465 | 'rule_name': |
|
503 | 466 | rule.review_rule_name, |
|
504 | 467 | 'rule_user_group_entry_id': |
|
505 | 468 | review_group.repo_review_rule_users_group_id, |
|
506 | 469 | 'rule_user_group_name': |
|
507 | 470 | review_group.users_group.users_group_name, |
|
508 | 471 | 'rule_user_group_members': |
|
509 | 472 | [x.user.username for x in review_group.users_group.members], |
|
510 | 473 | 'rule_user_group_members_id': |
|
511 | 474 | [x.user.user_id for x in review_group.users_group.members], |
|
512 | 475 | } |
|
513 | 476 | # e.g {'vote_rule': -1, 'mandatory': True} |
|
514 | 477 | rule_data.update(review_group.rule_data()) |
|
515 | 478 | |
|
516 | 479 | reviewer.rule_data = rule_data |
|
517 | 480 | |
|
518 | 481 | Session().add(reviewer) |
|
519 | 482 | Session().flush() |
|
520 | 483 | |
|
521 | 484 | # Set approval status to "Under Review" for all commits which are |
|
522 | 485 | # part of this pull request. |
|
523 | 486 | ChangesetStatusModel().set_status( |
|
524 | 487 | repo=target_repo, |
|
525 | 488 | status=ChangesetStatus.STATUS_UNDER_REVIEW, |
|
526 | 489 | user=created_by_user, |
|
527 | 490 | pull_request=pull_request |
|
528 | 491 | ) |
|
529 | 492 | # we commit early at this point. This has to do with a fact |
|
530 | 493 | # that before queries do some row-locking. And because of that |
|
531 | 494 | # we need to commit and finish transation before below validate call |
|
532 | 495 | # that for large repos could be long resulting in long row locks |
|
533 | 496 | Session().commit() |
|
534 | 497 | |
|
535 | 498 | # prepare workspace, and run initial merge simulation |
|
536 | 499 | MergeCheck.validate( |
|
537 | 500 | pull_request, auth_user=auth_user, translator=translator) |
|
538 | 501 | |
|
539 | 502 | self.notify_reviewers(pull_request, reviewer_ids) |
|
540 | 503 | self._trigger_pull_request_hook( |
|
541 | 504 | pull_request, created_by_user, 'create') |
|
542 | 505 | |
|
543 | 506 | creation_data = pull_request.get_api_data(with_merge_state=False) |
|
544 | 507 | self._log_audit_action( |
|
545 | 508 | 'repo.pull_request.create', {'data': creation_data}, |
|
546 | 509 | auth_user, pull_request) |
|
547 | 510 | |
|
548 | 511 | return pull_request |
|
549 | 512 | |
|
550 | 513 | def _trigger_pull_request_hook(self, pull_request, user, action): |
|
551 | 514 | pull_request = self.__get_pull_request(pull_request) |
|
552 | 515 | target_scm = pull_request.target_repo.scm_instance() |
|
553 | 516 | if action == 'create': |
|
554 | 517 | trigger_hook = hooks_utils.trigger_log_create_pull_request_hook |
|
555 | 518 | elif action == 'merge': |
|
556 | 519 | trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook |
|
557 | 520 | elif action == 'close': |
|
558 | 521 | trigger_hook = hooks_utils.trigger_log_close_pull_request_hook |
|
559 | 522 | elif action == 'review_status_change': |
|
560 | 523 | trigger_hook = hooks_utils.trigger_log_review_pull_request_hook |
|
561 | 524 | elif action == 'update': |
|
562 | 525 | trigger_hook = hooks_utils.trigger_log_update_pull_request_hook |
|
563 | 526 | else: |
|
564 | 527 | return |
|
565 | 528 | |
|
566 | 529 | trigger_hook( |
|
567 | 530 | username=user.username, |
|
568 | 531 | repo_name=pull_request.target_repo.repo_name, |
|
569 | 532 | repo_alias=target_scm.alias, |
|
570 | 533 | pull_request=pull_request) |
|
571 | 534 | |
|
572 | 535 | def _get_commit_ids(self, pull_request): |
|
573 | 536 | """ |
|
574 | 537 | Return the commit ids of the merged pull request. |
|
575 | 538 | |
|
576 | 539 | This method is not dealing correctly yet with the lack of autoupdates |
|
577 | 540 | nor with the implicit target updates. |
|
578 | 541 | For example: if a commit in the source repo is already in the target it |
|
579 | 542 | will be reported anyways. |
|
580 | 543 | """ |
|
581 | 544 | merge_rev = pull_request.merge_rev |
|
582 | 545 | if merge_rev is None: |
|
583 | 546 | raise ValueError('This pull request was not merged yet') |
|
584 | 547 | |
|
585 | 548 | commit_ids = list(pull_request.revisions) |
|
586 | 549 | if merge_rev not in commit_ids: |
|
587 | 550 | commit_ids.append(merge_rev) |
|
588 | 551 | |
|
589 | 552 | return commit_ids |
|
590 | 553 | |
|
591 | 554 | def merge_repo(self, pull_request, user, extras): |
|
592 | 555 | log.debug("Merging pull request %s", pull_request.pull_request_id) |
|
593 | 556 | extras['user_agent'] = 'internal-merge' |
|
594 | 557 | merge_state = self._merge_pull_request(pull_request, user, extras) |
|
595 | 558 | if merge_state.executed: |
|
596 | log.debug( | |
|
597 | "Merge was successful, updating the pull request comments.") | |
|
559 | log.debug("Merge was successful, updating the pull request comments.") | |
|
598 | 560 | self._comment_and_close_pr(pull_request, user, merge_state) |
|
599 | 561 | |
|
600 | 562 | self._log_audit_action( |
|
601 | 563 | 'repo.pull_request.merge', |
|
602 | 564 | {'merge_state': merge_state.__dict__}, |
|
603 | 565 | user, pull_request) |
|
604 | 566 | |
|
605 | 567 | else: |
|
606 | 568 | log.warn("Merge failed, not updating the pull request.") |
|
607 | 569 | return merge_state |
|
608 | 570 | |
|
609 | 571 | def _merge_pull_request(self, pull_request, user, extras, merge_msg=None): |
|
610 | 572 | target_vcs = pull_request.target_repo.scm_instance() |
|
611 | 573 | source_vcs = pull_request.source_repo.scm_instance() |
|
612 | 574 | |
|
613 | 575 | message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format( |
|
614 | 576 | pr_id=pull_request.pull_request_id, |
|
615 | 577 | pr_title=pull_request.title, |
|
616 | 578 | source_repo=source_vcs.name, |
|
617 | 579 | source_ref_name=pull_request.source_ref_parts.name, |
|
618 | 580 | target_repo=target_vcs.name, |
|
619 | 581 | target_ref_name=pull_request.target_ref_parts.name, |
|
620 | 582 | ) |
|
621 | 583 | |
|
622 | 584 | workspace_id = self._workspace_id(pull_request) |
|
623 | 585 | repo_id = pull_request.target_repo.repo_id |
|
624 | 586 | use_rebase = self._use_rebase_for_merging(pull_request) |
|
625 | 587 | close_branch = self._close_branch_before_merging(pull_request) |
|
626 | 588 | |
|
627 | 589 | target_ref = self._refresh_reference( |
|
628 | 590 | pull_request.target_ref_parts, target_vcs) |
|
629 | 591 | |
|
630 | 592 | callback_daemon, extras = prepare_callback_daemon( |
|
631 | 593 | extras, protocol=vcs_settings.HOOKS_PROTOCOL, |
|
632 | 594 | host=vcs_settings.HOOKS_HOST, |
|
633 | 595 | use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS) |
|
634 | 596 | |
|
635 | 597 | with callback_daemon: |
|
636 | 598 | # TODO: johbo: Implement a clean way to run a config_override |
|
637 | 599 | # for a single call. |
|
638 | 600 | target_vcs.config.set( |
|
639 | 601 | 'rhodecode', 'RC_SCM_DATA', json.dumps(extras)) |
|
640 | 602 | |
|
641 | 603 | user_name = user.short_contact |
|
642 | 604 | merge_state = target_vcs.merge( |
|
643 | 605 | repo_id, workspace_id, target_ref, source_vcs, |
|
644 | 606 | pull_request.source_ref_parts, |
|
645 | 607 | user_name=user_name, user_email=user.email, |
|
646 | 608 | message=message, use_rebase=use_rebase, |
|
647 | 609 | close_branch=close_branch) |
|
648 | 610 | return merge_state |
|
649 | 611 | |
|
650 | 612 | def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None): |
|
651 | 613 | pull_request.merge_rev = merge_state.merge_ref.commit_id |
|
652 | 614 | pull_request.updated_on = datetime.datetime.now() |
|
653 | 615 | close_msg = close_msg or 'Pull request merged and closed' |
|
654 | 616 | |
|
655 | 617 | CommentsModel().create( |
|
656 | 618 | text=safe_unicode(close_msg), |
|
657 | 619 | repo=pull_request.target_repo.repo_id, |
|
658 | 620 | user=user.user_id, |
|
659 | 621 | pull_request=pull_request.pull_request_id, |
|
660 | 622 | f_path=None, |
|
661 | 623 | line_no=None, |
|
662 | 624 | closing_pr=True |
|
663 | 625 | ) |
|
664 | 626 | |
|
665 | 627 | Session().add(pull_request) |
|
666 | 628 | Session().flush() |
|
667 | 629 | # TODO: paris: replace invalidation with less radical solution |
|
668 | 630 | ScmModel().mark_for_invalidation( |
|
669 | 631 | pull_request.target_repo.repo_name) |
|
670 | 632 | self._trigger_pull_request_hook(pull_request, user, 'merge') |
|
671 | 633 | |
|
672 | 634 | def has_valid_update_type(self, pull_request): |
|
673 | 635 | source_ref_type = pull_request.source_ref_parts.type |
|
674 | 636 | return source_ref_type in ['book', 'branch', 'tag'] |
|
675 | 637 | |
|
676 | 638 | def update_commits(self, pull_request): |
|
677 | 639 | """ |
|
678 | 640 | Get the updated list of commits for the pull request |
|
679 | 641 | and return the new pull request version and the list |
|
680 | 642 | of commits processed by this update action |
|
681 | 643 | """ |
|
682 | 644 | pull_request = self.__get_pull_request(pull_request) |
|
683 | 645 | source_ref_type = pull_request.source_ref_parts.type |
|
684 | 646 | source_ref_name = pull_request.source_ref_parts.name |
|
685 | 647 | source_ref_id = pull_request.source_ref_parts.commit_id |
|
686 | 648 | |
|
687 | 649 | target_ref_type = pull_request.target_ref_parts.type |
|
688 | 650 | target_ref_name = pull_request.target_ref_parts.name |
|
689 | 651 | target_ref_id = pull_request.target_ref_parts.commit_id |
|
690 | 652 | |
|
691 | 653 | if not self.has_valid_update_type(pull_request): |
|
692 | 654 | log.debug( |
|
693 | 655 | "Skipping update of pull request %s due to ref type: %s", |
|
694 | 656 | pull_request, source_ref_type) |
|
695 | 657 | return UpdateResponse( |
|
696 | 658 | executed=False, |
|
697 | 659 | reason=UpdateFailureReason.WRONG_REF_TYPE, |
|
698 | 660 | old=pull_request, new=None, changes=None, |
|
699 | 661 | source_changed=False, target_changed=False) |
|
700 | 662 | |
|
701 | 663 | # source repo |
|
702 | 664 | source_repo = pull_request.source_repo.scm_instance() |
|
703 | 665 | try: |
|
704 | 666 | source_commit = source_repo.get_commit(commit_id=source_ref_name) |
|
705 | 667 | except CommitDoesNotExistError: |
|
706 | 668 | return UpdateResponse( |
|
707 | 669 | executed=False, |
|
708 | 670 | reason=UpdateFailureReason.MISSING_SOURCE_REF, |
|
709 | 671 | old=pull_request, new=None, changes=None, |
|
710 | 672 | source_changed=False, target_changed=False) |
|
711 | 673 | |
|
712 | 674 | source_changed = source_ref_id != source_commit.raw_id |
|
713 | 675 | |
|
714 | 676 | # target repo |
|
715 | 677 | target_repo = pull_request.target_repo.scm_instance() |
|
716 | 678 | try: |
|
717 | 679 | target_commit = target_repo.get_commit(commit_id=target_ref_name) |
|
718 | 680 | except CommitDoesNotExistError: |
|
719 | 681 | return UpdateResponse( |
|
720 | 682 | executed=False, |
|
721 | 683 | reason=UpdateFailureReason.MISSING_TARGET_REF, |
|
722 | 684 | old=pull_request, new=None, changes=None, |
|
723 | 685 | source_changed=False, target_changed=False) |
|
724 | 686 | target_changed = target_ref_id != target_commit.raw_id |
|
725 | 687 | |
|
726 | 688 | if not (source_changed or target_changed): |
|
727 | 689 | log.debug("Nothing changed in pull request %s", pull_request) |
|
728 | 690 | return UpdateResponse( |
|
729 | 691 | executed=False, |
|
730 | 692 | reason=UpdateFailureReason.NO_CHANGE, |
|
731 | 693 | old=pull_request, new=None, changes=None, |
|
732 | 694 | source_changed=target_changed, target_changed=source_changed) |
|
733 | 695 | |
|
734 | 696 | change_in_found = 'target repo' if target_changed else 'source repo' |
|
735 | 697 | log.debug('Updating pull request because of change in %s detected', |
|
736 | 698 | change_in_found) |
|
737 | 699 | |
|
738 | 700 | # Finally there is a need for an update, in case of source change |
|
739 | 701 | # we create a new version, else just an update |
|
740 | 702 | if source_changed: |
|
741 | 703 | pull_request_version = self._create_version_from_snapshot(pull_request) |
|
742 | 704 | self._link_comments_to_version(pull_request_version) |
|
743 | 705 | else: |
|
744 | 706 | try: |
|
745 | 707 | ver = pull_request.versions[-1] |
|
746 | 708 | except IndexError: |
|
747 | 709 | ver = None |
|
748 | 710 | |
|
749 | 711 | pull_request.pull_request_version_id = \ |
|
750 | 712 | ver.pull_request_version_id if ver else None |
|
751 | 713 | pull_request_version = pull_request |
|
752 | 714 | |
|
753 | 715 | try: |
|
754 | 716 | if target_ref_type in ('tag', 'branch', 'book'): |
|
755 | 717 | target_commit = target_repo.get_commit(target_ref_name) |
|
756 | 718 | else: |
|
757 | 719 | target_commit = target_repo.get_commit(target_ref_id) |
|
758 | 720 | except CommitDoesNotExistError: |
|
759 | 721 | return UpdateResponse( |
|
760 | 722 | executed=False, |
|
761 | 723 | reason=UpdateFailureReason.MISSING_TARGET_REF, |
|
762 | 724 | old=pull_request, new=None, changes=None, |
|
763 | 725 | source_changed=source_changed, target_changed=target_changed) |
|
764 | 726 | |
|
765 | 727 | # re-compute commit ids |
|
766 | 728 | old_commit_ids = pull_request.revisions |
|
767 | 729 | pre_load = ["author", "branch", "date", "message"] |
|
768 | 730 | commit_ranges = target_repo.compare( |
|
769 | 731 | target_commit.raw_id, source_commit.raw_id, source_repo, merge=True, |
|
770 | 732 | pre_load=pre_load) |
|
771 | 733 | |
|
772 | 734 | ancestor = target_repo.get_common_ancestor( |
|
773 | 735 | target_commit.raw_id, source_commit.raw_id, source_repo) |
|
774 | 736 | |
|
775 | 737 | pull_request.source_ref = '%s:%s:%s' % ( |
|
776 | 738 | source_ref_type, source_ref_name, source_commit.raw_id) |
|
777 | 739 | pull_request.target_ref = '%s:%s:%s' % ( |
|
778 | 740 | target_ref_type, target_ref_name, ancestor) |
|
779 | 741 | |
|
780 | 742 | pull_request.revisions = [ |
|
781 | 743 | commit.raw_id for commit in reversed(commit_ranges)] |
|
782 | 744 | pull_request.updated_on = datetime.datetime.now() |
|
783 | 745 | Session().add(pull_request) |
|
784 | 746 | new_commit_ids = pull_request.revisions |
|
785 | 747 | |
|
786 | 748 | old_diff_data, new_diff_data = self._generate_update_diffs( |
|
787 | 749 | pull_request, pull_request_version) |
|
788 | 750 | |
|
789 | 751 | # calculate commit and file changes |
|
790 | 752 | changes = self._calculate_commit_id_changes( |
|
791 | 753 | old_commit_ids, new_commit_ids) |
|
792 | 754 | file_changes = self._calculate_file_changes( |
|
793 | 755 | old_diff_data, new_diff_data) |
|
794 | 756 | |
|
795 | 757 | # set comments as outdated if DIFFS changed |
|
796 | 758 | CommentsModel().outdate_comments( |
|
797 | 759 | pull_request, old_diff_data=old_diff_data, |
|
798 | 760 | new_diff_data=new_diff_data) |
|
799 | 761 | |
|
800 | 762 | commit_changes = (changes.added or changes.removed) |
|
801 | 763 | file_node_changes = ( |
|
802 | 764 | file_changes.added or file_changes.modified or file_changes.removed) |
|
803 | 765 | pr_has_changes = commit_changes or file_node_changes |
|
804 | 766 | |
|
805 | 767 | # Add an automatic comment to the pull request, in case |
|
806 | 768 | # anything has changed |
|
807 | 769 | if pr_has_changes: |
|
808 | 770 | update_comment = CommentsModel().create( |
|
809 | 771 | text=self._render_update_message(changes, file_changes), |
|
810 | 772 | repo=pull_request.target_repo, |
|
811 | 773 | user=pull_request.author, |
|
812 | 774 | pull_request=pull_request, |
|
813 | 775 | send_email=False, renderer=DEFAULT_COMMENTS_RENDERER) |
|
814 | 776 | |
|
815 | 777 | # Update status to "Under Review" for added commits |
|
816 | 778 | for commit_id in changes.added: |
|
817 | 779 | ChangesetStatusModel().set_status( |
|
818 | 780 | repo=pull_request.source_repo, |
|
819 | 781 | status=ChangesetStatus.STATUS_UNDER_REVIEW, |
|
820 | 782 | comment=update_comment, |
|
821 | 783 | user=pull_request.author, |
|
822 | 784 | pull_request=pull_request, |
|
823 | 785 | revision=commit_id) |
|
824 | 786 | |
|
825 | 787 | log.debug( |
|
826 | 788 | 'Updated pull request %s, added_ids: %s, common_ids: %s, ' |
|
827 | 789 | 'removed_ids: %s', pull_request.pull_request_id, |
|
828 | 790 | changes.added, changes.common, changes.removed) |
|
829 | 791 | log.debug( |
|
830 | 792 | 'Updated pull request with the following file changes: %s', |
|
831 | 793 | file_changes) |
|
832 | 794 | |
|
833 | 795 | log.info( |
|
834 | 796 | "Updated pull request %s from commit %s to commit %s, " |
|
835 | 797 | "stored new version %s of this pull request.", |
|
836 | 798 | pull_request.pull_request_id, source_ref_id, |
|
837 | 799 | pull_request.source_ref_parts.commit_id, |
|
838 | 800 | pull_request_version.pull_request_version_id) |
|
839 | 801 | Session().commit() |
|
840 | 802 | self._trigger_pull_request_hook( |
|
841 | 803 | pull_request, pull_request.author, 'update') |
|
842 | 804 | |
|
843 | 805 | return UpdateResponse( |
|
844 | 806 | executed=True, reason=UpdateFailureReason.NONE, |
|
845 | 807 | old=pull_request, new=pull_request_version, changes=changes, |
|
846 | 808 | source_changed=source_changed, target_changed=target_changed) |
|
847 | 809 | |
|
848 | 810 | def _create_version_from_snapshot(self, pull_request): |
|
849 | 811 | version = PullRequestVersion() |
|
850 | 812 | version.title = pull_request.title |
|
851 | 813 | version.description = pull_request.description |
|
852 | 814 | version.status = pull_request.status |
|
853 | 815 | version.created_on = datetime.datetime.now() |
|
854 | 816 | version.updated_on = pull_request.updated_on |
|
855 | 817 | version.user_id = pull_request.user_id |
|
856 | 818 | version.source_repo = pull_request.source_repo |
|
857 | 819 | version.source_ref = pull_request.source_ref |
|
858 | 820 | version.target_repo = pull_request.target_repo |
|
859 | 821 | version.target_ref = pull_request.target_ref |
|
860 | 822 | |
|
861 | 823 | version._last_merge_source_rev = pull_request._last_merge_source_rev |
|
862 | 824 | version._last_merge_target_rev = pull_request._last_merge_target_rev |
|
863 | 825 | version.last_merge_status = pull_request.last_merge_status |
|
864 | 826 | version.shadow_merge_ref = pull_request.shadow_merge_ref |
|
865 | 827 | version.merge_rev = pull_request.merge_rev |
|
866 | 828 | version.reviewer_data = pull_request.reviewer_data |
|
867 | 829 | |
|
868 | 830 | version.revisions = pull_request.revisions |
|
869 | 831 | version.pull_request = pull_request |
|
870 | 832 | Session().add(version) |
|
871 | 833 | Session().flush() |
|
872 | 834 | |
|
873 | 835 | return version |
|
874 | 836 | |
|
875 | 837 | def _generate_update_diffs(self, pull_request, pull_request_version): |
|
876 | 838 | |
|
877 | 839 | diff_context = ( |
|
878 | 840 | self.DIFF_CONTEXT + |
|
879 | 841 | CommentsModel.needed_extra_diff_context()) |
|
880 | 842 | hide_whitespace_changes = False |
|
881 | 843 | source_repo = pull_request_version.source_repo |
|
882 | 844 | source_ref_id = pull_request_version.source_ref_parts.commit_id |
|
883 | 845 | target_ref_id = pull_request_version.target_ref_parts.commit_id |
|
884 | 846 | old_diff = self._get_diff_from_pr_or_version( |
|
885 | 847 | source_repo, source_ref_id, target_ref_id, |
|
886 | 848 | hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context) |
|
887 | 849 | |
|
888 | 850 | source_repo = pull_request.source_repo |
|
889 | 851 | source_ref_id = pull_request.source_ref_parts.commit_id |
|
890 | 852 | target_ref_id = pull_request.target_ref_parts.commit_id |
|
891 | 853 | |
|
892 | 854 | new_diff = self._get_diff_from_pr_or_version( |
|
893 | 855 | source_repo, source_ref_id, target_ref_id, |
|
894 | 856 | hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context) |
|
895 | 857 | |
|
896 | 858 | old_diff_data = diffs.DiffProcessor(old_diff) |
|
897 | 859 | old_diff_data.prepare() |
|
898 | 860 | new_diff_data = diffs.DiffProcessor(new_diff) |
|
899 | 861 | new_diff_data.prepare() |
|
900 | 862 | |
|
901 | 863 | return old_diff_data, new_diff_data |
|
902 | 864 | |
|
903 | 865 | def _link_comments_to_version(self, pull_request_version): |
|
904 | 866 | """ |
|
905 | 867 | Link all unlinked comments of this pull request to the given version. |
|
906 | 868 | |
|
907 | 869 | :param pull_request_version: The `PullRequestVersion` to which |
|
908 | 870 | the comments shall be linked. |
|
909 | 871 | |
|
910 | 872 | """ |
|
911 | 873 | pull_request = pull_request_version.pull_request |
|
912 | 874 | comments = ChangesetComment.query()\ |
|
913 | 875 | .filter( |
|
914 | 876 | # TODO: johbo: Should we query for the repo at all here? |
|
915 | 877 | # Pending decision on how comments of PRs are to be related |
|
916 | 878 | # to either the source repo, the target repo or no repo at all. |
|
917 | 879 | ChangesetComment.repo_id == pull_request.target_repo.repo_id, |
|
918 | 880 | ChangesetComment.pull_request == pull_request, |
|
919 | 881 | ChangesetComment.pull_request_version == None)\ |
|
920 | 882 | .order_by(ChangesetComment.comment_id.asc()) |
|
921 | 883 | |
|
922 | 884 | # TODO: johbo: Find out why this breaks if it is done in a bulk |
|
923 | 885 | # operation. |
|
924 | 886 | for comment in comments: |
|
925 | 887 | comment.pull_request_version_id = ( |
|
926 | 888 | pull_request_version.pull_request_version_id) |
|
927 | 889 | Session().add(comment) |
|
928 | 890 | |
|
929 | 891 | def _calculate_commit_id_changes(self, old_ids, new_ids): |
|
930 | 892 | added = [x for x in new_ids if x not in old_ids] |
|
931 | 893 | common = [x for x in new_ids if x in old_ids] |
|
932 | 894 | removed = [x for x in old_ids if x not in new_ids] |
|
933 | 895 | total = new_ids |
|
934 | 896 | return ChangeTuple(added, common, removed, total) |
|
935 | 897 | |
|
936 | 898 | def _calculate_file_changes(self, old_diff_data, new_diff_data): |
|
937 | 899 | |
|
938 | 900 | old_files = OrderedDict() |
|
939 | 901 | for diff_data in old_diff_data.parsed_diff: |
|
940 | 902 | old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff']) |
|
941 | 903 | |
|
942 | 904 | added_files = [] |
|
943 | 905 | modified_files = [] |
|
944 | 906 | removed_files = [] |
|
945 | 907 | for diff_data in new_diff_data.parsed_diff: |
|
946 | 908 | new_filename = diff_data['filename'] |
|
947 | 909 | new_hash = md5_safe(diff_data['raw_diff']) |
|
948 | 910 | |
|
949 | 911 | old_hash = old_files.get(new_filename) |
|
950 | 912 | if not old_hash: |
|
951 | 913 | # file is not present in old diff, means it's added |
|
952 | 914 | added_files.append(new_filename) |
|
953 | 915 | else: |
|
954 | 916 | if new_hash != old_hash: |
|
955 | 917 | modified_files.append(new_filename) |
|
956 | 918 | # now remove a file from old, since we have seen it already |
|
957 | 919 | del old_files[new_filename] |
|
958 | 920 | |
|
959 | 921 | # removed files is when there are present in old, but not in NEW, |
|
960 | 922 | # since we remove old files that are present in new diff, left-overs |
|
961 | 923 | # if any should be the removed files |
|
962 | 924 | removed_files.extend(old_files.keys()) |
|
963 | 925 | |
|
964 | 926 | return FileChangeTuple(added_files, modified_files, removed_files) |
|
965 | 927 | |
|
966 | 928 | def _render_update_message(self, changes, file_changes): |
|
967 | 929 | """ |
|
968 | 930 | render the message using DEFAULT_COMMENTS_RENDERER (RST renderer), |
|
969 | 931 | so it's always looking the same disregarding on which default |
|
970 | 932 | renderer system is using. |
|
971 | 933 | |
|
972 | 934 | :param changes: changes named tuple |
|
973 | 935 | :param file_changes: file changes named tuple |
|
974 | 936 | |
|
975 | 937 | """ |
|
976 | 938 | new_status = ChangesetStatus.get_status_lbl( |
|
977 | 939 | ChangesetStatus.STATUS_UNDER_REVIEW) |
|
978 | 940 | |
|
979 | 941 | changed_files = ( |
|
980 | 942 | file_changes.added + file_changes.modified + file_changes.removed) |
|
981 | 943 | |
|
982 | 944 | params = { |
|
983 | 945 | 'under_review_label': new_status, |
|
984 | 946 | 'added_commits': changes.added, |
|
985 | 947 | 'removed_commits': changes.removed, |
|
986 | 948 | 'changed_files': changed_files, |
|
987 | 949 | 'added_files': file_changes.added, |
|
988 | 950 | 'modified_files': file_changes.modified, |
|
989 | 951 | 'removed_files': file_changes.removed, |
|
990 | 952 | } |
|
991 | 953 | renderer = RstTemplateRenderer() |
|
992 | 954 | return renderer.render('pull_request_update.mako', **params) |
|
993 | 955 | |
|
994 | 956 | def edit(self, pull_request, title, description, description_renderer, user): |
|
995 | 957 | pull_request = self.__get_pull_request(pull_request) |
|
996 | 958 | old_data = pull_request.get_api_data(with_merge_state=False) |
|
997 | 959 | if pull_request.is_closed(): |
|
998 | 960 | raise ValueError('This pull request is closed') |
|
999 | 961 | if title: |
|
1000 | 962 | pull_request.title = title |
|
1001 | 963 | pull_request.description = description |
|
1002 | 964 | pull_request.updated_on = datetime.datetime.now() |
|
1003 | 965 | pull_request.description_renderer = description_renderer |
|
1004 | 966 | Session().add(pull_request) |
|
1005 | 967 | self._log_audit_action( |
|
1006 | 968 | 'repo.pull_request.edit', {'old_data': old_data}, |
|
1007 | 969 | user, pull_request) |
|
1008 | 970 | |
|
1009 | 971 | def update_reviewers(self, pull_request, reviewer_data, user): |
|
1010 | 972 | """ |
|
1011 | 973 | Update the reviewers in the pull request |
|
1012 | 974 | |
|
1013 | 975 | :param pull_request: the pr to update |
|
1014 | 976 | :param reviewer_data: list of tuples |
|
1015 | 977 | [(user, ['reason1', 'reason2'], mandatory_flag, [rules])] |
|
1016 | 978 | """ |
|
1017 | 979 | pull_request = self.__get_pull_request(pull_request) |
|
1018 | 980 | if pull_request.is_closed(): |
|
1019 | 981 | raise ValueError('This pull request is closed') |
|
1020 | 982 | |
|
1021 | 983 | reviewers = {} |
|
1022 | 984 | for user_id, reasons, mandatory, rules in reviewer_data: |
|
1023 | 985 | if isinstance(user_id, (int, basestring)): |
|
1024 | 986 | user_id = self._get_user(user_id).user_id |
|
1025 | 987 | reviewers[user_id] = { |
|
1026 | 988 | 'reasons': reasons, 'mandatory': mandatory} |
|
1027 | 989 | |
|
1028 | 990 | reviewers_ids = set(reviewers.keys()) |
|
1029 | 991 | current_reviewers = PullRequestReviewers.query()\ |
|
1030 | 992 | .filter(PullRequestReviewers.pull_request == |
|
1031 | 993 | pull_request).all() |
|
1032 | 994 | current_reviewers_ids = set([x.user.user_id for x in current_reviewers]) |
|
1033 | 995 | |
|
1034 | 996 | ids_to_add = reviewers_ids.difference(current_reviewers_ids) |
|
1035 | 997 | ids_to_remove = current_reviewers_ids.difference(reviewers_ids) |
|
1036 | 998 | |
|
1037 | 999 | log.debug("Adding %s reviewers", ids_to_add) |
|
1038 | 1000 | log.debug("Removing %s reviewers", ids_to_remove) |
|
1039 | 1001 | changed = False |
|
1040 | 1002 | for uid in ids_to_add: |
|
1041 | 1003 | changed = True |
|
1042 | 1004 | _usr = self._get_user(uid) |
|
1043 | 1005 | reviewer = PullRequestReviewers() |
|
1044 | 1006 | reviewer.user = _usr |
|
1045 | 1007 | reviewer.pull_request = pull_request |
|
1046 | 1008 | reviewer.reasons = reviewers[uid]['reasons'] |
|
1047 | 1009 | # NOTE(marcink): mandatory shouldn't be changed now |
|
1048 | 1010 | # reviewer.mandatory = reviewers[uid]['reasons'] |
|
1049 | 1011 | Session().add(reviewer) |
|
1050 | 1012 | self._log_audit_action( |
|
1051 | 1013 | 'repo.pull_request.reviewer.add', {'data': reviewer.get_dict()}, |
|
1052 | 1014 | user, pull_request) |
|
1053 | 1015 | |
|
1054 | 1016 | for uid in ids_to_remove: |
|
1055 | 1017 | changed = True |
|
1056 | 1018 | reviewers = PullRequestReviewers.query()\ |
|
1057 | 1019 | .filter(PullRequestReviewers.user_id == uid, |
|
1058 | 1020 | PullRequestReviewers.pull_request == pull_request)\ |
|
1059 | 1021 | .all() |
|
1060 | 1022 | # use .all() in case we accidentally added the same person twice |
|
1061 | 1023 | # this CAN happen due to the lack of DB checks |
|
1062 | 1024 | for obj in reviewers: |
|
1063 | 1025 | old_data = obj.get_dict() |
|
1064 | 1026 | Session().delete(obj) |
|
1065 | 1027 | self._log_audit_action( |
|
1066 | 1028 | 'repo.pull_request.reviewer.delete', |
|
1067 | 1029 | {'old_data': old_data}, user, pull_request) |
|
1068 | 1030 | |
|
1069 | 1031 | if changed: |
|
1070 | 1032 | pull_request.updated_on = datetime.datetime.now() |
|
1071 | 1033 | Session().add(pull_request) |
|
1072 | 1034 | |
|
1073 | 1035 | self.notify_reviewers(pull_request, ids_to_add) |
|
1074 | 1036 | return ids_to_add, ids_to_remove |
|
1075 | 1037 | |
|
1076 | 1038 | def get_url(self, pull_request, request=None, permalink=False): |
|
1077 | 1039 | if not request: |
|
1078 | 1040 | request = get_current_request() |
|
1079 | 1041 | |
|
1080 | 1042 | if permalink: |
|
1081 | 1043 | return request.route_url( |
|
1082 | 1044 | 'pull_requests_global', |
|
1083 | 1045 | pull_request_id=pull_request.pull_request_id,) |
|
1084 | 1046 | else: |
|
1085 | 1047 | return request.route_url('pullrequest_show', |
|
1086 | 1048 | repo_name=safe_str(pull_request.target_repo.repo_name), |
|
1087 | 1049 | pull_request_id=pull_request.pull_request_id,) |
|
1088 | 1050 | |
|
1089 | 1051 | def get_shadow_clone_url(self, pull_request, request=None): |
|
1090 | 1052 | """ |
|
1091 | 1053 | Returns qualified url pointing to the shadow repository. If this pull |
|
1092 | 1054 | request is closed there is no shadow repository and ``None`` will be |
|
1093 | 1055 | returned. |
|
1094 | 1056 | """ |
|
1095 | 1057 | if pull_request.is_closed(): |
|
1096 | 1058 | return None |
|
1097 | 1059 | else: |
|
1098 | 1060 | pr_url = urllib.unquote(self.get_url(pull_request, request=request)) |
|
1099 | 1061 | return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url)) |
|
1100 | 1062 | |
|
1101 | 1063 | def notify_reviewers(self, pull_request, reviewers_ids): |
|
1102 | 1064 | # notification to reviewers |
|
1103 | 1065 | if not reviewers_ids: |
|
1104 | 1066 | return |
|
1105 | 1067 | |
|
1106 | 1068 | pull_request_obj = pull_request |
|
1107 | 1069 | # get the current participants of this pull request |
|
1108 | 1070 | recipients = reviewers_ids |
|
1109 | 1071 | notification_type = EmailNotificationModel.TYPE_PULL_REQUEST |
|
1110 | 1072 | |
|
1111 | 1073 | pr_source_repo = pull_request_obj.source_repo |
|
1112 | 1074 | pr_target_repo = pull_request_obj.target_repo |
|
1113 | 1075 | |
|
1114 | 1076 | pr_url = h.route_url('pullrequest_show', |
|
1115 | 1077 | repo_name=pr_target_repo.repo_name, |
|
1116 | 1078 | pull_request_id=pull_request_obj.pull_request_id,) |
|
1117 | 1079 | |
|
1118 | 1080 | # set some variables for email notification |
|
1119 | 1081 | pr_target_repo_url = h.route_url( |
|
1120 | 1082 | 'repo_summary', repo_name=pr_target_repo.repo_name) |
|
1121 | 1083 | |
|
1122 | 1084 | pr_source_repo_url = h.route_url( |
|
1123 | 1085 | 'repo_summary', repo_name=pr_source_repo.repo_name) |
|
1124 | 1086 | |
|
1125 | 1087 | # pull request specifics |
|
1126 | 1088 | pull_request_commits = [ |
|
1127 | 1089 | (x.raw_id, x.message) |
|
1128 | 1090 | for x in map(pr_source_repo.get_commit, pull_request.revisions)] |
|
1129 | 1091 | |
|
1130 | 1092 | kwargs = { |
|
1131 | 1093 | 'user': pull_request.author, |
|
1132 | 1094 | 'pull_request': pull_request_obj, |
|
1133 | 1095 | 'pull_request_commits': pull_request_commits, |
|
1134 | 1096 | |
|
1135 | 1097 | 'pull_request_target_repo': pr_target_repo, |
|
1136 | 1098 | 'pull_request_target_repo_url': pr_target_repo_url, |
|
1137 | 1099 | |
|
1138 | 1100 | 'pull_request_source_repo': pr_source_repo, |
|
1139 | 1101 | 'pull_request_source_repo_url': pr_source_repo_url, |
|
1140 | 1102 | |
|
1141 | 1103 | 'pull_request_url': pr_url, |
|
1142 | 1104 | } |
|
1143 | 1105 | |
|
1144 | 1106 | # pre-generate the subject for notification itself |
|
1145 | 1107 | (subject, |
|
1146 | 1108 | _h, _e, # we don't care about those |
|
1147 | 1109 | body_plaintext) = EmailNotificationModel().render_email( |
|
1148 | 1110 | notification_type, **kwargs) |
|
1149 | 1111 | |
|
1150 | 1112 | # create notification objects, and emails |
|
1151 | 1113 | NotificationModel().create( |
|
1152 | 1114 | created_by=pull_request.author, |
|
1153 | 1115 | notification_subject=subject, |
|
1154 | 1116 | notification_body=body_plaintext, |
|
1155 | 1117 | notification_type=notification_type, |
|
1156 | 1118 | recipients=recipients, |
|
1157 | 1119 | email_kwargs=kwargs, |
|
1158 | 1120 | ) |
|
1159 | 1121 | |
|
1160 | 1122 | def delete(self, pull_request, user): |
|
1161 | 1123 | pull_request = self.__get_pull_request(pull_request) |
|
1162 | 1124 | old_data = pull_request.get_api_data(with_merge_state=False) |
|
1163 | 1125 | self._cleanup_merge_workspace(pull_request) |
|
1164 | 1126 | self._log_audit_action( |
|
1165 | 1127 | 'repo.pull_request.delete', {'old_data': old_data}, |
|
1166 | 1128 | user, pull_request) |
|
1167 | 1129 | Session().delete(pull_request) |
|
1168 | 1130 | |
|
1169 | 1131 | def close_pull_request(self, pull_request, user): |
|
1170 | 1132 | pull_request = self.__get_pull_request(pull_request) |
|
1171 | 1133 | self._cleanup_merge_workspace(pull_request) |
|
1172 | 1134 | pull_request.status = PullRequest.STATUS_CLOSED |
|
1173 | 1135 | pull_request.updated_on = datetime.datetime.now() |
|
1174 | 1136 | Session().add(pull_request) |
|
1175 | 1137 | self._trigger_pull_request_hook( |
|
1176 | 1138 | pull_request, pull_request.author, 'close') |
|
1177 | 1139 | |
|
1178 | 1140 | pr_data = pull_request.get_api_data(with_merge_state=False) |
|
1179 | 1141 | self._log_audit_action( |
|
1180 | 1142 | 'repo.pull_request.close', {'data': pr_data}, user, pull_request) |
|
1181 | 1143 | |
|
1182 | 1144 | def close_pull_request_with_comment( |
|
1183 | 1145 | self, pull_request, user, repo, message=None, auth_user=None): |
|
1184 | 1146 | |
|
1185 | 1147 | pull_request_review_status = pull_request.calculated_review_status() |
|
1186 | 1148 | |
|
1187 | 1149 | if pull_request_review_status == ChangesetStatus.STATUS_APPROVED: |
|
1188 | 1150 | # approved only if we have voting consent |
|
1189 | 1151 | status = ChangesetStatus.STATUS_APPROVED |
|
1190 | 1152 | else: |
|
1191 | 1153 | status = ChangesetStatus.STATUS_REJECTED |
|
1192 | 1154 | status_lbl = ChangesetStatus.get_status_lbl(status) |
|
1193 | 1155 | |
|
1194 | 1156 | default_message = ( |
|
1195 | 1157 | 'Closing with status change {transition_icon} {status}.' |
|
1196 | 1158 | ).format(transition_icon='>', status=status_lbl) |
|
1197 | 1159 | text = message or default_message |
|
1198 | 1160 | |
|
1199 | 1161 | # create a comment, and link it to new status |
|
1200 | 1162 | comment = CommentsModel().create( |
|
1201 | 1163 | text=text, |
|
1202 | 1164 | repo=repo.repo_id, |
|
1203 | 1165 | user=user.user_id, |
|
1204 | 1166 | pull_request=pull_request.pull_request_id, |
|
1205 | 1167 | status_change=status_lbl, |
|
1206 | 1168 | status_change_type=status, |
|
1207 | 1169 | closing_pr=True, |
|
1208 | 1170 | auth_user=auth_user, |
|
1209 | 1171 | ) |
|
1210 | 1172 | |
|
1211 | 1173 | # calculate old status before we change it |
|
1212 | 1174 | old_calculated_status = pull_request.calculated_review_status() |
|
1213 | 1175 | ChangesetStatusModel().set_status( |
|
1214 | 1176 | repo.repo_id, |
|
1215 | 1177 | status, |
|
1216 | 1178 | user.user_id, |
|
1217 | 1179 | comment=comment, |
|
1218 | 1180 | pull_request=pull_request.pull_request_id |
|
1219 | 1181 | ) |
|
1220 | 1182 | |
|
1221 | 1183 | Session().flush() |
|
1222 | 1184 | events.trigger(events.PullRequestCommentEvent(pull_request, comment)) |
|
1223 | 1185 | # we now calculate the status of pull request again, and based on that |
|
1224 | 1186 | # calculation trigger status change. This might happen in cases |
|
1225 | 1187 | # that non-reviewer admin closes a pr, which means his vote doesn't |
|
1226 | 1188 | # change the status, while if he's a reviewer this might change it. |
|
1227 | 1189 | calculated_status = pull_request.calculated_review_status() |
|
1228 | 1190 | if old_calculated_status != calculated_status: |
|
1229 | 1191 | self._trigger_pull_request_hook( |
|
1230 | 1192 | pull_request, user, 'review_status_change') |
|
1231 | 1193 | |
|
1232 | 1194 | # finally close the PR |
|
1233 | 1195 | PullRequestModel().close_pull_request( |
|
1234 | 1196 | pull_request.pull_request_id, user) |
|
1235 | 1197 | |
|
1236 | 1198 | return comment, status |
|
1237 | 1199 | |
|
1238 | 1200 | def merge_status(self, pull_request, translator=None, |
|
1239 | 1201 | force_shadow_repo_refresh=False): |
|
1240 | 1202 | _ = translator or get_current_request().translate |
|
1241 | 1203 | |
|
1242 | 1204 | if not self._is_merge_enabled(pull_request): |
|
1243 | 1205 | return False, _('Server-side pull request merging is disabled.') |
|
1244 | 1206 | if pull_request.is_closed(): |
|
1245 | 1207 | return False, _('This pull request is closed.') |
|
1246 | 1208 | merge_possible, msg = self._check_repo_requirements( |
|
1247 | 1209 | target=pull_request.target_repo, source=pull_request.source_repo, |
|
1248 | 1210 | translator=_) |
|
1249 | 1211 | if not merge_possible: |
|
1250 | 1212 | return merge_possible, msg |
|
1251 | 1213 | |
|
1252 | 1214 | try: |
|
1253 | 1215 | resp = self._try_merge( |
|
1254 | 1216 | pull_request, |
|
1255 | 1217 | force_shadow_repo_refresh=force_shadow_repo_refresh) |
|
1256 | 1218 | log.debug("Merge response: %s", resp) |
|
1257 |
status = resp.possible, |
|
|
1258 | resp.failure_reason) | |
|
1219 | status = resp.possible, resp.merge_status_message | |
|
1259 | 1220 | except NotImplementedError: |
|
1260 | 1221 | status = False, _('Pull request merging is not supported.') |
|
1261 | 1222 | |
|
1262 | 1223 | return status |
|
1263 | 1224 | |
|
1264 | 1225 | def _check_repo_requirements(self, target, source, translator): |
|
1265 | 1226 | """ |
|
1266 | 1227 | Check if `target` and `source` have compatible requirements. |
|
1267 | 1228 | |
|
1268 | 1229 | Currently this is just checking for largefiles. |
|
1269 | 1230 | """ |
|
1270 | 1231 | _ = translator |
|
1271 | 1232 | target_has_largefiles = self._has_largefiles(target) |
|
1272 | 1233 | source_has_largefiles = self._has_largefiles(source) |
|
1273 | 1234 | merge_possible = True |
|
1274 | 1235 | message = u'' |
|
1275 | 1236 | |
|
1276 | 1237 | if target_has_largefiles != source_has_largefiles: |
|
1277 | 1238 | merge_possible = False |
|
1278 | 1239 | if source_has_largefiles: |
|
1279 | 1240 | message = _( |
|
1280 | 1241 | 'Target repository large files support is disabled.') |
|
1281 | 1242 | else: |
|
1282 | 1243 | message = _( |
|
1283 | 1244 | 'Source repository large files support is disabled.') |
|
1284 | 1245 | |
|
1285 | 1246 | return merge_possible, message |
|
1286 | 1247 | |
|
1287 | 1248 | def _has_largefiles(self, repo): |
|
1288 | 1249 | largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings( |
|
1289 | 1250 | 'extensions', 'largefiles') |
|
1290 | 1251 | return largefiles_ui and largefiles_ui[0].active |
|
1291 | 1252 | |
|
1292 | 1253 | def _try_merge(self, pull_request, force_shadow_repo_refresh=False): |
|
1293 | 1254 | """ |
|
1294 | 1255 | Try to merge the pull request and return the merge status. |
|
1295 | 1256 | """ |
|
1296 | 1257 | log.debug( |
|
1297 | 1258 | "Trying out if the pull request %s can be merged. Force_refresh=%s", |
|
1298 | 1259 | pull_request.pull_request_id, force_shadow_repo_refresh) |
|
1299 | 1260 | target_vcs = pull_request.target_repo.scm_instance() |
|
1300 | ||
|
1301 | 1261 | # Refresh the target reference. |
|
1302 | 1262 | try: |
|
1303 | 1263 | target_ref = self._refresh_reference( |
|
1304 | 1264 | pull_request.target_ref_parts, target_vcs) |
|
1305 | 1265 | except CommitDoesNotExistError: |
|
1306 | 1266 | merge_state = MergeResponse( |
|
1307 |
False, False, None, MergeFailureReason.MISSING_TARGET_REF |
|
|
1267 | False, False, None, MergeFailureReason.MISSING_TARGET_REF, | |
|
1268 | metadata={'target_ref': pull_request.target_ref_parts}) | |
|
1308 | 1269 | return merge_state |
|
1309 | 1270 | |
|
1310 | 1271 | target_locked = pull_request.target_repo.locked |
|
1311 | 1272 | if target_locked and target_locked[0]: |
|
1312 | log.debug("The target repository is locked.") | |
|
1273 | locked_by = 'user:{}'.format(target_locked[0]) | |
|
1274 | log.debug("The target repository is locked by %s.", locked_by) | |
|
1313 | 1275 | merge_state = MergeResponse( |
|
1314 |
False, False, None, MergeFailureReason.TARGET_IS_LOCKED |
|
|
1276 | False, False, None, MergeFailureReason.TARGET_IS_LOCKED, | |
|
1277 | metadata={'locked_by': locked_by}) | |
|
1315 | 1278 | elif force_shadow_repo_refresh or self._needs_merge_state_refresh( |
|
1316 | 1279 | pull_request, target_ref): |
|
1317 | 1280 | log.debug("Refreshing the merge status of the repository.") |
|
1318 | 1281 | merge_state = self._refresh_merge_state( |
|
1319 | 1282 | pull_request, target_vcs, target_ref) |
|
1320 | 1283 | else: |
|
1321 | 1284 | possible = pull_request.\ |
|
1322 | 1285 | last_merge_status == MergeFailureReason.NONE |
|
1323 | 1286 | merge_state = MergeResponse( |
|
1324 | 1287 | possible, False, None, pull_request.last_merge_status) |
|
1325 | 1288 | |
|
1326 | 1289 | return merge_state |
|
1327 | 1290 | |
|
1328 | 1291 | def _refresh_reference(self, reference, vcs_repository): |
|
1329 | 1292 | if reference.type in ('branch', 'book'): |
|
1330 | 1293 | name_or_id = reference.name |
|
1331 | 1294 | else: |
|
1332 | 1295 | name_or_id = reference.commit_id |
|
1333 | 1296 | refreshed_commit = vcs_repository.get_commit(name_or_id) |
|
1334 | 1297 | refreshed_reference = Reference( |
|
1335 | 1298 | reference.type, reference.name, refreshed_commit.raw_id) |
|
1336 | 1299 | return refreshed_reference |
|
1337 | 1300 | |
|
1338 | 1301 | def _needs_merge_state_refresh(self, pull_request, target_reference): |
|
1339 | 1302 | return not( |
|
1340 | 1303 | pull_request.revisions and |
|
1341 | 1304 | pull_request.revisions[0] == pull_request._last_merge_source_rev and |
|
1342 | 1305 | target_reference.commit_id == pull_request._last_merge_target_rev) |
|
1343 | 1306 | |
|
1344 | 1307 | def _refresh_merge_state(self, pull_request, target_vcs, target_reference): |
|
1345 | 1308 | workspace_id = self._workspace_id(pull_request) |
|
1346 | 1309 | source_vcs = pull_request.source_repo.scm_instance() |
|
1347 | 1310 | repo_id = pull_request.target_repo.repo_id |
|
1348 | 1311 | use_rebase = self._use_rebase_for_merging(pull_request) |
|
1349 | 1312 | close_branch = self._close_branch_before_merging(pull_request) |
|
1350 | 1313 | merge_state = target_vcs.merge( |
|
1351 | 1314 | repo_id, workspace_id, |
|
1352 | 1315 | target_reference, source_vcs, pull_request.source_ref_parts, |
|
1353 | 1316 | dry_run=True, use_rebase=use_rebase, |
|
1354 | 1317 | close_branch=close_branch) |
|
1355 | 1318 | |
|
1356 | 1319 | # Do not store the response if there was an unknown error. |
|
1357 | 1320 | if merge_state.failure_reason != MergeFailureReason.UNKNOWN: |
|
1358 | 1321 | pull_request._last_merge_source_rev = \ |
|
1359 | 1322 | pull_request.source_ref_parts.commit_id |
|
1360 | 1323 | pull_request._last_merge_target_rev = target_reference.commit_id |
|
1361 | 1324 | pull_request.last_merge_status = merge_state.failure_reason |
|
1362 | 1325 | pull_request.shadow_merge_ref = merge_state.merge_ref |
|
1363 | 1326 | Session().add(pull_request) |
|
1364 | 1327 | Session().commit() |
|
1365 | 1328 | |
|
1366 | 1329 | return merge_state |
|
1367 | 1330 | |
|
1368 | 1331 | def _workspace_id(self, pull_request): |
|
1369 | 1332 | workspace_id = 'pr-%s' % pull_request.pull_request_id |
|
1370 | 1333 | return workspace_id |
|
1371 | 1334 | |
|
1372 | def merge_status_message(self, status_code): | |
|
1373 | """ | |
|
1374 | Return a human friendly error message for the given merge status code. | |
|
1375 | """ | |
|
1376 | return self.MERGE_STATUS_MESSAGES[status_code] | |
|
1377 | ||
|
1378 | 1335 | def generate_repo_data(self, repo, commit_id=None, branch=None, |
|
1379 | 1336 | bookmark=None, translator=None): |
|
1380 | 1337 | from rhodecode.model.repo import RepoModel |
|
1381 | 1338 | |
|
1382 | 1339 | all_refs, selected_ref = \ |
|
1383 | 1340 | self._get_repo_pullrequest_sources( |
|
1384 | 1341 | repo.scm_instance(), commit_id=commit_id, |
|
1385 | 1342 | branch=branch, bookmark=bookmark, translator=translator) |
|
1386 | 1343 | |
|
1387 | 1344 | refs_select2 = [] |
|
1388 | 1345 | for element in all_refs: |
|
1389 | 1346 | children = [{'id': x[0], 'text': x[1]} for x in element[0]] |
|
1390 | 1347 | refs_select2.append({'text': element[1], 'children': children}) |
|
1391 | 1348 | |
|
1392 | 1349 | return { |
|
1393 | 1350 | 'user': { |
|
1394 | 1351 | 'user_id': repo.user.user_id, |
|
1395 | 1352 | 'username': repo.user.username, |
|
1396 | 1353 | 'firstname': repo.user.first_name, |
|
1397 | 1354 | 'lastname': repo.user.last_name, |
|
1398 | 1355 | 'gravatar_link': h.gravatar_url(repo.user.email, 14), |
|
1399 | 1356 | }, |
|
1400 | 1357 | 'name': repo.repo_name, |
|
1401 | 1358 | 'link': RepoModel().get_url(repo), |
|
1402 | 1359 | 'description': h.chop_at_smart(repo.description_safe, '\n'), |
|
1403 | 1360 | 'refs': { |
|
1404 | 1361 | 'all_refs': all_refs, |
|
1405 | 1362 | 'selected_ref': selected_ref, |
|
1406 | 1363 | 'select2_refs': refs_select2 |
|
1407 | 1364 | } |
|
1408 | 1365 | } |
|
1409 | 1366 | |
|
1410 | 1367 | def generate_pullrequest_title(self, source, source_ref, target): |
|
1411 | 1368 | return u'{source}#{at_ref} to {target}'.format( |
|
1412 | 1369 | source=source, |
|
1413 | 1370 | at_ref=source_ref, |
|
1414 | 1371 | target=target, |
|
1415 | 1372 | ) |
|
1416 | 1373 | |
|
1417 | 1374 | def _cleanup_merge_workspace(self, pull_request): |
|
1418 | 1375 | # Merging related cleanup |
|
1419 | 1376 | repo_id = pull_request.target_repo.repo_id |
|
1420 | 1377 | target_scm = pull_request.target_repo.scm_instance() |
|
1421 | 1378 | workspace_id = self._workspace_id(pull_request) |
|
1422 | 1379 | |
|
1423 | 1380 | try: |
|
1424 | 1381 | target_scm.cleanup_merge_workspace(repo_id, workspace_id) |
|
1425 | 1382 | except NotImplementedError: |
|
1426 | 1383 | pass |
|
1427 | 1384 | |
|
1428 | 1385 | def _get_repo_pullrequest_sources( |
|
1429 | 1386 | self, repo, commit_id=None, branch=None, bookmark=None, |
|
1430 | 1387 | translator=None): |
|
1431 | 1388 | """ |
|
1432 | 1389 | Return a structure with repo's interesting commits, suitable for |
|
1433 | 1390 | the selectors in pullrequest controller |
|
1434 | 1391 | |
|
1435 | 1392 | :param commit_id: a commit that must be in the list somehow |
|
1436 | 1393 | and selected by default |
|
1437 | 1394 | :param branch: a branch that must be in the list and selected |
|
1438 | 1395 | by default - even if closed |
|
1439 | 1396 | :param bookmark: a bookmark that must be in the list and selected |
|
1440 | 1397 | """ |
|
1441 | 1398 | _ = translator or get_current_request().translate |
|
1442 | 1399 | |
|
1443 | 1400 | commit_id = safe_str(commit_id) if commit_id else None |
|
1444 | 1401 | branch = safe_str(branch) if branch else None |
|
1445 | 1402 | bookmark = safe_str(bookmark) if bookmark else None |
|
1446 | 1403 | |
|
1447 | 1404 | selected = None |
|
1448 | 1405 | |
|
1449 | 1406 | # order matters: first source that has commit_id in it will be selected |
|
1450 | 1407 | sources = [] |
|
1451 | 1408 | sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark)) |
|
1452 | 1409 | sources.append(('branch', repo.branches.items(), _('Branches'), branch)) |
|
1453 | 1410 | |
|
1454 | 1411 | if commit_id: |
|
1455 | 1412 | ref_commit = (h.short_id(commit_id), commit_id) |
|
1456 | 1413 | sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id)) |
|
1457 | 1414 | |
|
1458 | 1415 | sources.append( |
|
1459 | 1416 | ('branch', repo.branches_closed.items(), _('Closed Branches'), branch), |
|
1460 | 1417 | ) |
|
1461 | 1418 | |
|
1462 | 1419 | groups = [] |
|
1463 | 1420 | for group_key, ref_list, group_name, match in sources: |
|
1464 | 1421 | group_refs = [] |
|
1465 | 1422 | for ref_name, ref_id in ref_list: |
|
1466 | 1423 | ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id) |
|
1467 | 1424 | group_refs.append((ref_key, ref_name)) |
|
1468 | 1425 | |
|
1469 | 1426 | if not selected: |
|
1470 | 1427 | if set([commit_id, match]) & set([ref_id, ref_name]): |
|
1471 | 1428 | selected = ref_key |
|
1472 | 1429 | |
|
1473 | 1430 | if group_refs: |
|
1474 | 1431 | groups.append((group_refs, group_name)) |
|
1475 | 1432 | |
|
1476 | 1433 | if not selected: |
|
1477 | 1434 | ref = commit_id or branch or bookmark |
|
1478 | 1435 | if ref: |
|
1479 | 1436 | raise CommitDoesNotExistError( |
|
1480 | 1437 | 'No commit refs could be found matching: %s' % ref) |
|
1481 | 1438 | elif repo.DEFAULT_BRANCH_NAME in repo.branches: |
|
1482 | 1439 | selected = 'branch:%s:%s' % ( |
|
1483 | 1440 | repo.DEFAULT_BRANCH_NAME, |
|
1484 | 1441 | repo.branches[repo.DEFAULT_BRANCH_NAME] |
|
1485 | 1442 | ) |
|
1486 | 1443 | elif repo.commit_ids: |
|
1487 | 1444 | # make the user select in this case |
|
1488 | 1445 | selected = None |
|
1489 | 1446 | else: |
|
1490 | 1447 | raise EmptyRepositoryError() |
|
1491 | 1448 | return groups, selected |
|
1492 | 1449 | |
|
1493 | 1450 | def get_diff(self, source_repo, source_ref_id, target_ref_id, |
|
1494 | 1451 | hide_whitespace_changes, diff_context): |
|
1495 | 1452 | |
|
1496 | 1453 | return self._get_diff_from_pr_or_version( |
|
1497 | 1454 | source_repo, source_ref_id, target_ref_id, |
|
1498 | 1455 | hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context) |
|
1499 | 1456 | |
|
1500 | 1457 | def _get_diff_from_pr_or_version( |
|
1501 | 1458 | self, source_repo, source_ref_id, target_ref_id, |
|
1502 | 1459 | hide_whitespace_changes, diff_context): |
|
1503 | 1460 | |
|
1504 | 1461 | target_commit = source_repo.get_commit( |
|
1505 | 1462 | commit_id=safe_str(target_ref_id)) |
|
1506 | 1463 | source_commit = source_repo.get_commit( |
|
1507 | 1464 | commit_id=safe_str(source_ref_id)) |
|
1508 | 1465 | if isinstance(source_repo, Repository): |
|
1509 | 1466 | vcs_repo = source_repo.scm_instance() |
|
1510 | 1467 | else: |
|
1511 | 1468 | vcs_repo = source_repo |
|
1512 | 1469 | |
|
1513 | 1470 | # TODO: johbo: In the context of an update, we cannot reach |
|
1514 | 1471 | # the old commit anymore with our normal mechanisms. It needs |
|
1515 | 1472 | # some sort of special support in the vcs layer to avoid this |
|
1516 | 1473 | # workaround. |
|
1517 | 1474 | if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and |
|
1518 | 1475 | vcs_repo.alias == 'git'): |
|
1519 | 1476 | source_commit.raw_id = safe_str(source_ref_id) |
|
1520 | 1477 | |
|
1521 | 1478 | log.debug('calculating diff between ' |
|
1522 | 1479 | 'source_ref:%s and target_ref:%s for repo `%s`', |
|
1523 | 1480 | target_ref_id, source_ref_id, |
|
1524 | 1481 | safe_unicode(vcs_repo.path)) |
|
1525 | 1482 | |
|
1526 | 1483 | vcs_diff = vcs_repo.get_diff( |
|
1527 | 1484 | commit1=target_commit, commit2=source_commit, |
|
1528 | 1485 | ignore_whitespace=hide_whitespace_changes, context=diff_context) |
|
1529 | 1486 | return vcs_diff |
|
1530 | 1487 | |
|
1531 | 1488 | def _is_merge_enabled(self, pull_request): |
|
1532 | 1489 | return self._get_general_setting( |
|
1533 | 1490 | pull_request, 'rhodecode_pr_merge_enabled') |
|
1534 | 1491 | |
|
1535 | 1492 | def _use_rebase_for_merging(self, pull_request): |
|
1536 | 1493 | repo_type = pull_request.target_repo.repo_type |
|
1537 | 1494 | if repo_type == 'hg': |
|
1538 | 1495 | return self._get_general_setting( |
|
1539 | 1496 | pull_request, 'rhodecode_hg_use_rebase_for_merging') |
|
1540 | 1497 | elif repo_type == 'git': |
|
1541 | 1498 | return self._get_general_setting( |
|
1542 | 1499 | pull_request, 'rhodecode_git_use_rebase_for_merging') |
|
1543 | 1500 | |
|
1544 | 1501 | return False |
|
1545 | 1502 | |
|
1546 | 1503 | def _close_branch_before_merging(self, pull_request): |
|
1547 | 1504 | repo_type = pull_request.target_repo.repo_type |
|
1548 | 1505 | if repo_type == 'hg': |
|
1549 | 1506 | return self._get_general_setting( |
|
1550 | 1507 | pull_request, 'rhodecode_hg_close_branch_before_merging') |
|
1551 | 1508 | elif repo_type == 'git': |
|
1552 | 1509 | return self._get_general_setting( |
|
1553 | 1510 | pull_request, 'rhodecode_git_close_branch_before_merging') |
|
1554 | 1511 | |
|
1555 | 1512 | return False |
|
1556 | 1513 | |
|
1557 | 1514 | def _get_general_setting(self, pull_request, settings_key, default=False): |
|
1558 | 1515 | settings_model = VcsSettingsModel(repo=pull_request.target_repo) |
|
1559 | 1516 | settings = settings_model.get_general_settings() |
|
1560 | 1517 | return settings.get(settings_key, default) |
|
1561 | 1518 | |
|
1562 | 1519 | def _log_audit_action(self, action, action_data, user, pull_request): |
|
1563 | 1520 | audit_logger.store( |
|
1564 | 1521 | action=action, |
|
1565 | 1522 | action_data=action_data, |
|
1566 | 1523 | user=user, |
|
1567 | 1524 | repo=pull_request.target_repo) |
|
1568 | 1525 | |
|
1569 | 1526 | def get_reviewer_functions(self): |
|
1570 | 1527 | """ |
|
1571 | 1528 | Fetches functions for validation and fetching default reviewers. |
|
1572 | 1529 | If available we use the EE package, else we fallback to CE |
|
1573 | 1530 | package functions |
|
1574 | 1531 | """ |
|
1575 | 1532 | try: |
|
1576 | 1533 | from rc_reviewers.utils import get_default_reviewers_data |
|
1577 | 1534 | from rc_reviewers.utils import validate_default_reviewers |
|
1578 | 1535 | except ImportError: |
|
1579 | 1536 | from rhodecode.apps.repository.utils import get_default_reviewers_data |
|
1580 | 1537 | from rhodecode.apps.repository.utils import validate_default_reviewers |
|
1581 | 1538 | |
|
1582 | 1539 | return get_default_reviewers_data, validate_default_reviewers |
|
1583 | 1540 | |
|
1584 | 1541 | |
|
1585 | 1542 | class MergeCheck(object): |
|
1586 | 1543 | """ |
|
1587 | 1544 | Perform Merge Checks and returns a check object which stores information |
|
1588 | 1545 | about merge errors, and merge conditions |
|
1589 | 1546 | """ |
|
1590 | 1547 | TODO_CHECK = 'todo' |
|
1591 | 1548 | PERM_CHECK = 'perm' |
|
1592 | 1549 | REVIEW_CHECK = 'review' |
|
1593 | 1550 | MERGE_CHECK = 'merge' |
|
1594 | 1551 | |
|
1595 | 1552 | def __init__(self): |
|
1596 | 1553 | self.review_status = None |
|
1597 | 1554 | self.merge_possible = None |
|
1598 | 1555 | self.merge_msg = '' |
|
1599 | 1556 | self.failed = None |
|
1600 | 1557 | self.errors = [] |
|
1601 | 1558 | self.error_details = OrderedDict() |
|
1602 | 1559 | |
|
1603 | 1560 | def push_error(self, error_type, message, error_key, details): |
|
1604 | 1561 | self.failed = True |
|
1605 | 1562 | self.errors.append([error_type, message]) |
|
1606 | 1563 | self.error_details[error_key] = dict( |
|
1607 | 1564 | details=details, |
|
1608 | 1565 | error_type=error_type, |
|
1609 | 1566 | message=message |
|
1610 | 1567 | ) |
|
1611 | 1568 | |
|
1612 | 1569 | @classmethod |
|
1613 | 1570 | def validate(cls, pull_request, auth_user, translator, fail_early=False, |
|
1614 | 1571 | force_shadow_repo_refresh=False): |
|
1615 | 1572 | _ = translator |
|
1616 | 1573 | merge_check = cls() |
|
1617 | 1574 | |
|
1618 | 1575 | # permissions to merge |
|
1619 | 1576 | user_allowed_to_merge = PullRequestModel().check_user_merge( |
|
1620 | 1577 | pull_request, auth_user) |
|
1621 | 1578 | if not user_allowed_to_merge: |
|
1622 | 1579 | log.debug("MergeCheck: cannot merge, approval is pending.") |
|
1623 | 1580 | |
|
1624 | 1581 | msg = _('User `{}` not allowed to perform merge.').format(auth_user.username) |
|
1625 | 1582 | merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username) |
|
1626 | 1583 | if fail_early: |
|
1627 | 1584 | return merge_check |
|
1628 | 1585 | |
|
1629 | 1586 | # permission to merge into the target branch |
|
1630 | 1587 | target_commit_id = pull_request.target_ref_parts.commit_id |
|
1631 | 1588 | if pull_request.target_ref_parts.type == 'branch': |
|
1632 | 1589 | branch_name = pull_request.target_ref_parts.name |
|
1633 | 1590 | else: |
|
1634 | 1591 | # for mercurial we can always figure out the branch from the commit |
|
1635 | 1592 | # in case of bookmark |
|
1636 | 1593 | target_commit = pull_request.target_repo.get_commit(target_commit_id) |
|
1637 | 1594 | branch_name = target_commit.branch |
|
1638 | 1595 | |
|
1639 | 1596 | rule, branch_perm = auth_user.get_rule_and_branch_permission( |
|
1640 | 1597 | pull_request.target_repo.repo_name, branch_name) |
|
1641 | 1598 | if branch_perm and branch_perm == 'branch.none': |
|
1642 | 1599 | msg = _('Target branch `{}` changes rejected by rule {}.').format( |
|
1643 | 1600 | branch_name, rule) |
|
1644 | 1601 | merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username) |
|
1645 | 1602 | if fail_early: |
|
1646 | 1603 | return merge_check |
|
1647 | 1604 | |
|
1648 | 1605 | # review status, must be always present |
|
1649 | 1606 | review_status = pull_request.calculated_review_status() |
|
1650 | 1607 | merge_check.review_status = review_status |
|
1651 | 1608 | |
|
1652 | 1609 | status_approved = review_status == ChangesetStatus.STATUS_APPROVED |
|
1653 | 1610 | if not status_approved: |
|
1654 | 1611 | log.debug("MergeCheck: cannot merge, approval is pending.") |
|
1655 | 1612 | |
|
1656 | 1613 | msg = _('Pull request reviewer approval is pending.') |
|
1657 | 1614 | |
|
1658 | 1615 | merge_check.push_error( |
|
1659 | 1616 | 'warning', msg, cls.REVIEW_CHECK, review_status) |
|
1660 | 1617 | |
|
1661 | 1618 | if fail_early: |
|
1662 | 1619 | return merge_check |
|
1663 | 1620 | |
|
1664 | 1621 | # left over TODOs |
|
1665 | 1622 | todos = CommentsModel().get_unresolved_todos(pull_request) |
|
1666 | 1623 | if todos: |
|
1667 | 1624 | log.debug("MergeCheck: cannot merge, {} " |
|
1668 | 1625 | "unresolved todos left.".format(len(todos))) |
|
1669 | 1626 | |
|
1670 | 1627 | if len(todos) == 1: |
|
1671 | 1628 | msg = _('Cannot merge, {} TODO still not resolved.').format( |
|
1672 | 1629 | len(todos)) |
|
1673 | 1630 | else: |
|
1674 | 1631 | msg = _('Cannot merge, {} TODOs still not resolved.').format( |
|
1675 | 1632 | len(todos)) |
|
1676 | 1633 | |
|
1677 | 1634 | merge_check.push_error('warning', msg, cls.TODO_CHECK, todos) |
|
1678 | 1635 | |
|
1679 | 1636 | if fail_early: |
|
1680 | 1637 | return merge_check |
|
1681 | 1638 | |
|
1682 | 1639 | # merge possible, here is the filesystem simulation + shadow repo |
|
1683 | 1640 | merge_status, msg = PullRequestModel().merge_status( |
|
1684 | 1641 | pull_request, translator=translator, |
|
1685 | 1642 | force_shadow_repo_refresh=force_shadow_repo_refresh) |
|
1686 | 1643 | merge_check.merge_possible = merge_status |
|
1687 | 1644 | merge_check.merge_msg = msg |
|
1688 | 1645 | if not merge_status: |
|
1689 | 1646 | log.debug( |
|
1690 | 1647 | "MergeCheck: cannot merge, pull request merge not possible.") |
|
1691 | 1648 | merge_check.push_error('warning', msg, cls.MERGE_CHECK, None) |
|
1692 | 1649 | |
|
1693 | 1650 | if fail_early: |
|
1694 | 1651 | return merge_check |
|
1695 | 1652 | |
|
1696 | 1653 | log.debug('MergeCheck: is failed: %s', merge_check.failed) |
|
1697 | 1654 | return merge_check |
|
1698 | 1655 | |
|
1699 | 1656 | @classmethod |
|
1700 | 1657 | def get_merge_conditions(cls, pull_request, translator): |
|
1701 | 1658 | _ = translator |
|
1702 | 1659 | merge_details = {} |
|
1703 | 1660 | |
|
1704 | 1661 | model = PullRequestModel() |
|
1705 | 1662 | use_rebase = model._use_rebase_for_merging(pull_request) |
|
1706 | 1663 | |
|
1707 | 1664 | if use_rebase: |
|
1708 | 1665 | merge_details['merge_strategy'] = dict( |
|
1709 | 1666 | details={}, |
|
1710 | 1667 | message=_('Merge strategy: rebase') |
|
1711 | 1668 | ) |
|
1712 | 1669 | else: |
|
1713 | 1670 | merge_details['merge_strategy'] = dict( |
|
1714 | 1671 | details={}, |
|
1715 | 1672 | message=_('Merge strategy: explicit merge commit') |
|
1716 | 1673 | ) |
|
1717 | 1674 | |
|
1718 | 1675 | close_branch = model._close_branch_before_merging(pull_request) |
|
1719 | 1676 | if close_branch: |
|
1720 | 1677 | repo_type = pull_request.target_repo.repo_type |
|
1721 | 1678 | if repo_type == 'hg': |
|
1722 | 1679 | close_msg = _('Source branch will be closed after merge.') |
|
1723 | 1680 | elif repo_type == 'git': |
|
1724 | 1681 | close_msg = _('Source branch will be deleted after merge.') |
|
1725 | 1682 | |
|
1726 | 1683 | merge_details['close_branch'] = dict( |
|
1727 | 1684 | details={}, |
|
1728 | 1685 | message=close_msg |
|
1729 | 1686 | ) |
|
1730 | 1687 | |
|
1731 | 1688 | return merge_details |
|
1732 | 1689 | |
|
1733 | 1690 | ChangeTuple = collections.namedtuple( |
|
1734 | 1691 | 'ChangeTuple', ['added', 'common', 'removed', 'total']) |
|
1735 | 1692 | |
|
1736 | 1693 | FileChangeTuple = collections.namedtuple( |
|
1737 | 1694 | 'FileChangeTuple', ['added', 'modified', 'removed']) |
@@ -1,871 +1,910 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import mock |
|
22 | 22 | import pytest |
|
23 | 23 | import textwrap |
|
24 | 24 | |
|
25 | 25 | import rhodecode |
|
26 | 26 | from rhodecode.lib.utils2 import safe_unicode |
|
27 | 27 | from rhodecode.lib.vcs.backends import get_backend |
|
28 | 28 | from rhodecode.lib.vcs.backends.base import ( |
|
29 | 29 | MergeResponse, MergeFailureReason, Reference) |
|
30 | 30 | from rhodecode.lib.vcs.exceptions import RepositoryError |
|
31 | 31 | from rhodecode.lib.vcs.nodes import FileNode |
|
32 | 32 | from rhodecode.model.comment import CommentsModel |
|
33 | 33 | from rhodecode.model.db import PullRequest, Session |
|
34 | 34 | from rhodecode.model.pull_request import PullRequestModel |
|
35 | 35 | from rhodecode.model.user import UserModel |
|
36 | 36 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN |
|
37 | 37 | |
|
38 | 38 | |
|
39 | 39 | pytestmark = [ |
|
40 | 40 | pytest.mark.backends("git", "hg"), |
|
41 | 41 | ] |
|
42 | 42 | |
|
43 | 43 | |
|
44 | 44 | @pytest.mark.usefixtures('config_stub') |
|
45 | 45 | class TestPullRequestModel(object): |
|
46 | 46 | |
|
47 | 47 | @pytest.fixture |
|
48 | 48 | def pull_request(self, request, backend, pr_util): |
|
49 | 49 | """ |
|
50 | 50 | A pull request combined with multiples patches. |
|
51 | 51 | """ |
|
52 | 52 | BackendClass = get_backend(backend.alias) |
|
53 | merge_resp = MergeResponse( | |
|
54 | False, False, None, MergeFailureReason.UNKNOWN, | |
|
55 | metadata={'exception': 'MockError'}) | |
|
53 | 56 | self.merge_patcher = mock.patch.object( |
|
54 |
BackendClass, 'merge', return_value= |
|
|
55 | False, False, None, MergeFailureReason.UNKNOWN)) | |
|
57 | BackendClass, 'merge', return_value=merge_resp) | |
|
56 | 58 | self.workspace_remove_patcher = mock.patch.object( |
|
57 | 59 | BackendClass, 'cleanup_merge_workspace') |
|
58 | 60 | |
|
59 | 61 | self.workspace_remove_mock = self.workspace_remove_patcher.start() |
|
60 | 62 | self.merge_mock = self.merge_patcher.start() |
|
61 | 63 | self.comment_patcher = mock.patch( |
|
62 | 64 | 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status') |
|
63 | 65 | self.comment_patcher.start() |
|
64 | 66 | self.notification_patcher = mock.patch( |
|
65 | 67 | 'rhodecode.model.notification.NotificationModel.create') |
|
66 | 68 | self.notification_patcher.start() |
|
67 | 69 | self.helper_patcher = mock.patch( |
|
68 | 70 | 'rhodecode.lib.helpers.route_path') |
|
69 | 71 | self.helper_patcher.start() |
|
70 | 72 | |
|
71 | 73 | self.hook_patcher = mock.patch.object(PullRequestModel, |
|
72 | 74 | '_trigger_pull_request_hook') |
|
73 | 75 | self.hook_mock = self.hook_patcher.start() |
|
74 | 76 | |
|
75 | 77 | self.invalidation_patcher = mock.patch( |
|
76 | 78 | 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation') |
|
77 | 79 | self.invalidation_mock = self.invalidation_patcher.start() |
|
78 | 80 | |
|
79 | 81 | self.pull_request = pr_util.create_pull_request( |
|
80 | 82 | mergeable=True, name_suffix=u'ąć') |
|
81 | 83 | self.source_commit = self.pull_request.source_ref_parts.commit_id |
|
82 | 84 | self.target_commit = self.pull_request.target_ref_parts.commit_id |
|
83 | 85 | self.workspace_id = 'pr-%s' % self.pull_request.pull_request_id |
|
84 | 86 | self.repo_id = self.pull_request.target_repo.repo_id |
|
85 | 87 | |
|
86 | 88 | @request.addfinalizer |
|
87 | 89 | def cleanup_pull_request(): |
|
88 | 90 | calls = [mock.call( |
|
89 | 91 | self.pull_request, self.pull_request.author, 'create')] |
|
90 | 92 | self.hook_mock.assert_has_calls(calls) |
|
91 | 93 | |
|
92 | 94 | self.workspace_remove_patcher.stop() |
|
93 | 95 | self.merge_patcher.stop() |
|
94 | 96 | self.comment_patcher.stop() |
|
95 | 97 | self.notification_patcher.stop() |
|
96 | 98 | self.helper_patcher.stop() |
|
97 | 99 | self.hook_patcher.stop() |
|
98 | 100 | self.invalidation_patcher.stop() |
|
99 | 101 | |
|
100 | 102 | return self.pull_request |
|
101 | 103 | |
|
102 | 104 | def test_get_all(self, pull_request): |
|
103 | 105 | prs = PullRequestModel().get_all(pull_request.target_repo) |
|
104 | 106 | assert isinstance(prs, list) |
|
105 | 107 | assert len(prs) == 1 |
|
106 | 108 | |
|
107 | 109 | def test_count_all(self, pull_request): |
|
108 | 110 | pr_count = PullRequestModel().count_all(pull_request.target_repo) |
|
109 | 111 | assert pr_count == 1 |
|
110 | 112 | |
|
111 | 113 | def test_get_awaiting_review(self, pull_request): |
|
112 | 114 | prs = PullRequestModel().get_awaiting_review(pull_request.target_repo) |
|
113 | 115 | assert isinstance(prs, list) |
|
114 | 116 | assert len(prs) == 1 |
|
115 | 117 | |
|
116 | 118 | def test_count_awaiting_review(self, pull_request): |
|
117 | 119 | pr_count = PullRequestModel().count_awaiting_review( |
|
118 | 120 | pull_request.target_repo) |
|
119 | 121 | assert pr_count == 1 |
|
120 | 122 | |
|
121 | 123 | def test_get_awaiting_my_review(self, pull_request): |
|
122 | 124 | PullRequestModel().update_reviewers( |
|
123 | 125 | pull_request, [(pull_request.author, ['author'], False, [])], |
|
124 | 126 | pull_request.author) |
|
125 | 127 | prs = PullRequestModel().get_awaiting_my_review( |
|
126 | 128 | pull_request.target_repo, user_id=pull_request.author.user_id) |
|
127 | 129 | assert isinstance(prs, list) |
|
128 | 130 | assert len(prs) == 1 |
|
129 | 131 | |
|
130 | 132 | def test_count_awaiting_my_review(self, pull_request): |
|
131 | 133 | PullRequestModel().update_reviewers( |
|
132 | 134 | pull_request, [(pull_request.author, ['author'], False, [])], |
|
133 | 135 | pull_request.author) |
|
134 | 136 | pr_count = PullRequestModel().count_awaiting_my_review( |
|
135 | 137 | pull_request.target_repo, user_id=pull_request.author.user_id) |
|
136 | 138 | assert pr_count == 1 |
|
137 | 139 | |
|
138 | 140 | def test_delete_calls_cleanup_merge(self, pull_request): |
|
139 | 141 | repo_id = pull_request.target_repo.repo_id |
|
140 | 142 | PullRequestModel().delete(pull_request, pull_request.author) |
|
141 | 143 | |
|
142 | 144 | self.workspace_remove_mock.assert_called_once_with( |
|
143 | 145 | repo_id, self.workspace_id) |
|
144 | 146 | |
|
145 | 147 | def test_close_calls_cleanup_and_hook(self, pull_request): |
|
146 | 148 | PullRequestModel().close_pull_request( |
|
147 | 149 | pull_request, pull_request.author) |
|
148 | 150 | repo_id = pull_request.target_repo.repo_id |
|
149 | 151 | |
|
150 | 152 | self.workspace_remove_mock.assert_called_once_with( |
|
151 | 153 | repo_id, self.workspace_id) |
|
152 | 154 | self.hook_mock.assert_called_with( |
|
153 | 155 | self.pull_request, self.pull_request.author, 'close') |
|
154 | 156 | |
|
155 | 157 | def test_merge_status(self, pull_request): |
|
156 | 158 | self.merge_mock.return_value = MergeResponse( |
|
157 | 159 | True, False, None, MergeFailureReason.NONE) |
|
158 | 160 | |
|
159 | 161 | assert pull_request._last_merge_source_rev is None |
|
160 | 162 | assert pull_request._last_merge_target_rev is None |
|
161 | 163 | assert pull_request.last_merge_status is None |
|
162 | 164 | |
|
163 | 165 | status, msg = PullRequestModel().merge_status(pull_request) |
|
164 | 166 | assert status is True |
|
165 |
assert msg |
|
|
167 | assert msg == 'This pull request can be automatically merged.' | |
|
166 | 168 | self.merge_mock.assert_called_with( |
|
167 | 169 | self.repo_id, self.workspace_id, |
|
168 | 170 | pull_request.target_ref_parts, |
|
169 | 171 | pull_request.source_repo.scm_instance(), |
|
170 | 172 | pull_request.source_ref_parts, dry_run=True, |
|
171 | 173 | use_rebase=False, close_branch=False) |
|
172 | 174 | |
|
173 | 175 | assert pull_request._last_merge_source_rev == self.source_commit |
|
174 | 176 | assert pull_request._last_merge_target_rev == self.target_commit |
|
175 | 177 | assert pull_request.last_merge_status is MergeFailureReason.NONE |
|
176 | 178 | |
|
177 | 179 | self.merge_mock.reset_mock() |
|
178 | 180 | status, msg = PullRequestModel().merge_status(pull_request) |
|
179 | 181 | assert status is True |
|
180 |
assert msg |
|
|
182 | assert msg == 'This pull request can be automatically merged.' | |
|
181 | 183 | assert self.merge_mock.called is False |
|
182 | 184 | |
|
183 | 185 | def test_merge_status_known_failure(self, pull_request): |
|
184 | 186 | self.merge_mock.return_value = MergeResponse( |
|
185 | 187 | False, False, None, MergeFailureReason.MERGE_FAILED) |
|
186 | 188 | |
|
187 | 189 | assert pull_request._last_merge_source_rev is None |
|
188 | 190 | assert pull_request._last_merge_target_rev is None |
|
189 | 191 | assert pull_request.last_merge_status is None |
|
190 | 192 | |
|
191 | 193 | status, msg = PullRequestModel().merge_status(pull_request) |
|
192 | 194 | assert status is False |
|
193 | assert ( | |
|
194 | msg.eval() == | |
|
195 | 'This pull request cannot be merged because of merge conflicts.') | |
|
195 | assert msg == 'This pull request cannot be merged because of merge conflicts.' | |
|
196 | 196 | self.merge_mock.assert_called_with( |
|
197 | 197 | self.repo_id, self.workspace_id, |
|
198 | 198 | pull_request.target_ref_parts, |
|
199 | 199 | pull_request.source_repo.scm_instance(), |
|
200 | 200 | pull_request.source_ref_parts, dry_run=True, |
|
201 | 201 | use_rebase=False, close_branch=False) |
|
202 | 202 | |
|
203 | 203 | assert pull_request._last_merge_source_rev == self.source_commit |
|
204 | 204 | assert pull_request._last_merge_target_rev == self.target_commit |
|
205 | 205 | assert ( |
|
206 | 206 | pull_request.last_merge_status is MergeFailureReason.MERGE_FAILED) |
|
207 | 207 | |
|
208 | 208 | self.merge_mock.reset_mock() |
|
209 | 209 | status, msg = PullRequestModel().merge_status(pull_request) |
|
210 | 210 | assert status is False |
|
211 | assert ( | |
|
212 | msg.eval() == | |
|
213 | 'This pull request cannot be merged because of merge conflicts.') | |
|
211 | assert msg == 'This pull request cannot be merged because of merge conflicts.' | |
|
214 | 212 | assert self.merge_mock.called is False |
|
215 | 213 | |
|
216 | 214 | def test_merge_status_unknown_failure(self, pull_request): |
|
217 | 215 | self.merge_mock.return_value = MergeResponse( |
|
218 |
False, False, None, MergeFailureReason.UNKNOWN |
|
|
216 | False, False, None, MergeFailureReason.UNKNOWN, | |
|
217 | metadata={'exception': 'MockError'}) | |
|
219 | 218 | |
|
220 | 219 | assert pull_request._last_merge_source_rev is None |
|
221 | 220 | assert pull_request._last_merge_target_rev is None |
|
222 | 221 | assert pull_request.last_merge_status is None |
|
223 | 222 | |
|
224 | 223 | status, msg = PullRequestModel().merge_status(pull_request) |
|
225 | 224 | assert status is False |
|
226 |
assert msg |
|
|
227 | 'This pull request cannot be merged because of an unhandled' | |
|
228 |
' |
|
|
225 | assert msg == ( | |
|
226 | 'This pull request cannot be merged because of an unhandled exception. ' | |
|
227 | 'MockError') | |
|
229 | 228 | self.merge_mock.assert_called_with( |
|
230 | 229 | self.repo_id, self.workspace_id, |
|
231 | 230 | pull_request.target_ref_parts, |
|
232 | 231 | pull_request.source_repo.scm_instance(), |
|
233 | 232 | pull_request.source_ref_parts, dry_run=True, |
|
234 | 233 | use_rebase=False, close_branch=False) |
|
235 | 234 | |
|
236 | 235 | assert pull_request._last_merge_source_rev is None |
|
237 | 236 | assert pull_request._last_merge_target_rev is None |
|
238 | 237 | assert pull_request.last_merge_status is None |
|
239 | 238 | |
|
240 | 239 | self.merge_mock.reset_mock() |
|
241 | 240 | status, msg = PullRequestModel().merge_status(pull_request) |
|
242 | 241 | assert status is False |
|
243 |
assert msg |
|
|
244 | 'This pull request cannot be merged because of an unhandled' | |
|
245 |
' |
|
|
242 | assert msg == ( | |
|
243 | 'This pull request cannot be merged because of an unhandled exception. ' | |
|
244 | 'MockError') | |
|
246 | 245 | assert self.merge_mock.called is True |
|
247 | 246 | |
|
248 | 247 | def test_merge_status_when_target_is_locked(self, pull_request): |
|
249 | 248 | pull_request.target_repo.locked = [1, u'12345.50', 'lock_web'] |
|
250 | 249 | status, msg = PullRequestModel().merge_status(pull_request) |
|
251 | 250 | assert status is False |
|
252 |
assert msg |
|
|
253 | 'This pull request cannot be merged because the target repository' | |
|
254 |
' |
|
|
251 | assert msg == ( | |
|
252 | 'This pull request cannot be merged because the target repository ' | |
|
253 | 'is locked by user:1.') | |
|
255 | 254 | |
|
256 | 255 | def test_merge_status_requirements_check_target(self, pull_request): |
|
257 | 256 | |
|
258 | 257 | def has_largefiles(self, repo): |
|
259 | 258 | return repo == pull_request.source_repo |
|
260 | 259 | |
|
261 | 260 | patcher = mock.patch.object( |
|
262 | 261 | PullRequestModel, '_has_largefiles', has_largefiles) |
|
263 | 262 | with patcher: |
|
264 | 263 | status, msg = PullRequestModel().merge_status(pull_request) |
|
265 | 264 | |
|
266 | 265 | assert status is False |
|
267 | 266 | assert msg == 'Target repository large files support is disabled.' |
|
268 | 267 | |
|
269 | 268 | def test_merge_status_requirements_check_source(self, pull_request): |
|
270 | 269 | |
|
271 | 270 | def has_largefiles(self, repo): |
|
272 | 271 | return repo == pull_request.target_repo |
|
273 | 272 | |
|
274 | 273 | patcher = mock.patch.object( |
|
275 | 274 | PullRequestModel, '_has_largefiles', has_largefiles) |
|
276 | 275 | with patcher: |
|
277 | 276 | status, msg = PullRequestModel().merge_status(pull_request) |
|
278 | 277 | |
|
279 | 278 | assert status is False |
|
280 | 279 | assert msg == 'Source repository large files support is disabled.' |
|
281 | 280 | |
|
282 | 281 | def test_merge(self, pull_request, merge_extras): |
|
283 | 282 | user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) |
|
284 | 283 | merge_ref = Reference( |
|
285 | 284 | 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6') |
|
286 | 285 | self.merge_mock.return_value = MergeResponse( |
|
287 | 286 | True, True, merge_ref, MergeFailureReason.NONE) |
|
288 | 287 | |
|
289 | 288 | merge_extras['repository'] = pull_request.target_repo.repo_name |
|
290 | 289 | PullRequestModel().merge_repo( |
|
291 | 290 | pull_request, pull_request.author, extras=merge_extras) |
|
292 | 291 | |
|
293 | 292 | message = ( |
|
294 | 293 | u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}' |
|
295 | 294 | u'\n\n {pr_title}'.format( |
|
296 | 295 | pr_id=pull_request.pull_request_id, |
|
297 | 296 | source_repo=safe_unicode( |
|
298 | 297 | pull_request.source_repo.scm_instance().name), |
|
299 | 298 | source_ref_name=pull_request.source_ref_parts.name, |
|
300 | 299 | pr_title=safe_unicode(pull_request.title) |
|
301 | 300 | ) |
|
302 | 301 | ) |
|
303 | 302 | self.merge_mock.assert_called_with( |
|
304 | 303 | self.repo_id, self.workspace_id, |
|
305 | 304 | pull_request.target_ref_parts, |
|
306 | 305 | pull_request.source_repo.scm_instance(), |
|
307 | 306 | pull_request.source_ref_parts, |
|
308 | 307 | user_name=user.short_contact, user_email=user.email, message=message, |
|
309 | 308 | use_rebase=False, close_branch=False |
|
310 | 309 | ) |
|
311 | 310 | self.invalidation_mock.assert_called_once_with( |
|
312 | 311 | pull_request.target_repo.repo_name) |
|
313 | 312 | |
|
314 | 313 | self.hook_mock.assert_called_with( |
|
315 | 314 | self.pull_request, self.pull_request.author, 'merge') |
|
316 | 315 | |
|
317 | 316 | pull_request = PullRequest.get(pull_request.pull_request_id) |
|
318 | 317 | assert ( |
|
319 | 318 | pull_request.merge_rev == |
|
320 | 319 | '6126b7bfcc82ad2d3deaee22af926b082ce54cc6') |
|
321 | 320 | |
|
322 | 321 | def test_merge_failed(self, pull_request, merge_extras): |
|
323 | 322 | user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) |
|
324 | 323 | merge_ref = Reference( |
|
325 | 324 | 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6') |
|
326 | 325 | self.merge_mock.return_value = MergeResponse( |
|
327 | 326 | False, False, merge_ref, MergeFailureReason.MERGE_FAILED) |
|
328 | 327 | |
|
329 | 328 | merge_extras['repository'] = pull_request.target_repo.repo_name |
|
330 | 329 | PullRequestModel().merge_repo( |
|
331 | 330 | pull_request, pull_request.author, extras=merge_extras) |
|
332 | 331 | |
|
333 | 332 | message = ( |
|
334 | 333 | u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}' |
|
335 | 334 | u'\n\n {pr_title}'.format( |
|
336 | 335 | pr_id=pull_request.pull_request_id, |
|
337 | 336 | source_repo=safe_unicode( |
|
338 | 337 | pull_request.source_repo.scm_instance().name), |
|
339 | 338 | source_ref_name=pull_request.source_ref_parts.name, |
|
340 | 339 | pr_title=safe_unicode(pull_request.title) |
|
341 | 340 | ) |
|
342 | 341 | ) |
|
343 | 342 | self.merge_mock.assert_called_with( |
|
344 | 343 | self.repo_id, self.workspace_id, |
|
345 | 344 | pull_request.target_ref_parts, |
|
346 | 345 | pull_request.source_repo.scm_instance(), |
|
347 | 346 | pull_request.source_ref_parts, |
|
348 | 347 | user_name=user.short_contact, user_email=user.email, message=message, |
|
349 | 348 | use_rebase=False, close_branch=False |
|
350 | 349 | ) |
|
351 | 350 | |
|
352 | 351 | pull_request = PullRequest.get(pull_request.pull_request_id) |
|
353 | 352 | assert self.invalidation_mock.called is False |
|
354 | 353 | assert pull_request.merge_rev is None |
|
355 | 354 | |
|
356 | 355 | def test_get_commit_ids(self, pull_request): |
|
357 | 356 | # The PR has been not merget yet, so expect an exception |
|
358 | 357 | with pytest.raises(ValueError): |
|
359 | 358 | PullRequestModel()._get_commit_ids(pull_request) |
|
360 | 359 | |
|
361 | 360 | # Merge revision is in the revisions list |
|
362 | 361 | pull_request.merge_rev = pull_request.revisions[0] |
|
363 | 362 | commit_ids = PullRequestModel()._get_commit_ids(pull_request) |
|
364 | 363 | assert commit_ids == pull_request.revisions |
|
365 | 364 | |
|
366 | 365 | # Merge revision is not in the revisions list |
|
367 | 366 | pull_request.merge_rev = 'f000' * 10 |
|
368 | 367 | commit_ids = PullRequestModel()._get_commit_ids(pull_request) |
|
369 | 368 | assert commit_ids == pull_request.revisions + [pull_request.merge_rev] |
|
370 | 369 | |
|
371 | 370 | def test_get_diff_from_pr_version(self, pull_request): |
|
372 | 371 | source_repo = pull_request.source_repo |
|
373 | 372 | source_ref_id = pull_request.source_ref_parts.commit_id |
|
374 | 373 | target_ref_id = pull_request.target_ref_parts.commit_id |
|
375 | 374 | diff = PullRequestModel()._get_diff_from_pr_or_version( |
|
376 | 375 | source_repo, source_ref_id, target_ref_id, |
|
377 | 376 | hide_whitespace_changes=False, diff_context=6) |
|
378 | 377 | assert 'file_1' in diff.raw |
|
379 | 378 | |
|
380 | 379 | def test_generate_title_returns_unicode(self): |
|
381 | 380 | title = PullRequestModel().generate_pullrequest_title( |
|
382 | 381 | source='source-dummy', |
|
383 | 382 | source_ref='source-ref-dummy', |
|
384 | 383 | target='target-dummy', |
|
385 | 384 | ) |
|
386 | 385 | assert type(title) == unicode |
|
387 | 386 | |
|
388 | 387 | |
|
389 | 388 | @pytest.mark.usefixtures('config_stub') |
|
390 | 389 | class TestIntegrationMerge(object): |
|
391 | 390 | @pytest.mark.parametrize('extra_config', ( |
|
392 | 391 | {'vcs.hooks.protocol': 'http', 'vcs.hooks.direct_calls': False}, |
|
393 | 392 | )) |
|
394 | 393 | def test_merge_triggers_push_hooks( |
|
395 | 394 | self, pr_util, user_admin, capture_rcextensions, merge_extras, |
|
396 | 395 | extra_config): |
|
397 | 396 | |
|
398 | 397 | pull_request = pr_util.create_pull_request( |
|
399 | 398 | approved=True, mergeable=True) |
|
400 | 399 | # TODO: johbo: Needed for sqlite, try to find an automatic way for it |
|
401 | 400 | merge_extras['repository'] = pull_request.target_repo.repo_name |
|
402 | 401 | Session().commit() |
|
403 | 402 | |
|
404 | 403 | with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False): |
|
405 | 404 | merge_state = PullRequestModel().merge_repo( |
|
406 | 405 | pull_request, user_admin, extras=merge_extras) |
|
407 | 406 | |
|
408 | 407 | assert merge_state.executed |
|
409 | 408 | assert '_pre_push_hook' in capture_rcextensions |
|
410 | 409 | assert '_push_hook' in capture_rcextensions |
|
411 | 410 | |
|
412 | 411 | def test_merge_can_be_rejected_by_pre_push_hook( |
|
413 | 412 | self, pr_util, user_admin, capture_rcextensions, merge_extras): |
|
414 | 413 | pull_request = pr_util.create_pull_request( |
|
415 | 414 | approved=True, mergeable=True) |
|
416 | 415 | # TODO: johbo: Needed for sqlite, try to find an automatic way for it |
|
417 | 416 | merge_extras['repository'] = pull_request.target_repo.repo_name |
|
418 | 417 | Session().commit() |
|
419 | 418 | |
|
420 | 419 | with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull: |
|
421 | 420 | pre_pull.side_effect = RepositoryError("Disallow push!") |
|
422 | 421 | merge_status = PullRequestModel().merge_repo( |
|
423 | 422 | pull_request, user_admin, extras=merge_extras) |
|
424 | 423 | |
|
425 | 424 | assert not merge_status.executed |
|
426 | 425 | assert 'pre_push' not in capture_rcextensions |
|
427 | 426 | assert 'post_push' not in capture_rcextensions |
|
428 | 427 | |
|
429 | 428 | def test_merge_fails_if_target_is_locked( |
|
430 | 429 | self, pr_util, user_regular, merge_extras): |
|
431 | 430 | pull_request = pr_util.create_pull_request( |
|
432 | 431 | approved=True, mergeable=True) |
|
433 | 432 | locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web'] |
|
434 | 433 | pull_request.target_repo.locked = locked_by |
|
435 | 434 | # TODO: johbo: Check if this can work based on the database, currently |
|
436 | 435 | # all data is pre-computed, that's why just updating the DB is not |
|
437 | 436 | # enough. |
|
438 | 437 | merge_extras['locked_by'] = locked_by |
|
439 | 438 | merge_extras['repository'] = pull_request.target_repo.repo_name |
|
440 | 439 | # TODO: johbo: Needed for sqlite, try to find an automatic way for it |
|
441 | 440 | Session().commit() |
|
442 | 441 | merge_status = PullRequestModel().merge_repo( |
|
443 | 442 | pull_request, user_regular, extras=merge_extras) |
|
444 | 443 | assert not merge_status.executed |
|
445 | 444 | |
|
446 | 445 | |
|
447 | 446 | @pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [ |
|
448 | 447 | (False, 1, 0), |
|
449 | 448 | (True, 0, 1), |
|
450 | 449 | ]) |
|
451 | 450 | def test_outdated_comments( |
|
452 | 451 | pr_util, use_outdated, inlines_count, outdated_count, config_stub): |
|
453 | 452 | pull_request = pr_util.create_pull_request() |
|
454 | 453 | pr_util.create_inline_comment(file_path='not_in_updated_diff') |
|
455 | 454 | |
|
456 | 455 | with outdated_comments_patcher(use_outdated) as outdated_comment_mock: |
|
457 | 456 | pr_util.add_one_commit() |
|
458 | 457 | assert_inline_comments( |
|
459 | 458 | pull_request, visible=inlines_count, outdated=outdated_count) |
|
460 | 459 | outdated_comment_mock.assert_called_with(pull_request) |
|
461 | 460 | |
|
462 | 461 | |
|
462 | @pytest.mark.parametrize('mr_type, expected_msg', [ | |
|
463 | (MergeFailureReason.NONE, | |
|
464 | 'This pull request can be automatically merged.'), | |
|
465 | (MergeFailureReason.UNKNOWN, | |
|
466 | 'This pull request cannot be merged because of an unhandled exception. CRASH'), | |
|
467 | (MergeFailureReason.MERGE_FAILED, | |
|
468 | 'This pull request cannot be merged because of merge conflicts.'), | |
|
469 | (MergeFailureReason.PUSH_FAILED, | |
|
470 | 'This pull request could not be merged because push to target:`some-repo@merge_commit` failed.'), | |
|
471 | (MergeFailureReason.TARGET_IS_NOT_HEAD, | |
|
472 | 'This pull request cannot be merged because the target `ref_name` is not a head.'), | |
|
473 | (MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES, | |
|
474 | 'This pull request cannot be merged because the source contains more branches than the target.'), | |
|
475 | (MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS, | |
|
476 | 'This pull request cannot be merged because the target has multiple heads: `a,b,c`.'), | |
|
477 | (MergeFailureReason.TARGET_IS_LOCKED, | |
|
478 | 'This pull request cannot be merged because the target repository is locked by user:123.'), | |
|
479 | (MergeFailureReason.MISSING_TARGET_REF, | |
|
480 | 'This pull request cannot be merged because the target reference `ref_name` is missing.'), | |
|
481 | (MergeFailureReason.MISSING_SOURCE_REF, | |
|
482 | 'This pull request cannot be merged because the source reference `ref_name` is missing.'), | |
|
483 | (MergeFailureReason.SUBREPO_MERGE_FAILED, | |
|
484 | 'This pull request cannot be merged because of conflicts related to sub repositories.'), | |
|
485 | ||
|
486 | ]) | |
|
487 | def test_merge_response_message(mr_type, expected_msg): | |
|
488 | merge_ref = Reference('type', 'ref_name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6') | |
|
489 | metadata = { | |
|
490 | 'exception': "CRASH", | |
|
491 | 'target': 'some-repo', | |
|
492 | 'merge_commit': 'merge_commit', | |
|
493 | 'target_ref': merge_ref, | |
|
494 | 'source_ref': merge_ref, | |
|
495 | 'heads': ','.join(['a', 'b', 'c']), | |
|
496 | 'locked_by': 'user:123'} | |
|
497 | ||
|
498 | merge_response = MergeResponse(True, True, merge_ref, mr_type, metadata=metadata) | |
|
499 | assert merge_response.merge_status_message == expected_msg | |
|
500 | ||
|
501 | ||
|
463 | 502 | @pytest.fixture |
|
464 | 503 | def merge_extras(user_regular): |
|
465 | 504 | """ |
|
466 | 505 | Context for the vcs operation when running a merge. |
|
467 | 506 | """ |
|
468 | 507 | extras = { |
|
469 | 508 | 'ip': '127.0.0.1', |
|
470 | 509 | 'username': user_regular.username, |
|
471 | 510 | 'user_id': user_regular.user_id, |
|
472 | 511 | 'action': 'push', |
|
473 | 512 | 'repository': 'fake_target_repo_name', |
|
474 | 513 | 'scm': 'git', |
|
475 | 514 | 'config': 'fake_config_ini_path', |
|
476 | 515 | 'repo_store': '', |
|
477 | 516 | 'make_lock': None, |
|
478 | 517 | 'locked_by': [None, None, None], |
|
479 | 518 | 'server_url': 'http://test.example.com:5000', |
|
480 | 519 | 'hooks': ['push', 'pull'], |
|
481 | 520 | 'is_shadow_repo': False, |
|
482 | 521 | } |
|
483 | 522 | return extras |
|
484 | 523 | |
|
485 | 524 | |
|
486 | 525 | @pytest.mark.usefixtures('config_stub') |
|
487 | 526 | class TestUpdateCommentHandling(object): |
|
488 | 527 | |
|
489 | 528 | @pytest.fixture(autouse=True, scope='class') |
|
490 | 529 | def enable_outdated_comments(self, request, baseapp): |
|
491 | 530 | config_patch = mock.patch.dict( |
|
492 | 531 | 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True}) |
|
493 | 532 | config_patch.start() |
|
494 | 533 | |
|
495 | 534 | @request.addfinalizer |
|
496 | 535 | def cleanup(): |
|
497 | 536 | config_patch.stop() |
|
498 | 537 | |
|
499 | 538 | def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util): |
|
500 | 539 | commits = [ |
|
501 | 540 | {'message': 'a'}, |
|
502 | 541 | {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]}, |
|
503 | 542 | {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]}, |
|
504 | 543 | ] |
|
505 | 544 | pull_request = pr_util.create_pull_request( |
|
506 | 545 | commits=commits, target_head='a', source_head='b', revisions=['b']) |
|
507 | 546 | pr_util.create_inline_comment(file_path='file_b') |
|
508 | 547 | pr_util.add_one_commit(head='c') |
|
509 | 548 | |
|
510 | 549 | assert_inline_comments(pull_request, visible=1, outdated=0) |
|
511 | 550 | |
|
512 | 551 | def test_comment_stays_unflagged_on_change_above(self, pr_util): |
|
513 | 552 | original_content = ''.join( |
|
514 | 553 | ['line {}\n'.format(x) for x in range(1, 11)]) |
|
515 | 554 | updated_content = 'new_line_at_top\n' + original_content |
|
516 | 555 | commits = [ |
|
517 | 556 | {'message': 'a'}, |
|
518 | 557 | {'message': 'b', 'added': [FileNode('file_b', original_content)]}, |
|
519 | 558 | {'message': 'c', 'changed': [FileNode('file_b', updated_content)]}, |
|
520 | 559 | ] |
|
521 | 560 | pull_request = pr_util.create_pull_request( |
|
522 | 561 | commits=commits, target_head='a', source_head='b', revisions=['b']) |
|
523 | 562 | |
|
524 | 563 | with outdated_comments_patcher(): |
|
525 | 564 | comment = pr_util.create_inline_comment( |
|
526 | 565 | line_no=u'n8', file_path='file_b') |
|
527 | 566 | pr_util.add_one_commit(head='c') |
|
528 | 567 | |
|
529 | 568 | assert_inline_comments(pull_request, visible=1, outdated=0) |
|
530 | 569 | assert comment.line_no == u'n9' |
|
531 | 570 | |
|
532 | 571 | def test_comment_stays_unflagged_on_change_below(self, pr_util): |
|
533 | 572 | original_content = ''.join(['line {}\n'.format(x) for x in range(10)]) |
|
534 | 573 | updated_content = original_content + 'new_line_at_end\n' |
|
535 | 574 | commits = [ |
|
536 | 575 | {'message': 'a'}, |
|
537 | 576 | {'message': 'b', 'added': [FileNode('file_b', original_content)]}, |
|
538 | 577 | {'message': 'c', 'changed': [FileNode('file_b', updated_content)]}, |
|
539 | 578 | ] |
|
540 | 579 | pull_request = pr_util.create_pull_request( |
|
541 | 580 | commits=commits, target_head='a', source_head='b', revisions=['b']) |
|
542 | 581 | pr_util.create_inline_comment(file_path='file_b') |
|
543 | 582 | pr_util.add_one_commit(head='c') |
|
544 | 583 | |
|
545 | 584 | assert_inline_comments(pull_request, visible=1, outdated=0) |
|
546 | 585 | |
|
547 | 586 | @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9']) |
|
548 | 587 | def test_comment_flagged_on_change_around_context(self, pr_util, line_no): |
|
549 | 588 | base_lines = ['line {}\n'.format(x) for x in range(1, 13)] |
|
550 | 589 | change_lines = list(base_lines) |
|
551 | 590 | change_lines.insert(6, 'line 6a added\n') |
|
552 | 591 | |
|
553 | 592 | # Changes on the last line of sight |
|
554 | 593 | update_lines = list(change_lines) |
|
555 | 594 | update_lines[0] = 'line 1 changed\n' |
|
556 | 595 | update_lines[-1] = 'line 12 changed\n' |
|
557 | 596 | |
|
558 | 597 | def file_b(lines): |
|
559 | 598 | return FileNode('file_b', ''.join(lines)) |
|
560 | 599 | |
|
561 | 600 | commits = [ |
|
562 | 601 | {'message': 'a', 'added': [file_b(base_lines)]}, |
|
563 | 602 | {'message': 'b', 'changed': [file_b(change_lines)]}, |
|
564 | 603 | {'message': 'c', 'changed': [file_b(update_lines)]}, |
|
565 | 604 | ] |
|
566 | 605 | |
|
567 | 606 | pull_request = pr_util.create_pull_request( |
|
568 | 607 | commits=commits, target_head='a', source_head='b', revisions=['b']) |
|
569 | 608 | pr_util.create_inline_comment(line_no=line_no, file_path='file_b') |
|
570 | 609 | |
|
571 | 610 | with outdated_comments_patcher(): |
|
572 | 611 | pr_util.add_one_commit(head='c') |
|
573 | 612 | assert_inline_comments(pull_request, visible=0, outdated=1) |
|
574 | 613 | |
|
575 | 614 | @pytest.mark.parametrize("change, content", [ |
|
576 | 615 | ('changed', 'changed\n'), |
|
577 | 616 | ('removed', ''), |
|
578 | 617 | ], ids=['changed', 'removed']) |
|
579 | 618 | def test_comment_flagged_on_change(self, pr_util, change, content): |
|
580 | 619 | commits = [ |
|
581 | 620 | {'message': 'a'}, |
|
582 | 621 | {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]}, |
|
583 | 622 | {'message': 'c', change: [FileNode('file_b', content)]}, |
|
584 | 623 | ] |
|
585 | 624 | pull_request = pr_util.create_pull_request( |
|
586 | 625 | commits=commits, target_head='a', source_head='b', revisions=['b']) |
|
587 | 626 | pr_util.create_inline_comment(file_path='file_b') |
|
588 | 627 | |
|
589 | 628 | with outdated_comments_patcher(): |
|
590 | 629 | pr_util.add_one_commit(head='c') |
|
591 | 630 | assert_inline_comments(pull_request, visible=0, outdated=1) |
|
592 | 631 | |
|
593 | 632 | |
|
594 | 633 | @pytest.mark.usefixtures('config_stub') |
|
595 | 634 | class TestUpdateChangedFiles(object): |
|
596 | 635 | |
|
597 | 636 | def test_no_changes_on_unchanged_diff(self, pr_util): |
|
598 | 637 | commits = [ |
|
599 | 638 | {'message': 'a'}, |
|
600 | 639 | {'message': 'b', |
|
601 | 640 | 'added': [FileNode('file_b', 'test_content b\n')]}, |
|
602 | 641 | {'message': 'c', |
|
603 | 642 | 'added': [FileNode('file_c', 'test_content c\n')]}, |
|
604 | 643 | ] |
|
605 | 644 | # open a PR from a to b, adding file_b |
|
606 | 645 | pull_request = pr_util.create_pull_request( |
|
607 | 646 | commits=commits, target_head='a', source_head='b', revisions=['b'], |
|
608 | 647 | name_suffix='per-file-review') |
|
609 | 648 | |
|
610 | 649 | # modify PR adding new file file_c |
|
611 | 650 | pr_util.add_one_commit(head='c') |
|
612 | 651 | |
|
613 | 652 | assert_pr_file_changes( |
|
614 | 653 | pull_request, |
|
615 | 654 | added=['file_c'], |
|
616 | 655 | modified=[], |
|
617 | 656 | removed=[]) |
|
618 | 657 | |
|
619 | 658 | def test_modify_and_undo_modification_diff(self, pr_util): |
|
620 | 659 | commits = [ |
|
621 | 660 | {'message': 'a'}, |
|
622 | 661 | {'message': 'b', |
|
623 | 662 | 'added': [FileNode('file_b', 'test_content b\n')]}, |
|
624 | 663 | {'message': 'c', |
|
625 | 664 | 'changed': [FileNode('file_b', 'test_content b modified\n')]}, |
|
626 | 665 | {'message': 'd', |
|
627 | 666 | 'changed': [FileNode('file_b', 'test_content b\n')]}, |
|
628 | 667 | ] |
|
629 | 668 | # open a PR from a to b, adding file_b |
|
630 | 669 | pull_request = pr_util.create_pull_request( |
|
631 | 670 | commits=commits, target_head='a', source_head='b', revisions=['b'], |
|
632 | 671 | name_suffix='per-file-review') |
|
633 | 672 | |
|
634 | 673 | # modify PR modifying file file_b |
|
635 | 674 | pr_util.add_one_commit(head='c') |
|
636 | 675 | |
|
637 | 676 | assert_pr_file_changes( |
|
638 | 677 | pull_request, |
|
639 | 678 | added=[], |
|
640 | 679 | modified=['file_b'], |
|
641 | 680 | removed=[]) |
|
642 | 681 | |
|
643 | 682 | # move the head again to d, which rollbacks change, |
|
644 | 683 | # meaning we should indicate no changes |
|
645 | 684 | pr_util.add_one_commit(head='d') |
|
646 | 685 | |
|
647 | 686 | assert_pr_file_changes( |
|
648 | 687 | pull_request, |
|
649 | 688 | added=[], |
|
650 | 689 | modified=[], |
|
651 | 690 | removed=[]) |
|
652 | 691 | |
|
653 | 692 | def test_updated_all_files_in_pr(self, pr_util): |
|
654 | 693 | commits = [ |
|
655 | 694 | {'message': 'a'}, |
|
656 | 695 | {'message': 'b', 'added': [ |
|
657 | 696 | FileNode('file_a', 'test_content a\n'), |
|
658 | 697 | FileNode('file_b', 'test_content b\n'), |
|
659 | 698 | FileNode('file_c', 'test_content c\n')]}, |
|
660 | 699 | {'message': 'c', 'changed': [ |
|
661 | 700 | FileNode('file_a', 'test_content a changed\n'), |
|
662 | 701 | FileNode('file_b', 'test_content b changed\n'), |
|
663 | 702 | FileNode('file_c', 'test_content c changed\n')]}, |
|
664 | 703 | ] |
|
665 | 704 | # open a PR from a to b, changing 3 files |
|
666 | 705 | pull_request = pr_util.create_pull_request( |
|
667 | 706 | commits=commits, target_head='a', source_head='b', revisions=['b'], |
|
668 | 707 | name_suffix='per-file-review') |
|
669 | 708 | |
|
670 | 709 | pr_util.add_one_commit(head='c') |
|
671 | 710 | |
|
672 | 711 | assert_pr_file_changes( |
|
673 | 712 | pull_request, |
|
674 | 713 | added=[], |
|
675 | 714 | modified=['file_a', 'file_b', 'file_c'], |
|
676 | 715 | removed=[]) |
|
677 | 716 | |
|
678 | 717 | def test_updated_and_removed_all_files_in_pr(self, pr_util): |
|
679 | 718 | commits = [ |
|
680 | 719 | {'message': 'a'}, |
|
681 | 720 | {'message': 'b', 'added': [ |
|
682 | 721 | FileNode('file_a', 'test_content a\n'), |
|
683 | 722 | FileNode('file_b', 'test_content b\n'), |
|
684 | 723 | FileNode('file_c', 'test_content c\n')]}, |
|
685 | 724 | {'message': 'c', 'removed': [ |
|
686 | 725 | FileNode('file_a', 'test_content a changed\n'), |
|
687 | 726 | FileNode('file_b', 'test_content b changed\n'), |
|
688 | 727 | FileNode('file_c', 'test_content c changed\n')]}, |
|
689 | 728 | ] |
|
690 | 729 | # open a PR from a to b, removing 3 files |
|
691 | 730 | pull_request = pr_util.create_pull_request( |
|
692 | 731 | commits=commits, target_head='a', source_head='b', revisions=['b'], |
|
693 | 732 | name_suffix='per-file-review') |
|
694 | 733 | |
|
695 | 734 | pr_util.add_one_commit(head='c') |
|
696 | 735 | |
|
697 | 736 | assert_pr_file_changes( |
|
698 | 737 | pull_request, |
|
699 | 738 | added=[], |
|
700 | 739 | modified=[], |
|
701 | 740 | removed=['file_a', 'file_b', 'file_c']) |
|
702 | 741 | |
|
703 | 742 | |
|
704 | 743 | def test_update_writes_snapshot_into_pull_request_version(pr_util, config_stub): |
|
705 | 744 | model = PullRequestModel() |
|
706 | 745 | pull_request = pr_util.create_pull_request() |
|
707 | 746 | pr_util.update_source_repository() |
|
708 | 747 | |
|
709 | 748 | model.update_commits(pull_request) |
|
710 | 749 | |
|
711 | 750 | # Expect that it has a version entry now |
|
712 | 751 | assert len(model.get_versions(pull_request)) == 1 |
|
713 | 752 | |
|
714 | 753 | |
|
715 | 754 | def test_update_skips_new_version_if_unchanged(pr_util, config_stub): |
|
716 | 755 | pull_request = pr_util.create_pull_request() |
|
717 | 756 | model = PullRequestModel() |
|
718 | 757 | model.update_commits(pull_request) |
|
719 | 758 | |
|
720 | 759 | # Expect that it still has no versions |
|
721 | 760 | assert len(model.get_versions(pull_request)) == 0 |
|
722 | 761 | |
|
723 | 762 | |
|
724 | 763 | def test_update_assigns_comments_to_the_new_version(pr_util, config_stub): |
|
725 | 764 | model = PullRequestModel() |
|
726 | 765 | pull_request = pr_util.create_pull_request() |
|
727 | 766 | comment = pr_util.create_comment() |
|
728 | 767 | pr_util.update_source_repository() |
|
729 | 768 | |
|
730 | 769 | model.update_commits(pull_request) |
|
731 | 770 | |
|
732 | 771 | # Expect that the comment is linked to the pr version now |
|
733 | 772 | assert comment.pull_request_version == model.get_versions(pull_request)[0] |
|
734 | 773 | |
|
735 | 774 | |
|
736 | 775 | def test_update_adds_a_comment_to_the_pull_request_about_the_change(pr_util, config_stub): |
|
737 | 776 | model = PullRequestModel() |
|
738 | 777 | pull_request = pr_util.create_pull_request() |
|
739 | 778 | pr_util.update_source_repository() |
|
740 | 779 | pr_util.update_source_repository() |
|
741 | 780 | |
|
742 | 781 | model.update_commits(pull_request) |
|
743 | 782 | |
|
744 | 783 | # Expect to find a new comment about the change |
|
745 | 784 | expected_message = textwrap.dedent( |
|
746 | 785 | """\ |
|
747 | 786 | Pull request updated. Auto status change to |under_review| |
|
748 | 787 | |
|
749 | 788 | .. role:: added |
|
750 | 789 | .. role:: removed |
|
751 | 790 | .. parsed-literal:: |
|
752 | 791 | |
|
753 | 792 | Changed commits: |
|
754 | 793 | * :added:`1 added` |
|
755 | 794 | * :removed:`0 removed` |
|
756 | 795 | |
|
757 | 796 | Changed files: |
|
758 | 797 | * `A file_2 <#a_c--92ed3b5f07b4>`_ |
|
759 | 798 | |
|
760 | 799 | .. |under_review| replace:: *"Under Review"*""" |
|
761 | 800 | ) |
|
762 | 801 | pull_request_comments = sorted( |
|
763 | 802 | pull_request.comments, key=lambda c: c.modified_at) |
|
764 | 803 | update_comment = pull_request_comments[-1] |
|
765 | 804 | assert update_comment.text == expected_message |
|
766 | 805 | |
|
767 | 806 | |
|
768 | 807 | def test_create_version_from_snapshot_updates_attributes(pr_util, config_stub): |
|
769 | 808 | pull_request = pr_util.create_pull_request() |
|
770 | 809 | |
|
771 | 810 | # Avoiding default values |
|
772 | 811 | pull_request.status = PullRequest.STATUS_CLOSED |
|
773 | 812 | pull_request._last_merge_source_rev = "0" * 40 |
|
774 | 813 | pull_request._last_merge_target_rev = "1" * 40 |
|
775 | 814 | pull_request.last_merge_status = 1 |
|
776 | 815 | pull_request.merge_rev = "2" * 40 |
|
777 | 816 | |
|
778 | 817 | # Remember automatic values |
|
779 | 818 | created_on = pull_request.created_on |
|
780 | 819 | updated_on = pull_request.updated_on |
|
781 | 820 | |
|
782 | 821 | # Create a new version of the pull request |
|
783 | 822 | version = PullRequestModel()._create_version_from_snapshot(pull_request) |
|
784 | 823 | |
|
785 | 824 | # Check attributes |
|
786 | 825 | assert version.title == pr_util.create_parameters['title'] |
|
787 | 826 | assert version.description == pr_util.create_parameters['description'] |
|
788 | 827 | assert version.status == PullRequest.STATUS_CLOSED |
|
789 | 828 | |
|
790 | 829 | # versions get updated created_on |
|
791 | 830 | assert version.created_on != created_on |
|
792 | 831 | |
|
793 | 832 | assert version.updated_on == updated_on |
|
794 | 833 | assert version.user_id == pull_request.user_id |
|
795 | 834 | assert version.revisions == pr_util.create_parameters['revisions'] |
|
796 | 835 | assert version.source_repo == pr_util.source_repository |
|
797 | 836 | assert version.source_ref == pr_util.create_parameters['source_ref'] |
|
798 | 837 | assert version.target_repo == pr_util.target_repository |
|
799 | 838 | assert version.target_ref == pr_util.create_parameters['target_ref'] |
|
800 | 839 | assert version._last_merge_source_rev == pull_request._last_merge_source_rev |
|
801 | 840 | assert version._last_merge_target_rev == pull_request._last_merge_target_rev |
|
802 | 841 | assert version.last_merge_status == pull_request.last_merge_status |
|
803 | 842 | assert version.merge_rev == pull_request.merge_rev |
|
804 | 843 | assert version.pull_request == pull_request |
|
805 | 844 | |
|
806 | 845 | |
|
807 | 846 | def test_link_comments_to_version_only_updates_unlinked_comments(pr_util, config_stub): |
|
808 | 847 | version1 = pr_util.create_version_of_pull_request() |
|
809 | 848 | comment_linked = pr_util.create_comment(linked_to=version1) |
|
810 | 849 | comment_unlinked = pr_util.create_comment() |
|
811 | 850 | version2 = pr_util.create_version_of_pull_request() |
|
812 | 851 | |
|
813 | 852 | PullRequestModel()._link_comments_to_version(version2) |
|
814 | 853 | |
|
815 | 854 | # Expect that only the new comment is linked to version2 |
|
816 | 855 | assert ( |
|
817 | 856 | comment_unlinked.pull_request_version_id == |
|
818 | 857 | version2.pull_request_version_id) |
|
819 | 858 | assert ( |
|
820 | 859 | comment_linked.pull_request_version_id == |
|
821 | 860 | version1.pull_request_version_id) |
|
822 | 861 | assert ( |
|
823 | 862 | comment_unlinked.pull_request_version_id != |
|
824 | 863 | comment_linked.pull_request_version_id) |
|
825 | 864 | |
|
826 | 865 | |
|
827 | 866 | def test_calculate_commits(): |
|
828 | 867 | old_ids = [1, 2, 3] |
|
829 | 868 | new_ids = [1, 3, 4, 5] |
|
830 | 869 | change = PullRequestModel()._calculate_commit_id_changes(old_ids, new_ids) |
|
831 | 870 | assert change.added == [4, 5] |
|
832 | 871 | assert change.common == [1, 3] |
|
833 | 872 | assert change.removed == [2] |
|
834 | 873 | assert change.total == [1, 3, 4, 5] |
|
835 | 874 | |
|
836 | 875 | |
|
837 | 876 | def assert_inline_comments(pull_request, visible=None, outdated=None): |
|
838 | 877 | if visible is not None: |
|
839 | 878 | inline_comments = CommentsModel().get_inline_comments( |
|
840 | 879 | pull_request.target_repo.repo_id, pull_request=pull_request) |
|
841 | 880 | inline_cnt = CommentsModel().get_inline_comments_count( |
|
842 | 881 | inline_comments) |
|
843 | 882 | assert inline_cnt == visible |
|
844 | 883 | if outdated is not None: |
|
845 | 884 | outdated_comments = CommentsModel().get_outdated_comments( |
|
846 | 885 | pull_request.target_repo.repo_id, pull_request) |
|
847 | 886 | assert len(outdated_comments) == outdated |
|
848 | 887 | |
|
849 | 888 | |
|
850 | 889 | def assert_pr_file_changes( |
|
851 | 890 | pull_request, added=None, modified=None, removed=None): |
|
852 | 891 | pr_versions = PullRequestModel().get_versions(pull_request) |
|
853 | 892 | # always use first version, ie original PR to calculate changes |
|
854 | 893 | pull_request_version = pr_versions[0] |
|
855 | 894 | old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs( |
|
856 | 895 | pull_request, pull_request_version) |
|
857 | 896 | file_changes = PullRequestModel()._calculate_file_changes( |
|
858 | 897 | old_diff_data, new_diff_data) |
|
859 | 898 | |
|
860 | 899 | assert added == file_changes.added, \ |
|
861 | 900 | 'expected added:%s vs value:%s' % (added, file_changes.added) |
|
862 | 901 | assert modified == file_changes.modified, \ |
|
863 | 902 | 'expected modified:%s vs value:%s' % (modified, file_changes.modified) |
|
864 | 903 | assert removed == file_changes.removed, \ |
|
865 | 904 | 'expected removed:%s vs value:%s' % (removed, file_changes.removed) |
|
866 | 905 | |
|
867 | 906 | |
|
868 | 907 | def outdated_comments_patcher(use_outdated=True): |
|
869 | 908 | return mock.patch.object( |
|
870 | 909 | CommentsModel, 'use_outdated_comments', |
|
871 | 910 | return_value=use_outdated) |
@@ -1,1887 +1,1886 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import collections |
|
22 | 22 | import datetime |
|
23 | 23 | import hashlib |
|
24 | 24 | import os |
|
25 | 25 | import re |
|
26 | 26 | import pprint |
|
27 | 27 | import shutil |
|
28 | 28 | import socket |
|
29 | 29 | import subprocess32 |
|
30 | 30 | import time |
|
31 | 31 | import uuid |
|
32 | 32 | import dateutil.tz |
|
33 | 33 | import functools |
|
34 | 34 | |
|
35 | 35 | import mock |
|
36 | 36 | import pyramid.testing |
|
37 | 37 | import pytest |
|
38 | 38 | import colander |
|
39 | 39 | import requests |
|
40 | 40 | import pyramid.paster |
|
41 | 41 | |
|
42 | 42 | import rhodecode |
|
43 | 43 | from rhodecode.lib.utils2 import AttributeDict |
|
44 | 44 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
45 | 45 | from rhodecode.model.comment import CommentsModel |
|
46 | 46 | from rhodecode.model.db import ( |
|
47 | 47 | PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup, |
|
48 | 48 | UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi) |
|
49 | 49 | from rhodecode.model.meta import Session |
|
50 | 50 | from rhodecode.model.pull_request import PullRequestModel |
|
51 | 51 | from rhodecode.model.repo import RepoModel |
|
52 | 52 | from rhodecode.model.repo_group import RepoGroupModel |
|
53 | 53 | from rhodecode.model.user import UserModel |
|
54 | 54 | from rhodecode.model.settings import VcsSettingsModel |
|
55 | 55 | from rhodecode.model.user_group import UserGroupModel |
|
56 | 56 | from rhodecode.model.integration import IntegrationModel |
|
57 | 57 | from rhodecode.integrations import integration_type_registry |
|
58 | 58 | from rhodecode.integrations.types.base import IntegrationTypeBase |
|
59 | 59 | from rhodecode.lib.utils import repo2db_mapper |
|
60 | 60 | from rhodecode.lib.vcs import create_vcsserver_proxy |
|
61 | 61 | from rhodecode.lib.vcs.backends import get_backend |
|
62 | 62 | from rhodecode.lib.vcs.nodes import FileNode |
|
63 | 63 | from rhodecode.tests import ( |
|
64 | 64 | login_user_session, get_new_dir, utils, TESTS_TMP_PATH, |
|
65 | 65 | TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN, |
|
66 | 66 | TEST_USER_REGULAR_PASS) |
|
67 | 67 | from rhodecode.tests.utils import CustomTestApp, set_anonymous_access |
|
68 | 68 | from rhodecode.tests.fixture import Fixture |
|
69 | 69 | from rhodecode.config import utils as config_utils |
|
70 | 70 | |
|
71 | 71 | def _split_comma(value): |
|
72 | 72 | return value.split(',') |
|
73 | 73 | |
|
74 | 74 | |
|
75 | 75 | def pytest_addoption(parser): |
|
76 | 76 | parser.addoption( |
|
77 | 77 | '--keep-tmp-path', action='store_true', |
|
78 | 78 | help="Keep the test temporary directories") |
|
79 | 79 | parser.addoption( |
|
80 | 80 | '--backends', action='store', type=_split_comma, |
|
81 | 81 | default=['git', 'hg', 'svn'], |
|
82 | 82 | help="Select which backends to test for backend specific tests.") |
|
83 | 83 | parser.addoption( |
|
84 | 84 | '--dbs', action='store', type=_split_comma, |
|
85 | 85 | default=['sqlite'], |
|
86 | 86 | help="Select which database to test for database specific tests. " |
|
87 | 87 | "Possible options are sqlite,postgres,mysql") |
|
88 | 88 | parser.addoption( |
|
89 | 89 | '--appenlight', '--ae', action='store_true', |
|
90 | 90 | help="Track statistics in appenlight.") |
|
91 | 91 | parser.addoption( |
|
92 | 92 | '--appenlight-api-key', '--ae-key', |
|
93 | 93 | help="API key for Appenlight.") |
|
94 | 94 | parser.addoption( |
|
95 | 95 | '--appenlight-url', '--ae-url', |
|
96 | 96 | default="https://ae.rhodecode.com", |
|
97 | 97 | help="Appenlight service URL, defaults to https://ae.rhodecode.com") |
|
98 | 98 | parser.addoption( |
|
99 | 99 | '--sqlite-connection-string', action='store', |
|
100 | 100 | default='', help="Connection string for the dbs tests with SQLite") |
|
101 | 101 | parser.addoption( |
|
102 | 102 | '--postgres-connection-string', action='store', |
|
103 | 103 | default='', help="Connection string for the dbs tests with Postgres") |
|
104 | 104 | parser.addoption( |
|
105 | 105 | '--mysql-connection-string', action='store', |
|
106 | 106 | default='', help="Connection string for the dbs tests with MySQL") |
|
107 | 107 | parser.addoption( |
|
108 | 108 | '--repeat', type=int, default=100, |
|
109 | 109 | help="Number of repetitions in performance tests.") |
|
110 | 110 | |
|
111 | 111 | |
|
112 | 112 | def pytest_configure(config): |
|
113 | 113 | from rhodecode.config import patches |
|
114 | 114 | |
|
115 | 115 | |
|
116 | 116 | def pytest_collection_modifyitems(session, config, items): |
|
117 | 117 | # nottest marked, compare nose, used for transition from nose to pytest |
|
118 | 118 | remaining = [ |
|
119 | 119 | i for i in items if getattr(i.obj, '__test__', True)] |
|
120 | 120 | items[:] = remaining |
|
121 | 121 | |
|
122 | 122 | |
|
123 | 123 | def pytest_generate_tests(metafunc): |
|
124 | 124 | # Support test generation based on --backend parameter |
|
125 | 125 | if 'backend_alias' in metafunc.fixturenames: |
|
126 | 126 | backends = get_backends_from_metafunc(metafunc) |
|
127 | 127 | scope = None |
|
128 | 128 | if not backends: |
|
129 | 129 | pytest.skip("Not enabled for any of selected backends") |
|
130 | 130 | metafunc.parametrize('backend_alias', backends, scope=scope) |
|
131 | 131 | elif hasattr(metafunc.function, 'backends'): |
|
132 | 132 | backends = get_backends_from_metafunc(metafunc) |
|
133 | 133 | if not backends: |
|
134 | 134 | pytest.skip("Not enabled for any of selected backends") |
|
135 | 135 | |
|
136 | 136 | |
|
137 | 137 | def get_backends_from_metafunc(metafunc): |
|
138 | 138 | requested_backends = set(metafunc.config.getoption('--backends')) |
|
139 | 139 | if hasattr(metafunc.function, 'backends'): |
|
140 | 140 | # Supported backends by this test function, created from |
|
141 | 141 | # pytest.mark.backends |
|
142 | 142 | backends = metafunc.definition.get_closest_marker('backends').args |
|
143 | 143 | elif hasattr(metafunc.cls, 'backend_alias'): |
|
144 | 144 | # Support class attribute "backend_alias", this is mainly |
|
145 | 145 | # for legacy reasons for tests not yet using pytest.mark.backends |
|
146 | 146 | backends = [metafunc.cls.backend_alias] |
|
147 | 147 | else: |
|
148 | 148 | backends = metafunc.config.getoption('--backends') |
|
149 | 149 | return requested_backends.intersection(backends) |
|
150 | 150 | |
|
151 | 151 | |
|
152 | 152 | @pytest.fixture(scope='session', autouse=True) |
|
153 | 153 | def activate_example_rcextensions(request): |
|
154 | 154 | """ |
|
155 | 155 | Patch in an example rcextensions module which verifies passed in kwargs. |
|
156 | 156 | """ |
|
157 | 157 | from rhodecode.config import rcextensions |
|
158 | 158 | |
|
159 | 159 | old_extensions = rhodecode.EXTENSIONS |
|
160 | 160 | rhodecode.EXTENSIONS = rcextensions |
|
161 | 161 | rhodecode.EXTENSIONS.calls = collections.defaultdict(list) |
|
162 | 162 | |
|
163 | 163 | @request.addfinalizer |
|
164 | 164 | def cleanup(): |
|
165 | 165 | rhodecode.EXTENSIONS = old_extensions |
|
166 | 166 | |
|
167 | 167 | |
|
168 | 168 | @pytest.fixture |
|
169 | 169 | def capture_rcextensions(): |
|
170 | 170 | """ |
|
171 | 171 | Returns the recorded calls to entry points in rcextensions. |
|
172 | 172 | """ |
|
173 | 173 | calls = rhodecode.EXTENSIONS.calls |
|
174 | 174 | calls.clear() |
|
175 | 175 | # Note: At this moment, it is still the empty dict, but that will |
|
176 | 176 | # be filled during the test run and since it is a reference this |
|
177 | 177 | # is enough to make it work. |
|
178 | 178 | return calls |
|
179 | 179 | |
|
180 | 180 | |
|
181 | 181 | @pytest.fixture(scope='session') |
|
182 | 182 | def http_environ_session(): |
|
183 | 183 | """ |
|
184 | 184 | Allow to use "http_environ" in session scope. |
|
185 | 185 | """ |
|
186 | 186 | return plain_http_environ() |
|
187 | 187 | |
|
188 | 188 | |
|
189 | 189 | def plain_http_host_stub(): |
|
190 | 190 | """ |
|
191 | 191 | Value of HTTP_HOST in the test run. |
|
192 | 192 | """ |
|
193 | 193 | return 'example.com:80' |
|
194 | 194 | |
|
195 | 195 | |
|
196 | 196 | @pytest.fixture |
|
197 | 197 | def http_host_stub(): |
|
198 | 198 | """ |
|
199 | 199 | Value of HTTP_HOST in the test run. |
|
200 | 200 | """ |
|
201 | 201 | return plain_http_host_stub() |
|
202 | 202 | |
|
203 | 203 | |
|
204 | 204 | def plain_http_host_only_stub(): |
|
205 | 205 | """ |
|
206 | 206 | Value of HTTP_HOST in the test run. |
|
207 | 207 | """ |
|
208 | 208 | return plain_http_host_stub().split(':')[0] |
|
209 | 209 | |
|
210 | 210 | |
|
211 | 211 | @pytest.fixture |
|
212 | 212 | def http_host_only_stub(): |
|
213 | 213 | """ |
|
214 | 214 | Value of HTTP_HOST in the test run. |
|
215 | 215 | """ |
|
216 | 216 | return plain_http_host_only_stub() |
|
217 | 217 | |
|
218 | 218 | |
|
219 | 219 | def plain_http_environ(): |
|
220 | 220 | """ |
|
221 | 221 | HTTP extra environ keys. |
|
222 | 222 | |
|
223 | 223 | User by the test application and as well for setting up the pylons |
|
224 | 224 | environment. In the case of the fixture "app" it should be possible |
|
225 | 225 | to override this for a specific test case. |
|
226 | 226 | """ |
|
227 | 227 | return { |
|
228 | 228 | 'SERVER_NAME': plain_http_host_only_stub(), |
|
229 | 229 | 'SERVER_PORT': plain_http_host_stub().split(':')[1], |
|
230 | 230 | 'HTTP_HOST': plain_http_host_stub(), |
|
231 | 231 | 'HTTP_USER_AGENT': 'rc-test-agent', |
|
232 | 232 | 'REQUEST_METHOD': 'GET' |
|
233 | 233 | } |
|
234 | 234 | |
|
235 | 235 | |
|
236 | 236 | @pytest.fixture |
|
237 | 237 | def http_environ(): |
|
238 | 238 | """ |
|
239 | 239 | HTTP extra environ keys. |
|
240 | 240 | |
|
241 | 241 | User by the test application and as well for setting up the pylons |
|
242 | 242 | environment. In the case of the fixture "app" it should be possible |
|
243 | 243 | to override this for a specific test case. |
|
244 | 244 | """ |
|
245 | 245 | return plain_http_environ() |
|
246 | 246 | |
|
247 | 247 | |
|
248 | 248 | @pytest.fixture(scope='session') |
|
249 | 249 | def baseapp(ini_config, vcsserver, http_environ_session): |
|
250 | 250 | from rhodecode.lib.pyramid_utils import get_app_config |
|
251 | 251 | from rhodecode.config.middleware import make_pyramid_app |
|
252 | 252 | |
|
253 | 253 | print("Using the RhodeCode configuration:{}".format(ini_config)) |
|
254 | 254 | pyramid.paster.setup_logging(ini_config) |
|
255 | 255 | |
|
256 | 256 | settings = get_app_config(ini_config) |
|
257 | 257 | app = make_pyramid_app({'__file__': ini_config}, **settings) |
|
258 | 258 | |
|
259 | 259 | return app |
|
260 | 260 | |
|
261 | 261 | |
|
262 | 262 | @pytest.fixture(scope='function') |
|
263 | 263 | def app(request, config_stub, baseapp, http_environ): |
|
264 | 264 | app = CustomTestApp( |
|
265 | 265 | baseapp, |
|
266 | 266 | extra_environ=http_environ) |
|
267 | 267 | if request.cls: |
|
268 | 268 | request.cls.app = app |
|
269 | 269 | return app |
|
270 | 270 | |
|
271 | 271 | |
|
272 | 272 | @pytest.fixture(scope='session') |
|
273 | 273 | def app_settings(baseapp, ini_config): |
|
274 | 274 | """ |
|
275 | 275 | Settings dictionary used to create the app. |
|
276 | 276 | |
|
277 | 277 | Parses the ini file and passes the result through the sanitize and apply |
|
278 | 278 | defaults mechanism in `rhodecode.config.middleware`. |
|
279 | 279 | """ |
|
280 | 280 | return baseapp.config.get_settings() |
|
281 | 281 | |
|
282 | 282 | |
|
283 | 283 | @pytest.fixture(scope='session') |
|
284 | 284 | def db_connection(ini_settings): |
|
285 | 285 | # Initialize the database connection. |
|
286 | 286 | config_utils.initialize_database(ini_settings) |
|
287 | 287 | |
|
288 | 288 | |
|
289 | 289 | LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user')) |
|
290 | 290 | |
|
291 | 291 | |
|
292 | 292 | def _autologin_user(app, *args): |
|
293 | 293 | session = login_user_session(app, *args) |
|
294 | 294 | csrf_token = rhodecode.lib.auth.get_csrf_token(session) |
|
295 | 295 | return LoginData(csrf_token, session['rhodecode_user']) |
|
296 | 296 | |
|
297 | 297 | |
|
298 | 298 | @pytest.fixture |
|
299 | 299 | def autologin_user(app): |
|
300 | 300 | """ |
|
301 | 301 | Utility fixture which makes sure that the admin user is logged in |
|
302 | 302 | """ |
|
303 | 303 | return _autologin_user(app) |
|
304 | 304 | |
|
305 | 305 | |
|
306 | 306 | @pytest.fixture |
|
307 | 307 | def autologin_regular_user(app): |
|
308 | 308 | """ |
|
309 | 309 | Utility fixture which makes sure that the regular user is logged in |
|
310 | 310 | """ |
|
311 | 311 | return _autologin_user( |
|
312 | 312 | app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS) |
|
313 | 313 | |
|
314 | 314 | |
|
315 | 315 | @pytest.fixture(scope='function') |
|
316 | 316 | def csrf_token(request, autologin_user): |
|
317 | 317 | return autologin_user.csrf_token |
|
318 | 318 | |
|
319 | 319 | |
|
320 | 320 | @pytest.fixture(scope='function') |
|
321 | 321 | def xhr_header(request): |
|
322 | 322 | return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'} |
|
323 | 323 | |
|
324 | 324 | |
|
325 | 325 | @pytest.fixture |
|
326 | 326 | def real_crypto_backend(monkeypatch): |
|
327 | 327 | """ |
|
328 | 328 | Switch the production crypto backend on for this test. |
|
329 | 329 | |
|
330 | 330 | During the test run the crypto backend is replaced with a faster |
|
331 | 331 | implementation based on the MD5 algorithm. |
|
332 | 332 | """ |
|
333 | 333 | monkeypatch.setattr(rhodecode, 'is_test', False) |
|
334 | 334 | |
|
335 | 335 | |
|
336 | 336 | @pytest.fixture(scope='class') |
|
337 | 337 | def index_location(request, baseapp): |
|
338 | 338 | index_location = baseapp.config.get_settings()['search.location'] |
|
339 | 339 | if request.cls: |
|
340 | 340 | request.cls.index_location = index_location |
|
341 | 341 | return index_location |
|
342 | 342 | |
|
343 | 343 | |
|
344 | 344 | @pytest.fixture(scope='session', autouse=True) |
|
345 | 345 | def tests_tmp_path(request): |
|
346 | 346 | """ |
|
347 | 347 | Create temporary directory to be used during the test session. |
|
348 | 348 | """ |
|
349 | 349 | if not os.path.exists(TESTS_TMP_PATH): |
|
350 | 350 | os.makedirs(TESTS_TMP_PATH) |
|
351 | 351 | |
|
352 | 352 | if not request.config.getoption('--keep-tmp-path'): |
|
353 | 353 | @request.addfinalizer |
|
354 | 354 | def remove_tmp_path(): |
|
355 | 355 | shutil.rmtree(TESTS_TMP_PATH) |
|
356 | 356 | |
|
357 | 357 | return TESTS_TMP_PATH |
|
358 | 358 | |
|
359 | 359 | |
|
360 | 360 | @pytest.fixture |
|
361 | 361 | def test_repo_group(request): |
|
362 | 362 | """ |
|
363 | 363 | Create a temporary repository group, and destroy it after |
|
364 | 364 | usage automatically |
|
365 | 365 | """ |
|
366 | 366 | fixture = Fixture() |
|
367 | 367 | repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '') |
|
368 | 368 | repo_group = fixture.create_repo_group(repogroupid) |
|
369 | 369 | |
|
370 | 370 | def _cleanup(): |
|
371 | 371 | fixture.destroy_repo_group(repogroupid) |
|
372 | 372 | |
|
373 | 373 | request.addfinalizer(_cleanup) |
|
374 | 374 | return repo_group |
|
375 | 375 | |
|
376 | 376 | |
|
377 | 377 | @pytest.fixture |
|
378 | 378 | def test_user_group(request): |
|
379 | 379 | """ |
|
380 | 380 | Create a temporary user group, and destroy it after |
|
381 | 381 | usage automatically |
|
382 | 382 | """ |
|
383 | 383 | fixture = Fixture() |
|
384 | 384 | usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '') |
|
385 | 385 | user_group = fixture.create_user_group(usergroupid) |
|
386 | 386 | |
|
387 | 387 | def _cleanup(): |
|
388 | 388 | fixture.destroy_user_group(user_group) |
|
389 | 389 | |
|
390 | 390 | request.addfinalizer(_cleanup) |
|
391 | 391 | return user_group |
|
392 | 392 | |
|
393 | 393 | |
|
394 | 394 | @pytest.fixture(scope='session') |
|
395 | 395 | def test_repo(request): |
|
396 | 396 | container = TestRepoContainer() |
|
397 | 397 | request.addfinalizer(container._cleanup) |
|
398 | 398 | return container |
|
399 | 399 | |
|
400 | 400 | |
|
401 | 401 | class TestRepoContainer(object): |
|
402 | 402 | """ |
|
403 | 403 | Container for test repositories which are used read only. |
|
404 | 404 | |
|
405 | 405 | Repositories will be created on demand and re-used during the lifetime |
|
406 | 406 | of this object. |
|
407 | 407 | |
|
408 | 408 | Usage to get the svn test repository "minimal":: |
|
409 | 409 | |
|
410 | 410 | test_repo = TestContainer() |
|
411 | 411 | repo = test_repo('minimal', 'svn') |
|
412 | 412 | |
|
413 | 413 | """ |
|
414 | 414 | |
|
415 | 415 | dump_extractors = { |
|
416 | 416 | 'git': utils.extract_git_repo_from_dump, |
|
417 | 417 | 'hg': utils.extract_hg_repo_from_dump, |
|
418 | 418 | 'svn': utils.extract_svn_repo_from_dump, |
|
419 | 419 | } |
|
420 | 420 | |
|
421 | 421 | def __init__(self): |
|
422 | 422 | self._cleanup_repos = [] |
|
423 | 423 | self._fixture = Fixture() |
|
424 | 424 | self._repos = {} |
|
425 | 425 | |
|
426 | 426 | def __call__(self, dump_name, backend_alias, config=None): |
|
427 | 427 | key = (dump_name, backend_alias) |
|
428 | 428 | if key not in self._repos: |
|
429 | 429 | repo = self._create_repo(dump_name, backend_alias, config) |
|
430 | 430 | self._repos[key] = repo.repo_id |
|
431 | 431 | return Repository.get(self._repos[key]) |
|
432 | 432 | |
|
433 | 433 | def _create_repo(self, dump_name, backend_alias, config): |
|
434 | 434 | repo_name = '%s-%s' % (backend_alias, dump_name) |
|
435 | 435 | backend_class = get_backend(backend_alias) |
|
436 | 436 | dump_extractor = self.dump_extractors[backend_alias] |
|
437 | 437 | repo_path = dump_extractor(dump_name, repo_name) |
|
438 | 438 | |
|
439 | 439 | vcs_repo = backend_class(repo_path, config=config) |
|
440 | 440 | repo2db_mapper({repo_name: vcs_repo}) |
|
441 | 441 | |
|
442 | 442 | repo = RepoModel().get_by_repo_name(repo_name) |
|
443 | 443 | self._cleanup_repos.append(repo_name) |
|
444 | 444 | return repo |
|
445 | 445 | |
|
446 | 446 | def _cleanup(self): |
|
447 | 447 | for repo_name in reversed(self._cleanup_repos): |
|
448 | 448 | self._fixture.destroy_repo(repo_name) |
|
449 | 449 | |
|
450 | 450 | |
|
451 | 451 | def backend_base(request, backend_alias, baseapp, test_repo): |
|
452 | 452 | if backend_alias not in request.config.getoption('--backends'): |
|
453 | 453 | pytest.skip("Backend %s not selected." % (backend_alias, )) |
|
454 | 454 | |
|
455 | 455 | utils.check_xfail_backends(request.node, backend_alias) |
|
456 | 456 | utils.check_skip_backends(request.node, backend_alias) |
|
457 | 457 | |
|
458 | 458 | repo_name = 'vcs_test_%s' % (backend_alias, ) |
|
459 | 459 | backend = Backend( |
|
460 | 460 | alias=backend_alias, |
|
461 | 461 | repo_name=repo_name, |
|
462 | 462 | test_name=request.node.name, |
|
463 | 463 | test_repo_container=test_repo) |
|
464 | 464 | request.addfinalizer(backend.cleanup) |
|
465 | 465 | return backend |
|
466 | 466 | |
|
467 | 467 | |
|
468 | 468 | @pytest.fixture |
|
469 | 469 | def backend(request, backend_alias, baseapp, test_repo): |
|
470 | 470 | """ |
|
471 | 471 | Parametrized fixture which represents a single backend implementation. |
|
472 | 472 | |
|
473 | 473 | It respects the option `--backends` to focus the test run on specific |
|
474 | 474 | backend implementations. |
|
475 | 475 | |
|
476 | 476 | It also supports `pytest.mark.xfail_backends` to mark tests as failing |
|
477 | 477 | for specific backends. This is intended as a utility for incremental |
|
478 | 478 | development of a new backend implementation. |
|
479 | 479 | """ |
|
480 | 480 | return backend_base(request, backend_alias, baseapp, test_repo) |
|
481 | 481 | |
|
482 | 482 | |
|
483 | 483 | @pytest.fixture |
|
484 | 484 | def backend_git(request, baseapp, test_repo): |
|
485 | 485 | return backend_base(request, 'git', baseapp, test_repo) |
|
486 | 486 | |
|
487 | 487 | |
|
488 | 488 | @pytest.fixture |
|
489 | 489 | def backend_hg(request, baseapp, test_repo): |
|
490 | 490 | return backend_base(request, 'hg', baseapp, test_repo) |
|
491 | 491 | |
|
492 | 492 | |
|
493 | 493 | @pytest.fixture |
|
494 | 494 | def backend_svn(request, baseapp, test_repo): |
|
495 | 495 | return backend_base(request, 'svn', baseapp, test_repo) |
|
496 | 496 | |
|
497 | 497 | |
|
498 | 498 | @pytest.fixture |
|
499 | 499 | def backend_random(backend_git): |
|
500 | 500 | """ |
|
501 | 501 | Use this to express that your tests need "a backend. |
|
502 | 502 | |
|
503 | 503 | A few of our tests need a backend, so that we can run the code. This |
|
504 | 504 | fixture is intended to be used for such cases. It will pick one of the |
|
505 | 505 | backends and run the tests. |
|
506 | 506 | |
|
507 | 507 | The fixture `backend` would run the test multiple times for each |
|
508 | 508 | available backend which is a pure waste of time if the test is |
|
509 | 509 | independent of the backend type. |
|
510 | 510 | """ |
|
511 | 511 | # TODO: johbo: Change this to pick a random backend |
|
512 | 512 | return backend_git |
|
513 | 513 | |
|
514 | 514 | |
|
515 | 515 | @pytest.fixture |
|
516 | 516 | def backend_stub(backend_git): |
|
517 | 517 | """ |
|
518 | 518 | Use this to express that your tests need a backend stub |
|
519 | 519 | |
|
520 | 520 | TODO: mikhail: Implement a real stub logic instead of returning |
|
521 | 521 | a git backend |
|
522 | 522 | """ |
|
523 | 523 | return backend_git |
|
524 | 524 | |
|
525 | 525 | |
|
526 | 526 | @pytest.fixture |
|
527 | 527 | def repo_stub(backend_stub): |
|
528 | 528 | """ |
|
529 | 529 | Use this to express that your tests need a repository stub |
|
530 | 530 | """ |
|
531 | 531 | return backend_stub.create_repo() |
|
532 | 532 | |
|
533 | 533 | |
|
534 | 534 | class Backend(object): |
|
535 | 535 | """ |
|
536 | 536 | Represents the test configuration for one supported backend |
|
537 | 537 | |
|
538 | 538 | Provides easy access to different test repositories based on |
|
539 | 539 | `__getitem__`. Such repositories will only be created once per test |
|
540 | 540 | session. |
|
541 | 541 | """ |
|
542 | 542 | |
|
543 | 543 | invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+') |
|
544 | 544 | _master_repo = None |
|
545 | 545 | _commit_ids = {} |
|
546 | 546 | |
|
547 | 547 | def __init__(self, alias, repo_name, test_name, test_repo_container): |
|
548 | 548 | self.alias = alias |
|
549 | 549 | self.repo_name = repo_name |
|
550 | 550 | self._cleanup_repos = [] |
|
551 | 551 | self._test_name = test_name |
|
552 | 552 | self._test_repo_container = test_repo_container |
|
553 | 553 | # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or |
|
554 | 554 | # Fixture will survive in the end. |
|
555 | 555 | self._fixture = Fixture() |
|
556 | 556 | |
|
557 | 557 | def __getitem__(self, key): |
|
558 | 558 | return self._test_repo_container(key, self.alias) |
|
559 | 559 | |
|
560 | 560 | def create_test_repo(self, key, config=None): |
|
561 | 561 | return self._test_repo_container(key, self.alias, config) |
|
562 | 562 | |
|
563 | 563 | @property |
|
564 | 564 | def repo(self): |
|
565 | 565 | """ |
|
566 | 566 | Returns the "current" repository. This is the vcs_test repo or the |
|
567 | 567 | last repo which has been created with `create_repo`. |
|
568 | 568 | """ |
|
569 | 569 | from rhodecode.model.db import Repository |
|
570 | 570 | return Repository.get_by_repo_name(self.repo_name) |
|
571 | 571 | |
|
572 | 572 | @property |
|
573 | 573 | def default_branch_name(self): |
|
574 | 574 | VcsRepository = get_backend(self.alias) |
|
575 | 575 | return VcsRepository.DEFAULT_BRANCH_NAME |
|
576 | 576 | |
|
577 | 577 | @property |
|
578 | 578 | def default_head_id(self): |
|
579 | 579 | """ |
|
580 | 580 | Returns the default head id of the underlying backend. |
|
581 | 581 | |
|
582 | 582 | This will be the default branch name in case the backend does have a |
|
583 | 583 | default branch. In the other cases it will point to a valid head |
|
584 | 584 | which can serve as the base to create a new commit on top of it. |
|
585 | 585 | """ |
|
586 | 586 | vcsrepo = self.repo.scm_instance() |
|
587 | 587 | head_id = ( |
|
588 | 588 | vcsrepo.DEFAULT_BRANCH_NAME or |
|
589 | 589 | vcsrepo.commit_ids[-1]) |
|
590 | 590 | return head_id |
|
591 | 591 | |
|
592 | 592 | @property |
|
593 | 593 | def commit_ids(self): |
|
594 | 594 | """ |
|
595 | 595 | Returns the list of commits for the last created repository |
|
596 | 596 | """ |
|
597 | 597 | return self._commit_ids |
|
598 | 598 | |
|
599 | 599 | def create_master_repo(self, commits): |
|
600 | 600 | """ |
|
601 | 601 | Create a repository and remember it as a template. |
|
602 | 602 | |
|
603 | 603 | This allows to easily create derived repositories to construct |
|
604 | 604 | more complex scenarios for diff, compare and pull requests. |
|
605 | 605 | |
|
606 | 606 | Returns a commit map which maps from commit message to raw_id. |
|
607 | 607 | """ |
|
608 | 608 | self._master_repo = self.create_repo(commits=commits) |
|
609 | 609 | return self._commit_ids |
|
610 | 610 | |
|
611 | 611 | def create_repo( |
|
612 | 612 | self, commits=None, number_of_commits=0, heads=None, |
|
613 | 613 | name_suffix=u'', bare=False, **kwargs): |
|
614 | 614 | """ |
|
615 | 615 | Create a repository and record it for later cleanup. |
|
616 | 616 | |
|
617 | 617 | :param commits: Optional. A sequence of dict instances. |
|
618 | 618 | Will add a commit per entry to the new repository. |
|
619 | 619 | :param number_of_commits: Optional. If set to a number, this number of |
|
620 | 620 | commits will be added to the new repository. |
|
621 | 621 | :param heads: Optional. Can be set to a sequence of of commit |
|
622 | 622 | names which shall be pulled in from the master repository. |
|
623 | 623 | :param name_suffix: adds special suffix to generated repo name |
|
624 | 624 | :param bare: set a repo as bare (no checkout) |
|
625 | 625 | """ |
|
626 | 626 | self.repo_name = self._next_repo_name() + name_suffix |
|
627 | 627 | repo = self._fixture.create_repo( |
|
628 | 628 | self.repo_name, repo_type=self.alias, bare=bare, **kwargs) |
|
629 | 629 | self._cleanup_repos.append(repo.repo_name) |
|
630 | 630 | |
|
631 | 631 | commits = commits or [ |
|
632 | 632 | {'message': 'Commit %s of %s' % (x, self.repo_name)} |
|
633 | 633 | for x in range(number_of_commits)] |
|
634 | 634 | self._add_commits_to_repo(repo.scm_instance(), commits) |
|
635 | 635 | if heads: |
|
636 | 636 | self.pull_heads(repo, heads) |
|
637 | 637 | |
|
638 | 638 | return repo |
|
639 | 639 | |
|
640 | 640 | def pull_heads(self, repo, heads): |
|
641 | 641 | """ |
|
642 | 642 | Make sure that repo contains all commits mentioned in `heads` |
|
643 | 643 | """ |
|
644 | 644 | vcsmaster = self._master_repo.scm_instance() |
|
645 | 645 | vcsrepo = repo.scm_instance() |
|
646 | 646 | vcsrepo.config.clear_section('hooks') |
|
647 | 647 | commit_ids = [self._commit_ids[h] for h in heads] |
|
648 | 648 | vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids) |
|
649 | 649 | |
|
650 | 650 | def create_fork(self): |
|
651 | 651 | repo_to_fork = self.repo_name |
|
652 | 652 | self.repo_name = self._next_repo_name() |
|
653 | 653 | repo = self._fixture.create_fork(repo_to_fork, self.repo_name) |
|
654 | 654 | self._cleanup_repos.append(self.repo_name) |
|
655 | 655 | return repo |
|
656 | 656 | |
|
657 | 657 | def new_repo_name(self, suffix=u''): |
|
658 | 658 | self.repo_name = self._next_repo_name() + suffix |
|
659 | 659 | self._cleanup_repos.append(self.repo_name) |
|
660 | 660 | return self.repo_name |
|
661 | 661 | |
|
662 | 662 | def _next_repo_name(self): |
|
663 | 663 | return u"%s_%s" % ( |
|
664 | self.invalid_repo_name.sub(u'_', self._test_name), | |
|
665 | len(self._cleanup_repos)) | |
|
664 | self.invalid_repo_name.sub(u'_', self._test_name), len(self._cleanup_repos)) | |
|
666 | 665 | |
|
667 | 666 | def ensure_file(self, filename, content='Test content\n'): |
|
668 | 667 | assert self._cleanup_repos, "Avoid writing into vcs_test repos" |
|
669 | 668 | commits = [ |
|
670 | 669 | {'added': [ |
|
671 | 670 | FileNode(filename, content=content), |
|
672 | 671 | ]}, |
|
673 | 672 | ] |
|
674 | 673 | self._add_commits_to_repo(self.repo.scm_instance(), commits) |
|
675 | 674 | |
|
676 | 675 | def enable_downloads(self): |
|
677 | 676 | repo = self.repo |
|
678 | 677 | repo.enable_downloads = True |
|
679 | 678 | Session().add(repo) |
|
680 | 679 | Session().commit() |
|
681 | 680 | |
|
682 | 681 | def cleanup(self): |
|
683 | 682 | for repo_name in reversed(self._cleanup_repos): |
|
684 | 683 | self._fixture.destroy_repo(repo_name) |
|
685 | 684 | |
|
686 | 685 | def _add_commits_to_repo(self, repo, commits): |
|
687 | 686 | commit_ids = _add_commits_to_repo(repo, commits) |
|
688 | 687 | if not commit_ids: |
|
689 | 688 | return |
|
690 | 689 | self._commit_ids = commit_ids |
|
691 | 690 | |
|
692 | 691 | # Creating refs for Git to allow fetching them from remote repository |
|
693 | 692 | if self.alias == 'git': |
|
694 | 693 | refs = {} |
|
695 | 694 | for message in self._commit_ids: |
|
696 | 695 | # TODO: mikhail: do more special chars replacements |
|
697 | 696 | ref_name = 'refs/test-refs/{}'.format( |
|
698 | 697 | message.replace(' ', '')) |
|
699 | 698 | refs[ref_name] = self._commit_ids[message] |
|
700 | 699 | self._create_refs(repo, refs) |
|
701 | 700 | |
|
702 | 701 | def _create_refs(self, repo, refs): |
|
703 | 702 | for ref_name in refs: |
|
704 | 703 | repo.set_refs(ref_name, refs[ref_name]) |
|
705 | 704 | |
|
706 | 705 | |
|
707 | 706 | def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo): |
|
708 | 707 | if backend_alias not in request.config.getoption('--backends'): |
|
709 | 708 | pytest.skip("Backend %s not selected." % (backend_alias, )) |
|
710 | 709 | |
|
711 | 710 | utils.check_xfail_backends(request.node, backend_alias) |
|
712 | 711 | utils.check_skip_backends(request.node, backend_alias) |
|
713 | 712 | |
|
714 | 713 | repo_name = 'vcs_test_%s' % (backend_alias, ) |
|
715 | 714 | repo_path = os.path.join(tests_tmp_path, repo_name) |
|
716 | 715 | backend = VcsBackend( |
|
717 | 716 | alias=backend_alias, |
|
718 | 717 | repo_path=repo_path, |
|
719 | 718 | test_name=request.node.name, |
|
720 | 719 | test_repo_container=test_repo) |
|
721 | 720 | request.addfinalizer(backend.cleanup) |
|
722 | 721 | return backend |
|
723 | 722 | |
|
724 | 723 | |
|
725 | 724 | @pytest.fixture |
|
726 | 725 | def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo): |
|
727 | 726 | """ |
|
728 | 727 | Parametrized fixture which represents a single vcs backend implementation. |
|
729 | 728 | |
|
730 | 729 | See the fixture `backend` for more details. This one implements the same |
|
731 | 730 | concept, but on vcs level. So it does not provide model instances etc. |
|
732 | 731 | |
|
733 | 732 | Parameters are generated dynamically, see :func:`pytest_generate_tests` |
|
734 | 733 | for how this works. |
|
735 | 734 | """ |
|
736 | 735 | return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo) |
|
737 | 736 | |
|
738 | 737 | |
|
739 | 738 | @pytest.fixture |
|
740 | 739 | def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo): |
|
741 | 740 | return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo) |
|
742 | 741 | |
|
743 | 742 | |
|
744 | 743 | @pytest.fixture |
|
745 | 744 | def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo): |
|
746 | 745 | return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo) |
|
747 | 746 | |
|
748 | 747 | |
|
749 | 748 | @pytest.fixture |
|
750 | 749 | def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo): |
|
751 | 750 | return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo) |
|
752 | 751 | |
|
753 | 752 | |
|
754 | 753 | @pytest.fixture |
|
755 | 754 | def vcsbackend_stub(vcsbackend_git): |
|
756 | 755 | """ |
|
757 | 756 | Use this to express that your test just needs a stub of a vcsbackend. |
|
758 | 757 | |
|
759 | 758 | Plan is to eventually implement an in-memory stub to speed tests up. |
|
760 | 759 | """ |
|
761 | 760 | return vcsbackend_git |
|
762 | 761 | |
|
763 | 762 | |
|
764 | 763 | class VcsBackend(object): |
|
765 | 764 | """ |
|
766 | 765 | Represents the test configuration for one supported vcs backend. |
|
767 | 766 | """ |
|
768 | 767 | |
|
769 | 768 | invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+') |
|
770 | 769 | |
|
771 | 770 | def __init__(self, alias, repo_path, test_name, test_repo_container): |
|
772 | 771 | self.alias = alias |
|
773 | 772 | self._repo_path = repo_path |
|
774 | 773 | self._cleanup_repos = [] |
|
775 | 774 | self._test_name = test_name |
|
776 | 775 | self._test_repo_container = test_repo_container |
|
777 | 776 | |
|
778 | 777 | def __getitem__(self, key): |
|
779 | 778 | return self._test_repo_container(key, self.alias).scm_instance() |
|
780 | 779 | |
|
781 | 780 | @property |
|
782 | 781 | def repo(self): |
|
783 | 782 | """ |
|
784 | 783 | Returns the "current" repository. This is the vcs_test repo of the last |
|
785 | 784 | repo which has been created. |
|
786 | 785 | """ |
|
787 | 786 | Repository = get_backend(self.alias) |
|
788 | 787 | return Repository(self._repo_path) |
|
789 | 788 | |
|
790 | 789 | @property |
|
791 | 790 | def backend(self): |
|
792 | 791 | """ |
|
793 | 792 | Returns the backend implementation class. |
|
794 | 793 | """ |
|
795 | 794 | return get_backend(self.alias) |
|
796 | 795 | |
|
797 | 796 | def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None, |
|
798 | 797 | bare=False): |
|
799 | 798 | repo_name = self._next_repo_name() |
|
800 | 799 | self._repo_path = get_new_dir(repo_name) |
|
801 | 800 | repo_class = get_backend(self.alias) |
|
802 | 801 | src_url = None |
|
803 | 802 | if _clone_repo: |
|
804 | 803 | src_url = _clone_repo.path |
|
805 | 804 | repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare) |
|
806 | 805 | self._cleanup_repos.append(repo) |
|
807 | 806 | |
|
808 | 807 | commits = commits or [ |
|
809 | 808 | {'message': 'Commit %s of %s' % (x, repo_name)} |
|
810 | 809 | for x in xrange(number_of_commits)] |
|
811 | 810 | _add_commits_to_repo(repo, commits) |
|
812 | 811 | return repo |
|
813 | 812 | |
|
814 | 813 | def clone_repo(self, repo): |
|
815 | 814 | return self.create_repo(_clone_repo=repo) |
|
816 | 815 | |
|
817 | 816 | def cleanup(self): |
|
818 | 817 | for repo in self._cleanup_repos: |
|
819 | 818 | shutil.rmtree(repo.path) |
|
820 | 819 | |
|
821 | 820 | def new_repo_path(self): |
|
822 | 821 | repo_name = self._next_repo_name() |
|
823 | 822 | self._repo_path = get_new_dir(repo_name) |
|
824 | 823 | return self._repo_path |
|
825 | 824 | |
|
826 | 825 | def _next_repo_name(self): |
|
827 | 826 | return "%s_%s" % ( |
|
828 | 827 | self.invalid_repo_name.sub('_', self._test_name), |
|
829 | 828 | len(self._cleanup_repos)) |
|
830 | 829 | |
|
831 | 830 | def add_file(self, repo, filename, content='Test content\n'): |
|
832 | 831 | imc = repo.in_memory_commit |
|
833 | 832 | imc.add(FileNode(filename, content=content)) |
|
834 | 833 | imc.commit( |
|
835 | 834 | message=u'Automatic commit from vcsbackend fixture', |
|
836 | 835 | author=u'Automatic') |
|
837 | 836 | |
|
838 | 837 | def ensure_file(self, filename, content='Test content\n'): |
|
839 | 838 | assert self._cleanup_repos, "Avoid writing into vcs_test repos" |
|
840 | 839 | self.add_file(self.repo, filename, content) |
|
841 | 840 | |
|
842 | 841 | |
|
843 | 842 | def _add_commits_to_repo(vcs_repo, commits): |
|
844 | 843 | commit_ids = {} |
|
845 | 844 | if not commits: |
|
846 | 845 | return commit_ids |
|
847 | 846 | |
|
848 | 847 | imc = vcs_repo.in_memory_commit |
|
849 | 848 | commit = None |
|
850 | 849 | |
|
851 | 850 | for idx, commit in enumerate(commits): |
|
852 | 851 | message = unicode(commit.get('message', 'Commit %s' % idx)) |
|
853 | 852 | |
|
854 | 853 | for node in commit.get('added', []): |
|
855 | 854 | imc.add(FileNode(node.path, content=node.content)) |
|
856 | 855 | for node in commit.get('changed', []): |
|
857 | 856 | imc.change(FileNode(node.path, content=node.content)) |
|
858 | 857 | for node in commit.get('removed', []): |
|
859 | 858 | imc.remove(FileNode(node.path)) |
|
860 | 859 | |
|
861 | 860 | parents = [ |
|
862 | 861 | vcs_repo.get_commit(commit_id=commit_ids[p]) |
|
863 | 862 | for p in commit.get('parents', [])] |
|
864 | 863 | |
|
865 | 864 | operations = ('added', 'changed', 'removed') |
|
866 | 865 | if not any((commit.get(o) for o in operations)): |
|
867 | 866 | imc.add(FileNode('file_%s' % idx, content=message)) |
|
868 | 867 | |
|
869 | 868 | commit = imc.commit( |
|
870 | 869 | message=message, |
|
871 | 870 | author=unicode(commit.get('author', 'Automatic')), |
|
872 | 871 | date=commit.get('date'), |
|
873 | 872 | branch=commit.get('branch'), |
|
874 | 873 | parents=parents) |
|
875 | 874 | |
|
876 | 875 | commit_ids[commit.message] = commit.raw_id |
|
877 | 876 | |
|
878 | 877 | return commit_ids |
|
879 | 878 | |
|
880 | 879 | |
|
881 | 880 | @pytest.fixture |
|
882 | 881 | def reposerver(request): |
|
883 | 882 | """ |
|
884 | 883 | Allows to serve a backend repository |
|
885 | 884 | """ |
|
886 | 885 | |
|
887 | 886 | repo_server = RepoServer() |
|
888 | 887 | request.addfinalizer(repo_server.cleanup) |
|
889 | 888 | return repo_server |
|
890 | 889 | |
|
891 | 890 | |
|
892 | 891 | class RepoServer(object): |
|
893 | 892 | """ |
|
894 | 893 | Utility to serve a local repository for the duration of a test case. |
|
895 | 894 | |
|
896 | 895 | Supports only Subversion so far. |
|
897 | 896 | """ |
|
898 | 897 | |
|
899 | 898 | url = None |
|
900 | 899 | |
|
901 | 900 | def __init__(self): |
|
902 | 901 | self._cleanup_servers = [] |
|
903 | 902 | |
|
904 | 903 | def serve(self, vcsrepo): |
|
905 | 904 | if vcsrepo.alias != 'svn': |
|
906 | 905 | raise TypeError("Backend %s not supported" % vcsrepo.alias) |
|
907 | 906 | |
|
908 | 907 | proc = subprocess32.Popen( |
|
909 | 908 | ['svnserve', '-d', '--foreground', '--listen-host', 'localhost', |
|
910 | 909 | '--root', vcsrepo.path]) |
|
911 | 910 | self._cleanup_servers.append(proc) |
|
912 | 911 | self.url = 'svn://localhost' |
|
913 | 912 | |
|
914 | 913 | def cleanup(self): |
|
915 | 914 | for proc in self._cleanup_servers: |
|
916 | 915 | proc.terminate() |
|
917 | 916 | |
|
918 | 917 | |
|
919 | 918 | @pytest.fixture |
|
920 | 919 | def pr_util(backend, request, config_stub): |
|
921 | 920 | """ |
|
922 | 921 | Utility for tests of models and for functional tests around pull requests. |
|
923 | 922 | |
|
924 | 923 | It gives an instance of :class:`PRTestUtility` which provides various |
|
925 | 924 | utility methods around one pull request. |
|
926 | 925 | |
|
927 | 926 | This fixture uses `backend` and inherits its parameterization. |
|
928 | 927 | """ |
|
929 | 928 | |
|
930 | 929 | util = PRTestUtility(backend) |
|
931 | 930 | request.addfinalizer(util.cleanup) |
|
932 | 931 | |
|
933 | 932 | return util |
|
934 | 933 | |
|
935 | 934 | |
|
936 | 935 | class PRTestUtility(object): |
|
937 | 936 | |
|
938 | 937 | pull_request = None |
|
939 | 938 | pull_request_id = None |
|
940 | 939 | mergeable_patcher = None |
|
941 | 940 | mergeable_mock = None |
|
942 | 941 | notification_patcher = None |
|
943 | 942 | |
|
944 | 943 | def __init__(self, backend): |
|
945 | 944 | self.backend = backend |
|
946 | 945 | |
|
947 | 946 | def create_pull_request( |
|
948 | 947 | self, commits=None, target_head=None, source_head=None, |
|
949 | 948 | revisions=None, approved=False, author=None, mergeable=False, |
|
950 | 949 | enable_notifications=True, name_suffix=u'', reviewers=None, |
|
951 | 950 | title=u"Test", description=u"Description"): |
|
952 | 951 | self.set_mergeable(mergeable) |
|
953 | 952 | if not enable_notifications: |
|
954 | 953 | # mock notification side effect |
|
955 | 954 | self.notification_patcher = mock.patch( |
|
956 | 955 | 'rhodecode.model.notification.NotificationModel.create') |
|
957 | 956 | self.notification_patcher.start() |
|
958 | 957 | |
|
959 | 958 | if not self.pull_request: |
|
960 | 959 | if not commits: |
|
961 | 960 | commits = [ |
|
962 | 961 | {'message': 'c1'}, |
|
963 | 962 | {'message': 'c2'}, |
|
964 | 963 | {'message': 'c3'}, |
|
965 | 964 | ] |
|
966 | 965 | target_head = 'c1' |
|
967 | 966 | source_head = 'c2' |
|
968 | 967 | revisions = ['c2'] |
|
969 | 968 | |
|
970 | 969 | self.commit_ids = self.backend.create_master_repo(commits) |
|
971 | 970 | self.target_repository = self.backend.create_repo( |
|
972 | 971 | heads=[target_head], name_suffix=name_suffix) |
|
973 | 972 | self.source_repository = self.backend.create_repo( |
|
974 | 973 | heads=[source_head], name_suffix=name_suffix) |
|
975 | 974 | self.author = author or UserModel().get_by_username( |
|
976 | 975 | TEST_USER_ADMIN_LOGIN) |
|
977 | 976 | |
|
978 | 977 | model = PullRequestModel() |
|
979 | 978 | self.create_parameters = { |
|
980 | 979 | 'created_by': self.author, |
|
981 | 980 | 'source_repo': self.source_repository.repo_name, |
|
982 | 981 | 'source_ref': self._default_branch_reference(source_head), |
|
983 | 982 | 'target_repo': self.target_repository.repo_name, |
|
984 | 983 | 'target_ref': self._default_branch_reference(target_head), |
|
985 | 984 | 'revisions': [self.commit_ids[r] for r in revisions], |
|
986 | 985 | 'reviewers': reviewers or self._get_reviewers(), |
|
987 | 986 | 'title': title, |
|
988 | 987 | 'description': description, |
|
989 | 988 | } |
|
990 | 989 | self.pull_request = model.create(**self.create_parameters) |
|
991 | 990 | assert model.get_versions(self.pull_request) == [] |
|
992 | 991 | |
|
993 | 992 | self.pull_request_id = self.pull_request.pull_request_id |
|
994 | 993 | |
|
995 | 994 | if approved: |
|
996 | 995 | self.approve() |
|
997 | 996 | |
|
998 | 997 | Session().add(self.pull_request) |
|
999 | 998 | Session().commit() |
|
1000 | 999 | |
|
1001 | 1000 | return self.pull_request |
|
1002 | 1001 | |
|
1003 | 1002 | def approve(self): |
|
1004 | 1003 | self.create_status_votes( |
|
1005 | 1004 | ChangesetStatus.STATUS_APPROVED, |
|
1006 | 1005 | *self.pull_request.reviewers) |
|
1007 | 1006 | |
|
1008 | 1007 | def close(self): |
|
1009 | 1008 | PullRequestModel().close_pull_request(self.pull_request, self.author) |
|
1010 | 1009 | |
|
1011 | 1010 | def _default_branch_reference(self, commit_message): |
|
1012 | 1011 | reference = '%s:%s:%s' % ( |
|
1013 | 1012 | 'branch', |
|
1014 | 1013 | self.backend.default_branch_name, |
|
1015 | 1014 | self.commit_ids[commit_message]) |
|
1016 | 1015 | return reference |
|
1017 | 1016 | |
|
1018 | 1017 | def _get_reviewers(self): |
|
1019 | 1018 | return [ |
|
1020 | 1019 | (TEST_USER_REGULAR_LOGIN, ['default1'], False, []), |
|
1021 | 1020 | (TEST_USER_REGULAR2_LOGIN, ['default2'], False, []), |
|
1022 | 1021 | ] |
|
1023 | 1022 | |
|
1024 | 1023 | def update_source_repository(self, head=None): |
|
1025 | 1024 | heads = [head or 'c3'] |
|
1026 | 1025 | self.backend.pull_heads(self.source_repository, heads=heads) |
|
1027 | 1026 | |
|
1028 | 1027 | def add_one_commit(self, head=None): |
|
1029 | 1028 | self.update_source_repository(head=head) |
|
1030 | 1029 | old_commit_ids = set(self.pull_request.revisions) |
|
1031 | 1030 | PullRequestModel().update_commits(self.pull_request) |
|
1032 | 1031 | commit_ids = set(self.pull_request.revisions) |
|
1033 | 1032 | new_commit_ids = commit_ids - old_commit_ids |
|
1034 | 1033 | assert len(new_commit_ids) == 1 |
|
1035 | 1034 | return new_commit_ids.pop() |
|
1036 | 1035 | |
|
1037 | 1036 | def remove_one_commit(self): |
|
1038 | 1037 | assert len(self.pull_request.revisions) == 2 |
|
1039 | 1038 | source_vcs = self.source_repository.scm_instance() |
|
1040 | 1039 | removed_commit_id = source_vcs.commit_ids[-1] |
|
1041 | 1040 | |
|
1042 | 1041 | # TODO: johbo: Git and Mercurial have an inconsistent vcs api here, |
|
1043 | 1042 | # remove the if once that's sorted out. |
|
1044 | 1043 | if self.backend.alias == "git": |
|
1045 | 1044 | kwargs = {'branch_name': self.backend.default_branch_name} |
|
1046 | 1045 | else: |
|
1047 | 1046 | kwargs = {} |
|
1048 | 1047 | source_vcs.strip(removed_commit_id, **kwargs) |
|
1049 | 1048 | |
|
1050 | 1049 | PullRequestModel().update_commits(self.pull_request) |
|
1051 | 1050 | assert len(self.pull_request.revisions) == 1 |
|
1052 | 1051 | return removed_commit_id |
|
1053 | 1052 | |
|
1054 | 1053 | def create_comment(self, linked_to=None): |
|
1055 | 1054 | comment = CommentsModel().create( |
|
1056 | 1055 | text=u"Test comment", |
|
1057 | 1056 | repo=self.target_repository.repo_name, |
|
1058 | 1057 | user=self.author, |
|
1059 | 1058 | pull_request=self.pull_request) |
|
1060 | 1059 | assert comment.pull_request_version_id is None |
|
1061 | 1060 | |
|
1062 | 1061 | if linked_to: |
|
1063 | 1062 | PullRequestModel()._link_comments_to_version(linked_to) |
|
1064 | 1063 | |
|
1065 | 1064 | return comment |
|
1066 | 1065 | |
|
1067 | 1066 | def create_inline_comment( |
|
1068 | 1067 | self, linked_to=None, line_no=u'n1', file_path='file_1'): |
|
1069 | 1068 | comment = CommentsModel().create( |
|
1070 | 1069 | text=u"Test comment", |
|
1071 | 1070 | repo=self.target_repository.repo_name, |
|
1072 | 1071 | user=self.author, |
|
1073 | 1072 | line_no=line_no, |
|
1074 | 1073 | f_path=file_path, |
|
1075 | 1074 | pull_request=self.pull_request) |
|
1076 | 1075 | assert comment.pull_request_version_id is None |
|
1077 | 1076 | |
|
1078 | 1077 | if linked_to: |
|
1079 | 1078 | PullRequestModel()._link_comments_to_version(linked_to) |
|
1080 | 1079 | |
|
1081 | 1080 | return comment |
|
1082 | 1081 | |
|
1083 | 1082 | def create_version_of_pull_request(self): |
|
1084 | 1083 | pull_request = self.create_pull_request() |
|
1085 | 1084 | version = PullRequestModel()._create_version_from_snapshot( |
|
1086 | 1085 | pull_request) |
|
1087 | 1086 | return version |
|
1088 | 1087 | |
|
1089 | 1088 | def create_status_votes(self, status, *reviewers): |
|
1090 | 1089 | for reviewer in reviewers: |
|
1091 | 1090 | ChangesetStatusModel().set_status( |
|
1092 | 1091 | repo=self.pull_request.target_repo, |
|
1093 | 1092 | status=status, |
|
1094 | 1093 | user=reviewer.user_id, |
|
1095 | 1094 | pull_request=self.pull_request) |
|
1096 | 1095 | |
|
1097 | 1096 | def set_mergeable(self, value): |
|
1098 | 1097 | if not self.mergeable_patcher: |
|
1099 | 1098 | self.mergeable_patcher = mock.patch.object( |
|
1100 | 1099 | VcsSettingsModel, 'get_general_settings') |
|
1101 | 1100 | self.mergeable_mock = self.mergeable_patcher.start() |
|
1102 | 1101 | self.mergeable_mock.return_value = { |
|
1103 | 1102 | 'rhodecode_pr_merge_enabled': value} |
|
1104 | 1103 | |
|
1105 | 1104 | def cleanup(self): |
|
1106 | 1105 | # In case the source repository is already cleaned up, the pull |
|
1107 | 1106 | # request will already be deleted. |
|
1108 | 1107 | pull_request = PullRequest().get(self.pull_request_id) |
|
1109 | 1108 | if pull_request: |
|
1110 | 1109 | PullRequestModel().delete(pull_request, pull_request.author) |
|
1111 | 1110 | Session().commit() |
|
1112 | 1111 | |
|
1113 | 1112 | if self.notification_patcher: |
|
1114 | 1113 | self.notification_patcher.stop() |
|
1115 | 1114 | |
|
1116 | 1115 | if self.mergeable_patcher: |
|
1117 | 1116 | self.mergeable_patcher.stop() |
|
1118 | 1117 | |
|
1119 | 1118 | |
|
1120 | 1119 | @pytest.fixture |
|
1121 | 1120 | def user_admin(baseapp): |
|
1122 | 1121 | """ |
|
1123 | 1122 | Provides the default admin test user as an instance of `db.User`. |
|
1124 | 1123 | """ |
|
1125 | 1124 | user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) |
|
1126 | 1125 | return user |
|
1127 | 1126 | |
|
1128 | 1127 | |
|
1129 | 1128 | @pytest.fixture |
|
1130 | 1129 | def user_regular(baseapp): |
|
1131 | 1130 | """ |
|
1132 | 1131 | Provides the default regular test user as an instance of `db.User`. |
|
1133 | 1132 | """ |
|
1134 | 1133 | user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN) |
|
1135 | 1134 | return user |
|
1136 | 1135 | |
|
1137 | 1136 | |
|
1138 | 1137 | @pytest.fixture |
|
1139 | 1138 | def user_util(request, db_connection): |
|
1140 | 1139 | """ |
|
1141 | 1140 | Provides a wired instance of `UserUtility` with integrated cleanup. |
|
1142 | 1141 | """ |
|
1143 | 1142 | utility = UserUtility(test_name=request.node.name) |
|
1144 | 1143 | request.addfinalizer(utility.cleanup) |
|
1145 | 1144 | return utility |
|
1146 | 1145 | |
|
1147 | 1146 | |
|
1148 | 1147 | # TODO: johbo: Split this up into utilities per domain or something similar |
|
1149 | 1148 | class UserUtility(object): |
|
1150 | 1149 | |
|
1151 | 1150 | def __init__(self, test_name="test"): |
|
1152 | 1151 | self._test_name = self._sanitize_name(test_name) |
|
1153 | 1152 | self.fixture = Fixture() |
|
1154 | 1153 | self.repo_group_ids = [] |
|
1155 | 1154 | self.repos_ids = [] |
|
1156 | 1155 | self.user_ids = [] |
|
1157 | 1156 | self.user_group_ids = [] |
|
1158 | 1157 | self.user_repo_permission_ids = [] |
|
1159 | 1158 | self.user_group_repo_permission_ids = [] |
|
1160 | 1159 | self.user_repo_group_permission_ids = [] |
|
1161 | 1160 | self.user_group_repo_group_permission_ids = [] |
|
1162 | 1161 | self.user_user_group_permission_ids = [] |
|
1163 | 1162 | self.user_group_user_group_permission_ids = [] |
|
1164 | 1163 | self.user_permissions = [] |
|
1165 | 1164 | |
|
1166 | 1165 | def _sanitize_name(self, name): |
|
1167 | 1166 | for char in ['[', ']']: |
|
1168 | 1167 | name = name.replace(char, '_') |
|
1169 | 1168 | return name |
|
1170 | 1169 | |
|
1171 | 1170 | def create_repo_group( |
|
1172 | 1171 | self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True): |
|
1173 | 1172 | group_name = "{prefix}_repogroup_{count}".format( |
|
1174 | 1173 | prefix=self._test_name, |
|
1175 | 1174 | count=len(self.repo_group_ids)) |
|
1176 | 1175 | repo_group = self.fixture.create_repo_group( |
|
1177 | 1176 | group_name, cur_user=owner) |
|
1178 | 1177 | if auto_cleanup: |
|
1179 | 1178 | self.repo_group_ids.append(repo_group.group_id) |
|
1180 | 1179 | return repo_group |
|
1181 | 1180 | |
|
1182 | 1181 | def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None, |
|
1183 | 1182 | auto_cleanup=True, repo_type='hg', bare=False): |
|
1184 | 1183 | repo_name = "{prefix}_repository_{count}".format( |
|
1185 | 1184 | prefix=self._test_name, |
|
1186 | 1185 | count=len(self.repos_ids)) |
|
1187 | 1186 | |
|
1188 | 1187 | repository = self.fixture.create_repo( |
|
1189 | 1188 | repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare) |
|
1190 | 1189 | if auto_cleanup: |
|
1191 | 1190 | self.repos_ids.append(repository.repo_id) |
|
1192 | 1191 | return repository |
|
1193 | 1192 | |
|
1194 | 1193 | def create_user(self, auto_cleanup=True, **kwargs): |
|
1195 | 1194 | user_name = "{prefix}_user_{count}".format( |
|
1196 | 1195 | prefix=self._test_name, |
|
1197 | 1196 | count=len(self.user_ids)) |
|
1198 | 1197 | user = self.fixture.create_user(user_name, **kwargs) |
|
1199 | 1198 | if auto_cleanup: |
|
1200 | 1199 | self.user_ids.append(user.user_id) |
|
1201 | 1200 | return user |
|
1202 | 1201 | |
|
1203 | 1202 | def create_additional_user_email(self, user, email): |
|
1204 | 1203 | uem = self.fixture.create_additional_user_email(user=user, email=email) |
|
1205 | 1204 | return uem |
|
1206 | 1205 | |
|
1207 | 1206 | def create_user_with_group(self): |
|
1208 | 1207 | user = self.create_user() |
|
1209 | 1208 | user_group = self.create_user_group(members=[user]) |
|
1210 | 1209 | return user, user_group |
|
1211 | 1210 | |
|
1212 | 1211 | def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None, |
|
1213 | 1212 | auto_cleanup=True, **kwargs): |
|
1214 | 1213 | group_name = "{prefix}_usergroup_{count}".format( |
|
1215 | 1214 | prefix=self._test_name, |
|
1216 | 1215 | count=len(self.user_group_ids)) |
|
1217 | 1216 | user_group = self.fixture.create_user_group( |
|
1218 | 1217 | group_name, cur_user=owner, **kwargs) |
|
1219 | 1218 | |
|
1220 | 1219 | if auto_cleanup: |
|
1221 | 1220 | self.user_group_ids.append(user_group.users_group_id) |
|
1222 | 1221 | if members: |
|
1223 | 1222 | for user in members: |
|
1224 | 1223 | UserGroupModel().add_user_to_group(user_group, user) |
|
1225 | 1224 | return user_group |
|
1226 | 1225 | |
|
1227 | 1226 | def grant_user_permission(self, user_name, permission_name): |
|
1228 | 1227 | self._inherit_default_user_permissions(user_name, False) |
|
1229 | 1228 | self.user_permissions.append((user_name, permission_name)) |
|
1230 | 1229 | |
|
1231 | 1230 | def grant_user_permission_to_repo_group( |
|
1232 | 1231 | self, repo_group, user, permission_name): |
|
1233 | 1232 | permission = RepoGroupModel().grant_user_permission( |
|
1234 | 1233 | repo_group, user, permission_name) |
|
1235 | 1234 | self.user_repo_group_permission_ids.append( |
|
1236 | 1235 | (repo_group.group_id, user.user_id)) |
|
1237 | 1236 | return permission |
|
1238 | 1237 | |
|
1239 | 1238 | def grant_user_group_permission_to_repo_group( |
|
1240 | 1239 | self, repo_group, user_group, permission_name): |
|
1241 | 1240 | permission = RepoGroupModel().grant_user_group_permission( |
|
1242 | 1241 | repo_group, user_group, permission_name) |
|
1243 | 1242 | self.user_group_repo_group_permission_ids.append( |
|
1244 | 1243 | (repo_group.group_id, user_group.users_group_id)) |
|
1245 | 1244 | return permission |
|
1246 | 1245 | |
|
1247 | 1246 | def grant_user_permission_to_repo( |
|
1248 | 1247 | self, repo, user, permission_name): |
|
1249 | 1248 | permission = RepoModel().grant_user_permission( |
|
1250 | 1249 | repo, user, permission_name) |
|
1251 | 1250 | self.user_repo_permission_ids.append( |
|
1252 | 1251 | (repo.repo_id, user.user_id)) |
|
1253 | 1252 | return permission |
|
1254 | 1253 | |
|
1255 | 1254 | def grant_user_group_permission_to_repo( |
|
1256 | 1255 | self, repo, user_group, permission_name): |
|
1257 | 1256 | permission = RepoModel().grant_user_group_permission( |
|
1258 | 1257 | repo, user_group, permission_name) |
|
1259 | 1258 | self.user_group_repo_permission_ids.append( |
|
1260 | 1259 | (repo.repo_id, user_group.users_group_id)) |
|
1261 | 1260 | return permission |
|
1262 | 1261 | |
|
1263 | 1262 | def grant_user_permission_to_user_group( |
|
1264 | 1263 | self, target_user_group, user, permission_name): |
|
1265 | 1264 | permission = UserGroupModel().grant_user_permission( |
|
1266 | 1265 | target_user_group, user, permission_name) |
|
1267 | 1266 | self.user_user_group_permission_ids.append( |
|
1268 | 1267 | (target_user_group.users_group_id, user.user_id)) |
|
1269 | 1268 | return permission |
|
1270 | 1269 | |
|
1271 | 1270 | def grant_user_group_permission_to_user_group( |
|
1272 | 1271 | self, target_user_group, user_group, permission_name): |
|
1273 | 1272 | permission = UserGroupModel().grant_user_group_permission( |
|
1274 | 1273 | target_user_group, user_group, permission_name) |
|
1275 | 1274 | self.user_group_user_group_permission_ids.append( |
|
1276 | 1275 | (target_user_group.users_group_id, user_group.users_group_id)) |
|
1277 | 1276 | return permission |
|
1278 | 1277 | |
|
1279 | 1278 | def revoke_user_permission(self, user_name, permission_name): |
|
1280 | 1279 | self._inherit_default_user_permissions(user_name, True) |
|
1281 | 1280 | UserModel().revoke_perm(user_name, permission_name) |
|
1282 | 1281 | |
|
1283 | 1282 | def _inherit_default_user_permissions(self, user_name, value): |
|
1284 | 1283 | user = UserModel().get_by_username(user_name) |
|
1285 | 1284 | user.inherit_default_permissions = value |
|
1286 | 1285 | Session().add(user) |
|
1287 | 1286 | Session().commit() |
|
1288 | 1287 | |
|
1289 | 1288 | def cleanup(self): |
|
1290 | 1289 | self._cleanup_permissions() |
|
1291 | 1290 | self._cleanup_repos() |
|
1292 | 1291 | self._cleanup_repo_groups() |
|
1293 | 1292 | self._cleanup_user_groups() |
|
1294 | 1293 | self._cleanup_users() |
|
1295 | 1294 | |
|
1296 | 1295 | def _cleanup_permissions(self): |
|
1297 | 1296 | if self.user_permissions: |
|
1298 | 1297 | for user_name, permission_name in self.user_permissions: |
|
1299 | 1298 | self.revoke_user_permission(user_name, permission_name) |
|
1300 | 1299 | |
|
1301 | 1300 | for permission in self.user_repo_permission_ids: |
|
1302 | 1301 | RepoModel().revoke_user_permission(*permission) |
|
1303 | 1302 | |
|
1304 | 1303 | for permission in self.user_group_repo_permission_ids: |
|
1305 | 1304 | RepoModel().revoke_user_group_permission(*permission) |
|
1306 | 1305 | |
|
1307 | 1306 | for permission in self.user_repo_group_permission_ids: |
|
1308 | 1307 | RepoGroupModel().revoke_user_permission(*permission) |
|
1309 | 1308 | |
|
1310 | 1309 | for permission in self.user_group_repo_group_permission_ids: |
|
1311 | 1310 | RepoGroupModel().revoke_user_group_permission(*permission) |
|
1312 | 1311 | |
|
1313 | 1312 | for permission in self.user_user_group_permission_ids: |
|
1314 | 1313 | UserGroupModel().revoke_user_permission(*permission) |
|
1315 | 1314 | |
|
1316 | 1315 | for permission in self.user_group_user_group_permission_ids: |
|
1317 | 1316 | UserGroupModel().revoke_user_group_permission(*permission) |
|
1318 | 1317 | |
|
1319 | 1318 | def _cleanup_repo_groups(self): |
|
1320 | 1319 | def _repo_group_compare(first_group_id, second_group_id): |
|
1321 | 1320 | """ |
|
1322 | 1321 | Gives higher priority to the groups with the most complex paths |
|
1323 | 1322 | """ |
|
1324 | 1323 | first_group = RepoGroup.get(first_group_id) |
|
1325 | 1324 | second_group = RepoGroup.get(second_group_id) |
|
1326 | 1325 | first_group_parts = ( |
|
1327 | 1326 | len(first_group.group_name.split('/')) if first_group else 0) |
|
1328 | 1327 | second_group_parts = ( |
|
1329 | 1328 | len(second_group.group_name.split('/')) if second_group else 0) |
|
1330 | 1329 | return cmp(second_group_parts, first_group_parts) |
|
1331 | 1330 | |
|
1332 | 1331 | sorted_repo_group_ids = sorted( |
|
1333 | 1332 | self.repo_group_ids, cmp=_repo_group_compare) |
|
1334 | 1333 | for repo_group_id in sorted_repo_group_ids: |
|
1335 | 1334 | self.fixture.destroy_repo_group(repo_group_id) |
|
1336 | 1335 | |
|
1337 | 1336 | def _cleanup_repos(self): |
|
1338 | 1337 | sorted_repos_ids = sorted(self.repos_ids) |
|
1339 | 1338 | for repo_id in sorted_repos_ids: |
|
1340 | 1339 | self.fixture.destroy_repo(repo_id) |
|
1341 | 1340 | |
|
1342 | 1341 | def _cleanup_user_groups(self): |
|
1343 | 1342 | def _user_group_compare(first_group_id, second_group_id): |
|
1344 | 1343 | """ |
|
1345 | 1344 | Gives higher priority to the groups with the most complex paths |
|
1346 | 1345 | """ |
|
1347 | 1346 | first_group = UserGroup.get(first_group_id) |
|
1348 | 1347 | second_group = UserGroup.get(second_group_id) |
|
1349 | 1348 | first_group_parts = ( |
|
1350 | 1349 | len(first_group.users_group_name.split('/')) |
|
1351 | 1350 | if first_group else 0) |
|
1352 | 1351 | second_group_parts = ( |
|
1353 | 1352 | len(second_group.users_group_name.split('/')) |
|
1354 | 1353 | if second_group else 0) |
|
1355 | 1354 | return cmp(second_group_parts, first_group_parts) |
|
1356 | 1355 | |
|
1357 | 1356 | sorted_user_group_ids = sorted( |
|
1358 | 1357 | self.user_group_ids, cmp=_user_group_compare) |
|
1359 | 1358 | for user_group_id in sorted_user_group_ids: |
|
1360 | 1359 | self.fixture.destroy_user_group(user_group_id) |
|
1361 | 1360 | |
|
1362 | 1361 | def _cleanup_users(self): |
|
1363 | 1362 | for user_id in self.user_ids: |
|
1364 | 1363 | self.fixture.destroy_user(user_id) |
|
1365 | 1364 | |
|
1366 | 1365 | |
|
1367 | 1366 | # TODO: Think about moving this into a pytest-pyro package and make it a |
|
1368 | 1367 | # pytest plugin |
|
1369 | 1368 | @pytest.hookimpl(tryfirst=True, hookwrapper=True) |
|
1370 | 1369 | def pytest_runtest_makereport(item, call): |
|
1371 | 1370 | """ |
|
1372 | 1371 | Adding the remote traceback if the exception has this information. |
|
1373 | 1372 | |
|
1374 | 1373 | VCSServer attaches this information as the attribute `_vcs_server_traceback` |
|
1375 | 1374 | to the exception instance. |
|
1376 | 1375 | """ |
|
1377 | 1376 | outcome = yield |
|
1378 | 1377 | report = outcome.get_result() |
|
1379 | 1378 | if call.excinfo: |
|
1380 | 1379 | _add_vcsserver_remote_traceback(report, call.excinfo.value) |
|
1381 | 1380 | |
|
1382 | 1381 | |
|
1383 | 1382 | def _add_vcsserver_remote_traceback(report, exc): |
|
1384 | 1383 | vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None) |
|
1385 | 1384 | |
|
1386 | 1385 | if vcsserver_traceback: |
|
1387 | 1386 | section = 'VCSServer remote traceback ' + report.when |
|
1388 | 1387 | report.sections.append((section, vcsserver_traceback)) |
|
1389 | 1388 | |
|
1390 | 1389 | |
|
1391 | 1390 | @pytest.fixture(scope='session') |
|
1392 | 1391 | def testrun(): |
|
1393 | 1392 | return { |
|
1394 | 1393 | 'uuid': uuid.uuid4(), |
|
1395 | 1394 | 'start': datetime.datetime.utcnow().isoformat(), |
|
1396 | 1395 | 'timestamp': int(time.time()), |
|
1397 | 1396 | } |
|
1398 | 1397 | |
|
1399 | 1398 | |
|
1400 | 1399 | @pytest.fixture(autouse=True) |
|
1401 | 1400 | def collect_appenlight_stats(request, testrun): |
|
1402 | 1401 | """ |
|
1403 | 1402 | This fixture reports memory consumtion of single tests. |
|
1404 | 1403 | |
|
1405 | 1404 | It gathers data based on `psutil` and sends them to Appenlight. The option |
|
1406 | 1405 | ``--ae`` has te be used to enable this fixture and the API key for your |
|
1407 | 1406 | application has to be provided in ``--ae-key``. |
|
1408 | 1407 | """ |
|
1409 | 1408 | try: |
|
1410 | 1409 | # cygwin cannot have yet psutil support. |
|
1411 | 1410 | import psutil |
|
1412 | 1411 | except ImportError: |
|
1413 | 1412 | return |
|
1414 | 1413 | |
|
1415 | 1414 | if not request.config.getoption('--appenlight'): |
|
1416 | 1415 | return |
|
1417 | 1416 | else: |
|
1418 | 1417 | # Only request the baseapp fixture if appenlight tracking is |
|
1419 | 1418 | # enabled. This will speed up a test run of unit tests by 2 to 3 |
|
1420 | 1419 | # seconds if appenlight is not enabled. |
|
1421 | 1420 | baseapp = request.getfuncargvalue("baseapp") |
|
1422 | 1421 | url = '{}/api/logs'.format(request.config.getoption('--appenlight-url')) |
|
1423 | 1422 | client = AppenlightClient( |
|
1424 | 1423 | url=url, |
|
1425 | 1424 | api_key=request.config.getoption('--appenlight-api-key'), |
|
1426 | 1425 | namespace=request.node.nodeid, |
|
1427 | 1426 | request=str(testrun['uuid']), |
|
1428 | 1427 | testrun=testrun) |
|
1429 | 1428 | |
|
1430 | 1429 | client.collect({ |
|
1431 | 1430 | 'message': "Starting", |
|
1432 | 1431 | }) |
|
1433 | 1432 | |
|
1434 | 1433 | server_and_port = baseapp.config.get_settings()['vcs.server'] |
|
1435 | 1434 | protocol = baseapp.config.get_settings()['vcs.server.protocol'] |
|
1436 | 1435 | server = create_vcsserver_proxy(server_and_port, protocol) |
|
1437 | 1436 | with server: |
|
1438 | 1437 | vcs_pid = server.get_pid() |
|
1439 | 1438 | server.run_gc() |
|
1440 | 1439 | vcs_process = psutil.Process(vcs_pid) |
|
1441 | 1440 | mem = vcs_process.memory_info() |
|
1442 | 1441 | client.tag_before('vcsserver.rss', mem.rss) |
|
1443 | 1442 | client.tag_before('vcsserver.vms', mem.vms) |
|
1444 | 1443 | |
|
1445 | 1444 | test_process = psutil.Process() |
|
1446 | 1445 | mem = test_process.memory_info() |
|
1447 | 1446 | client.tag_before('test.rss', mem.rss) |
|
1448 | 1447 | client.tag_before('test.vms', mem.vms) |
|
1449 | 1448 | |
|
1450 | 1449 | client.tag_before('time', time.time()) |
|
1451 | 1450 | |
|
1452 | 1451 | @request.addfinalizer |
|
1453 | 1452 | def send_stats(): |
|
1454 | 1453 | client.tag_after('time', time.time()) |
|
1455 | 1454 | with server: |
|
1456 | 1455 | gc_stats = server.run_gc() |
|
1457 | 1456 | for tag, value in gc_stats.items(): |
|
1458 | 1457 | client.tag_after(tag, value) |
|
1459 | 1458 | mem = vcs_process.memory_info() |
|
1460 | 1459 | client.tag_after('vcsserver.rss', mem.rss) |
|
1461 | 1460 | client.tag_after('vcsserver.vms', mem.vms) |
|
1462 | 1461 | |
|
1463 | 1462 | mem = test_process.memory_info() |
|
1464 | 1463 | client.tag_after('test.rss', mem.rss) |
|
1465 | 1464 | client.tag_after('test.vms', mem.vms) |
|
1466 | 1465 | |
|
1467 | 1466 | client.collect({ |
|
1468 | 1467 | 'message': "Finished", |
|
1469 | 1468 | }) |
|
1470 | 1469 | client.send_stats() |
|
1471 | 1470 | |
|
1472 | 1471 | return client |
|
1473 | 1472 | |
|
1474 | 1473 | |
|
1475 | 1474 | class AppenlightClient(): |
|
1476 | 1475 | |
|
1477 | 1476 | url_template = '{url}?protocol_version=0.5' |
|
1478 | 1477 | |
|
1479 | 1478 | def __init__( |
|
1480 | 1479 | self, url, api_key, add_server=True, add_timestamp=True, |
|
1481 | 1480 | namespace=None, request=None, testrun=None): |
|
1482 | 1481 | self.url = self.url_template.format(url=url) |
|
1483 | 1482 | self.api_key = api_key |
|
1484 | 1483 | self.add_server = add_server |
|
1485 | 1484 | self.add_timestamp = add_timestamp |
|
1486 | 1485 | self.namespace = namespace |
|
1487 | 1486 | self.request = request |
|
1488 | 1487 | self.server = socket.getfqdn(socket.gethostname()) |
|
1489 | 1488 | self.tags_before = {} |
|
1490 | 1489 | self.tags_after = {} |
|
1491 | 1490 | self.stats = [] |
|
1492 | 1491 | self.testrun = testrun or {} |
|
1493 | 1492 | |
|
1494 | 1493 | def tag_before(self, tag, value): |
|
1495 | 1494 | self.tags_before[tag] = value |
|
1496 | 1495 | |
|
1497 | 1496 | def tag_after(self, tag, value): |
|
1498 | 1497 | self.tags_after[tag] = value |
|
1499 | 1498 | |
|
1500 | 1499 | def collect(self, data): |
|
1501 | 1500 | if self.add_server: |
|
1502 | 1501 | data.setdefault('server', self.server) |
|
1503 | 1502 | if self.add_timestamp: |
|
1504 | 1503 | data.setdefault('date', datetime.datetime.utcnow().isoformat()) |
|
1505 | 1504 | if self.namespace: |
|
1506 | 1505 | data.setdefault('namespace', self.namespace) |
|
1507 | 1506 | if self.request: |
|
1508 | 1507 | data.setdefault('request', self.request) |
|
1509 | 1508 | self.stats.append(data) |
|
1510 | 1509 | |
|
1511 | 1510 | def send_stats(self): |
|
1512 | 1511 | tags = [ |
|
1513 | 1512 | ('testrun', self.request), |
|
1514 | 1513 | ('testrun.start', self.testrun['start']), |
|
1515 | 1514 | ('testrun.timestamp', self.testrun['timestamp']), |
|
1516 | 1515 | ('test', self.namespace), |
|
1517 | 1516 | ] |
|
1518 | 1517 | for key, value in self.tags_before.items(): |
|
1519 | 1518 | tags.append((key + '.before', value)) |
|
1520 | 1519 | try: |
|
1521 | 1520 | delta = self.tags_after[key] - value |
|
1522 | 1521 | tags.append((key + '.delta', delta)) |
|
1523 | 1522 | except Exception: |
|
1524 | 1523 | pass |
|
1525 | 1524 | for key, value in self.tags_after.items(): |
|
1526 | 1525 | tags.append((key + '.after', value)) |
|
1527 | 1526 | self.collect({ |
|
1528 | 1527 | 'message': "Collected tags", |
|
1529 | 1528 | 'tags': tags, |
|
1530 | 1529 | }) |
|
1531 | 1530 | |
|
1532 | 1531 | response = requests.post( |
|
1533 | 1532 | self.url, |
|
1534 | 1533 | headers={ |
|
1535 | 1534 | 'X-appenlight-api-key': self.api_key}, |
|
1536 | 1535 | json=self.stats, |
|
1537 | 1536 | ) |
|
1538 | 1537 | |
|
1539 | 1538 | if not response.status_code == 200: |
|
1540 | 1539 | pprint.pprint(self.stats) |
|
1541 | 1540 | print(response.headers) |
|
1542 | 1541 | print(response.text) |
|
1543 | 1542 | raise Exception('Sending to appenlight failed') |
|
1544 | 1543 | |
|
1545 | 1544 | |
|
1546 | 1545 | @pytest.fixture |
|
1547 | 1546 | def gist_util(request, db_connection): |
|
1548 | 1547 | """ |
|
1549 | 1548 | Provides a wired instance of `GistUtility` with integrated cleanup. |
|
1550 | 1549 | """ |
|
1551 | 1550 | utility = GistUtility() |
|
1552 | 1551 | request.addfinalizer(utility.cleanup) |
|
1553 | 1552 | return utility |
|
1554 | 1553 | |
|
1555 | 1554 | |
|
1556 | 1555 | class GistUtility(object): |
|
1557 | 1556 | def __init__(self): |
|
1558 | 1557 | self.fixture = Fixture() |
|
1559 | 1558 | self.gist_ids = [] |
|
1560 | 1559 | |
|
1561 | 1560 | def create_gist(self, **kwargs): |
|
1562 | 1561 | gist = self.fixture.create_gist(**kwargs) |
|
1563 | 1562 | self.gist_ids.append(gist.gist_id) |
|
1564 | 1563 | return gist |
|
1565 | 1564 | |
|
1566 | 1565 | def cleanup(self): |
|
1567 | 1566 | for id_ in self.gist_ids: |
|
1568 | 1567 | self.fixture.destroy_gists(str(id_)) |
|
1569 | 1568 | |
|
1570 | 1569 | |
|
1571 | 1570 | @pytest.fixture |
|
1572 | 1571 | def enabled_backends(request): |
|
1573 | 1572 | backends = request.config.option.backends |
|
1574 | 1573 | return backends[:] |
|
1575 | 1574 | |
|
1576 | 1575 | |
|
1577 | 1576 | @pytest.fixture |
|
1578 | 1577 | def settings_util(request, db_connection): |
|
1579 | 1578 | """ |
|
1580 | 1579 | Provides a wired instance of `SettingsUtility` with integrated cleanup. |
|
1581 | 1580 | """ |
|
1582 | 1581 | utility = SettingsUtility() |
|
1583 | 1582 | request.addfinalizer(utility.cleanup) |
|
1584 | 1583 | return utility |
|
1585 | 1584 | |
|
1586 | 1585 | |
|
1587 | 1586 | class SettingsUtility(object): |
|
1588 | 1587 | def __init__(self): |
|
1589 | 1588 | self.rhodecode_ui_ids = [] |
|
1590 | 1589 | self.rhodecode_setting_ids = [] |
|
1591 | 1590 | self.repo_rhodecode_ui_ids = [] |
|
1592 | 1591 | self.repo_rhodecode_setting_ids = [] |
|
1593 | 1592 | |
|
1594 | 1593 | def create_repo_rhodecode_ui( |
|
1595 | 1594 | self, repo, section, value, key=None, active=True, cleanup=True): |
|
1596 | 1595 | key = key or hashlib.sha1( |
|
1597 | 1596 | '{}{}{}'.format(section, value, repo.repo_id)).hexdigest() |
|
1598 | 1597 | |
|
1599 | 1598 | setting = RepoRhodeCodeUi() |
|
1600 | 1599 | setting.repository_id = repo.repo_id |
|
1601 | 1600 | setting.ui_section = section |
|
1602 | 1601 | setting.ui_value = value |
|
1603 | 1602 | setting.ui_key = key |
|
1604 | 1603 | setting.ui_active = active |
|
1605 | 1604 | Session().add(setting) |
|
1606 | 1605 | Session().commit() |
|
1607 | 1606 | |
|
1608 | 1607 | if cleanup: |
|
1609 | 1608 | self.repo_rhodecode_ui_ids.append(setting.ui_id) |
|
1610 | 1609 | return setting |
|
1611 | 1610 | |
|
1612 | 1611 | def create_rhodecode_ui( |
|
1613 | 1612 | self, section, value, key=None, active=True, cleanup=True): |
|
1614 | 1613 | key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest() |
|
1615 | 1614 | |
|
1616 | 1615 | setting = RhodeCodeUi() |
|
1617 | 1616 | setting.ui_section = section |
|
1618 | 1617 | setting.ui_value = value |
|
1619 | 1618 | setting.ui_key = key |
|
1620 | 1619 | setting.ui_active = active |
|
1621 | 1620 | Session().add(setting) |
|
1622 | 1621 | Session().commit() |
|
1623 | 1622 | |
|
1624 | 1623 | if cleanup: |
|
1625 | 1624 | self.rhodecode_ui_ids.append(setting.ui_id) |
|
1626 | 1625 | return setting |
|
1627 | 1626 | |
|
1628 | 1627 | def create_repo_rhodecode_setting( |
|
1629 | 1628 | self, repo, name, value, type_, cleanup=True): |
|
1630 | 1629 | setting = RepoRhodeCodeSetting( |
|
1631 | 1630 | repo.repo_id, key=name, val=value, type=type_) |
|
1632 | 1631 | Session().add(setting) |
|
1633 | 1632 | Session().commit() |
|
1634 | 1633 | |
|
1635 | 1634 | if cleanup: |
|
1636 | 1635 | self.repo_rhodecode_setting_ids.append(setting.app_settings_id) |
|
1637 | 1636 | return setting |
|
1638 | 1637 | |
|
1639 | 1638 | def create_rhodecode_setting(self, name, value, type_, cleanup=True): |
|
1640 | 1639 | setting = RhodeCodeSetting(key=name, val=value, type=type_) |
|
1641 | 1640 | Session().add(setting) |
|
1642 | 1641 | Session().commit() |
|
1643 | 1642 | |
|
1644 | 1643 | if cleanup: |
|
1645 | 1644 | self.rhodecode_setting_ids.append(setting.app_settings_id) |
|
1646 | 1645 | |
|
1647 | 1646 | return setting |
|
1648 | 1647 | |
|
1649 | 1648 | def cleanup(self): |
|
1650 | 1649 | for id_ in self.rhodecode_ui_ids: |
|
1651 | 1650 | setting = RhodeCodeUi.get(id_) |
|
1652 | 1651 | Session().delete(setting) |
|
1653 | 1652 | |
|
1654 | 1653 | for id_ in self.rhodecode_setting_ids: |
|
1655 | 1654 | setting = RhodeCodeSetting.get(id_) |
|
1656 | 1655 | Session().delete(setting) |
|
1657 | 1656 | |
|
1658 | 1657 | for id_ in self.repo_rhodecode_ui_ids: |
|
1659 | 1658 | setting = RepoRhodeCodeUi.get(id_) |
|
1660 | 1659 | Session().delete(setting) |
|
1661 | 1660 | |
|
1662 | 1661 | for id_ in self.repo_rhodecode_setting_ids: |
|
1663 | 1662 | setting = RepoRhodeCodeSetting.get(id_) |
|
1664 | 1663 | Session().delete(setting) |
|
1665 | 1664 | |
|
1666 | 1665 | Session().commit() |
|
1667 | 1666 | |
|
1668 | 1667 | |
|
1669 | 1668 | @pytest.fixture |
|
1670 | 1669 | def no_notifications(request): |
|
1671 | 1670 | notification_patcher = mock.patch( |
|
1672 | 1671 | 'rhodecode.model.notification.NotificationModel.create') |
|
1673 | 1672 | notification_patcher.start() |
|
1674 | 1673 | request.addfinalizer(notification_patcher.stop) |
|
1675 | 1674 | |
|
1676 | 1675 | |
|
1677 | 1676 | @pytest.fixture(scope='session') |
|
1678 | 1677 | def repeat(request): |
|
1679 | 1678 | """ |
|
1680 | 1679 | The number of repetitions is based on this fixture. |
|
1681 | 1680 | |
|
1682 | 1681 | Slower calls may divide it by 10 or 100. It is chosen in a way so that the |
|
1683 | 1682 | tests are not too slow in our default test suite. |
|
1684 | 1683 | """ |
|
1685 | 1684 | return request.config.getoption('--repeat') |
|
1686 | 1685 | |
|
1687 | 1686 | |
|
1688 | 1687 | @pytest.fixture |
|
1689 | 1688 | def rhodecode_fixtures(): |
|
1690 | 1689 | return Fixture() |
|
1691 | 1690 | |
|
1692 | 1691 | |
|
1693 | 1692 | @pytest.fixture |
|
1694 | 1693 | def context_stub(): |
|
1695 | 1694 | """ |
|
1696 | 1695 | Stub context object. |
|
1697 | 1696 | """ |
|
1698 | 1697 | context = pyramid.testing.DummyResource() |
|
1699 | 1698 | return context |
|
1700 | 1699 | |
|
1701 | 1700 | |
|
1702 | 1701 | @pytest.fixture |
|
1703 | 1702 | def request_stub(): |
|
1704 | 1703 | """ |
|
1705 | 1704 | Stub request object. |
|
1706 | 1705 | """ |
|
1707 | 1706 | from rhodecode.lib.base import bootstrap_request |
|
1708 | 1707 | request = bootstrap_request(scheme='https') |
|
1709 | 1708 | return request |
|
1710 | 1709 | |
|
1711 | 1710 | |
|
1712 | 1711 | @pytest.fixture |
|
1713 | 1712 | def config_stub(request, request_stub): |
|
1714 | 1713 | """ |
|
1715 | 1714 | Set up pyramid.testing and return the Configurator. |
|
1716 | 1715 | """ |
|
1717 | 1716 | from rhodecode.lib.base import bootstrap_config |
|
1718 | 1717 | config = bootstrap_config(request=request_stub) |
|
1719 | 1718 | |
|
1720 | 1719 | @request.addfinalizer |
|
1721 | 1720 | def cleanup(): |
|
1722 | 1721 | pyramid.testing.tearDown() |
|
1723 | 1722 | |
|
1724 | 1723 | return config |
|
1725 | 1724 | |
|
1726 | 1725 | |
|
1727 | 1726 | @pytest.fixture |
|
1728 | 1727 | def StubIntegrationType(): |
|
1729 | 1728 | class _StubIntegrationType(IntegrationTypeBase): |
|
1730 | 1729 | """ Test integration type class """ |
|
1731 | 1730 | |
|
1732 | 1731 | key = 'test' |
|
1733 | 1732 | display_name = 'Test integration type' |
|
1734 | 1733 | description = 'A test integration type for testing' |
|
1735 | 1734 | |
|
1736 | 1735 | @classmethod |
|
1737 | 1736 | def icon(cls): |
|
1738 | 1737 | return 'test_icon_html_image' |
|
1739 | 1738 | |
|
1740 | 1739 | def __init__(self, settings): |
|
1741 | 1740 | super(_StubIntegrationType, self).__init__(settings) |
|
1742 | 1741 | self.sent_events = [] # for testing |
|
1743 | 1742 | |
|
1744 | 1743 | def send_event(self, event): |
|
1745 | 1744 | self.sent_events.append(event) |
|
1746 | 1745 | |
|
1747 | 1746 | def settings_schema(self): |
|
1748 | 1747 | class SettingsSchema(colander.Schema): |
|
1749 | 1748 | test_string_field = colander.SchemaNode( |
|
1750 | 1749 | colander.String(), |
|
1751 | 1750 | missing=colander.required, |
|
1752 | 1751 | title='test string field', |
|
1753 | 1752 | ) |
|
1754 | 1753 | test_int_field = colander.SchemaNode( |
|
1755 | 1754 | colander.Int(), |
|
1756 | 1755 | title='some integer setting', |
|
1757 | 1756 | ) |
|
1758 | 1757 | return SettingsSchema() |
|
1759 | 1758 | |
|
1760 | 1759 | |
|
1761 | 1760 | integration_type_registry.register_integration_type(_StubIntegrationType) |
|
1762 | 1761 | return _StubIntegrationType |
|
1763 | 1762 | |
|
1764 | 1763 | @pytest.fixture |
|
1765 | 1764 | def stub_integration_settings(): |
|
1766 | 1765 | return { |
|
1767 | 1766 | 'test_string_field': 'some data', |
|
1768 | 1767 | 'test_int_field': 100, |
|
1769 | 1768 | } |
|
1770 | 1769 | |
|
1771 | 1770 | |
|
1772 | 1771 | @pytest.fixture |
|
1773 | 1772 | def repo_integration_stub(request, repo_stub, StubIntegrationType, |
|
1774 | 1773 | stub_integration_settings): |
|
1775 | 1774 | integration = IntegrationModel().create( |
|
1776 | 1775 | StubIntegrationType, settings=stub_integration_settings, enabled=True, |
|
1777 | 1776 | name='test repo integration', |
|
1778 | 1777 | repo=repo_stub, repo_group=None, child_repos_only=None) |
|
1779 | 1778 | |
|
1780 | 1779 | @request.addfinalizer |
|
1781 | 1780 | def cleanup(): |
|
1782 | 1781 | IntegrationModel().delete(integration) |
|
1783 | 1782 | |
|
1784 | 1783 | return integration |
|
1785 | 1784 | |
|
1786 | 1785 | |
|
1787 | 1786 | @pytest.fixture |
|
1788 | 1787 | def repogroup_integration_stub(request, test_repo_group, StubIntegrationType, |
|
1789 | 1788 | stub_integration_settings): |
|
1790 | 1789 | integration = IntegrationModel().create( |
|
1791 | 1790 | StubIntegrationType, settings=stub_integration_settings, enabled=True, |
|
1792 | 1791 | name='test repogroup integration', |
|
1793 | 1792 | repo=None, repo_group=test_repo_group, child_repos_only=True) |
|
1794 | 1793 | |
|
1795 | 1794 | @request.addfinalizer |
|
1796 | 1795 | def cleanup(): |
|
1797 | 1796 | IntegrationModel().delete(integration) |
|
1798 | 1797 | |
|
1799 | 1798 | return integration |
|
1800 | 1799 | |
|
1801 | 1800 | |
|
1802 | 1801 | @pytest.fixture |
|
1803 | 1802 | def repogroup_recursive_integration_stub(request, test_repo_group, |
|
1804 | 1803 | StubIntegrationType, stub_integration_settings): |
|
1805 | 1804 | integration = IntegrationModel().create( |
|
1806 | 1805 | StubIntegrationType, settings=stub_integration_settings, enabled=True, |
|
1807 | 1806 | name='test recursive repogroup integration', |
|
1808 | 1807 | repo=None, repo_group=test_repo_group, child_repos_only=False) |
|
1809 | 1808 | |
|
1810 | 1809 | @request.addfinalizer |
|
1811 | 1810 | def cleanup(): |
|
1812 | 1811 | IntegrationModel().delete(integration) |
|
1813 | 1812 | |
|
1814 | 1813 | return integration |
|
1815 | 1814 | |
|
1816 | 1815 | |
|
1817 | 1816 | @pytest.fixture |
|
1818 | 1817 | def global_integration_stub(request, StubIntegrationType, |
|
1819 | 1818 | stub_integration_settings): |
|
1820 | 1819 | integration = IntegrationModel().create( |
|
1821 | 1820 | StubIntegrationType, settings=stub_integration_settings, enabled=True, |
|
1822 | 1821 | name='test global integration', |
|
1823 | 1822 | repo=None, repo_group=None, child_repos_only=None) |
|
1824 | 1823 | |
|
1825 | 1824 | @request.addfinalizer |
|
1826 | 1825 | def cleanup(): |
|
1827 | 1826 | IntegrationModel().delete(integration) |
|
1828 | 1827 | |
|
1829 | 1828 | return integration |
|
1830 | 1829 | |
|
1831 | 1830 | |
|
1832 | 1831 | @pytest.fixture |
|
1833 | 1832 | def root_repos_integration_stub(request, StubIntegrationType, |
|
1834 | 1833 | stub_integration_settings): |
|
1835 | 1834 | integration = IntegrationModel().create( |
|
1836 | 1835 | StubIntegrationType, settings=stub_integration_settings, enabled=True, |
|
1837 | 1836 | name='test global integration', |
|
1838 | 1837 | repo=None, repo_group=None, child_repos_only=True) |
|
1839 | 1838 | |
|
1840 | 1839 | @request.addfinalizer |
|
1841 | 1840 | def cleanup(): |
|
1842 | 1841 | IntegrationModel().delete(integration) |
|
1843 | 1842 | |
|
1844 | 1843 | return integration |
|
1845 | 1844 | |
|
1846 | 1845 | |
|
1847 | 1846 | @pytest.fixture |
|
1848 | 1847 | def local_dt_to_utc(): |
|
1849 | 1848 | def _factory(dt): |
|
1850 | 1849 | return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone( |
|
1851 | 1850 | dateutil.tz.tzutc()).replace(tzinfo=None) |
|
1852 | 1851 | return _factory |
|
1853 | 1852 | |
|
1854 | 1853 | |
|
1855 | 1854 | @pytest.fixture |
|
1856 | 1855 | def disable_anonymous_user(request, baseapp): |
|
1857 | 1856 | set_anonymous_access(False) |
|
1858 | 1857 | |
|
1859 | 1858 | @request.addfinalizer |
|
1860 | 1859 | def cleanup(): |
|
1861 | 1860 | set_anonymous_access(True) |
|
1862 | 1861 | |
|
1863 | 1862 | |
|
1864 | 1863 | @pytest.fixture(scope='module') |
|
1865 | 1864 | def rc_fixture(request): |
|
1866 | 1865 | return Fixture() |
|
1867 | 1866 | |
|
1868 | 1867 | |
|
1869 | 1868 | @pytest.fixture |
|
1870 | 1869 | def repo_groups(request): |
|
1871 | 1870 | fixture = Fixture() |
|
1872 | 1871 | |
|
1873 | 1872 | session = Session() |
|
1874 | 1873 | zombie_group = fixture.create_repo_group('zombie') |
|
1875 | 1874 | parent_group = fixture.create_repo_group('parent') |
|
1876 | 1875 | child_group = fixture.create_repo_group('parent/child') |
|
1877 | 1876 | groups_in_db = session.query(RepoGroup).all() |
|
1878 | 1877 | assert len(groups_in_db) == 3 |
|
1879 | 1878 | assert child_group.group_parent_id == parent_group.group_id |
|
1880 | 1879 | |
|
1881 | 1880 | @request.addfinalizer |
|
1882 | 1881 | def cleanup(): |
|
1883 | 1882 | fixture.destroy_repo_group(zombie_group) |
|
1884 | 1883 | fixture.destroy_repo_group(child_group) |
|
1885 | 1884 | fixture.destroy_repo_group(parent_group) |
|
1886 | 1885 | |
|
1887 | 1886 | return zombie_group, parent_group, child_group |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
General Comments 0
You need to be logged in to leave comments.
Login now