##// END OF EJS Templates
shadow-repos: use numeric repo id for creation of shadow repos....
marcink -
r2810:a15bd3a8 default
parent child Browse files
Show More
@@ -1,905 +1,905 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 import logging
23 23
24 24 from rhodecode import events
25 25 from rhodecode.api import jsonrpc_method, JSONRPCError, JSONRPCValidationError
26 26 from rhodecode.api.utils import (
27 27 has_superadmin_permission, Optional, OAttr, get_repo_or_error,
28 28 get_pull_request_or_error, get_commit_or_error, get_user_or_error,
29 29 validate_repo_permissions, resolve_ref_or_error)
30 30 from rhodecode.lib.auth import (HasRepoPermissionAnyApi)
31 31 from rhodecode.lib.base import vcs_operation_context
32 32 from rhodecode.lib.utils2 import str2bool
33 33 from rhodecode.model.changeset_status import ChangesetStatusModel
34 34 from rhodecode.model.comment import CommentsModel
35 35 from rhodecode.model.db import Session, ChangesetStatus, ChangesetComment
36 36 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
37 37 from rhodecode.model.settings import SettingsModel
38 38 from rhodecode.model.validation_schema import Invalid
39 39 from rhodecode.model.validation_schema.schemas.reviewer_schema import(
40 40 ReviewerListSchema)
41 41
42 42 log = logging.getLogger(__name__)
43 43
44 44
45 45 @jsonrpc_method()
46 46 def get_pull_request(request, apiuser, pullrequestid, repoid=Optional(None)):
47 47 """
48 48 Get a pull request based on the given ID.
49 49
50 50 :param apiuser: This is filled automatically from the |authtoken|.
51 51 :type apiuser: AuthUser
52 52 :param repoid: Optional, repository name or repository ID from where
53 53 the pull request was opened.
54 54 :type repoid: str or int
55 55 :param pullrequestid: ID of the requested pull request.
56 56 :type pullrequestid: int
57 57
58 58 Example output:
59 59
60 60 .. code-block:: bash
61 61
62 62 "id": <id_given_in_input>,
63 63 "result":
64 64 {
65 65 "pull_request_id": "<pull_request_id>",
66 66 "url": "<url>",
67 67 "title": "<title>",
68 68 "description": "<description>",
69 69 "status" : "<status>",
70 70 "created_on": "<date_time_created>",
71 71 "updated_on": "<date_time_updated>",
72 72 "commit_ids": [
73 73 ...
74 74 "<commit_id>",
75 75 "<commit_id>",
76 76 ...
77 77 ],
78 78 "review_status": "<review_status>",
79 79 "mergeable": {
80 80 "status": "<bool>",
81 81 "message": "<message>",
82 82 },
83 83 "source": {
84 84 "clone_url": "<clone_url>",
85 85 "repository": "<repository_name>",
86 86 "reference":
87 87 {
88 88 "name": "<name>",
89 89 "type": "<type>",
90 90 "commit_id": "<commit_id>",
91 91 }
92 92 },
93 93 "target": {
94 94 "clone_url": "<clone_url>",
95 95 "repository": "<repository_name>",
96 96 "reference":
97 97 {
98 98 "name": "<name>",
99 99 "type": "<type>",
100 100 "commit_id": "<commit_id>",
101 101 }
102 102 },
103 103 "merge": {
104 104 "clone_url": "<clone_url>",
105 105 "reference":
106 106 {
107 107 "name": "<name>",
108 108 "type": "<type>",
109 109 "commit_id": "<commit_id>",
110 110 }
111 111 },
112 112 "author": <user_obj>,
113 113 "reviewers": [
114 114 ...
115 115 {
116 116 "user": "<user_obj>",
117 117 "review_status": "<review_status>",
118 118 }
119 119 ...
120 120 ]
121 121 },
122 122 "error": null
123 123 """
124 124
125 125 pull_request = get_pull_request_or_error(pullrequestid)
126 126 if Optional.extract(repoid):
127 127 repo = get_repo_or_error(repoid)
128 128 else:
129 129 repo = pull_request.target_repo
130 130
131 131 if not PullRequestModel().check_user_read(
132 132 pull_request, apiuser, api=True):
133 133 raise JSONRPCError('repository `%s` or pull request `%s` '
134 134 'does not exist' % (repoid, pullrequestid))
135 135 data = pull_request.get_api_data()
136 136 return data
137 137
138 138
139 139 @jsonrpc_method()
140 140 def get_pull_requests(request, apiuser, repoid, status=Optional('new')):
141 141 """
142 142 Get all pull requests from the repository specified in `repoid`.
143 143
144 144 :param apiuser: This is filled automatically from the |authtoken|.
145 145 :type apiuser: AuthUser
146 146 :param repoid: Optional repository name or repository ID.
147 147 :type repoid: str or int
148 148 :param status: Only return pull requests with the specified status.
149 149 Valid options are.
150 150 * ``new`` (default)
151 151 * ``open``
152 152 * ``closed``
153 153 :type status: str
154 154
155 155 Example output:
156 156
157 157 .. code-block:: bash
158 158
159 159 "id": <id_given_in_input>,
160 160 "result":
161 161 [
162 162 ...
163 163 {
164 164 "pull_request_id": "<pull_request_id>",
165 165 "url": "<url>",
166 166 "title" : "<title>",
167 167 "description": "<description>",
168 168 "status": "<status>",
169 169 "created_on": "<date_time_created>",
170 170 "updated_on": "<date_time_updated>",
171 171 "commit_ids": [
172 172 ...
173 173 "<commit_id>",
174 174 "<commit_id>",
175 175 ...
176 176 ],
177 177 "review_status": "<review_status>",
178 178 "mergeable": {
179 179 "status": "<bool>",
180 180 "message: "<message>",
181 181 },
182 182 "source": {
183 183 "clone_url": "<clone_url>",
184 184 "reference":
185 185 {
186 186 "name": "<name>",
187 187 "type": "<type>",
188 188 "commit_id": "<commit_id>",
189 189 }
190 190 },
191 191 "target": {
192 192 "clone_url": "<clone_url>",
193 193 "reference":
194 194 {
195 195 "name": "<name>",
196 196 "type": "<type>",
197 197 "commit_id": "<commit_id>",
198 198 }
199 199 },
200 200 "merge": {
201 201 "clone_url": "<clone_url>",
202 202 "reference":
203 203 {
204 204 "name": "<name>",
205 205 "type": "<type>",
206 206 "commit_id": "<commit_id>",
207 207 }
208 208 },
209 209 "author": <user_obj>,
210 210 "reviewers": [
211 211 ...
212 212 {
213 213 "user": "<user_obj>",
214 214 "review_status": "<review_status>",
215 215 }
216 216 ...
217 217 ]
218 218 }
219 219 ...
220 220 ],
221 221 "error": null
222 222
223 223 """
224 224 repo = get_repo_or_error(repoid)
225 225 if not has_superadmin_permission(apiuser):
226 226 _perms = (
227 227 'repository.admin', 'repository.write', 'repository.read',)
228 228 validate_repo_permissions(apiuser, repoid, repo, _perms)
229 229
230 230 status = Optional.extract(status)
231 231 pull_requests = PullRequestModel().get_all(repo, statuses=[status])
232 232 data = [pr.get_api_data() for pr in pull_requests]
233 233 return data
234 234
235 235
236 236 @jsonrpc_method()
237 237 def merge_pull_request(
238 238 request, apiuser, pullrequestid, repoid=Optional(None),
239 239 userid=Optional(OAttr('apiuser'))):
240 240 """
241 241 Merge the pull request specified by `pullrequestid` into its target
242 242 repository.
243 243
244 244 :param apiuser: This is filled automatically from the |authtoken|.
245 245 :type apiuser: AuthUser
246 246 :param repoid: Optional, repository name or repository ID of the
247 247 target repository to which the |pr| is to be merged.
248 248 :type repoid: str or int
249 249 :param pullrequestid: ID of the pull request which shall be merged.
250 250 :type pullrequestid: int
251 251 :param userid: Merge the pull request as this user.
252 252 :type userid: Optional(str or int)
253 253
254 254 Example output:
255 255
256 256 .. code-block:: bash
257 257
258 258 "id": <id_given_in_input>,
259 259 "result": {
260 260 "executed": "<bool>",
261 261 "failure_reason": "<int>",
262 262 "merge_commit_id": "<merge_commit_id>",
263 263 "possible": "<bool>",
264 264 "merge_ref": {
265 265 "commit_id": "<commit_id>",
266 266 "type": "<type>",
267 267 "name": "<name>"
268 268 }
269 269 },
270 270 "error": null
271 271 """
272 272 pull_request = get_pull_request_or_error(pullrequestid)
273 273 if Optional.extract(repoid):
274 274 repo = get_repo_or_error(repoid)
275 275 else:
276 276 repo = pull_request.target_repo
277 277
278 278 if not isinstance(userid, Optional):
279 279 if (has_superadmin_permission(apiuser) or
280 280 HasRepoPermissionAnyApi('repository.admin')(
281 281 user=apiuser, repo_name=repo.repo_name)):
282 282 apiuser = get_user_or_error(userid)
283 283 else:
284 284 raise JSONRPCError('userid is not the same as your user')
285 285
286 286 check = MergeCheck.validate(
287 287 pull_request, user=apiuser, translator=request.translate)
288 288 merge_possible = not check.failed
289 289
290 290 if not merge_possible:
291 291 error_messages = []
292 292 for err_type, error_msg in check.errors:
293 293 error_msg = request.translate(error_msg)
294 294 error_messages.append(error_msg)
295 295
296 296 reasons = ','.join(error_messages)
297 297 raise JSONRPCError(
298 298 'merge not possible for following reasons: {}'.format(reasons))
299 299
300 300 target_repo = pull_request.target_repo
301 301 extras = vcs_operation_context(
302 302 request.environ, repo_name=target_repo.repo_name,
303 303 username=apiuser.username, action='push',
304 304 scm=target_repo.repo_type)
305 merge_response = PullRequestModel().merge(
305 merge_response = PullRequestModel().merge_repo(
306 306 pull_request, apiuser, extras=extras)
307 307 if merge_response.executed:
308 308 PullRequestModel().close_pull_request(
309 309 pull_request.pull_request_id, apiuser)
310 310
311 311 Session().commit()
312 312
313 313 # In previous versions the merge response directly contained the merge
314 314 # commit id. It is now contained in the merge reference object. To be
315 315 # backwards compatible we have to extract it again.
316 316 merge_response = merge_response._asdict()
317 317 merge_response['merge_commit_id'] = merge_response['merge_ref'].commit_id
318 318
319 319 return merge_response
320 320
321 321
322 322 @jsonrpc_method()
323 323 def get_pull_request_comments(
324 324 request, apiuser, pullrequestid, repoid=Optional(None)):
325 325 """
326 326 Get all comments of pull request specified with the `pullrequestid`
327 327
328 328 :param apiuser: This is filled automatically from the |authtoken|.
329 329 :type apiuser: AuthUser
330 330 :param repoid: Optional repository name or repository ID.
331 331 :type repoid: str or int
332 332 :param pullrequestid: The pull request ID.
333 333 :type pullrequestid: int
334 334
335 335 Example output:
336 336
337 337 .. code-block:: bash
338 338
339 339 id : <id_given_in_input>
340 340 result : [
341 341 {
342 342 "comment_author": {
343 343 "active": true,
344 344 "full_name_or_username": "Tom Gore",
345 345 "username": "admin"
346 346 },
347 347 "comment_created_on": "2017-01-02T18:43:45.533",
348 348 "comment_f_path": null,
349 349 "comment_id": 25,
350 350 "comment_lineno": null,
351 351 "comment_status": {
352 352 "status": "under_review",
353 353 "status_lbl": "Under Review"
354 354 },
355 355 "comment_text": "Example text",
356 356 "comment_type": null,
357 357 "pull_request_version": null
358 358 }
359 359 ],
360 360 error : null
361 361 """
362 362
363 363 pull_request = get_pull_request_or_error(pullrequestid)
364 364 if Optional.extract(repoid):
365 365 repo = get_repo_or_error(repoid)
366 366 else:
367 367 repo = pull_request.target_repo
368 368
369 369 if not PullRequestModel().check_user_read(
370 370 pull_request, apiuser, api=True):
371 371 raise JSONRPCError('repository `%s` or pull request `%s` '
372 372 'does not exist' % (repoid, pullrequestid))
373 373
374 374 (pull_request_latest,
375 375 pull_request_at_ver,
376 376 pull_request_display_obj,
377 377 at_version) = PullRequestModel().get_pr_version(
378 378 pull_request.pull_request_id, version=None)
379 379
380 380 versions = pull_request_display_obj.versions()
381 381 ver_map = {
382 382 ver.pull_request_version_id: cnt
383 383 for cnt, ver in enumerate(versions, 1)
384 384 }
385 385
386 386 # GENERAL COMMENTS with versions #
387 387 q = CommentsModel()._all_general_comments_of_pull_request(pull_request)
388 388 q = q.order_by(ChangesetComment.comment_id.asc())
389 389 general_comments = q.all()
390 390
391 391 # INLINE COMMENTS with versions #
392 392 q = CommentsModel()._all_inline_comments_of_pull_request(pull_request)
393 393 q = q.order_by(ChangesetComment.comment_id.asc())
394 394 inline_comments = q.all()
395 395
396 396 data = []
397 397 for comment in inline_comments + general_comments:
398 398 full_data = comment.get_api_data()
399 399 pr_version_id = None
400 400 if comment.pull_request_version_id:
401 401 pr_version_id = 'v{}'.format(
402 402 ver_map[comment.pull_request_version_id])
403 403
404 404 # sanitize some entries
405 405
406 406 full_data['pull_request_version'] = pr_version_id
407 407 full_data['comment_author'] = {
408 408 'username': full_data['comment_author'].username,
409 409 'full_name_or_username': full_data['comment_author'].full_name_or_username,
410 410 'active': full_data['comment_author'].active,
411 411 }
412 412
413 413 if full_data['comment_status']:
414 414 full_data['comment_status'] = {
415 415 'status': full_data['comment_status'][0].status,
416 416 'status_lbl': full_data['comment_status'][0].status_lbl,
417 417 }
418 418 else:
419 419 full_data['comment_status'] = {}
420 420
421 421 data.append(full_data)
422 422 return data
423 423
424 424
425 425 @jsonrpc_method()
426 426 def comment_pull_request(
427 427 request, apiuser, pullrequestid, repoid=Optional(None),
428 428 message=Optional(None), commit_id=Optional(None), status=Optional(None),
429 429 comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE),
430 430 resolves_comment_id=Optional(None),
431 431 userid=Optional(OAttr('apiuser'))):
432 432 """
433 433 Comment on the pull request specified with the `pullrequestid`,
434 434 in the |repo| specified by the `repoid`, and optionally change the
435 435 review status.
436 436
437 437 :param apiuser: This is filled automatically from the |authtoken|.
438 438 :type apiuser: AuthUser
439 439 :param repoid: Optional repository name or repository ID.
440 440 :type repoid: str or int
441 441 :param pullrequestid: The pull request ID.
442 442 :type pullrequestid: int
443 443 :param commit_id: Specify the commit_id for which to set a comment. If
444 444 given commit_id is different than latest in the PR status
445 445 change won't be performed.
446 446 :type commit_id: str
447 447 :param message: The text content of the comment.
448 448 :type message: str
449 449 :param status: (**Optional**) Set the approval status of the pull
450 450 request. One of: 'not_reviewed', 'approved', 'rejected',
451 451 'under_review'
452 452 :type status: str
453 453 :param comment_type: Comment type, one of: 'note', 'todo'
454 454 :type comment_type: Optional(str), default: 'note'
455 455 :param userid: Comment on the pull request as this user
456 456 :type userid: Optional(str or int)
457 457
458 458 Example output:
459 459
460 460 .. code-block:: bash
461 461
462 462 id : <id_given_in_input>
463 463 result : {
464 464 "pull_request_id": "<Integer>",
465 465 "comment_id": "<Integer>",
466 466 "status": {"given": <given_status>,
467 467 "was_changed": <bool status_was_actually_changed> },
468 468 },
469 469 error : null
470 470 """
471 471 pull_request = get_pull_request_or_error(pullrequestid)
472 472 if Optional.extract(repoid):
473 473 repo = get_repo_or_error(repoid)
474 474 else:
475 475 repo = pull_request.target_repo
476 476
477 477 if not isinstance(userid, Optional):
478 478 if (has_superadmin_permission(apiuser) or
479 479 HasRepoPermissionAnyApi('repository.admin')(
480 480 user=apiuser, repo_name=repo.repo_name)):
481 481 apiuser = get_user_or_error(userid)
482 482 else:
483 483 raise JSONRPCError('userid is not the same as your user')
484 484
485 485 if not PullRequestModel().check_user_read(
486 486 pull_request, apiuser, api=True):
487 487 raise JSONRPCError('repository `%s` does not exist' % (repoid,))
488 488 message = Optional.extract(message)
489 489 status = Optional.extract(status)
490 490 commit_id = Optional.extract(commit_id)
491 491 comment_type = Optional.extract(comment_type)
492 492 resolves_comment_id = Optional.extract(resolves_comment_id)
493 493
494 494 if not message and not status:
495 495 raise JSONRPCError(
496 496 'Both message and status parameters are missing. '
497 497 'At least one is required.')
498 498
499 499 if (status not in (st[0] for st in ChangesetStatus.STATUSES) and
500 500 status is not None):
501 501 raise JSONRPCError('Unknown comment status: `%s`' % status)
502 502
503 503 if commit_id and commit_id not in pull_request.revisions:
504 504 raise JSONRPCError(
505 505 'Invalid commit_id `%s` for this pull request.' % commit_id)
506 506
507 507 allowed_to_change_status = PullRequestModel().check_user_change_status(
508 508 pull_request, apiuser)
509 509
510 510 # if commit_id is passed re-validated if user is allowed to change status
511 511 # based on latest commit_id from the PR
512 512 if commit_id:
513 513 commit_idx = pull_request.revisions.index(commit_id)
514 514 if commit_idx != 0:
515 515 allowed_to_change_status = False
516 516
517 517 if resolves_comment_id:
518 518 comment = ChangesetComment.get(resolves_comment_id)
519 519 if not comment:
520 520 raise JSONRPCError(
521 521 'Invalid resolves_comment_id `%s` for this pull request.'
522 522 % resolves_comment_id)
523 523 if comment.comment_type != ChangesetComment.COMMENT_TYPE_TODO:
524 524 raise JSONRPCError(
525 525 'Comment `%s` is wrong type for setting status to resolved.'
526 526 % resolves_comment_id)
527 527
528 528 text = message
529 529 status_label = ChangesetStatus.get_status_lbl(status)
530 530 if status and allowed_to_change_status:
531 531 st_message = ('Status change %(transition_icon)s %(status)s'
532 532 % {'transition_icon': '>', 'status': status_label})
533 533 text = message or st_message
534 534
535 535 rc_config = SettingsModel().get_all_settings()
536 536 renderer = rc_config.get('rhodecode_markup_renderer', 'rst')
537 537
538 538 status_change = status and allowed_to_change_status
539 539 comment = CommentsModel().create(
540 540 text=text,
541 541 repo=pull_request.target_repo.repo_id,
542 542 user=apiuser.user_id,
543 543 pull_request=pull_request.pull_request_id,
544 544 f_path=None,
545 545 line_no=None,
546 546 status_change=(status_label if status_change else None),
547 547 status_change_type=(status if status_change else None),
548 548 closing_pr=False,
549 549 renderer=renderer,
550 550 comment_type=comment_type,
551 551 resolves_comment_id=resolves_comment_id,
552 552 auth_user=apiuser
553 553 )
554 554
555 555 if allowed_to_change_status and status:
556 556 ChangesetStatusModel().set_status(
557 557 pull_request.target_repo.repo_id,
558 558 status,
559 559 apiuser.user_id,
560 560 comment,
561 561 pull_request=pull_request.pull_request_id
562 562 )
563 563 Session().flush()
564 564
565 565 Session().commit()
566 566 data = {
567 567 'pull_request_id': pull_request.pull_request_id,
568 568 'comment_id': comment.comment_id if comment else None,
569 569 'status': {'given': status, 'was_changed': status_change},
570 570 }
571 571 return data
572 572
573 573
574 574 @jsonrpc_method()
575 575 def create_pull_request(
576 576 request, apiuser, source_repo, target_repo, source_ref, target_ref,
577 577 title, description=Optional(''), reviewers=Optional(None)):
578 578 """
579 579 Creates a new pull request.
580 580
581 581 Accepts refs in the following formats:
582 582
583 583 * branch:<branch_name>:<sha>
584 584 * branch:<branch_name>
585 585 * bookmark:<bookmark_name>:<sha> (Mercurial only)
586 586 * bookmark:<bookmark_name> (Mercurial only)
587 587
588 588 :param apiuser: This is filled automatically from the |authtoken|.
589 589 :type apiuser: AuthUser
590 590 :param source_repo: Set the source repository name.
591 591 :type source_repo: str
592 592 :param target_repo: Set the target repository name.
593 593 :type target_repo: str
594 594 :param source_ref: Set the source ref name.
595 595 :type source_ref: str
596 596 :param target_ref: Set the target ref name.
597 597 :type target_ref: str
598 598 :param title: Set the pull request title.
599 599 :type title: str
600 600 :param description: Set the pull request description.
601 601 :type description: Optional(str)
602 602 :param reviewers: Set the new pull request reviewers list.
603 603 Reviewer defined by review rules will be added automatically to the
604 604 defined list.
605 605 :type reviewers: Optional(list)
606 606 Accepts username strings or objects of the format:
607 607
608 608 [{'username': 'nick', 'reasons': ['original author'], 'mandatory': <bool>}]
609 609 """
610 610
611 611 source_db_repo = get_repo_or_error(source_repo)
612 612 target_db_repo = get_repo_or_error(target_repo)
613 613 if not has_superadmin_permission(apiuser):
614 614 _perms = ('repository.admin', 'repository.write', 'repository.read',)
615 615 validate_repo_permissions(apiuser, source_repo, source_db_repo, _perms)
616 616
617 617 full_source_ref = resolve_ref_or_error(source_ref, source_db_repo)
618 618 full_target_ref = resolve_ref_or_error(target_ref, target_db_repo)
619 619 source_commit = get_commit_or_error(full_source_ref, source_db_repo)
620 620 target_commit = get_commit_or_error(full_target_ref, target_db_repo)
621 621 source_scm = source_db_repo.scm_instance()
622 622 target_scm = target_db_repo.scm_instance()
623 623
624 624 commit_ranges = target_scm.compare(
625 625 target_commit.raw_id, source_commit.raw_id, source_scm,
626 626 merge=True, pre_load=[])
627 627
628 628 ancestor = target_scm.get_common_ancestor(
629 629 target_commit.raw_id, source_commit.raw_id, source_scm)
630 630
631 631 if not commit_ranges:
632 632 raise JSONRPCError('no commits found')
633 633
634 634 if not ancestor:
635 635 raise JSONRPCError('no common ancestor found')
636 636
637 637 reviewer_objects = Optional.extract(reviewers) or []
638 638
639 639 if reviewer_objects:
640 640 schema = ReviewerListSchema()
641 641 try:
642 642 reviewer_objects = schema.deserialize(reviewer_objects)
643 643 except Invalid as err:
644 644 raise JSONRPCValidationError(colander_exc=err)
645 645
646 646 # validate users
647 647 for reviewer_object in reviewer_objects:
648 648 user = get_user_or_error(reviewer_object['username'])
649 649 reviewer_object['user_id'] = user.user_id
650 650
651 651 get_default_reviewers_data, get_validated_reviewers = \
652 652 PullRequestModel().get_reviewer_functions()
653 653
654 654 reviewer_rules = get_default_reviewers_data(
655 655 apiuser.get_instance(), source_db_repo,
656 656 source_commit, target_db_repo, target_commit)
657 657
658 658 # specified rules are later re-validated, thus we can assume users will
659 659 # eventually provide those that meet the reviewer criteria.
660 660 if not reviewer_objects:
661 661 reviewer_objects = reviewer_rules['reviewers']
662 662
663 663 try:
664 664 reviewers = get_validated_reviewers(
665 665 reviewer_objects, reviewer_rules)
666 666 except ValueError as e:
667 667 raise JSONRPCError('Reviewers Validation: {}'.format(e))
668 668
669 669 pull_request_model = PullRequestModel()
670 670 pull_request = pull_request_model.create(
671 671 created_by=apiuser.user_id,
672 672 source_repo=source_repo,
673 673 source_ref=full_source_ref,
674 674 target_repo=target_repo,
675 675 target_ref=full_target_ref,
676 676 revisions=reversed(
677 677 [commit.raw_id for commit in reversed(commit_ranges)]),
678 678 reviewers=reviewers,
679 679 title=title,
680 680 description=Optional.extract(description),
681 681 auth_user=apiuser
682 682 )
683 683
684 684 Session().commit()
685 685 data = {
686 686 'msg': 'Created new pull request `{}`'.format(title),
687 687 'pull_request_id': pull_request.pull_request_id,
688 688 }
689 689 return data
690 690
691 691
692 692 @jsonrpc_method()
693 693 def update_pull_request(
694 694 request, apiuser, pullrequestid, repoid=Optional(None),
695 695 title=Optional(''), description=Optional(''), reviewers=Optional(None),
696 696 update_commits=Optional(None)):
697 697 """
698 698 Updates a pull request.
699 699
700 700 :param apiuser: This is filled automatically from the |authtoken|.
701 701 :type apiuser: AuthUser
702 702 :param repoid: Optional repository name or repository ID.
703 703 :type repoid: str or int
704 704 :param pullrequestid: The pull request ID.
705 705 :type pullrequestid: int
706 706 :param title: Set the pull request title.
707 707 :type title: str
708 708 :param description: Update pull request description.
709 709 :type description: Optional(str)
710 710 :param reviewers: Update pull request reviewers list with new value.
711 711 :type reviewers: Optional(list)
712 712 Accepts username strings or objects of the format:
713 713
714 714 [{'username': 'nick', 'reasons': ['original author'], 'mandatory': <bool>}]
715 715
716 716 :param update_commits: Trigger update of commits for this pull request
717 717 :type: update_commits: Optional(bool)
718 718
719 719 Example output:
720 720
721 721 .. code-block:: bash
722 722
723 723 id : <id_given_in_input>
724 724 result : {
725 725 "msg": "Updated pull request `63`",
726 726 "pull_request": <pull_request_object>,
727 727 "updated_reviewers": {
728 728 "added": [
729 729 "username"
730 730 ],
731 731 "removed": []
732 732 },
733 733 "updated_commits": {
734 734 "added": [
735 735 "<sha1_hash>"
736 736 ],
737 737 "common": [
738 738 "<sha1_hash>",
739 739 "<sha1_hash>",
740 740 ],
741 741 "removed": []
742 742 }
743 743 }
744 744 error : null
745 745 """
746 746
747 747 pull_request = get_pull_request_or_error(pullrequestid)
748 748 if Optional.extract(repoid):
749 749 repo = get_repo_or_error(repoid)
750 750 else:
751 751 repo = pull_request.target_repo
752 752
753 753 if not PullRequestModel().check_user_update(
754 754 pull_request, apiuser, api=True):
755 755 raise JSONRPCError(
756 756 'pull request `%s` update failed, no permission to update.' % (
757 757 pullrequestid,))
758 758 if pull_request.is_closed():
759 759 raise JSONRPCError(
760 760 'pull request `%s` update failed, pull request is closed' % (
761 761 pullrequestid,))
762 762
763 763 reviewer_objects = Optional.extract(reviewers) or []
764 764
765 765 if reviewer_objects:
766 766 schema = ReviewerListSchema()
767 767 try:
768 768 reviewer_objects = schema.deserialize(reviewer_objects)
769 769 except Invalid as err:
770 770 raise JSONRPCValidationError(colander_exc=err)
771 771
772 772 # validate users
773 773 for reviewer_object in reviewer_objects:
774 774 user = get_user_or_error(reviewer_object['username'])
775 775 reviewer_object['user_id'] = user.user_id
776 776
777 777 get_default_reviewers_data, get_validated_reviewers = \
778 778 PullRequestModel().get_reviewer_functions()
779 779
780 780 # re-use stored rules
781 781 reviewer_rules = pull_request.reviewer_data
782 782 try:
783 783 reviewers = get_validated_reviewers(
784 784 reviewer_objects, reviewer_rules)
785 785 except ValueError as e:
786 786 raise JSONRPCError('Reviewers Validation: {}'.format(e))
787 787 else:
788 788 reviewers = []
789 789
790 790 title = Optional.extract(title)
791 791 description = Optional.extract(description)
792 792 if title or description:
793 793 PullRequestModel().edit(
794 794 pull_request, title or pull_request.title,
795 795 description or pull_request.description, apiuser)
796 796 Session().commit()
797 797
798 798 commit_changes = {"added": [], "common": [], "removed": []}
799 799 if str2bool(Optional.extract(update_commits)):
800 800 if PullRequestModel().has_valid_update_type(pull_request):
801 801 update_response = PullRequestModel().update_commits(
802 802 pull_request)
803 803 commit_changes = update_response.changes or commit_changes
804 804 Session().commit()
805 805
806 806 reviewers_changes = {"added": [], "removed": []}
807 807 if reviewers:
808 808 added_reviewers, removed_reviewers = \
809 809 PullRequestModel().update_reviewers(pull_request, reviewers, apiuser)
810 810
811 811 reviewers_changes['added'] = sorted(
812 812 [get_user_or_error(n).username for n in added_reviewers])
813 813 reviewers_changes['removed'] = sorted(
814 814 [get_user_or_error(n).username for n in removed_reviewers])
815 815 Session().commit()
816 816
817 817 data = {
818 818 'msg': 'Updated pull request `{}`'.format(
819 819 pull_request.pull_request_id),
820 820 'pull_request': pull_request.get_api_data(),
821 821 'updated_commits': commit_changes,
822 822 'updated_reviewers': reviewers_changes
823 823 }
824 824
825 825 return data
826 826
827 827
828 828 @jsonrpc_method()
829 829 def close_pull_request(
830 830 request, apiuser, pullrequestid, repoid=Optional(None),
831 831 userid=Optional(OAttr('apiuser')), message=Optional('')):
832 832 """
833 833 Close the pull request specified by `pullrequestid`.
834 834
835 835 :param apiuser: This is filled automatically from the |authtoken|.
836 836 :type apiuser: AuthUser
837 837 :param repoid: Repository name or repository ID to which the pull
838 838 request belongs.
839 839 :type repoid: str or int
840 840 :param pullrequestid: ID of the pull request to be closed.
841 841 :type pullrequestid: int
842 842 :param userid: Close the pull request as this user.
843 843 :type userid: Optional(str or int)
844 844 :param message: Optional message to close the Pull Request with. If not
845 845 specified it will be generated automatically.
846 846 :type message: Optional(str)
847 847
848 848 Example output:
849 849
850 850 .. code-block:: bash
851 851
852 852 "id": <id_given_in_input>,
853 853 "result": {
854 854 "pull_request_id": "<int>",
855 855 "close_status": "<str:status_lbl>,
856 856 "closed": "<bool>"
857 857 },
858 858 "error": null
859 859
860 860 """
861 861 _ = request.translate
862 862
863 863 pull_request = get_pull_request_or_error(pullrequestid)
864 864 if Optional.extract(repoid):
865 865 repo = get_repo_or_error(repoid)
866 866 else:
867 867 repo = pull_request.target_repo
868 868
869 869 if not isinstance(userid, Optional):
870 870 if (has_superadmin_permission(apiuser) or
871 871 HasRepoPermissionAnyApi('repository.admin')(
872 872 user=apiuser, repo_name=repo.repo_name)):
873 873 apiuser = get_user_or_error(userid)
874 874 else:
875 875 raise JSONRPCError('userid is not the same as your user')
876 876
877 877 if pull_request.is_closed():
878 878 raise JSONRPCError(
879 879 'pull request `%s` is already closed' % (pullrequestid,))
880 880
881 881 # only owner or admin or person with write permissions
882 882 allowed_to_close = PullRequestModel().check_user_update(
883 883 pull_request, apiuser, api=True)
884 884
885 885 if not allowed_to_close:
886 886 raise JSONRPCError(
887 887 'pull request `%s` close failed, no permission to close.' % (
888 888 pullrequestid,))
889 889
890 890 # message we're using to close the PR, else it's automatically generated
891 891 message = Optional.extract(message)
892 892
893 893 # finally close the PR, with proper message comment
894 894 comment, status = PullRequestModel().close_pull_request_with_comment(
895 895 pull_request, apiuser, repo, message=message)
896 896 status_lbl = ChangesetStatus.get_status_lbl(status)
897 897
898 898 Session().commit()
899 899
900 900 data = {
901 901 'pull_request_id': pull_request.pull_request_id,
902 902 'close_status': status_lbl,
903 903 'closed': True,
904 904 }
905 905 return data
@@ -1,1203 +1,1203 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20 import mock
21 21 import pytest
22 22
23 23 import rhodecode
24 24 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason
25 25 from rhodecode.lib.vcs.nodes import FileNode
26 26 from rhodecode.lib import helpers as h
27 27 from rhodecode.model.changeset_status import ChangesetStatusModel
28 28 from rhodecode.model.db import (
29 29 PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment)
30 30 from rhodecode.model.meta import Session
31 31 from rhodecode.model.pull_request import PullRequestModel
32 32 from rhodecode.model.user import UserModel
33 33 from rhodecode.tests import (
34 34 assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN)
35 35 from rhodecode.tests.utils import AssertResponse
36 36
37 37
38 38 def route_path(name, params=None, **kwargs):
39 39 import urllib
40 40
41 41 base_url = {
42 42 'repo_changelog': '/{repo_name}/changelog',
43 43 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}',
44 44 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}',
45 45 'pullrequest_show_all': '/{repo_name}/pull-request',
46 46 'pullrequest_show_all_data': '/{repo_name}/pull-request-data',
47 47 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
48 48 'pullrequest_repo_destinations': '/{repo_name}/pull-request/repo-destinations',
49 49 'pullrequest_new': '/{repo_name}/pull-request/new',
50 50 'pullrequest_create': '/{repo_name}/pull-request/create',
51 51 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update',
52 52 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge',
53 53 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete',
54 54 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment',
55 55 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete',
56 56 }[name].format(**kwargs)
57 57
58 58 if params:
59 59 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
60 60 return base_url
61 61
62 62
63 63 @pytest.mark.usefixtures('app', 'autologin_user')
64 64 @pytest.mark.backends("git", "hg")
65 65 class TestPullrequestsView(object):
66 66
67 67 def test_index(self, backend):
68 68 self.app.get(route_path(
69 69 'pullrequest_new',
70 70 repo_name=backend.repo_name))
71 71
72 72 def test_option_menu_create_pull_request_exists(self, backend):
73 73 repo_name = backend.repo_name
74 74 response = self.app.get(h.route_path('repo_summary', repo_name=repo_name))
75 75
76 76 create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path(
77 77 'pullrequest_new', repo_name=repo_name)
78 78 response.mustcontain(create_pr_link)
79 79
80 80 def test_create_pr_form_with_raw_commit_id(self, backend):
81 81 repo = backend.repo
82 82
83 83 self.app.get(
84 84 route_path('pullrequest_new',
85 85 repo_name=repo.repo_name,
86 86 commit=repo.get_commit().raw_id),
87 87 status=200)
88 88
89 89 @pytest.mark.parametrize('pr_merge_enabled', [True, False])
90 90 def test_show(self, pr_util, pr_merge_enabled):
91 91 pull_request = pr_util.create_pull_request(
92 92 mergeable=pr_merge_enabled, enable_notifications=False)
93 93
94 94 response = self.app.get(route_path(
95 95 'pullrequest_show',
96 96 repo_name=pull_request.target_repo.scm_instance().name,
97 97 pull_request_id=pull_request.pull_request_id))
98 98
99 99 for commit_id in pull_request.revisions:
100 100 response.mustcontain(commit_id)
101 101
102 102 assert pull_request.target_ref_parts.type in response
103 103 assert pull_request.target_ref_parts.name in response
104 104 target_clone_url = pull_request.target_repo.clone_url()
105 105 assert target_clone_url in response
106 106
107 107 assert 'class="pull-request-merge"' in response
108 108 assert (
109 109 'Server-side pull request merging is disabled.'
110 110 in response) != pr_merge_enabled
111 111
112 112 def test_close_status_visibility(self, pr_util, user_util, csrf_token):
113 113 # Logout
114 114 response = self.app.post(
115 115 h.route_path('logout'),
116 116 params={'csrf_token': csrf_token})
117 117 # Login as regular user
118 118 response = self.app.post(h.route_path('login'),
119 119 {'username': TEST_USER_REGULAR_LOGIN,
120 120 'password': 'test12'})
121 121
122 122 pull_request = pr_util.create_pull_request(
123 123 author=TEST_USER_REGULAR_LOGIN)
124 124
125 125 response = self.app.get(route_path(
126 126 'pullrequest_show',
127 127 repo_name=pull_request.target_repo.scm_instance().name,
128 128 pull_request_id=pull_request.pull_request_id))
129 129
130 130 response.mustcontain('Server-side pull request merging is disabled.')
131 131
132 132 assert_response = response.assert_response()
133 133 # for regular user without a merge permissions, we don't see it
134 134 assert_response.no_element_exists('#close-pull-request-action')
135 135
136 136 user_util.grant_user_permission_to_repo(
137 137 pull_request.target_repo,
138 138 UserModel().get_by_username(TEST_USER_REGULAR_LOGIN),
139 139 'repository.write')
140 140 response = self.app.get(route_path(
141 141 'pullrequest_show',
142 142 repo_name=pull_request.target_repo.scm_instance().name,
143 143 pull_request_id=pull_request.pull_request_id))
144 144
145 145 response.mustcontain('Server-side pull request merging is disabled.')
146 146
147 147 assert_response = response.assert_response()
148 148 # now regular user has a merge permissions, we have CLOSE button
149 149 assert_response.one_element_exists('#close-pull-request-action')
150 150
151 151 def test_show_invalid_commit_id(self, pr_util):
152 152 # Simulating invalid revisions which will cause a lookup error
153 153 pull_request = pr_util.create_pull_request()
154 154 pull_request.revisions = ['invalid']
155 155 Session().add(pull_request)
156 156 Session().commit()
157 157
158 158 response = self.app.get(route_path(
159 159 'pullrequest_show',
160 160 repo_name=pull_request.target_repo.scm_instance().name,
161 161 pull_request_id=pull_request.pull_request_id))
162 162
163 163 for commit_id in pull_request.revisions:
164 164 response.mustcontain(commit_id)
165 165
166 166 def test_show_invalid_source_reference(self, pr_util):
167 167 pull_request = pr_util.create_pull_request()
168 168 pull_request.source_ref = 'branch:b:invalid'
169 169 Session().add(pull_request)
170 170 Session().commit()
171 171
172 172 self.app.get(route_path(
173 173 'pullrequest_show',
174 174 repo_name=pull_request.target_repo.scm_instance().name,
175 175 pull_request_id=pull_request.pull_request_id))
176 176
177 177 def test_edit_title_description(self, pr_util, csrf_token):
178 178 pull_request = pr_util.create_pull_request()
179 179 pull_request_id = pull_request.pull_request_id
180 180
181 181 response = self.app.post(
182 182 route_path('pullrequest_update',
183 183 repo_name=pull_request.target_repo.repo_name,
184 184 pull_request_id=pull_request_id),
185 185 params={
186 186 'edit_pull_request': 'true',
187 187 'title': 'New title',
188 188 'description': 'New description',
189 189 'csrf_token': csrf_token})
190 190
191 191 assert_session_flash(
192 192 response, u'Pull request title & description updated.',
193 193 category='success')
194 194
195 195 pull_request = PullRequest.get(pull_request_id)
196 196 assert pull_request.title == 'New title'
197 197 assert pull_request.description == 'New description'
198 198
199 199 def test_edit_title_description_closed(self, pr_util, csrf_token):
200 200 pull_request = pr_util.create_pull_request()
201 201 pull_request_id = pull_request.pull_request_id
202 202 repo_name = pull_request.target_repo.repo_name
203 203 pr_util.close()
204 204
205 205 response = self.app.post(
206 206 route_path('pullrequest_update',
207 207 repo_name=repo_name, pull_request_id=pull_request_id),
208 208 params={
209 209 'edit_pull_request': 'true',
210 210 'title': 'New title',
211 211 'description': 'New description',
212 212 'csrf_token': csrf_token}, status=200)
213 213 assert_session_flash(
214 214 response, u'Cannot update closed pull requests.',
215 215 category='error')
216 216
217 217 def test_update_invalid_source_reference(self, pr_util, csrf_token):
218 218 from rhodecode.lib.vcs.backends.base import UpdateFailureReason
219 219
220 220 pull_request = pr_util.create_pull_request()
221 221 pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id'
222 222 Session().add(pull_request)
223 223 Session().commit()
224 224
225 225 pull_request_id = pull_request.pull_request_id
226 226
227 227 response = self.app.post(
228 228 route_path('pullrequest_update',
229 229 repo_name=pull_request.target_repo.repo_name,
230 230 pull_request_id=pull_request_id),
231 231 params={'update_commits': 'true',
232 232 'csrf_token': csrf_token})
233 233
234 234 expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[
235 235 UpdateFailureReason.MISSING_SOURCE_REF])
236 236 assert_session_flash(response, expected_msg, category='error')
237 237
238 238 def test_missing_target_reference(self, pr_util, csrf_token):
239 239 from rhodecode.lib.vcs.backends.base import MergeFailureReason
240 240 pull_request = pr_util.create_pull_request(
241 241 approved=True, mergeable=True)
242 242 pull_request.target_ref = 'branch:invalid-branch:invalid-commit-id'
243 243 Session().add(pull_request)
244 244 Session().commit()
245 245
246 246 pull_request_id = pull_request.pull_request_id
247 247 pull_request_url = route_path(
248 248 'pullrequest_show',
249 249 repo_name=pull_request.target_repo.repo_name,
250 250 pull_request_id=pull_request_id)
251 251
252 252 response = self.app.get(pull_request_url)
253 253
254 254 assertr = AssertResponse(response)
255 255 expected_msg = PullRequestModel.MERGE_STATUS_MESSAGES[
256 256 MergeFailureReason.MISSING_TARGET_REF]
257 257 assertr.element_contains(
258 258 'span[data-role="merge-message"]', str(expected_msg))
259 259
260 260 def test_comment_and_close_pull_request_custom_message_approved(
261 261 self, pr_util, csrf_token, xhr_header):
262 262
263 263 pull_request = pr_util.create_pull_request(approved=True)
264 264 pull_request_id = pull_request.pull_request_id
265 265 author = pull_request.user_id
266 266 repo = pull_request.target_repo.repo_id
267 267
268 268 self.app.post(
269 269 route_path('pullrequest_comment_create',
270 270 repo_name=pull_request.target_repo.scm_instance().name,
271 271 pull_request_id=pull_request_id),
272 272 params={
273 273 'close_pull_request': '1',
274 274 'text': 'Closing a PR',
275 275 'csrf_token': csrf_token},
276 276 extra_environ=xhr_header,)
277 277
278 278 journal = UserLog.query()\
279 279 .filter(UserLog.user_id == author)\
280 280 .filter(UserLog.repository_id == repo) \
281 281 .order_by('user_log_id') \
282 282 .all()
283 283 assert journal[-1].action == 'repo.pull_request.close'
284 284
285 285 pull_request = PullRequest.get(pull_request_id)
286 286 assert pull_request.is_closed()
287 287
288 288 status = ChangesetStatusModel().get_status(
289 289 pull_request.source_repo, pull_request=pull_request)
290 290 assert status == ChangesetStatus.STATUS_APPROVED
291 291 comments = ChangesetComment().query() \
292 292 .filter(ChangesetComment.pull_request == pull_request) \
293 293 .order_by(ChangesetComment.comment_id.asc())\
294 294 .all()
295 295 assert comments[-1].text == 'Closing a PR'
296 296
297 297 def test_comment_force_close_pull_request_rejected(
298 298 self, pr_util, csrf_token, xhr_header):
299 299 pull_request = pr_util.create_pull_request()
300 300 pull_request_id = pull_request.pull_request_id
301 301 PullRequestModel().update_reviewers(
302 302 pull_request_id, [(1, ['reason'], False, []), (2, ['reason2'], False, [])],
303 303 pull_request.author)
304 304 author = pull_request.user_id
305 305 repo = pull_request.target_repo.repo_id
306 306
307 307 self.app.post(
308 308 route_path('pullrequest_comment_create',
309 309 repo_name=pull_request.target_repo.scm_instance().name,
310 310 pull_request_id=pull_request_id),
311 311 params={
312 312 'close_pull_request': '1',
313 313 'csrf_token': csrf_token},
314 314 extra_environ=xhr_header)
315 315
316 316 pull_request = PullRequest.get(pull_request_id)
317 317
318 318 journal = UserLog.query()\
319 319 .filter(UserLog.user_id == author, UserLog.repository_id == repo) \
320 320 .order_by('user_log_id') \
321 321 .all()
322 322 assert journal[-1].action == 'repo.pull_request.close'
323 323
324 324 # check only the latest status, not the review status
325 325 status = ChangesetStatusModel().get_status(
326 326 pull_request.source_repo, pull_request=pull_request)
327 327 assert status == ChangesetStatus.STATUS_REJECTED
328 328
329 329 def test_comment_and_close_pull_request(
330 330 self, pr_util, csrf_token, xhr_header):
331 331 pull_request = pr_util.create_pull_request()
332 332 pull_request_id = pull_request.pull_request_id
333 333
334 334 response = self.app.post(
335 335 route_path('pullrequest_comment_create',
336 336 repo_name=pull_request.target_repo.scm_instance().name,
337 337 pull_request_id=pull_request.pull_request_id),
338 338 params={
339 339 'close_pull_request': 'true',
340 340 'csrf_token': csrf_token},
341 341 extra_environ=xhr_header)
342 342
343 343 assert response.json
344 344
345 345 pull_request = PullRequest.get(pull_request_id)
346 346 assert pull_request.is_closed()
347 347
348 348 # check only the latest status, not the review status
349 349 status = ChangesetStatusModel().get_status(
350 350 pull_request.source_repo, pull_request=pull_request)
351 351 assert status == ChangesetStatus.STATUS_REJECTED
352 352
353 353 def test_create_pull_request(self, backend, csrf_token):
354 354 commits = [
355 355 {'message': 'ancestor'},
356 356 {'message': 'change'},
357 357 {'message': 'change2'},
358 358 ]
359 359 commit_ids = backend.create_master_repo(commits)
360 360 target = backend.create_repo(heads=['ancestor'])
361 361 source = backend.create_repo(heads=['change2'])
362 362
363 363 response = self.app.post(
364 364 route_path('pullrequest_create', repo_name=source.repo_name),
365 365 [
366 366 ('source_repo', source.repo_name),
367 367 ('source_ref', 'branch:default:' + commit_ids['change2']),
368 368 ('target_repo', target.repo_name),
369 369 ('target_ref', 'branch:default:' + commit_ids['ancestor']),
370 370 ('common_ancestor', commit_ids['ancestor']),
371 371 ('pullrequest_desc', 'Description'),
372 372 ('pullrequest_title', 'Title'),
373 373 ('__start__', 'review_members:sequence'),
374 374 ('__start__', 'reviewer:mapping'),
375 375 ('user_id', '1'),
376 376 ('__start__', 'reasons:sequence'),
377 377 ('reason', 'Some reason'),
378 378 ('__end__', 'reasons:sequence'),
379 379 ('__start__', 'rules:sequence'),
380 380 ('__end__', 'rules:sequence'),
381 381 ('mandatory', 'False'),
382 382 ('__end__', 'reviewer:mapping'),
383 383 ('__end__', 'review_members:sequence'),
384 384 ('__start__', 'revisions:sequence'),
385 385 ('revisions', commit_ids['change']),
386 386 ('revisions', commit_ids['change2']),
387 387 ('__end__', 'revisions:sequence'),
388 388 ('user', ''),
389 389 ('csrf_token', csrf_token),
390 390 ],
391 391 status=302)
392 392
393 393 location = response.headers['Location']
394 394 pull_request_id = location.rsplit('/', 1)[1]
395 395 assert pull_request_id != 'new'
396 396 pull_request = PullRequest.get(int(pull_request_id))
397 397
398 398 # check that we have now both revisions
399 399 assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']]
400 400 assert pull_request.source_ref == 'branch:default:' + commit_ids['change2']
401 401 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
402 402 assert pull_request.target_ref == expected_target_ref
403 403
404 404 def test_reviewer_notifications(self, backend, csrf_token):
405 405 # We have to use the app.post for this test so it will create the
406 406 # notifications properly with the new PR
407 407 commits = [
408 408 {'message': 'ancestor',
409 409 'added': [FileNode('file_A', content='content_of_ancestor')]},
410 410 {'message': 'change',
411 411 'added': [FileNode('file_a', content='content_of_change')]},
412 412 {'message': 'change-child'},
413 413 {'message': 'ancestor-child', 'parents': ['ancestor'],
414 414 'added': [
415 415 FileNode('file_B', content='content_of_ancestor_child')]},
416 416 {'message': 'ancestor-child-2'},
417 417 ]
418 418 commit_ids = backend.create_master_repo(commits)
419 419 target = backend.create_repo(heads=['ancestor-child'])
420 420 source = backend.create_repo(heads=['change'])
421 421
422 422 response = self.app.post(
423 423 route_path('pullrequest_create', repo_name=source.repo_name),
424 424 [
425 425 ('source_repo', source.repo_name),
426 426 ('source_ref', 'branch:default:' + commit_ids['change']),
427 427 ('target_repo', target.repo_name),
428 428 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
429 429 ('common_ancestor', commit_ids['ancestor']),
430 430 ('pullrequest_desc', 'Description'),
431 431 ('pullrequest_title', 'Title'),
432 432 ('__start__', 'review_members:sequence'),
433 433 ('__start__', 'reviewer:mapping'),
434 434 ('user_id', '2'),
435 435 ('__start__', 'reasons:sequence'),
436 436 ('reason', 'Some reason'),
437 437 ('__end__', 'reasons:sequence'),
438 438 ('__start__', 'rules:sequence'),
439 439 ('__end__', 'rules:sequence'),
440 440 ('mandatory', 'False'),
441 441 ('__end__', 'reviewer:mapping'),
442 442 ('__end__', 'review_members:sequence'),
443 443 ('__start__', 'revisions:sequence'),
444 444 ('revisions', commit_ids['change']),
445 445 ('__end__', 'revisions:sequence'),
446 446 ('user', ''),
447 447 ('csrf_token', csrf_token),
448 448 ],
449 449 status=302)
450 450
451 451 location = response.headers['Location']
452 452
453 453 pull_request_id = location.rsplit('/', 1)[1]
454 454 assert pull_request_id != 'new'
455 455 pull_request = PullRequest.get(int(pull_request_id))
456 456
457 457 # Check that a notification was made
458 458 notifications = Notification.query()\
459 459 .filter(Notification.created_by == pull_request.author.user_id,
460 460 Notification.type_ == Notification.TYPE_PULL_REQUEST,
461 461 Notification.subject.contains(
462 462 "wants you to review pull request #%s" % pull_request_id))
463 463 assert len(notifications.all()) == 1
464 464
465 465 # Change reviewers and check that a notification was made
466 466 PullRequestModel().update_reviewers(
467 467 pull_request.pull_request_id, [(1, [], False, [])],
468 468 pull_request.author)
469 469 assert len(notifications.all()) == 2
470 470
471 471 def test_create_pull_request_stores_ancestor_commit_id(self, backend,
472 472 csrf_token):
473 473 commits = [
474 474 {'message': 'ancestor',
475 475 'added': [FileNode('file_A', content='content_of_ancestor')]},
476 476 {'message': 'change',
477 477 'added': [FileNode('file_a', content='content_of_change')]},
478 478 {'message': 'change-child'},
479 479 {'message': 'ancestor-child', 'parents': ['ancestor'],
480 480 'added': [
481 481 FileNode('file_B', content='content_of_ancestor_child')]},
482 482 {'message': 'ancestor-child-2'},
483 483 ]
484 484 commit_ids = backend.create_master_repo(commits)
485 485 target = backend.create_repo(heads=['ancestor-child'])
486 486 source = backend.create_repo(heads=['change'])
487 487
488 488 response = self.app.post(
489 489 route_path('pullrequest_create', repo_name=source.repo_name),
490 490 [
491 491 ('source_repo', source.repo_name),
492 492 ('source_ref', 'branch:default:' + commit_ids['change']),
493 493 ('target_repo', target.repo_name),
494 494 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
495 495 ('common_ancestor', commit_ids['ancestor']),
496 496 ('pullrequest_desc', 'Description'),
497 497 ('pullrequest_title', 'Title'),
498 498 ('__start__', 'review_members:sequence'),
499 499 ('__start__', 'reviewer:mapping'),
500 500 ('user_id', '1'),
501 501 ('__start__', 'reasons:sequence'),
502 502 ('reason', 'Some reason'),
503 503 ('__end__', 'reasons:sequence'),
504 504 ('__start__', 'rules:sequence'),
505 505 ('__end__', 'rules:sequence'),
506 506 ('mandatory', 'False'),
507 507 ('__end__', 'reviewer:mapping'),
508 508 ('__end__', 'review_members:sequence'),
509 509 ('__start__', 'revisions:sequence'),
510 510 ('revisions', commit_ids['change']),
511 511 ('__end__', 'revisions:sequence'),
512 512 ('user', ''),
513 513 ('csrf_token', csrf_token),
514 514 ],
515 515 status=302)
516 516
517 517 location = response.headers['Location']
518 518
519 519 pull_request_id = location.rsplit('/', 1)[1]
520 520 assert pull_request_id != 'new'
521 521 pull_request = PullRequest.get(int(pull_request_id))
522 522
523 523 # target_ref has to point to the ancestor's commit_id in order to
524 524 # show the correct diff
525 525 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
526 526 assert pull_request.target_ref == expected_target_ref
527 527
528 528 # Check generated diff contents
529 529 response = response.follow()
530 530 assert 'content_of_ancestor' not in response.body
531 531 assert 'content_of_ancestor-child' not in response.body
532 532 assert 'content_of_change' in response.body
533 533
534 534 def test_merge_pull_request_enabled(self, pr_util, csrf_token):
535 535 # Clear any previous calls to rcextensions
536 536 rhodecode.EXTENSIONS.calls.clear()
537 537
538 538 pull_request = pr_util.create_pull_request(
539 539 approved=True, mergeable=True)
540 540 pull_request_id = pull_request.pull_request_id
541 541 repo_name = pull_request.target_repo.scm_instance().name,
542 542
543 543 response = self.app.post(
544 544 route_path('pullrequest_merge',
545 545 repo_name=str(repo_name[0]),
546 546 pull_request_id=pull_request_id),
547 547 params={'csrf_token': csrf_token}).follow()
548 548
549 549 pull_request = PullRequest.get(pull_request_id)
550 550
551 551 assert response.status_int == 200
552 552 assert pull_request.is_closed()
553 553 assert_pull_request_status(
554 554 pull_request, ChangesetStatus.STATUS_APPROVED)
555 555
556 556 # Check the relevant log entries were added
557 557 user_logs = UserLog.query().order_by('-user_log_id').limit(3)
558 558 actions = [log.action for log in user_logs]
559 559 pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request)
560 560 expected_actions = [
561 561 u'repo.pull_request.close',
562 562 u'repo.pull_request.merge',
563 563 u'repo.pull_request.comment.create'
564 564 ]
565 565 assert actions == expected_actions
566 566
567 567 user_logs = UserLog.query().order_by('-user_log_id').limit(4)
568 568 actions = [log for log in user_logs]
569 569 assert actions[-1].action == 'user.push'
570 570 assert actions[-1].action_data['commit_ids'] == pr_commit_ids
571 571
572 572 # Check post_push rcextension was really executed
573 573 push_calls = rhodecode.EXTENSIONS.calls['post_push']
574 574 assert len(push_calls) == 1
575 575 unused_last_call_args, last_call_kwargs = push_calls[0]
576 576 assert last_call_kwargs['action'] == 'push'
577 577 assert last_call_kwargs['pushed_revs'] == pr_commit_ids
578 578
579 579 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
580 580 pull_request = pr_util.create_pull_request(mergeable=False)
581 581 pull_request_id = pull_request.pull_request_id
582 582 pull_request = PullRequest.get(pull_request_id)
583 583
584 584 response = self.app.post(
585 585 route_path('pullrequest_merge',
586 586 repo_name=pull_request.target_repo.scm_instance().name,
587 587 pull_request_id=pull_request.pull_request_id),
588 588 params={'csrf_token': csrf_token}).follow()
589 589
590 590 assert response.status_int == 200
591 591 response.mustcontain(
592 592 'Merge is not currently possible because of below failed checks.')
593 593 response.mustcontain('Server-side pull request merging is disabled.')
594 594
595 595 @pytest.mark.skip_backends('svn')
596 596 def test_merge_pull_request_not_approved(self, pr_util, csrf_token):
597 597 pull_request = pr_util.create_pull_request(mergeable=True)
598 598 pull_request_id = pull_request.pull_request_id
599 599 repo_name = pull_request.target_repo.scm_instance().name
600 600
601 601 response = self.app.post(
602 602 route_path('pullrequest_merge',
603 603 repo_name=repo_name,
604 604 pull_request_id=pull_request_id),
605 605 params={'csrf_token': csrf_token}).follow()
606 606
607 607 assert response.status_int == 200
608 608
609 609 response.mustcontain(
610 610 'Merge is not currently possible because of below failed checks.')
611 611 response.mustcontain('Pull request reviewer approval is pending.')
612 612
613 613 def test_merge_pull_request_renders_failure_reason(
614 614 self, user_regular, csrf_token, pr_util):
615 615 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
616 616 pull_request_id = pull_request.pull_request_id
617 617 repo_name = pull_request.target_repo.scm_instance().name
618 618
619 619 model_patcher = mock.patch.multiple(
620 620 PullRequestModel,
621 merge=mock.Mock(return_value=MergeResponse(
621 merge_repo=mock.Mock(return_value=MergeResponse(
622 622 True, False, 'STUB_COMMIT_ID', MergeFailureReason.PUSH_FAILED)),
623 623 merge_status=mock.Mock(return_value=(True, 'WRONG_MESSAGE')))
624 624
625 625 with model_patcher:
626 626 response = self.app.post(
627 627 route_path('pullrequest_merge',
628 628 repo_name=repo_name,
629 629 pull_request_id=pull_request_id),
630 630 params={'csrf_token': csrf_token}, status=302)
631 631
632 632 assert_session_flash(response, PullRequestModel.MERGE_STATUS_MESSAGES[
633 633 MergeFailureReason.PUSH_FAILED])
634 634
635 635 def test_update_source_revision(self, backend, csrf_token):
636 636 commits = [
637 637 {'message': 'ancestor'},
638 638 {'message': 'change'},
639 639 {'message': 'change-2'},
640 640 ]
641 641 commit_ids = backend.create_master_repo(commits)
642 642 target = backend.create_repo(heads=['ancestor'])
643 643 source = backend.create_repo(heads=['change'])
644 644
645 645 # create pr from a in source to A in target
646 646 pull_request = PullRequest()
647 647 pull_request.source_repo = source
648 648 # TODO: johbo: Make sure that we write the source ref this way!
649 649 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
650 650 branch=backend.default_branch_name, commit_id=commit_ids['change'])
651 651 pull_request.target_repo = target
652 652
653 653 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
654 654 branch=backend.default_branch_name,
655 655 commit_id=commit_ids['ancestor'])
656 656 pull_request.revisions = [commit_ids['change']]
657 657 pull_request.title = u"Test"
658 658 pull_request.description = u"Description"
659 659 pull_request.author = UserModel().get_by_username(
660 660 TEST_USER_ADMIN_LOGIN)
661 661 Session().add(pull_request)
662 662 Session().commit()
663 663 pull_request_id = pull_request.pull_request_id
664 664
665 665 # source has ancestor - change - change-2
666 666 backend.pull_heads(source, heads=['change-2'])
667 667
668 668 # update PR
669 669 self.app.post(
670 670 route_path('pullrequest_update',
671 671 repo_name=target.repo_name,
672 672 pull_request_id=pull_request_id),
673 673 params={'update_commits': 'true',
674 674 'csrf_token': csrf_token})
675 675
676 676 # check that we have now both revisions
677 677 pull_request = PullRequest.get(pull_request_id)
678 678 assert pull_request.revisions == [
679 679 commit_ids['change-2'], commit_ids['change']]
680 680
681 681 # TODO: johbo: this should be a test on its own
682 682 response = self.app.get(route_path(
683 683 'pullrequest_new',
684 684 repo_name=target.repo_name))
685 685 assert response.status_int == 200
686 686 assert 'Pull request updated to' in response.body
687 687 assert 'with 1 added, 0 removed commits.' in response.body
688 688
689 689 def test_update_target_revision(self, backend, csrf_token):
690 690 commits = [
691 691 {'message': 'ancestor'},
692 692 {'message': 'change'},
693 693 {'message': 'ancestor-new', 'parents': ['ancestor']},
694 694 {'message': 'change-rebased'},
695 695 ]
696 696 commit_ids = backend.create_master_repo(commits)
697 697 target = backend.create_repo(heads=['ancestor'])
698 698 source = backend.create_repo(heads=['change'])
699 699
700 700 # create pr from a in source to A in target
701 701 pull_request = PullRequest()
702 702 pull_request.source_repo = source
703 703 # TODO: johbo: Make sure that we write the source ref this way!
704 704 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
705 705 branch=backend.default_branch_name, commit_id=commit_ids['change'])
706 706 pull_request.target_repo = target
707 707 # TODO: johbo: Target ref should be branch based, since tip can jump
708 708 # from branch to branch
709 709 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
710 710 branch=backend.default_branch_name,
711 711 commit_id=commit_ids['ancestor'])
712 712 pull_request.revisions = [commit_ids['change']]
713 713 pull_request.title = u"Test"
714 714 pull_request.description = u"Description"
715 715 pull_request.author = UserModel().get_by_username(
716 716 TEST_USER_ADMIN_LOGIN)
717 717 Session().add(pull_request)
718 718 Session().commit()
719 719 pull_request_id = pull_request.pull_request_id
720 720
721 721 # target has ancestor - ancestor-new
722 722 # source has ancestor - ancestor-new - change-rebased
723 723 backend.pull_heads(target, heads=['ancestor-new'])
724 724 backend.pull_heads(source, heads=['change-rebased'])
725 725
726 726 # update PR
727 727 self.app.post(
728 728 route_path('pullrequest_update',
729 729 repo_name=target.repo_name,
730 730 pull_request_id=pull_request_id),
731 731 params={'update_commits': 'true',
732 732 'csrf_token': csrf_token},
733 733 status=200)
734 734
735 735 # check that we have now both revisions
736 736 pull_request = PullRequest.get(pull_request_id)
737 737 assert pull_request.revisions == [commit_ids['change-rebased']]
738 738 assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format(
739 739 branch=backend.default_branch_name,
740 740 commit_id=commit_ids['ancestor-new'])
741 741
742 742 # TODO: johbo: This should be a test on its own
743 743 response = self.app.get(route_path(
744 744 'pullrequest_new',
745 745 repo_name=target.repo_name))
746 746 assert response.status_int == 200
747 747 assert 'Pull request updated to' in response.body
748 748 assert 'with 1 added, 1 removed commits.' in response.body
749 749
750 750 def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token):
751 751 backend = backend_git
752 752 commits = [
753 753 {'message': 'master-commit-1'},
754 754 {'message': 'master-commit-2-change-1'},
755 755 {'message': 'master-commit-3-change-2'},
756 756
757 757 {'message': 'feat-commit-1', 'parents': ['master-commit-1']},
758 758 {'message': 'feat-commit-2'},
759 759 ]
760 760 commit_ids = backend.create_master_repo(commits)
761 761 target = backend.create_repo(heads=['master-commit-3-change-2'])
762 762 source = backend.create_repo(heads=['feat-commit-2'])
763 763
764 764 # create pr from a in source to A in target
765 765 pull_request = PullRequest()
766 766 pull_request.source_repo = source
767 767 # TODO: johbo: Make sure that we write the source ref this way!
768 768 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
769 769 branch=backend.default_branch_name,
770 770 commit_id=commit_ids['master-commit-3-change-2'])
771 771
772 772 pull_request.target_repo = target
773 773 # TODO: johbo: Target ref should be branch based, since tip can jump
774 774 # from branch to branch
775 775 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
776 776 branch=backend.default_branch_name,
777 777 commit_id=commit_ids['feat-commit-2'])
778 778
779 779 pull_request.revisions = [
780 780 commit_ids['feat-commit-1'],
781 781 commit_ids['feat-commit-2']
782 782 ]
783 783 pull_request.title = u"Test"
784 784 pull_request.description = u"Description"
785 785 pull_request.author = UserModel().get_by_username(
786 786 TEST_USER_ADMIN_LOGIN)
787 787 Session().add(pull_request)
788 788 Session().commit()
789 789 pull_request_id = pull_request.pull_request_id
790 790
791 791 # PR is created, now we simulate a force-push into target,
792 792 # that drops a 2 last commits
793 793 vcsrepo = target.scm_instance()
794 794 vcsrepo.config.clear_section('hooks')
795 795 vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2'])
796 796
797 797 # update PR
798 798 self.app.post(
799 799 route_path('pullrequest_update',
800 800 repo_name=target.repo_name,
801 801 pull_request_id=pull_request_id),
802 802 params={'update_commits': 'true',
803 803 'csrf_token': csrf_token},
804 804 status=200)
805 805
806 806 response = self.app.get(route_path(
807 807 'pullrequest_new',
808 808 repo_name=target.repo_name))
809 809 assert response.status_int == 200
810 810 response.mustcontain('Pull request updated to')
811 811 response.mustcontain('with 0 added, 0 removed commits.')
812 812
813 813 def test_update_of_ancestor_reference(self, backend, csrf_token):
814 814 commits = [
815 815 {'message': 'ancestor'},
816 816 {'message': 'change'},
817 817 {'message': 'change-2'},
818 818 {'message': 'ancestor-new', 'parents': ['ancestor']},
819 819 {'message': 'change-rebased'},
820 820 ]
821 821 commit_ids = backend.create_master_repo(commits)
822 822 target = backend.create_repo(heads=['ancestor'])
823 823 source = backend.create_repo(heads=['change'])
824 824
825 825 # create pr from a in source to A in target
826 826 pull_request = PullRequest()
827 827 pull_request.source_repo = source
828 828 # TODO: johbo: Make sure that we write the source ref this way!
829 829 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
830 830 branch=backend.default_branch_name,
831 831 commit_id=commit_ids['change'])
832 832 pull_request.target_repo = target
833 833 # TODO: johbo: Target ref should be branch based, since tip can jump
834 834 # from branch to branch
835 835 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
836 836 branch=backend.default_branch_name,
837 837 commit_id=commit_ids['ancestor'])
838 838 pull_request.revisions = [commit_ids['change']]
839 839 pull_request.title = u"Test"
840 840 pull_request.description = u"Description"
841 841 pull_request.author = UserModel().get_by_username(
842 842 TEST_USER_ADMIN_LOGIN)
843 843 Session().add(pull_request)
844 844 Session().commit()
845 845 pull_request_id = pull_request.pull_request_id
846 846
847 847 # target has ancestor - ancestor-new
848 848 # source has ancestor - ancestor-new - change-rebased
849 849 backend.pull_heads(target, heads=['ancestor-new'])
850 850 backend.pull_heads(source, heads=['change-rebased'])
851 851
852 852 # update PR
853 853 self.app.post(
854 854 route_path('pullrequest_update',
855 855 repo_name=target.repo_name,
856 856 pull_request_id=pull_request_id),
857 857 params={'update_commits': 'true',
858 858 'csrf_token': csrf_token},
859 859 status=200)
860 860
861 861 # Expect the target reference to be updated correctly
862 862 pull_request = PullRequest.get(pull_request_id)
863 863 assert pull_request.revisions == [commit_ids['change-rebased']]
864 864 expected_target_ref = 'branch:{branch}:{commit_id}'.format(
865 865 branch=backend.default_branch_name,
866 866 commit_id=commit_ids['ancestor-new'])
867 867 assert pull_request.target_ref == expected_target_ref
868 868
869 869 def test_remove_pull_request_branch(self, backend_git, csrf_token):
870 870 branch_name = 'development'
871 871 commits = [
872 872 {'message': 'initial-commit'},
873 873 {'message': 'old-feature'},
874 874 {'message': 'new-feature', 'branch': branch_name},
875 875 ]
876 876 repo = backend_git.create_repo(commits)
877 877 commit_ids = backend_git.commit_ids
878 878
879 879 pull_request = PullRequest()
880 880 pull_request.source_repo = repo
881 881 pull_request.target_repo = repo
882 882 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
883 883 branch=branch_name, commit_id=commit_ids['new-feature'])
884 884 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
885 885 branch=backend_git.default_branch_name,
886 886 commit_id=commit_ids['old-feature'])
887 887 pull_request.revisions = [commit_ids['new-feature']]
888 888 pull_request.title = u"Test"
889 889 pull_request.description = u"Description"
890 890 pull_request.author = UserModel().get_by_username(
891 891 TEST_USER_ADMIN_LOGIN)
892 892 Session().add(pull_request)
893 893 Session().commit()
894 894
895 895 vcs = repo.scm_instance()
896 896 vcs.remove_ref('refs/heads/{}'.format(branch_name))
897 897
898 898 response = self.app.get(route_path(
899 899 'pullrequest_show',
900 900 repo_name=repo.repo_name,
901 901 pull_request_id=pull_request.pull_request_id))
902 902
903 903 assert response.status_int == 200
904 904 assert_response = AssertResponse(response)
905 905 assert_response.element_contains(
906 906 '#changeset_compare_view_content .alert strong',
907 907 'Missing commits')
908 908 assert_response.element_contains(
909 909 '#changeset_compare_view_content .alert',
910 910 'This pull request cannot be displayed, because one or more'
911 911 ' commits no longer exist in the source repository.')
912 912
913 913 def test_strip_commits_from_pull_request(
914 914 self, backend, pr_util, csrf_token):
915 915 commits = [
916 916 {'message': 'initial-commit'},
917 917 {'message': 'old-feature'},
918 918 {'message': 'new-feature', 'parents': ['initial-commit']},
919 919 ]
920 920 pull_request = pr_util.create_pull_request(
921 921 commits, target_head='initial-commit', source_head='new-feature',
922 922 revisions=['new-feature'])
923 923
924 924 vcs = pr_util.source_repository.scm_instance()
925 925 if backend.alias == 'git':
926 926 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
927 927 else:
928 928 vcs.strip(pr_util.commit_ids['new-feature'])
929 929
930 930 response = self.app.get(route_path(
931 931 'pullrequest_show',
932 932 repo_name=pr_util.target_repository.repo_name,
933 933 pull_request_id=pull_request.pull_request_id))
934 934
935 935 assert response.status_int == 200
936 936 assert_response = AssertResponse(response)
937 937 assert_response.element_contains(
938 938 '#changeset_compare_view_content .alert strong',
939 939 'Missing commits')
940 940 assert_response.element_contains(
941 941 '#changeset_compare_view_content .alert',
942 942 'This pull request cannot be displayed, because one or more'
943 943 ' commits no longer exist in the source repository.')
944 944 assert_response.element_contains(
945 945 '#update_commits',
946 946 'Update commits')
947 947
948 948 def test_strip_commits_and_update(
949 949 self, backend, pr_util, csrf_token):
950 950 commits = [
951 951 {'message': 'initial-commit'},
952 952 {'message': 'old-feature'},
953 953 {'message': 'new-feature', 'parents': ['old-feature']},
954 954 ]
955 955 pull_request = pr_util.create_pull_request(
956 956 commits, target_head='old-feature', source_head='new-feature',
957 957 revisions=['new-feature'], mergeable=True)
958 958
959 959 vcs = pr_util.source_repository.scm_instance()
960 960 if backend.alias == 'git':
961 961 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
962 962 else:
963 963 vcs.strip(pr_util.commit_ids['new-feature'])
964 964
965 965 response = self.app.post(
966 966 route_path('pullrequest_update',
967 967 repo_name=pull_request.target_repo.repo_name,
968 968 pull_request_id=pull_request.pull_request_id),
969 969 params={'update_commits': 'true',
970 970 'csrf_token': csrf_token})
971 971
972 972 assert response.status_int == 200
973 973 assert response.body == 'true'
974 974
975 975 # Make sure that after update, it won't raise 500 errors
976 976 response = self.app.get(route_path(
977 977 'pullrequest_show',
978 978 repo_name=pr_util.target_repository.repo_name,
979 979 pull_request_id=pull_request.pull_request_id))
980 980
981 981 assert response.status_int == 200
982 982 assert_response = AssertResponse(response)
983 983 assert_response.element_contains(
984 984 '#changeset_compare_view_content .alert strong',
985 985 'Missing commits')
986 986
987 987 def test_branch_is_a_link(self, pr_util):
988 988 pull_request = pr_util.create_pull_request()
989 989 pull_request.source_ref = 'branch:origin:1234567890abcdef'
990 990 pull_request.target_ref = 'branch:target:abcdef1234567890'
991 991 Session().add(pull_request)
992 992 Session().commit()
993 993
994 994 response = self.app.get(route_path(
995 995 'pullrequest_show',
996 996 repo_name=pull_request.target_repo.scm_instance().name,
997 997 pull_request_id=pull_request.pull_request_id))
998 998 assert response.status_int == 200
999 999 assert_response = AssertResponse(response)
1000 1000
1001 1001 origin = assert_response.get_element('.pr-origininfo .tag')
1002 1002 origin_children = origin.getchildren()
1003 1003 assert len(origin_children) == 1
1004 1004 target = assert_response.get_element('.pr-targetinfo .tag')
1005 1005 target_children = target.getchildren()
1006 1006 assert len(target_children) == 1
1007 1007
1008 1008 expected_origin_link = route_path(
1009 1009 'repo_changelog',
1010 1010 repo_name=pull_request.source_repo.scm_instance().name,
1011 1011 params=dict(branch='origin'))
1012 1012 expected_target_link = route_path(
1013 1013 'repo_changelog',
1014 1014 repo_name=pull_request.target_repo.scm_instance().name,
1015 1015 params=dict(branch='target'))
1016 1016 assert origin_children[0].attrib['href'] == expected_origin_link
1017 1017 assert origin_children[0].text == 'branch: origin'
1018 1018 assert target_children[0].attrib['href'] == expected_target_link
1019 1019 assert target_children[0].text == 'branch: target'
1020 1020
1021 1021 def test_bookmark_is_not_a_link(self, pr_util):
1022 1022 pull_request = pr_util.create_pull_request()
1023 1023 pull_request.source_ref = 'bookmark:origin:1234567890abcdef'
1024 1024 pull_request.target_ref = 'bookmark:target:abcdef1234567890'
1025 1025 Session().add(pull_request)
1026 1026 Session().commit()
1027 1027
1028 1028 response = self.app.get(route_path(
1029 1029 'pullrequest_show',
1030 1030 repo_name=pull_request.target_repo.scm_instance().name,
1031 1031 pull_request_id=pull_request.pull_request_id))
1032 1032 assert response.status_int == 200
1033 1033 assert_response = AssertResponse(response)
1034 1034
1035 1035 origin = assert_response.get_element('.pr-origininfo .tag')
1036 1036 assert origin.text.strip() == 'bookmark: origin'
1037 1037 assert origin.getchildren() == []
1038 1038
1039 1039 target = assert_response.get_element('.pr-targetinfo .tag')
1040 1040 assert target.text.strip() == 'bookmark: target'
1041 1041 assert target.getchildren() == []
1042 1042
1043 1043 def test_tag_is_not_a_link(self, pr_util):
1044 1044 pull_request = pr_util.create_pull_request()
1045 1045 pull_request.source_ref = 'tag:origin:1234567890abcdef'
1046 1046 pull_request.target_ref = 'tag:target:abcdef1234567890'
1047 1047 Session().add(pull_request)
1048 1048 Session().commit()
1049 1049
1050 1050 response = self.app.get(route_path(
1051 1051 'pullrequest_show',
1052 1052 repo_name=pull_request.target_repo.scm_instance().name,
1053 1053 pull_request_id=pull_request.pull_request_id))
1054 1054 assert response.status_int == 200
1055 1055 assert_response = AssertResponse(response)
1056 1056
1057 1057 origin = assert_response.get_element('.pr-origininfo .tag')
1058 1058 assert origin.text.strip() == 'tag: origin'
1059 1059 assert origin.getchildren() == []
1060 1060
1061 1061 target = assert_response.get_element('.pr-targetinfo .tag')
1062 1062 assert target.text.strip() == 'tag: target'
1063 1063 assert target.getchildren() == []
1064 1064
1065 1065 @pytest.mark.parametrize('mergeable', [True, False])
1066 1066 def test_shadow_repository_link(
1067 1067 self, mergeable, pr_util, http_host_only_stub):
1068 1068 """
1069 1069 Check that the pull request summary page displays a link to the shadow
1070 1070 repository if the pull request is mergeable. If it is not mergeable
1071 1071 the link should not be displayed.
1072 1072 """
1073 1073 pull_request = pr_util.create_pull_request(
1074 1074 mergeable=mergeable, enable_notifications=False)
1075 1075 target_repo = pull_request.target_repo.scm_instance()
1076 1076 pr_id = pull_request.pull_request_id
1077 1077 shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format(
1078 1078 host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id)
1079 1079
1080 1080 response = self.app.get(route_path(
1081 1081 'pullrequest_show',
1082 1082 repo_name=target_repo.name,
1083 1083 pull_request_id=pr_id))
1084 1084
1085 1085 assertr = AssertResponse(response)
1086 1086 if mergeable:
1087 1087 assertr.element_value_contains('input.pr-mergeinfo', shadow_url)
1088 1088 assertr.element_value_contains('input.pr-mergeinfo ', 'pr-merge')
1089 1089 else:
1090 1090 assertr.no_element_exists('.pr-mergeinfo')
1091 1091
1092 1092
1093 1093 @pytest.mark.usefixtures('app')
1094 1094 @pytest.mark.backends("git", "hg")
1095 1095 class TestPullrequestsControllerDelete(object):
1096 1096 def test_pull_request_delete_button_permissions_admin(
1097 1097 self, autologin_user, user_admin, pr_util):
1098 1098 pull_request = pr_util.create_pull_request(
1099 1099 author=user_admin.username, enable_notifications=False)
1100 1100
1101 1101 response = self.app.get(route_path(
1102 1102 'pullrequest_show',
1103 1103 repo_name=pull_request.target_repo.scm_instance().name,
1104 1104 pull_request_id=pull_request.pull_request_id))
1105 1105
1106 1106 response.mustcontain('id="delete_pullrequest"')
1107 1107 response.mustcontain('Confirm to delete this pull request')
1108 1108
1109 1109 def test_pull_request_delete_button_permissions_owner(
1110 1110 self, autologin_regular_user, user_regular, pr_util):
1111 1111 pull_request = pr_util.create_pull_request(
1112 1112 author=user_regular.username, enable_notifications=False)
1113 1113
1114 1114 response = self.app.get(route_path(
1115 1115 'pullrequest_show',
1116 1116 repo_name=pull_request.target_repo.scm_instance().name,
1117 1117 pull_request_id=pull_request.pull_request_id))
1118 1118
1119 1119 response.mustcontain('id="delete_pullrequest"')
1120 1120 response.mustcontain('Confirm to delete this pull request')
1121 1121
1122 1122 def test_pull_request_delete_button_permissions_forbidden(
1123 1123 self, autologin_regular_user, user_regular, user_admin, pr_util):
1124 1124 pull_request = pr_util.create_pull_request(
1125 1125 author=user_admin.username, enable_notifications=False)
1126 1126
1127 1127 response = self.app.get(route_path(
1128 1128 'pullrequest_show',
1129 1129 repo_name=pull_request.target_repo.scm_instance().name,
1130 1130 pull_request_id=pull_request.pull_request_id))
1131 1131 response.mustcontain(no=['id="delete_pullrequest"'])
1132 1132 response.mustcontain(no=['Confirm to delete this pull request'])
1133 1133
1134 1134 def test_pull_request_delete_button_permissions_can_update_cannot_delete(
1135 1135 self, autologin_regular_user, user_regular, user_admin, pr_util,
1136 1136 user_util):
1137 1137
1138 1138 pull_request = pr_util.create_pull_request(
1139 1139 author=user_admin.username, enable_notifications=False)
1140 1140
1141 1141 user_util.grant_user_permission_to_repo(
1142 1142 pull_request.target_repo, user_regular,
1143 1143 'repository.write')
1144 1144
1145 1145 response = self.app.get(route_path(
1146 1146 'pullrequest_show',
1147 1147 repo_name=pull_request.target_repo.scm_instance().name,
1148 1148 pull_request_id=pull_request.pull_request_id))
1149 1149
1150 1150 response.mustcontain('id="open_edit_pullrequest"')
1151 1151 response.mustcontain('id="delete_pullrequest"')
1152 1152 response.mustcontain(no=['Confirm to delete this pull request'])
1153 1153
1154 1154 def test_delete_comment_returns_404_if_comment_does_not_exist(
1155 1155 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1156 1156
1157 1157 pull_request = pr_util.create_pull_request(
1158 1158 author=user_admin.username, enable_notifications=False)
1159 1159
1160 1160 self.app.post(
1161 1161 route_path(
1162 1162 'pullrequest_comment_delete',
1163 1163 repo_name=pull_request.target_repo.scm_instance().name,
1164 1164 pull_request_id=pull_request.pull_request_id,
1165 1165 comment_id=1024404),
1166 1166 extra_environ=xhr_header,
1167 1167 params={'csrf_token': csrf_token},
1168 1168 status=404
1169 1169 )
1170 1170
1171 1171 def test_delete_comment(
1172 1172 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1173 1173
1174 1174 pull_request = pr_util.create_pull_request(
1175 1175 author=user_admin.username, enable_notifications=False)
1176 1176 comment = pr_util.create_comment()
1177 1177 comment_id = comment.comment_id
1178 1178
1179 1179 response = self.app.post(
1180 1180 route_path(
1181 1181 'pullrequest_comment_delete',
1182 1182 repo_name=pull_request.target_repo.scm_instance().name,
1183 1183 pull_request_id=pull_request.pull_request_id,
1184 1184 comment_id=comment_id),
1185 1185 extra_environ=xhr_header,
1186 1186 params={'csrf_token': csrf_token},
1187 1187 status=200
1188 1188 )
1189 1189 assert response.body == 'true'
1190 1190
1191 1191
1192 1192 def assert_pull_request_status(pull_request, expected_status):
1193 1193 status = ChangesetStatusModel().calculated_review_status(
1194 1194 pull_request=pull_request)
1195 1195 assert status == expected_status
1196 1196
1197 1197
1198 1198 @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create'])
1199 1199 @pytest.mark.usefixtures("autologin_user")
1200 1200 def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route):
1201 1201 response = app.get(
1202 1202 route_path(route, repo_name=backend_svn.repo_name), status=404)
1203 1203
@@ -1,1306 +1,1307 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22 import collections
23 23
24 24 import formencode
25 25 import formencode.htmlfill
26 26 import peppercorn
27 27 from pyramid.httpexceptions import (
28 28 HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest)
29 29 from pyramid.view import view_config
30 30 from pyramid.renderers import render
31 31
32 32 from rhodecode import events
33 33 from rhodecode.apps._base import RepoAppView, DataGridAppView
34 34
35 35 from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream
36 36 from rhodecode.lib.base import vcs_operation_context
37 37 from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist
38 38 from rhodecode.lib.ext_json import json
39 39 from rhodecode.lib.auth import (
40 40 LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator,
41 41 NotAnonymous, CSRFRequired)
42 42 from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode
43 43 from rhodecode.lib.vcs.backends.base import EmptyCommit, UpdateFailureReason
44 44 from rhodecode.lib.vcs.exceptions import (CommitDoesNotExistError,
45 45 RepositoryRequirementError, EmptyRepositoryError)
46 46 from rhodecode.model.changeset_status import ChangesetStatusModel
47 47 from rhodecode.model.comment import CommentsModel
48 48 from rhodecode.model.db import (func, or_, PullRequest, PullRequestVersion,
49 49 ChangesetComment, ChangesetStatus, Repository)
50 50 from rhodecode.model.forms import PullRequestForm
51 51 from rhodecode.model.meta import Session
52 52 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
53 53 from rhodecode.model.scm import ScmModel
54 54
55 55 log = logging.getLogger(__name__)
56 56
57 57
58 58 class RepoPullRequestsView(RepoAppView, DataGridAppView):
59 59
60 60 def load_default_context(self):
61 61 c = self._get_local_tmpl_context(include_app_defaults=True)
62 62 c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED
63 63 c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED
64 64
65 65 return c
66 66
67 67 def _get_pull_requests_list(
68 68 self, repo_name, source, filter_type, opened_by, statuses):
69 69
70 70 draw, start, limit = self._extract_chunk(self.request)
71 71 search_q, order_by, order_dir = self._extract_ordering(self.request)
72 72 _render = self.request.get_partial_renderer(
73 73 'rhodecode:templates/data_table/_dt_elements.mako')
74 74
75 75 # pagination
76 76
77 77 if filter_type == 'awaiting_review':
78 78 pull_requests = PullRequestModel().get_awaiting_review(
79 79 repo_name, source=source, opened_by=opened_by,
80 80 statuses=statuses, offset=start, length=limit,
81 81 order_by=order_by, order_dir=order_dir)
82 82 pull_requests_total_count = PullRequestModel().count_awaiting_review(
83 83 repo_name, source=source, statuses=statuses,
84 84 opened_by=opened_by)
85 85 elif filter_type == 'awaiting_my_review':
86 86 pull_requests = PullRequestModel().get_awaiting_my_review(
87 87 repo_name, source=source, opened_by=opened_by,
88 88 user_id=self._rhodecode_user.user_id, statuses=statuses,
89 89 offset=start, length=limit, order_by=order_by,
90 90 order_dir=order_dir)
91 91 pull_requests_total_count = PullRequestModel().count_awaiting_my_review(
92 92 repo_name, source=source, user_id=self._rhodecode_user.user_id,
93 93 statuses=statuses, opened_by=opened_by)
94 94 else:
95 95 pull_requests = PullRequestModel().get_all(
96 96 repo_name, source=source, opened_by=opened_by,
97 97 statuses=statuses, offset=start, length=limit,
98 98 order_by=order_by, order_dir=order_dir)
99 99 pull_requests_total_count = PullRequestModel().count_all(
100 100 repo_name, source=source, statuses=statuses,
101 101 opened_by=opened_by)
102 102
103 103 data = []
104 104 comments_model = CommentsModel()
105 105 for pr in pull_requests:
106 106 comments = comments_model.get_all_comments(
107 107 self.db_repo.repo_id, pull_request=pr)
108 108
109 109 data.append({
110 110 'name': _render('pullrequest_name',
111 111 pr.pull_request_id, pr.target_repo.repo_name),
112 112 'name_raw': pr.pull_request_id,
113 113 'status': _render('pullrequest_status',
114 114 pr.calculated_review_status()),
115 115 'title': _render(
116 116 'pullrequest_title', pr.title, pr.description),
117 117 'description': h.escape(pr.description),
118 118 'updated_on': _render('pullrequest_updated_on',
119 119 h.datetime_to_time(pr.updated_on)),
120 120 'updated_on_raw': h.datetime_to_time(pr.updated_on),
121 121 'created_on': _render('pullrequest_updated_on',
122 122 h.datetime_to_time(pr.created_on)),
123 123 'created_on_raw': h.datetime_to_time(pr.created_on),
124 124 'author': _render('pullrequest_author',
125 125 pr.author.full_contact, ),
126 126 'author_raw': pr.author.full_name,
127 127 'comments': _render('pullrequest_comments', len(comments)),
128 128 'comments_raw': len(comments),
129 129 'closed': pr.is_closed(),
130 130 })
131 131
132 132 data = ({
133 133 'draw': draw,
134 134 'data': data,
135 135 'recordsTotal': pull_requests_total_count,
136 136 'recordsFiltered': pull_requests_total_count,
137 137 })
138 138 return data
139 139
140 140 @LoginRequired()
141 141 @HasRepoPermissionAnyDecorator(
142 142 'repository.read', 'repository.write', 'repository.admin')
143 143 @view_config(
144 144 route_name='pullrequest_show_all', request_method='GET',
145 145 renderer='rhodecode:templates/pullrequests/pullrequests.mako')
146 146 def pull_request_list(self):
147 147 c = self.load_default_context()
148 148
149 149 req_get = self.request.GET
150 150 c.source = str2bool(req_get.get('source'))
151 151 c.closed = str2bool(req_get.get('closed'))
152 152 c.my = str2bool(req_get.get('my'))
153 153 c.awaiting_review = str2bool(req_get.get('awaiting_review'))
154 154 c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
155 155
156 156 c.active = 'open'
157 157 if c.my:
158 158 c.active = 'my'
159 159 if c.closed:
160 160 c.active = 'closed'
161 161 if c.awaiting_review and not c.source:
162 162 c.active = 'awaiting'
163 163 if c.source and not c.awaiting_review:
164 164 c.active = 'source'
165 165 if c.awaiting_my_review:
166 166 c.active = 'awaiting_my'
167 167
168 168 return self._get_template_context(c)
169 169
170 170 @LoginRequired()
171 171 @HasRepoPermissionAnyDecorator(
172 172 'repository.read', 'repository.write', 'repository.admin')
173 173 @view_config(
174 174 route_name='pullrequest_show_all_data', request_method='GET',
175 175 renderer='json_ext', xhr=True)
176 176 def pull_request_list_data(self):
177 177 self.load_default_context()
178 178
179 179 # additional filters
180 180 req_get = self.request.GET
181 181 source = str2bool(req_get.get('source'))
182 182 closed = str2bool(req_get.get('closed'))
183 183 my = str2bool(req_get.get('my'))
184 184 awaiting_review = str2bool(req_get.get('awaiting_review'))
185 185 awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
186 186
187 187 filter_type = 'awaiting_review' if awaiting_review \
188 188 else 'awaiting_my_review' if awaiting_my_review \
189 189 else None
190 190
191 191 opened_by = None
192 192 if my:
193 193 opened_by = [self._rhodecode_user.user_id]
194 194
195 195 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
196 196 if closed:
197 197 statuses = [PullRequest.STATUS_CLOSED]
198 198
199 199 data = self._get_pull_requests_list(
200 200 repo_name=self.db_repo_name, source=source,
201 201 filter_type=filter_type, opened_by=opened_by, statuses=statuses)
202 202
203 203 return data
204 204
205 205 def _is_diff_cache_enabled(self, target_repo):
206 206 caching_enabled = self._get_general_setting(
207 207 target_repo, 'rhodecode_diff_cache')
208 208 log.debug('Diff caching enabled: %s', caching_enabled)
209 209 return caching_enabled
210 210
211 211 def _get_diffset(self, source_repo_name, source_repo,
212 212 source_ref_id, target_ref_id,
213 213 target_commit, source_commit, diff_limit, file_limit,
214 214 fulldiff):
215 215
216 216 vcs_diff = PullRequestModel().get_diff(
217 217 source_repo, source_ref_id, target_ref_id)
218 218
219 219 diff_processor = diffs.DiffProcessor(
220 220 vcs_diff, format='newdiff', diff_limit=diff_limit,
221 221 file_limit=file_limit, show_full_diff=fulldiff)
222 222
223 223 _parsed = diff_processor.prepare()
224 224
225 225 diffset = codeblocks.DiffSet(
226 226 repo_name=self.db_repo_name,
227 227 source_repo_name=source_repo_name,
228 228 source_node_getter=codeblocks.diffset_node_getter(target_commit),
229 229 target_node_getter=codeblocks.diffset_node_getter(source_commit),
230 230 )
231 231 diffset = self.path_filter.render_patchset_filtered(
232 232 diffset, _parsed, target_commit.raw_id, source_commit.raw_id)
233 233
234 234 return diffset
235 235
236 236 @LoginRequired()
237 237 @HasRepoPermissionAnyDecorator(
238 238 'repository.read', 'repository.write', 'repository.admin')
239 239 @view_config(
240 240 route_name='pullrequest_show', request_method='GET',
241 241 renderer='rhodecode:templates/pullrequests/pullrequest_show.mako')
242 242 def pull_request_show(self):
243 243 pull_request_id = self.request.matchdict['pull_request_id']
244 244
245 245 c = self.load_default_context()
246 246
247 247 version = self.request.GET.get('version')
248 248 from_version = self.request.GET.get('from_version') or version
249 249 merge_checks = self.request.GET.get('merge_checks')
250 250 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
251 251 force_refresh = str2bool(self.request.GET.get('force_refresh'))
252 252
253 253 (pull_request_latest,
254 254 pull_request_at_ver,
255 255 pull_request_display_obj,
256 256 at_version) = PullRequestModel().get_pr_version(
257 257 pull_request_id, version=version)
258 258 pr_closed = pull_request_latest.is_closed()
259 259
260 260 if pr_closed and (version or from_version):
261 261 # not allow to browse versions
262 262 raise HTTPFound(h.route_path(
263 263 'pullrequest_show', repo_name=self.db_repo_name,
264 264 pull_request_id=pull_request_id))
265 265
266 266 versions = pull_request_display_obj.versions()
267 267
268 268 c.at_version = at_version
269 269 c.at_version_num = (at_version
270 270 if at_version and at_version != 'latest'
271 271 else None)
272 272 c.at_version_pos = ChangesetComment.get_index_from_version(
273 273 c.at_version_num, versions)
274 274
275 275 (prev_pull_request_latest,
276 276 prev_pull_request_at_ver,
277 277 prev_pull_request_display_obj,
278 278 prev_at_version) = PullRequestModel().get_pr_version(
279 279 pull_request_id, version=from_version)
280 280
281 281 c.from_version = prev_at_version
282 282 c.from_version_num = (prev_at_version
283 283 if prev_at_version and prev_at_version != 'latest'
284 284 else None)
285 285 c.from_version_pos = ChangesetComment.get_index_from_version(
286 286 c.from_version_num, versions)
287 287
288 288 # define if we're in COMPARE mode or VIEW at version mode
289 289 compare = at_version != prev_at_version
290 290
291 291 # pull_requests repo_name we opened it against
292 292 # ie. target_repo must match
293 293 if self.db_repo_name != pull_request_at_ver.target_repo.repo_name:
294 294 raise HTTPNotFound()
295 295
296 296 c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(
297 297 pull_request_at_ver)
298 298
299 299 c.pull_request = pull_request_display_obj
300 300 c.pull_request_latest = pull_request_latest
301 301
302 302 if compare or (at_version and not at_version == 'latest'):
303 303 c.allowed_to_change_status = False
304 304 c.allowed_to_update = False
305 305 c.allowed_to_merge = False
306 306 c.allowed_to_delete = False
307 307 c.allowed_to_comment = False
308 308 c.allowed_to_close = False
309 309 else:
310 310 can_change_status = PullRequestModel().check_user_change_status(
311 311 pull_request_at_ver, self._rhodecode_user)
312 312 c.allowed_to_change_status = can_change_status and not pr_closed
313 313
314 314 c.allowed_to_update = PullRequestModel().check_user_update(
315 315 pull_request_latest, self._rhodecode_user) and not pr_closed
316 316 c.allowed_to_merge = PullRequestModel().check_user_merge(
317 317 pull_request_latest, self._rhodecode_user) and not pr_closed
318 318 c.allowed_to_delete = PullRequestModel().check_user_delete(
319 319 pull_request_latest, self._rhodecode_user) and not pr_closed
320 320 c.allowed_to_comment = not pr_closed
321 321 c.allowed_to_close = c.allowed_to_merge and not pr_closed
322 322
323 323 c.forbid_adding_reviewers = False
324 324 c.forbid_author_to_review = False
325 325 c.forbid_commit_author_to_review = False
326 326
327 327 if pull_request_latest.reviewer_data and \
328 328 'rules' in pull_request_latest.reviewer_data:
329 329 rules = pull_request_latest.reviewer_data['rules'] or {}
330 330 try:
331 331 c.forbid_adding_reviewers = rules.get(
332 332 'forbid_adding_reviewers')
333 333 c.forbid_author_to_review = rules.get(
334 334 'forbid_author_to_review')
335 335 c.forbid_commit_author_to_review = rules.get(
336 336 'forbid_commit_author_to_review')
337 337 except Exception:
338 338 pass
339 339
340 340 # check merge capabilities
341 341 _merge_check = MergeCheck.validate(
342 342 pull_request_latest, user=self._rhodecode_user,
343 translator=self.request.translate, force_shadow_repo_refresh=force_refresh)
343 translator=self.request.translate,
344 force_shadow_repo_refresh=force_refresh)
344 345 c.pr_merge_errors = _merge_check.error_details
345 346 c.pr_merge_possible = not _merge_check.failed
346 347 c.pr_merge_message = _merge_check.merge_msg
347 348
348 349 c.pr_merge_info = MergeCheck.get_merge_conditions(
349 350 pull_request_latest, translator=self.request.translate)
350 351
351 352 c.pull_request_review_status = _merge_check.review_status
352 353 if merge_checks:
353 354 self.request.override_renderer = \
354 355 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako'
355 356 return self._get_template_context(c)
356 357
357 358 comments_model = CommentsModel()
358 359
359 360 # reviewers and statuses
360 361 c.pull_request_reviewers = pull_request_at_ver.reviewers_statuses()
361 362 allowed_reviewers = [x[0].user_id for x in c.pull_request_reviewers]
362 363
363 364 # GENERAL COMMENTS with versions #
364 365 q = comments_model._all_general_comments_of_pull_request(pull_request_latest)
365 366 q = q.order_by(ChangesetComment.comment_id.asc())
366 367 general_comments = q
367 368
368 369 # pick comments we want to render at current version
369 370 c.comment_versions = comments_model.aggregate_comments(
370 371 general_comments, versions, c.at_version_num)
371 372 c.comments = c.comment_versions[c.at_version_num]['until']
372 373
373 374 # INLINE COMMENTS with versions #
374 375 q = comments_model._all_inline_comments_of_pull_request(pull_request_latest)
375 376 q = q.order_by(ChangesetComment.comment_id.asc())
376 377 inline_comments = q
377 378
378 379 c.inline_versions = comments_model.aggregate_comments(
379 380 inline_comments, versions, c.at_version_num, inline=True)
380 381
381 382 # inject latest version
382 383 latest_ver = PullRequest.get_pr_display_object(
383 384 pull_request_latest, pull_request_latest)
384 385
385 386 c.versions = versions + [latest_ver]
386 387
387 388 # if we use version, then do not show later comments
388 389 # than current version
389 390 display_inline_comments = collections.defaultdict(
390 391 lambda: collections.defaultdict(list))
391 392 for co in inline_comments:
392 393 if c.at_version_num:
393 394 # pick comments that are at least UPTO given version, so we
394 395 # don't render comments for higher version
395 396 should_render = co.pull_request_version_id and \
396 397 co.pull_request_version_id <= c.at_version_num
397 398 else:
398 399 # showing all, for 'latest'
399 400 should_render = True
400 401
401 402 if should_render:
402 403 display_inline_comments[co.f_path][co.line_no].append(co)
403 404
404 405 # load diff data into template context, if we use compare mode then
405 406 # diff is calculated based on changes between versions of PR
406 407
407 408 source_repo = pull_request_at_ver.source_repo
408 409 source_ref_id = pull_request_at_ver.source_ref_parts.commit_id
409 410
410 411 target_repo = pull_request_at_ver.target_repo
411 412 target_ref_id = pull_request_at_ver.target_ref_parts.commit_id
412 413
413 414 if compare:
414 415 # in compare switch the diff base to latest commit from prev version
415 416 target_ref_id = prev_pull_request_display_obj.revisions[0]
416 417
417 418 # despite opening commits for bookmarks/branches/tags, we always
418 419 # convert this to rev to prevent changes after bookmark or branch change
419 420 c.source_ref_type = 'rev'
420 421 c.source_ref = source_ref_id
421 422
422 423 c.target_ref_type = 'rev'
423 424 c.target_ref = target_ref_id
424 425
425 426 c.source_repo = source_repo
426 427 c.target_repo = target_repo
427 428
428 429 c.commit_ranges = []
429 430 source_commit = EmptyCommit()
430 431 target_commit = EmptyCommit()
431 432 c.missing_requirements = False
432 433
433 434 source_scm = source_repo.scm_instance()
434 435 target_scm = target_repo.scm_instance()
435 436
436 437 shadow_scm = None
437 438 try:
438 439 shadow_scm = pull_request_latest.get_shadow_repo()
439 440 except Exception:
440 441 log.debug('Failed to get shadow repo', exc_info=True)
441 442 # try first the existing source_repo, and then shadow
442 443 # repo if we can obtain one
443 444 commits_source_repo = source_scm or shadow_scm
444 445
445 446 c.commits_source_repo = commits_source_repo
446 447 c.ancestor = None # set it to None, to hide it from PR view
447 448
448 449 # empty version means latest, so we keep this to prevent
449 450 # double caching
450 451 version_normalized = version or 'latest'
451 452 from_version_normalized = from_version or 'latest'
452 453
453 454 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(
454 455 target_repo)
455 456 cache_file_path = diff_cache_exist(
456 457 cache_path, 'pull_request', pull_request_id, version_normalized,
457 458 from_version_normalized, source_ref_id, target_ref_id, c.fulldiff)
458 459
459 460 caching_enabled = self._is_diff_cache_enabled(c.target_repo)
460 461 force_recache = str2bool(self.request.GET.get('force_recache'))
461 462
462 463 cached_diff = None
463 464 if caching_enabled:
464 465 cached_diff = load_cached_diff(cache_file_path)
465 466
466 467 has_proper_commit_cache = (
467 468 cached_diff and cached_diff.get('commits')
468 469 and len(cached_diff.get('commits', [])) == 5
469 470 and cached_diff.get('commits')[0]
470 471 and cached_diff.get('commits')[3])
471 472 if not force_recache and has_proper_commit_cache:
472 473 diff_commit_cache = \
473 474 (ancestor_commit, commit_cache, missing_requirements,
474 475 source_commit, target_commit) = cached_diff['commits']
475 476 else:
476 477 diff_commit_cache = \
477 478 (ancestor_commit, commit_cache, missing_requirements,
478 479 source_commit, target_commit) = self.get_commits(
479 480 commits_source_repo,
480 481 pull_request_at_ver,
481 482 source_commit,
482 483 source_ref_id,
483 484 source_scm,
484 485 target_commit,
485 486 target_ref_id,
486 487 target_scm)
487 488
488 489 # register our commit range
489 490 for comm in commit_cache.values():
490 491 c.commit_ranges.append(comm)
491 492
492 493 c.missing_requirements = missing_requirements
493 494 c.ancestor_commit = ancestor_commit
494 495 c.statuses = source_repo.statuses(
495 496 [x.raw_id for x in c.commit_ranges])
496 497
497 498 # auto collapse if we have more than limit
498 499 collapse_limit = diffs.DiffProcessor._collapse_commits_over
499 500 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
500 501 c.compare_mode = compare
501 502
502 503 # diff_limit is the old behavior, will cut off the whole diff
503 504 # if the limit is applied otherwise will just hide the
504 505 # big files from the front-end
505 506 diff_limit = c.visual.cut_off_limit_diff
506 507 file_limit = c.visual.cut_off_limit_file
507 508
508 509 c.missing_commits = False
509 510 if (c.missing_requirements
510 511 or isinstance(source_commit, EmptyCommit)
511 512 or source_commit == target_commit):
512 513
513 514 c.missing_commits = True
514 515 else:
515 516 c.inline_comments = display_inline_comments
516 517
517 518 has_proper_diff_cache = cached_diff and cached_diff.get('commits')
518 519 if not force_recache and has_proper_diff_cache:
519 520 c.diffset = cached_diff['diff']
520 521 (ancestor_commit, commit_cache, missing_requirements,
521 522 source_commit, target_commit) = cached_diff['commits']
522 523 else:
523 524 c.diffset = self._get_diffset(
524 525 c.source_repo.repo_name, commits_source_repo,
525 526 source_ref_id, target_ref_id,
526 527 target_commit, source_commit,
527 528 diff_limit, file_limit, c.fulldiff)
528 529
529 530 # save cached diff
530 531 if caching_enabled:
531 532 cache_diff(cache_file_path, c.diffset, diff_commit_cache)
532 533
533 534 c.limited_diff = c.diffset.limited_diff
534 535
535 536 # calculate removed files that are bound to comments
536 537 comment_deleted_files = [
537 538 fname for fname in display_inline_comments
538 539 if fname not in c.diffset.file_stats]
539 540
540 541 c.deleted_files_comments = collections.defaultdict(dict)
541 542 for fname, per_line_comments in display_inline_comments.items():
542 543 if fname in comment_deleted_files:
543 544 c.deleted_files_comments[fname]['stats'] = 0
544 545 c.deleted_files_comments[fname]['comments'] = list()
545 546 for lno, comments in per_line_comments.items():
546 547 c.deleted_files_comments[fname]['comments'].extend(
547 548 comments)
548 549
549 550 # this is a hack to properly display links, when creating PR, the
550 551 # compare view and others uses different notation, and
551 552 # compare_commits.mako renders links based on the target_repo.
552 553 # We need to swap that here to generate it properly on the html side
553 554 c.target_repo = c.source_repo
554 555
555 556 c.commit_statuses = ChangesetStatus.STATUSES
556 557
557 558 c.show_version_changes = not pr_closed
558 559 if c.show_version_changes:
559 560 cur_obj = pull_request_at_ver
560 561 prev_obj = prev_pull_request_at_ver
561 562
562 563 old_commit_ids = prev_obj.revisions
563 564 new_commit_ids = cur_obj.revisions
564 565 commit_changes = PullRequestModel()._calculate_commit_id_changes(
565 566 old_commit_ids, new_commit_ids)
566 567 c.commit_changes_summary = commit_changes
567 568
568 569 # calculate the diff for commits between versions
569 570 c.commit_changes = []
570 571 mark = lambda cs, fw: list(
571 572 h.itertools.izip_longest([], cs, fillvalue=fw))
572 573 for c_type, raw_id in mark(commit_changes.added, 'a') \
573 574 + mark(commit_changes.removed, 'r') \
574 575 + mark(commit_changes.common, 'c'):
575 576
576 577 if raw_id in commit_cache:
577 578 commit = commit_cache[raw_id]
578 579 else:
579 580 try:
580 581 commit = commits_source_repo.get_commit(raw_id)
581 582 except CommitDoesNotExistError:
582 583 # in case we fail extracting still use "dummy" commit
583 584 # for display in commit diff
584 585 commit = h.AttributeDict(
585 586 {'raw_id': raw_id,
586 587 'message': 'EMPTY or MISSING COMMIT'})
587 588 c.commit_changes.append([c_type, commit])
588 589
589 590 # current user review statuses for each version
590 591 c.review_versions = {}
591 592 if self._rhodecode_user.user_id in allowed_reviewers:
592 593 for co in general_comments:
593 594 if co.author.user_id == self._rhodecode_user.user_id:
594 595 status = co.status_change
595 596 if status:
596 597 _ver_pr = status[0].comment.pull_request_version_id
597 598 c.review_versions[_ver_pr] = status[0]
598 599
599 600 return self._get_template_context(c)
600 601
601 602 def get_commits(
602 603 self, commits_source_repo, pull_request_at_ver, source_commit,
603 604 source_ref_id, source_scm, target_commit, target_ref_id, target_scm):
604 605 commit_cache = collections.OrderedDict()
605 606 missing_requirements = False
606 607 try:
607 608 pre_load = ["author", "branch", "date", "message"]
608 609 show_revs = pull_request_at_ver.revisions
609 610 for rev in show_revs:
610 611 comm = commits_source_repo.get_commit(
611 612 commit_id=rev, pre_load=pre_load)
612 613 commit_cache[comm.raw_id] = comm
613 614
614 615 # Order here matters, we first need to get target, and then
615 616 # the source
616 617 target_commit = commits_source_repo.get_commit(
617 618 commit_id=safe_str(target_ref_id))
618 619
619 620 source_commit = commits_source_repo.get_commit(
620 621 commit_id=safe_str(source_ref_id))
621 622 except CommitDoesNotExistError:
622 623 log.warning(
623 624 'Failed to get commit from `{}` repo'.format(
624 625 commits_source_repo), exc_info=True)
625 626 except RepositoryRequirementError:
626 627 log.warning(
627 628 'Failed to get all required data from repo', exc_info=True)
628 629 missing_requirements = True
629 630 ancestor_commit = None
630 631 try:
631 632 ancestor_id = source_scm.get_common_ancestor(
632 633 source_commit.raw_id, target_commit.raw_id, target_scm)
633 634 ancestor_commit = source_scm.get_commit(ancestor_id)
634 635 except Exception:
635 636 ancestor_commit = None
636 637 return ancestor_commit, commit_cache, missing_requirements, source_commit, target_commit
637 638
638 639 def assure_not_empty_repo(self):
639 640 _ = self.request.translate
640 641
641 642 try:
642 643 self.db_repo.scm_instance().get_commit()
643 644 except EmptyRepositoryError:
644 645 h.flash(h.literal(_('There are no commits yet')),
645 646 category='warning')
646 647 raise HTTPFound(
647 648 h.route_path('repo_summary', repo_name=self.db_repo.repo_name))
648 649
649 650 @LoginRequired()
650 651 @NotAnonymous()
651 652 @HasRepoPermissionAnyDecorator(
652 653 'repository.read', 'repository.write', 'repository.admin')
653 654 @view_config(
654 655 route_name='pullrequest_new', request_method='GET',
655 656 renderer='rhodecode:templates/pullrequests/pullrequest.mako')
656 657 def pull_request_new(self):
657 658 _ = self.request.translate
658 659 c = self.load_default_context()
659 660
660 661 self.assure_not_empty_repo()
661 662 source_repo = self.db_repo
662 663
663 664 commit_id = self.request.GET.get('commit')
664 665 branch_ref = self.request.GET.get('branch')
665 666 bookmark_ref = self.request.GET.get('bookmark')
666 667
667 668 try:
668 669 source_repo_data = PullRequestModel().generate_repo_data(
669 670 source_repo, commit_id=commit_id,
670 671 branch=branch_ref, bookmark=bookmark_ref,
671 672 translator=self.request.translate)
672 673 except CommitDoesNotExistError as e:
673 674 log.exception(e)
674 675 h.flash(_('Commit does not exist'), 'error')
675 676 raise HTTPFound(
676 677 h.route_path('pullrequest_new', repo_name=source_repo.repo_name))
677 678
678 679 default_target_repo = source_repo
679 680
680 681 if source_repo.parent:
681 682 parent_vcs_obj = source_repo.parent.scm_instance()
682 683 if parent_vcs_obj and not parent_vcs_obj.is_empty():
683 684 # change default if we have a parent repo
684 685 default_target_repo = source_repo.parent
685 686
686 687 target_repo_data = PullRequestModel().generate_repo_data(
687 688 default_target_repo, translator=self.request.translate)
688 689
689 690 selected_source_ref = source_repo_data['refs']['selected_ref']
690 691 title_source_ref = ''
691 692 if selected_source_ref:
692 693 title_source_ref = selected_source_ref.split(':', 2)[1]
693 694 c.default_title = PullRequestModel().generate_pullrequest_title(
694 695 source=source_repo.repo_name,
695 696 source_ref=title_source_ref,
696 697 target=default_target_repo.repo_name
697 698 )
698 699
699 700 c.default_repo_data = {
700 701 'source_repo_name': source_repo.repo_name,
701 702 'source_refs_json': json.dumps(source_repo_data),
702 703 'target_repo_name': default_target_repo.repo_name,
703 704 'target_refs_json': json.dumps(target_repo_data),
704 705 }
705 706 c.default_source_ref = selected_source_ref
706 707
707 708 return self._get_template_context(c)
708 709
709 710 @LoginRequired()
710 711 @NotAnonymous()
711 712 @HasRepoPermissionAnyDecorator(
712 713 'repository.read', 'repository.write', 'repository.admin')
713 714 @view_config(
714 715 route_name='pullrequest_repo_refs', request_method='GET',
715 716 renderer='json_ext', xhr=True)
716 717 def pull_request_repo_refs(self):
717 718 self.load_default_context()
718 719 target_repo_name = self.request.matchdict['target_repo_name']
719 720 repo = Repository.get_by_repo_name(target_repo_name)
720 721 if not repo:
721 722 raise HTTPNotFound()
722 723
723 724 target_perm = HasRepoPermissionAny(
724 725 'repository.read', 'repository.write', 'repository.admin')(
725 726 target_repo_name)
726 727 if not target_perm:
727 728 raise HTTPNotFound()
728 729
729 730 return PullRequestModel().generate_repo_data(
730 731 repo, translator=self.request.translate)
731 732
732 733 @LoginRequired()
733 734 @NotAnonymous()
734 735 @HasRepoPermissionAnyDecorator(
735 736 'repository.read', 'repository.write', 'repository.admin')
736 737 @view_config(
737 738 route_name='pullrequest_repo_destinations', request_method='GET',
738 739 renderer='json_ext', xhr=True)
739 740 def pull_request_repo_destinations(self):
740 741 _ = self.request.translate
741 742 filter_query = self.request.GET.get('query')
742 743
743 744 query = Repository.query() \
744 745 .order_by(func.length(Repository.repo_name)) \
745 746 .filter(
746 747 or_(Repository.repo_name == self.db_repo.repo_name,
747 748 Repository.fork_id == self.db_repo.repo_id))
748 749
749 750 if filter_query:
750 751 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
751 752 query = query.filter(
752 753 Repository.repo_name.ilike(ilike_expression))
753 754
754 755 add_parent = False
755 756 if self.db_repo.parent:
756 757 if filter_query in self.db_repo.parent.repo_name:
757 758 parent_vcs_obj = self.db_repo.parent.scm_instance()
758 759 if parent_vcs_obj and not parent_vcs_obj.is_empty():
759 760 add_parent = True
760 761
761 762 limit = 20 - 1 if add_parent else 20
762 763 all_repos = query.limit(limit).all()
763 764 if add_parent:
764 765 all_repos += [self.db_repo.parent]
765 766
766 767 repos = []
767 768 for obj in ScmModel().get_repos(all_repos):
768 769 repos.append({
769 770 'id': obj['name'],
770 771 'text': obj['name'],
771 772 'type': 'repo',
772 773 'repo_id': obj['dbrepo']['repo_id'],
773 774 'repo_type': obj['dbrepo']['repo_type'],
774 775 'private': obj['dbrepo']['private'],
775 776
776 777 })
777 778
778 779 data = {
779 780 'more': False,
780 781 'results': [{
781 782 'text': _('Repositories'),
782 783 'children': repos
783 784 }] if repos else []
784 785 }
785 786 return data
786 787
787 788 @LoginRequired()
788 789 @NotAnonymous()
789 790 @HasRepoPermissionAnyDecorator(
790 791 'repository.read', 'repository.write', 'repository.admin')
791 792 @CSRFRequired()
792 793 @view_config(
793 794 route_name='pullrequest_create', request_method='POST',
794 795 renderer=None)
795 796 def pull_request_create(self):
796 797 _ = self.request.translate
797 798 self.assure_not_empty_repo()
798 799 self.load_default_context()
799 800
800 801 controls = peppercorn.parse(self.request.POST.items())
801 802
802 803 try:
803 804 form = PullRequestForm(
804 805 self.request.translate, self.db_repo.repo_id)()
805 806 _form = form.to_python(controls)
806 807 except formencode.Invalid as errors:
807 808 if errors.error_dict.get('revisions'):
808 809 msg = 'Revisions: %s' % errors.error_dict['revisions']
809 810 elif errors.error_dict.get('pullrequest_title'):
810 811 msg = errors.error_dict.get('pullrequest_title')
811 812 else:
812 813 msg = _('Error creating pull request: {}').format(errors)
813 814 log.exception(msg)
814 815 h.flash(msg, 'error')
815 816
816 817 # would rather just go back to form ...
817 818 raise HTTPFound(
818 819 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
819 820
820 821 source_repo = _form['source_repo']
821 822 source_ref = _form['source_ref']
822 823 target_repo = _form['target_repo']
823 824 target_ref = _form['target_ref']
824 825 commit_ids = _form['revisions'][::-1]
825 826
826 827 # find the ancestor for this pr
827 828 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
828 829 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
829 830
830 831 # re-check permissions again here
831 832 # source_repo we must have read permissions
832 833
833 834 source_perm = HasRepoPermissionAny(
834 835 'repository.read',
835 836 'repository.write', 'repository.admin')(source_db_repo.repo_name)
836 837 if not source_perm:
837 838 msg = _('Not Enough permissions to source repo `{}`.'.format(
838 839 source_db_repo.repo_name))
839 840 h.flash(msg, category='error')
840 841 # copy the args back to redirect
841 842 org_query = self.request.GET.mixed()
842 843 raise HTTPFound(
843 844 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
844 845 _query=org_query))
845 846
846 847 # target repo we must have read permissions, and also later on
847 848 # we want to check branch permissions here
848 849 target_perm = HasRepoPermissionAny(
849 850 'repository.read',
850 851 'repository.write', 'repository.admin')(target_db_repo.repo_name)
851 852 if not target_perm:
852 853 msg = _('Not Enough permissions to target repo `{}`.'.format(
853 854 target_db_repo.repo_name))
854 855 h.flash(msg, category='error')
855 856 # copy the args back to redirect
856 857 org_query = self.request.GET.mixed()
857 858 raise HTTPFound(
858 859 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
859 860 _query=org_query))
860 861
861 862 source_scm = source_db_repo.scm_instance()
862 863 target_scm = target_db_repo.scm_instance()
863 864
864 865 source_commit = source_scm.get_commit(source_ref.split(':')[-1])
865 866 target_commit = target_scm.get_commit(target_ref.split(':')[-1])
866 867
867 868 ancestor = source_scm.get_common_ancestor(
868 869 source_commit.raw_id, target_commit.raw_id, target_scm)
869 870
870 871 target_ref_type, target_ref_name, __ = _form['target_ref'].split(':')
871 872 target_ref = ':'.join((target_ref_type, target_ref_name, ancestor))
872 873
873 874 pullrequest_title = _form['pullrequest_title']
874 875 title_source_ref = source_ref.split(':', 2)[1]
875 876 if not pullrequest_title:
876 877 pullrequest_title = PullRequestModel().generate_pullrequest_title(
877 878 source=source_repo,
878 879 source_ref=title_source_ref,
879 880 target=target_repo
880 881 )
881 882
882 883 description = _form['pullrequest_desc']
883 884
884 885 get_default_reviewers_data, validate_default_reviewers = \
885 886 PullRequestModel().get_reviewer_functions()
886 887
887 888 # recalculate reviewers logic, to make sure we can validate this
888 889 reviewer_rules = get_default_reviewers_data(
889 890 self._rhodecode_db_user, source_db_repo,
890 891 source_commit, target_db_repo, target_commit)
891 892
892 893 given_reviewers = _form['review_members']
893 894 reviewers = validate_default_reviewers(given_reviewers, reviewer_rules)
894 895
895 896 try:
896 897 pull_request = PullRequestModel().create(
897 898 self._rhodecode_user.user_id, source_repo, source_ref,
898 899 target_repo, target_ref, commit_ids, reviewers,
899 900 pullrequest_title, description, reviewer_rules,
900 901 auth_user=self._rhodecode_user
901 902 )
902 903 Session().commit()
903 904
904 905 h.flash(_('Successfully opened new pull request'),
905 906 category='success')
906 907 except Exception:
907 908 msg = _('Error occurred during creation of this pull request.')
908 909 log.exception(msg)
909 910 h.flash(msg, category='error')
910 911
911 912 # copy the args back to redirect
912 913 org_query = self.request.GET.mixed()
913 914 raise HTTPFound(
914 915 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
915 916 _query=org_query))
916 917
917 918 raise HTTPFound(
918 919 h.route_path('pullrequest_show', repo_name=target_repo,
919 920 pull_request_id=pull_request.pull_request_id))
920 921
921 922 @LoginRequired()
922 923 @NotAnonymous()
923 924 @HasRepoPermissionAnyDecorator(
924 925 'repository.read', 'repository.write', 'repository.admin')
925 926 @CSRFRequired()
926 927 @view_config(
927 928 route_name='pullrequest_update', request_method='POST',
928 929 renderer='json_ext')
929 930 def pull_request_update(self):
930 931 pull_request = PullRequest.get_or_404(
931 932 self.request.matchdict['pull_request_id'])
932 933 _ = self.request.translate
933 934
934 935 self.load_default_context()
935 936
936 937 if pull_request.is_closed():
937 938 log.debug('update: forbidden because pull request is closed')
938 939 msg = _(u'Cannot update closed pull requests.')
939 940 h.flash(msg, category='error')
940 941 return True
941 942
942 943 # only owner or admin can update it
943 944 allowed_to_update = PullRequestModel().check_user_update(
944 945 pull_request, self._rhodecode_user)
945 946 if allowed_to_update:
946 947 controls = peppercorn.parse(self.request.POST.items())
947 948
948 949 if 'review_members' in controls:
949 950 self._update_reviewers(
950 951 pull_request, controls['review_members'],
951 952 pull_request.reviewer_data)
952 953 elif str2bool(self.request.POST.get('update_commits', 'false')):
953 954 self._update_commits(pull_request)
954 955 elif str2bool(self.request.POST.get('edit_pull_request', 'false')):
955 956 self._edit_pull_request(pull_request)
956 957 else:
957 958 raise HTTPBadRequest()
958 959 return True
959 960 raise HTTPForbidden()
960 961
961 962 def _edit_pull_request(self, pull_request):
962 963 _ = self.request.translate
963 964 try:
964 965 PullRequestModel().edit(
965 966 pull_request, self.request.POST.get('title'),
966 967 self.request.POST.get('description'), self._rhodecode_user)
967 968 except ValueError:
968 969 msg = _(u'Cannot update closed pull requests.')
969 970 h.flash(msg, category='error')
970 971 return
971 972 else:
972 973 Session().commit()
973 974
974 975 msg = _(u'Pull request title & description updated.')
975 976 h.flash(msg, category='success')
976 977 return
977 978
978 979 def _update_commits(self, pull_request):
979 980 _ = self.request.translate
980 981 resp = PullRequestModel().update_commits(pull_request)
981 982
982 983 if resp.executed:
983 984
984 985 if resp.target_changed and resp.source_changed:
985 986 changed = 'target and source repositories'
986 987 elif resp.target_changed and not resp.source_changed:
987 988 changed = 'target repository'
988 989 elif not resp.target_changed and resp.source_changed:
989 990 changed = 'source repository'
990 991 else:
991 992 changed = 'nothing'
992 993
993 994 msg = _(
994 995 u'Pull request updated to "{source_commit_id}" with '
995 996 u'{count_added} added, {count_removed} removed commits. '
996 997 u'Source of changes: {change_source}')
997 998 msg = msg.format(
998 999 source_commit_id=pull_request.source_ref_parts.commit_id,
999 1000 count_added=len(resp.changes.added),
1000 1001 count_removed=len(resp.changes.removed),
1001 1002 change_source=changed)
1002 1003 h.flash(msg, category='success')
1003 1004
1004 1005 channel = '/repo${}$/pr/{}'.format(
1005 1006 pull_request.target_repo.repo_name,
1006 1007 pull_request.pull_request_id)
1007 1008 message = msg + (
1008 1009 ' - <a onclick="window.location.reload()">'
1009 1010 '<strong>{}</strong></a>'.format(_('Reload page')))
1010 1011 channelstream.post_message(
1011 1012 channel, message, self._rhodecode_user.username,
1012 1013 registry=self.request.registry)
1013 1014 else:
1014 1015 msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason]
1015 1016 warning_reasons = [
1016 1017 UpdateFailureReason.NO_CHANGE,
1017 1018 UpdateFailureReason.WRONG_REF_TYPE,
1018 1019 ]
1019 1020 category = 'warning' if resp.reason in warning_reasons else 'error'
1020 1021 h.flash(msg, category=category)
1021 1022
1022 1023 @LoginRequired()
1023 1024 @NotAnonymous()
1024 1025 @HasRepoPermissionAnyDecorator(
1025 1026 'repository.read', 'repository.write', 'repository.admin')
1026 1027 @CSRFRequired()
1027 1028 @view_config(
1028 1029 route_name='pullrequest_merge', request_method='POST',
1029 1030 renderer='json_ext')
1030 1031 def pull_request_merge(self):
1031 1032 """
1032 1033 Merge will perform a server-side merge of the specified
1033 1034 pull request, if the pull request is approved and mergeable.
1034 1035 After successful merging, the pull request is automatically
1035 1036 closed, with a relevant comment.
1036 1037 """
1037 1038 pull_request = PullRequest.get_or_404(
1038 1039 self.request.matchdict['pull_request_id'])
1039 1040
1040 1041 self.load_default_context()
1041 1042 check = MergeCheck.validate(pull_request, self._rhodecode_db_user,
1042 1043 translator=self.request.translate)
1043 1044 merge_possible = not check.failed
1044 1045
1045 1046 for err_type, error_msg in check.errors:
1046 1047 h.flash(error_msg, category=err_type)
1047 1048
1048 1049 if merge_possible:
1049 1050 log.debug("Pre-conditions checked, trying to merge.")
1050 1051 extras = vcs_operation_context(
1051 1052 self.request.environ, repo_name=pull_request.target_repo.repo_name,
1052 1053 username=self._rhodecode_db_user.username, action='push',
1053 1054 scm=pull_request.target_repo.repo_type)
1054 1055 self._merge_pull_request(
1055 1056 pull_request, self._rhodecode_db_user, extras)
1056 1057 else:
1057 1058 log.debug("Pre-conditions failed, NOT merging.")
1058 1059
1059 1060 raise HTTPFound(
1060 1061 h.route_path('pullrequest_show',
1061 1062 repo_name=pull_request.target_repo.repo_name,
1062 1063 pull_request_id=pull_request.pull_request_id))
1063 1064
1064 1065 def _merge_pull_request(self, pull_request, user, extras):
1065 1066 _ = self.request.translate
1066 merge_resp = PullRequestModel().merge(pull_request, user, extras=extras)
1067 merge_resp = PullRequestModel().merge_repo(pull_request, user, extras=extras)
1067 1068
1068 1069 if merge_resp.executed:
1069 1070 log.debug("The merge was successful, closing the pull request.")
1070 1071 PullRequestModel().close_pull_request(
1071 1072 pull_request.pull_request_id, user)
1072 1073 Session().commit()
1073 1074 msg = _('Pull request was successfully merged and closed.')
1074 1075 h.flash(msg, category='success')
1075 1076 else:
1076 1077 log.debug(
1077 1078 "The merge was not successful. Merge response: %s",
1078 1079 merge_resp)
1079 1080 msg = PullRequestModel().merge_status_message(
1080 1081 merge_resp.failure_reason)
1081 1082 h.flash(msg, category='error')
1082 1083
1083 1084 def _update_reviewers(self, pull_request, review_members, reviewer_rules):
1084 1085 _ = self.request.translate
1085 1086 get_default_reviewers_data, validate_default_reviewers = \
1086 1087 PullRequestModel().get_reviewer_functions()
1087 1088
1088 1089 try:
1089 1090 reviewers = validate_default_reviewers(review_members, reviewer_rules)
1090 1091 except ValueError as e:
1091 1092 log.error('Reviewers Validation: {}'.format(e))
1092 1093 h.flash(e, category='error')
1093 1094 return
1094 1095
1095 1096 PullRequestModel().update_reviewers(
1096 1097 pull_request, reviewers, self._rhodecode_user)
1097 1098 h.flash(_('Pull request reviewers updated.'), category='success')
1098 1099 Session().commit()
1099 1100
1100 1101 @LoginRequired()
1101 1102 @NotAnonymous()
1102 1103 @HasRepoPermissionAnyDecorator(
1103 1104 'repository.read', 'repository.write', 'repository.admin')
1104 1105 @CSRFRequired()
1105 1106 @view_config(
1106 1107 route_name='pullrequest_delete', request_method='POST',
1107 1108 renderer='json_ext')
1108 1109 def pull_request_delete(self):
1109 1110 _ = self.request.translate
1110 1111
1111 1112 pull_request = PullRequest.get_or_404(
1112 1113 self.request.matchdict['pull_request_id'])
1113 1114 self.load_default_context()
1114 1115
1115 1116 pr_closed = pull_request.is_closed()
1116 1117 allowed_to_delete = PullRequestModel().check_user_delete(
1117 1118 pull_request, self._rhodecode_user) and not pr_closed
1118 1119
1119 1120 # only owner can delete it !
1120 1121 if allowed_to_delete:
1121 1122 PullRequestModel().delete(pull_request, self._rhodecode_user)
1122 1123 Session().commit()
1123 1124 h.flash(_('Successfully deleted pull request'),
1124 1125 category='success')
1125 1126 raise HTTPFound(h.route_path('pullrequest_show_all',
1126 1127 repo_name=self.db_repo_name))
1127 1128
1128 1129 log.warning('user %s tried to delete pull request without access',
1129 1130 self._rhodecode_user)
1130 1131 raise HTTPNotFound()
1131 1132
1132 1133 @LoginRequired()
1133 1134 @NotAnonymous()
1134 1135 @HasRepoPermissionAnyDecorator(
1135 1136 'repository.read', 'repository.write', 'repository.admin')
1136 1137 @CSRFRequired()
1137 1138 @view_config(
1138 1139 route_name='pullrequest_comment_create', request_method='POST',
1139 1140 renderer='json_ext')
1140 1141 def pull_request_comment_create(self):
1141 1142 _ = self.request.translate
1142 1143
1143 1144 pull_request = PullRequest.get_or_404(
1144 1145 self.request.matchdict['pull_request_id'])
1145 1146 pull_request_id = pull_request.pull_request_id
1146 1147
1147 1148 if pull_request.is_closed():
1148 1149 log.debug('comment: forbidden because pull request is closed')
1149 1150 raise HTTPForbidden()
1150 1151
1151 1152 allowed_to_comment = PullRequestModel().check_user_comment(
1152 1153 pull_request, self._rhodecode_user)
1153 1154 if not allowed_to_comment:
1154 1155 log.debug(
1155 1156 'comment: forbidden because pull request is from forbidden repo')
1156 1157 raise HTTPForbidden()
1157 1158
1158 1159 c = self.load_default_context()
1159 1160
1160 1161 status = self.request.POST.get('changeset_status', None)
1161 1162 text = self.request.POST.get('text')
1162 1163 comment_type = self.request.POST.get('comment_type')
1163 1164 resolves_comment_id = self.request.POST.get('resolves_comment_id', None)
1164 1165 close_pull_request = self.request.POST.get('close_pull_request')
1165 1166
1166 1167 # the logic here should work like following, if we submit close
1167 1168 # pr comment, use `close_pull_request_with_comment` function
1168 1169 # else handle regular comment logic
1169 1170
1170 1171 if close_pull_request:
1171 1172 # only owner or admin or person with write permissions
1172 1173 allowed_to_close = PullRequestModel().check_user_update(
1173 1174 pull_request, self._rhodecode_user)
1174 1175 if not allowed_to_close:
1175 1176 log.debug('comment: forbidden because not allowed to close '
1176 1177 'pull request %s', pull_request_id)
1177 1178 raise HTTPForbidden()
1178 1179 comment, status = PullRequestModel().close_pull_request_with_comment(
1179 1180 pull_request, self._rhodecode_user, self.db_repo, message=text)
1180 1181 Session().flush()
1181 1182 events.trigger(
1182 1183 events.PullRequestCommentEvent(pull_request, comment))
1183 1184
1184 1185 else:
1185 1186 # regular comment case, could be inline, or one with status.
1186 1187 # for that one we check also permissions
1187 1188
1188 1189 allowed_to_change_status = PullRequestModel().check_user_change_status(
1189 1190 pull_request, self._rhodecode_user)
1190 1191
1191 1192 if status and allowed_to_change_status:
1192 1193 message = (_('Status change %(transition_icon)s %(status)s')
1193 1194 % {'transition_icon': '>',
1194 1195 'status': ChangesetStatus.get_status_lbl(status)})
1195 1196 text = text or message
1196 1197
1197 1198 comment = CommentsModel().create(
1198 1199 text=text,
1199 1200 repo=self.db_repo.repo_id,
1200 1201 user=self._rhodecode_user.user_id,
1201 1202 pull_request=pull_request,
1202 1203 f_path=self.request.POST.get('f_path'),
1203 1204 line_no=self.request.POST.get('line'),
1204 1205 status_change=(ChangesetStatus.get_status_lbl(status)
1205 1206 if status and allowed_to_change_status else None),
1206 1207 status_change_type=(status
1207 1208 if status and allowed_to_change_status else None),
1208 1209 comment_type=comment_type,
1209 1210 resolves_comment_id=resolves_comment_id,
1210 1211 auth_user=self._rhodecode_user
1211 1212 )
1212 1213
1213 1214 if allowed_to_change_status:
1214 1215 # calculate old status before we change it
1215 1216 old_calculated_status = pull_request.calculated_review_status()
1216 1217
1217 1218 # get status if set !
1218 1219 if status:
1219 1220 ChangesetStatusModel().set_status(
1220 1221 self.db_repo.repo_id,
1221 1222 status,
1222 1223 self._rhodecode_user.user_id,
1223 1224 comment,
1224 1225 pull_request=pull_request
1225 1226 )
1226 1227
1227 1228 Session().flush()
1228 1229 # this is somehow required to get access to some relationship
1229 1230 # loaded on comment
1230 1231 Session().refresh(comment)
1231 1232
1232 1233 events.trigger(
1233 1234 events.PullRequestCommentEvent(pull_request, comment))
1234 1235
1235 1236 # we now calculate the status of pull request, and based on that
1236 1237 # calculation we set the commits status
1237 1238 calculated_status = pull_request.calculated_review_status()
1238 1239 if old_calculated_status != calculated_status:
1239 1240 PullRequestModel()._trigger_pull_request_hook(
1240 1241 pull_request, self._rhodecode_user, 'review_status_change')
1241 1242
1242 1243 Session().commit()
1243 1244
1244 1245 data = {
1245 1246 'target_id': h.safeid(h.safe_unicode(
1246 1247 self.request.POST.get('f_path'))),
1247 1248 }
1248 1249 if comment:
1249 1250 c.co = comment
1250 1251 rendered_comment = render(
1251 1252 'rhodecode:templates/changeset/changeset_comment_block.mako',
1252 1253 self._get_template_context(c), self.request)
1253 1254
1254 1255 data.update(comment.get_dict())
1255 1256 data.update({'rendered_text': rendered_comment})
1256 1257
1257 1258 return data
1258 1259
1259 1260 @LoginRequired()
1260 1261 @NotAnonymous()
1261 1262 @HasRepoPermissionAnyDecorator(
1262 1263 'repository.read', 'repository.write', 'repository.admin')
1263 1264 @CSRFRequired()
1264 1265 @view_config(
1265 1266 route_name='pullrequest_comment_delete', request_method='POST',
1266 1267 renderer='json_ext')
1267 1268 def pull_request_comment_delete(self):
1268 1269 pull_request = PullRequest.get_or_404(
1269 1270 self.request.matchdict['pull_request_id'])
1270 1271
1271 1272 comment = ChangesetComment.get_or_404(
1272 1273 self.request.matchdict['comment_id'])
1273 1274 comment_id = comment.comment_id
1274 1275
1275 1276 if pull_request.is_closed():
1276 1277 log.debug('comment: forbidden because pull request is closed')
1277 1278 raise HTTPForbidden()
1278 1279
1279 1280 if not comment:
1280 1281 log.debug('Comment with id:%s not found, skipping', comment_id)
1281 1282 # comment already deleted in another call probably
1282 1283 return True
1283 1284
1284 1285 if comment.pull_request.is_closed():
1285 1286 # don't allow deleting comments on closed pull request
1286 1287 raise HTTPForbidden()
1287 1288
1288 1289 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1289 1290 super_admin = h.HasPermissionAny('hg.admin')()
1290 1291 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1291 1292 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1292 1293 comment_repo_admin = is_repo_admin and is_repo_comment
1293 1294
1294 1295 if super_admin or comment_owner or comment_repo_admin:
1295 1296 old_calculated_status = comment.pull_request.calculated_review_status()
1296 1297 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
1297 1298 Session().commit()
1298 1299 calculated_status = comment.pull_request.calculated_review_status()
1299 1300 if old_calculated_status != calculated_status:
1300 1301 PullRequestModel()._trigger_pull_request_hook(
1301 1302 comment.pull_request, self._rhodecode_user, 'review_status_change')
1302 1303 return True
1303 1304 else:
1304 1305 log.warning('No permissions for user %s to delete comment_id: %s',
1305 1306 self._rhodecode_db_user, comment_id)
1306 1307 raise HTTPNotFound()
@@ -1,673 +1,674 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 SimpleVCS middleware for handling protocol request (push/clone etc.)
23 23 It's implemented with basic auth function
24 24 """
25 25
26 26 import os
27 27 import re
28 28 import logging
29 29 import importlib
30 30 from functools import wraps
31 31 from StringIO import StringIO
32 32 from lxml import etree
33 33
34 34 import time
35 35 from paste.httpheaders import REMOTE_USER, AUTH_TYPE
36 36
37 37 from pyramid.httpexceptions import (
38 38 HTTPNotFound, HTTPForbidden, HTTPNotAcceptable, HTTPInternalServerError)
39 39 from zope.cachedescriptors.property import Lazy as LazyProperty
40 40
41 41 import rhodecode
42 42 from rhodecode.authentication.base import (
43 43 authenticate, get_perms_cache_manager, VCS_TYPE, loadplugin)
44 44 from rhodecode.lib import caches
45 45 from rhodecode.lib.auth import AuthUser, HasPermissionAnyMiddleware
46 46 from rhodecode.lib.base import (
47 47 BasicAuth, get_ip_addr, get_user_agent, vcs_operation_context)
48 48 from rhodecode.lib.exceptions import (UserCreationError, NotAllowedToCreateUserError)
49 49 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
50 50 from rhodecode.lib.middleware import appenlight
51 51 from rhodecode.lib.middleware.utils import scm_app_http
52 52 from rhodecode.lib.utils import is_valid_repo, SLUG_RE
53 53 from rhodecode.lib.utils2 import safe_str, fix_PATH, str2bool, safe_unicode
54 54 from rhodecode.lib.vcs.conf import settings as vcs_settings
55 55 from rhodecode.lib.vcs.backends import base
56 56
57 57 from rhodecode.model import meta
58 58 from rhodecode.model.db import User, Repository, PullRequest
59 59 from rhodecode.model.scm import ScmModel
60 60 from rhodecode.model.pull_request import PullRequestModel
61 61 from rhodecode.model.settings import SettingsModel, VcsSettingsModel
62 62
63 63 log = logging.getLogger(__name__)
64 64
65 65
66 66 def extract_svn_txn_id(acl_repo_name, data):
67 67 """
68 68 Helper method for extraction of svn txn_id from submited XML data during
69 69 POST operations
70 70 """
71 71 try:
72 72 root = etree.fromstring(data)
73 73 pat = re.compile(r'/txn/(?P<txn_id>.*)')
74 74 for el in root:
75 75 if el.tag == '{DAV:}source':
76 76 for sub_el in el:
77 77 if sub_el.tag == '{DAV:}href':
78 78 match = pat.search(sub_el.text)
79 79 if match:
80 80 svn_tx_id = match.groupdict()['txn_id']
81 81 txn_id = caches.compute_key_from_params(
82 82 acl_repo_name, svn_tx_id)
83 83 return txn_id
84 84 except Exception:
85 85 log.exception('Failed to extract txn_id')
86 86
87 87
88 88 def initialize_generator(factory):
89 89 """
90 90 Initializes the returned generator by draining its first element.
91 91
92 92 This can be used to give a generator an initializer, which is the code
93 93 up to the first yield statement. This decorator enforces that the first
94 94 produced element has the value ``"__init__"`` to make its special
95 95 purpose very explicit in the using code.
96 96 """
97 97
98 98 @wraps(factory)
99 99 def wrapper(*args, **kwargs):
100 100 gen = factory(*args, **kwargs)
101 101 try:
102 102 init = gen.next()
103 103 except StopIteration:
104 104 raise ValueError('Generator must yield at least one element.')
105 105 if init != "__init__":
106 106 raise ValueError('First yielded element must be "__init__".')
107 107 return gen
108 108 return wrapper
109 109
110 110
111 111 class SimpleVCS(object):
112 112 """Common functionality for SCM HTTP handlers."""
113 113
114 114 SCM = 'unknown'
115 115
116 116 acl_repo_name = None
117 117 url_repo_name = None
118 118 vcs_repo_name = None
119 119 rc_extras = {}
120 120
121 121 # We have to handle requests to shadow repositories different than requests
122 122 # to normal repositories. Therefore we have to distinguish them. To do this
123 123 # we use this regex which will match only on URLs pointing to shadow
124 124 # repositories.
125 125 shadow_repo_re = re.compile(
126 126 '(?P<groups>(?:{slug_pat}/)*)' # repo groups
127 127 '(?P<target>{slug_pat})/' # target repo
128 128 'pull-request/(?P<pr_id>\d+)/' # pull request
129 129 'repository$' # shadow repo
130 130 .format(slug_pat=SLUG_RE.pattern))
131 131
132 132 def __init__(self, config, registry):
133 133 self.registry = registry
134 134 self.config = config
135 135 # re-populated by specialized middleware
136 136 self.repo_vcs_config = base.Config()
137 137 self.rhodecode_settings = SettingsModel().get_all_settings(cache=True)
138 138
139 139 registry.rhodecode_settings = self.rhodecode_settings
140 140 # authenticate this VCS request using authfunc
141 141 auth_ret_code_detection = \
142 142 str2bool(self.config.get('auth_ret_code_detection', False))
143 143 self.authenticate = BasicAuth(
144 144 '', authenticate, registry, config.get('auth_ret_code'),
145 145 auth_ret_code_detection)
146 146 self.ip_addr = '0.0.0.0'
147 147
148 148 @LazyProperty
149 149 def global_vcs_config(self):
150 150 try:
151 151 return VcsSettingsModel().get_ui_settings_as_config_obj()
152 152 except Exception:
153 153 return base.Config()
154 154
155 155 @property
156 156 def base_path(self):
157 157 settings_path = self.repo_vcs_config.get(
158 158 *VcsSettingsModel.PATH_SETTING)
159 159
160 160 if not settings_path:
161 161 settings_path = self.global_vcs_config.get(
162 162 *VcsSettingsModel.PATH_SETTING)
163 163
164 164 if not settings_path:
165 165 # try, maybe we passed in explicitly as config option
166 166 settings_path = self.config.get('base_path')
167 167
168 168 if not settings_path:
169 169 raise ValueError('FATAL: base_path is empty')
170 170 return settings_path
171 171
172 172 def set_repo_names(self, environ):
173 173 """
174 174 This will populate the attributes acl_repo_name, url_repo_name,
175 175 vcs_repo_name and is_shadow_repo. In case of requests to normal (non
176 176 shadow) repositories all names are equal. In case of requests to a
177 177 shadow repository the acl-name points to the target repo of the pull
178 178 request and the vcs-name points to the shadow repo file system path.
179 179 The url-name is always the URL used by the vcs client program.
180 180
181 181 Example in case of a shadow repo:
182 182 acl_repo_name = RepoGroup/MyRepo
183 183 url_repo_name = RepoGroup/MyRepo/pull-request/3/repository
184 184 vcs_repo_name = /repo/base/path/RepoGroup/.__shadow_MyRepo_pr-3'
185 185 """
186 186 # First we set the repo name from URL for all attributes. This is the
187 187 # default if handling normal (non shadow) repo requests.
188 188 self.url_repo_name = self._get_repository_name(environ)
189 189 self.acl_repo_name = self.vcs_repo_name = self.url_repo_name
190 190 self.is_shadow_repo = False
191 191
192 192 # Check if this is a request to a shadow repository.
193 193 match = self.shadow_repo_re.match(self.url_repo_name)
194 194 if match:
195 195 match_dict = match.groupdict()
196 196
197 197 # Build acl repo name from regex match.
198 198 acl_repo_name = safe_unicode('{groups}{target}'.format(
199 199 groups=match_dict['groups'] or '',
200 200 target=match_dict['target']))
201 201
202 202 # Retrieve pull request instance by ID from regex match.
203 203 pull_request = PullRequest.get(match_dict['pr_id'])
204 204
205 205 # Only proceed if we got a pull request and if acl repo name from
206 206 # URL equals the target repo name of the pull request.
207 if pull_request and (acl_repo_name ==
208 pull_request.target_repo.repo_name):
207 if pull_request and \
208 (acl_repo_name == pull_request.target_repo.repo_name):
209 repo_id = pull_request.target_repo.repo_id
209 210 # Get file system path to shadow repository.
210 211 workspace_id = PullRequestModel()._workspace_id(pull_request)
211 212 target_vcs = pull_request.target_repo.scm_instance()
212 213 vcs_repo_name = target_vcs._get_shadow_repository_path(
213 workspace_id)
214 repo_id, workspace_id)
214 215
215 216 # Store names for later usage.
216 217 self.vcs_repo_name = vcs_repo_name
217 218 self.acl_repo_name = acl_repo_name
218 219 self.is_shadow_repo = True
219 220
220 221 log.debug('Setting all VCS repository names: %s', {
221 222 'acl_repo_name': self.acl_repo_name,
222 223 'url_repo_name': self.url_repo_name,
223 224 'vcs_repo_name': self.vcs_repo_name,
224 225 })
225 226
226 227 @property
227 228 def scm_app(self):
228 229 custom_implementation = self.config['vcs.scm_app_implementation']
229 230 if custom_implementation == 'http':
230 231 log.info('Using HTTP implementation of scm app.')
231 232 scm_app_impl = scm_app_http
232 233 else:
233 234 log.info('Using custom implementation of scm_app: "{}"'.format(
234 235 custom_implementation))
235 236 scm_app_impl = importlib.import_module(custom_implementation)
236 237 return scm_app_impl
237 238
238 239 def _get_by_id(self, repo_name):
239 240 """
240 241 Gets a special pattern _<ID> from clone url and tries to replace it
241 242 with a repository_name for support of _<ID> non changeable urls
242 243 """
243 244
244 245 data = repo_name.split('/')
245 246 if len(data) >= 2:
246 247 from rhodecode.model.repo import RepoModel
247 248 by_id_match = RepoModel().get_repo_by_id(repo_name)
248 249 if by_id_match:
249 250 data[1] = by_id_match.repo_name
250 251
251 252 return safe_str('/'.join(data))
252 253
253 254 def _invalidate_cache(self, repo_name):
254 255 """
255 256 Set's cache for this repository for invalidation on next access
256 257
257 258 :param repo_name: full repo name, also a cache key
258 259 """
259 260 ScmModel().mark_for_invalidation(repo_name)
260 261
261 262 def is_valid_and_existing_repo(self, repo_name, base_path, scm_type):
262 263 db_repo = Repository.get_by_repo_name(repo_name)
263 264 if not db_repo:
264 265 log.debug('Repository `%s` not found inside the database.',
265 266 repo_name)
266 267 return False
267 268
268 269 if db_repo.repo_type != scm_type:
269 270 log.warning(
270 271 'Repository `%s` have incorrect scm_type, expected %s got %s',
271 272 repo_name, db_repo.repo_type, scm_type)
272 273 return False
273 274
274 275 config = db_repo._config
275 276 config.set('extensions', 'largefiles', '')
276 277 return is_valid_repo(
277 278 repo_name, base_path,
278 279 explicit_scm=scm_type, expect_scm=scm_type, config=config)
279 280
280 281 def valid_and_active_user(self, user):
281 282 """
282 283 Checks if that user is not empty, and if it's actually object it checks
283 284 if he's active.
284 285
285 286 :param user: user object or None
286 287 :return: boolean
287 288 """
288 289 if user is None:
289 290 return False
290 291
291 292 elif user.active:
292 293 return True
293 294
294 295 return False
295 296
296 297 @property
297 298 def is_shadow_repo_dir(self):
298 299 return os.path.isdir(self.vcs_repo_name)
299 300
300 301 def _check_permission(self, action, user, repo_name, ip_addr=None,
301 302 plugin_id='', plugin_cache_active=False, cache_ttl=0):
302 303 """
303 304 Checks permissions using action (push/pull) user and repository
304 305 name. If plugin_cache and ttl is set it will use the plugin which
305 306 authenticated the user to store the cached permissions result for N
306 307 amount of seconds as in cache_ttl
307 308
308 309 :param action: push or pull action
309 310 :param user: user instance
310 311 :param repo_name: repository name
311 312 """
312 313
313 314 # get instance of cache manager configured for a namespace
314 315 cache_manager = get_perms_cache_manager(
315 316 custom_ttl=cache_ttl, suffix=user.user_id)
316 317 log.debug('AUTH_CACHE_TTL for permissions `%s` active: %s (TTL: %s)',
317 318 plugin_id, plugin_cache_active, cache_ttl)
318 319
319 320 # for environ based password can be empty, but then the validation is
320 321 # on the server that fills in the env data needed for authentication
321 322 _perm_calc_hash = caches.compute_key_from_params(
322 323 plugin_id, action, user.user_id, repo_name, ip_addr)
323 324
324 325 # _authenticate is a wrapper for .auth() method of plugin.
325 326 # it checks if .auth() sends proper data.
326 327 # For RhodeCodeExternalAuthPlugin it also maps users to
327 328 # Database and maps the attributes returned from .auth()
328 329 # to RhodeCode database. If this function returns data
329 330 # then auth is correct.
330 331 start = time.time()
331 332 log.debug('Running plugin `%s` permissions check', plugin_id)
332 333
333 334 def perm_func():
334 335 """
335 336 This function is used internally in Cache of Beaker to calculate
336 337 Results
337 338 """
338 339 log.debug('auth: calculating permission access now...')
339 340 # check IP
340 341 inherit = user.inherit_default_permissions
341 342 ip_allowed = AuthUser.check_ip_allowed(
342 343 user.user_id, ip_addr, inherit_from_default=inherit)
343 344 if ip_allowed:
344 345 log.info('Access for IP:%s allowed', ip_addr)
345 346 else:
346 347 return False
347 348
348 349 if action == 'push':
349 350 perms = ('repository.write', 'repository.admin')
350 351 if not HasPermissionAnyMiddleware(*perms)(user, repo_name):
351 352 return False
352 353
353 354 else:
354 355 # any other action need at least read permission
355 356 perms = (
356 357 'repository.read', 'repository.write', 'repository.admin')
357 358 if not HasPermissionAnyMiddleware(*perms)(user, repo_name):
358 359 return False
359 360
360 361 return True
361 362
362 363 if plugin_cache_active:
363 364 log.debug('Trying to fetch cached perms by %s', _perm_calc_hash[:6])
364 365 perm_result = cache_manager.get(
365 366 _perm_calc_hash, createfunc=perm_func)
366 367 else:
367 368 perm_result = perm_func()
368 369
369 370 auth_time = time.time() - start
370 371 log.debug('Permissions for plugin `%s` completed in %.3fs, '
371 372 'expiration time of fetched cache %.1fs.',
372 373 plugin_id, auth_time, cache_ttl)
373 374
374 375 return perm_result
375 376
376 377 def _check_ssl(self, environ, start_response):
377 378 """
378 379 Checks the SSL check flag and returns False if SSL is not present
379 380 and required True otherwise
380 381 """
381 382 org_proto = environ['wsgi._org_proto']
382 383 # check if we have SSL required ! if not it's a bad request !
383 384 require_ssl = str2bool(self.repo_vcs_config.get('web', 'push_ssl'))
384 385 if require_ssl and org_proto == 'http':
385 386 log.debug(
386 387 'Bad request: detected protocol is `%s` and '
387 388 'SSL/HTTPS is required.', org_proto)
388 389 return False
389 390 return True
390 391
391 392 def _get_default_cache_ttl(self):
392 393 # take AUTH_CACHE_TTL from the `rhodecode` auth plugin
393 394 plugin = loadplugin('egg:rhodecode-enterprise-ce#rhodecode')
394 395 plugin_settings = plugin.get_settings()
395 396 plugin_cache_active, cache_ttl = plugin.get_ttl_cache(
396 397 plugin_settings) or (False, 0)
397 398 return plugin_cache_active, cache_ttl
398 399
399 400 def __call__(self, environ, start_response):
400 401 try:
401 402 return self._handle_request(environ, start_response)
402 403 except Exception:
403 404 log.exception("Exception while handling request")
404 405 appenlight.track_exception(environ)
405 406 return HTTPInternalServerError()(environ, start_response)
406 407 finally:
407 408 meta.Session.remove()
408 409
409 410 def _handle_request(self, environ, start_response):
410 411
411 412 if not self._check_ssl(environ, start_response):
412 413 reason = ('SSL required, while RhodeCode was unable '
413 414 'to detect this as SSL request')
414 415 log.debug('User not allowed to proceed, %s', reason)
415 416 return HTTPNotAcceptable(reason)(environ, start_response)
416 417
417 418 if not self.url_repo_name:
418 419 log.warning('Repository name is empty: %s', self.url_repo_name)
419 420 # failed to get repo name, we fail now
420 421 return HTTPNotFound()(environ, start_response)
421 422 log.debug('Extracted repo name is %s', self.url_repo_name)
422 423
423 424 ip_addr = get_ip_addr(environ)
424 425 user_agent = get_user_agent(environ)
425 426 username = None
426 427
427 428 # skip passing error to error controller
428 429 environ['pylons.status_code_redirect'] = True
429 430
430 431 # ======================================================================
431 432 # GET ACTION PULL or PUSH
432 433 # ======================================================================
433 434 action = self._get_action(environ)
434 435
435 436 # ======================================================================
436 437 # Check if this is a request to a shadow repository of a pull request.
437 438 # In this case only pull action is allowed.
438 439 # ======================================================================
439 440 if self.is_shadow_repo and action != 'pull':
440 441 reason = 'Only pull action is allowed for shadow repositories.'
441 442 log.debug('User not allowed to proceed, %s', reason)
442 443 return HTTPNotAcceptable(reason)(environ, start_response)
443 444
444 445 # Check if the shadow repo actually exists, in case someone refers
445 446 # to it, and it has been deleted because of successful merge.
446 447 if self.is_shadow_repo and not self.is_shadow_repo_dir:
447 448 log.debug(
448 449 'Shadow repo detected, and shadow repo dir `%s` is missing',
449 450 self.is_shadow_repo_dir)
450 451 return HTTPNotFound()(environ, start_response)
451 452
452 453 # ======================================================================
453 454 # CHECK ANONYMOUS PERMISSION
454 455 # ======================================================================
455 456 if action in ['pull', 'push']:
456 457 anonymous_user = User.get_default_user()
457 458 username = anonymous_user.username
458 459 if anonymous_user.active:
459 460 plugin_cache_active, cache_ttl = self._get_default_cache_ttl()
460 461 # ONLY check permissions if the user is activated
461 462 anonymous_perm = self._check_permission(
462 463 action, anonymous_user, self.acl_repo_name, ip_addr,
463 464 plugin_id='anonymous_access',
464 465 plugin_cache_active=plugin_cache_active,
465 466 cache_ttl=cache_ttl,
466 467 )
467 468 else:
468 469 anonymous_perm = False
469 470
470 471 if not anonymous_user.active or not anonymous_perm:
471 472 if not anonymous_user.active:
472 473 log.debug('Anonymous access is disabled, running '
473 474 'authentication')
474 475
475 476 if not anonymous_perm:
476 477 log.debug('Not enough credentials to access this '
477 478 'repository as anonymous user')
478 479
479 480 username = None
480 481 # ==============================================================
481 482 # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE
482 483 # NEED TO AUTHENTICATE AND ASK FOR AUTH USER PERMISSIONS
483 484 # ==============================================================
484 485
485 486 # try to auth based on environ, container auth methods
486 487 log.debug('Running PRE-AUTH for container based authentication')
487 488 pre_auth = authenticate(
488 489 '', '', environ, VCS_TYPE, registry=self.registry,
489 490 acl_repo_name=self.acl_repo_name)
490 491 if pre_auth and pre_auth.get('username'):
491 492 username = pre_auth['username']
492 493 log.debug('PRE-AUTH got %s as username', username)
493 494 if pre_auth:
494 495 log.debug('PRE-AUTH successful from %s',
495 496 pre_auth.get('auth_data', {}).get('_plugin'))
496 497
497 498 # If not authenticated by the container, running basic auth
498 499 # before inject the calling repo_name for special scope checks
499 500 self.authenticate.acl_repo_name = self.acl_repo_name
500 501
501 502 plugin_cache_active, cache_ttl = False, 0
502 503 plugin = None
503 504 if not username:
504 505 self.authenticate.realm = self.authenticate.get_rc_realm()
505 506
506 507 try:
507 508 auth_result = self.authenticate(environ)
508 509 except (UserCreationError, NotAllowedToCreateUserError) as e:
509 510 log.error(e)
510 511 reason = safe_str(e)
511 512 return HTTPNotAcceptable(reason)(environ, start_response)
512 513
513 514 if isinstance(auth_result, dict):
514 515 AUTH_TYPE.update(environ, 'basic')
515 516 REMOTE_USER.update(environ, auth_result['username'])
516 517 username = auth_result['username']
517 518 plugin = auth_result.get('auth_data', {}).get('_plugin')
518 519 log.info(
519 520 'MAIN-AUTH successful for user `%s` from %s plugin',
520 521 username, plugin)
521 522
522 523 plugin_cache_active, cache_ttl = auth_result.get(
523 524 'auth_data', {}).get('_ttl_cache') or (False, 0)
524 525 else:
525 526 return auth_result.wsgi_application(
526 527 environ, start_response)
527 528
528 529
529 530 # ==============================================================
530 531 # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME
531 532 # ==============================================================
532 533 user = User.get_by_username(username)
533 534 if not self.valid_and_active_user(user):
534 535 return HTTPForbidden()(environ, start_response)
535 536 username = user.username
536 537 user.update_lastactivity()
537 538 meta.Session().commit()
538 539
539 540 # check user attributes for password change flag
540 541 user_obj = user
541 542 if user_obj and user_obj.username != User.DEFAULT_USER and \
542 543 user_obj.user_data.get('force_password_change'):
543 544 reason = 'password change required'
544 545 log.debug('User not allowed to authenticate, %s', reason)
545 546 return HTTPNotAcceptable(reason)(environ, start_response)
546 547
547 548 # check permissions for this repository
548 549 perm = self._check_permission(
549 550 action, user, self.acl_repo_name, ip_addr,
550 551 plugin, plugin_cache_active, cache_ttl)
551 552 if not perm:
552 553 return HTTPForbidden()(environ, start_response)
553 554
554 555 # extras are injected into UI object and later available
555 556 # in hooks executed by RhodeCode
556 557 check_locking = _should_check_locking(environ.get('QUERY_STRING'))
557 558 extras = vcs_operation_context(
558 559 environ, repo_name=self.acl_repo_name, username=username,
559 560 action=action, scm=self.SCM, check_locking=check_locking,
560 561 is_shadow_repo=self.is_shadow_repo
561 562 )
562 563
563 564 # ======================================================================
564 565 # REQUEST HANDLING
565 566 # ======================================================================
566 567 repo_path = os.path.join(
567 568 safe_str(self.base_path), safe_str(self.vcs_repo_name))
568 569 log.debug('Repository path is %s', repo_path)
569 570
570 571 fix_PATH()
571 572
572 573 log.info(
573 574 '%s action on %s repo "%s" by "%s" from %s %s',
574 575 action, self.SCM, safe_str(self.url_repo_name),
575 576 safe_str(username), ip_addr, user_agent)
576 577
577 578 return self._generate_vcs_response(
578 579 environ, start_response, repo_path, extras, action)
579 580
580 581 @initialize_generator
581 582 def _generate_vcs_response(
582 583 self, environ, start_response, repo_path, extras, action):
583 584 """
584 585 Returns a generator for the response content.
585 586
586 587 This method is implemented as a generator, so that it can trigger
587 588 the cache validation after all content sent back to the client. It
588 589 also handles the locking exceptions which will be triggered when
589 590 the first chunk is produced by the underlying WSGI application.
590 591 """
591 592 txn_id = ''
592 593 if 'CONTENT_LENGTH' in environ and environ['REQUEST_METHOD'] == 'MERGE':
593 594 # case for SVN, we want to re-use the callback daemon port
594 595 # so we use the txn_id, for this we peek the body, and still save
595 596 # it as wsgi.input
596 597 data = environ['wsgi.input'].read()
597 598 environ['wsgi.input'] = StringIO(data)
598 599 txn_id = extract_svn_txn_id(self.acl_repo_name, data)
599 600
600 601 callback_daemon, extras = self._prepare_callback_daemon(
601 602 extras, environ, action, txn_id=txn_id)
602 603 log.debug('HOOKS extras is %s', extras)
603 604
604 605 config = self._create_config(extras, self.acl_repo_name)
605 606 app = self._create_wsgi_app(repo_path, self.url_repo_name, config)
606 607 with callback_daemon:
607 608 app.rc_extras = extras
608 609
609 610 try:
610 611 response = app(environ, start_response)
611 612 finally:
612 613 # This statement works together with the decorator
613 614 # "initialize_generator" above. The decorator ensures that
614 615 # we hit the first yield statement before the generator is
615 616 # returned back to the WSGI server. This is needed to
616 617 # ensure that the call to "app" above triggers the
617 618 # needed callback to "start_response" before the
618 619 # generator is actually used.
619 620 yield "__init__"
620 621
621 622 # iter content
622 623 for chunk in response:
623 624 yield chunk
624 625
625 626 try:
626 627 # invalidate cache on push
627 628 if action == 'push':
628 629 self._invalidate_cache(self.url_repo_name)
629 630 finally:
630 631 meta.Session.remove()
631 632
632 633 def _get_repository_name(self, environ):
633 634 """Get repository name out of the environmnent
634 635
635 636 :param environ: WSGI environment
636 637 """
637 638 raise NotImplementedError()
638 639
639 640 def _get_action(self, environ):
640 641 """Map request commands into a pull or push command.
641 642
642 643 :param environ: WSGI environment
643 644 """
644 645 raise NotImplementedError()
645 646
646 647 def _create_wsgi_app(self, repo_path, repo_name, config):
647 648 """Return the WSGI app that will finally handle the request."""
648 649 raise NotImplementedError()
649 650
650 651 def _create_config(self, extras, repo_name):
651 652 """Create a safe config representation."""
652 653 raise NotImplementedError()
653 654
654 655 def _should_use_callback_daemon(self, extras, environ, action):
655 656 return True
656 657
657 658 def _prepare_callback_daemon(self, extras, environ, action, txn_id=None):
658 659 direct_calls = vcs_settings.HOOKS_DIRECT_CALLS
659 660 if not self._should_use_callback_daemon(extras, environ, action):
660 661 # disable callback daemon for actions that don't require it
661 662 direct_calls = True
662 663
663 664 return prepare_callback_daemon(
664 665 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
665 666 use_direct_calls=direct_calls, txn_id=txn_id)
666 667
667 668
668 669 def _should_check_locking(query_string):
669 670 # this is kind of hacky, but due to how mercurial handles client-server
670 671 # server see all operation on commit; bookmarks, phases and
671 672 # obsolescence marker in different transaction, we don't want to check
672 673 # locking on those
673 674 return query_string not in ['cmd=listkeys']
@@ -1,1730 +1,1746 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Base module for all VCS systems
23 23 """
24 24
25 25 import collections
26 26 import datetime
27 27 import fnmatch
28 28 import itertools
29 29 import logging
30 30 import os
31 31 import re
32 32 import time
33 33 import warnings
34 34 import shutil
35 35
36 36 from zope.cachedescriptors.property import Lazy as LazyProperty
37 37
38 38 from rhodecode.lib.utils2 import safe_str, safe_unicode
39 39 from rhodecode.lib.vcs import connection
40 40 from rhodecode.lib.vcs.utils import author_name, author_email
41 41 from rhodecode.lib.vcs.conf import settings
42 42 from rhodecode.lib.vcs.exceptions import (
43 43 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
44 44 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
45 45 NodeDoesNotExistError, NodeNotChangedError, VCSError,
46 46 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
47 47 RepositoryError)
48 48
49 49
50 50 log = logging.getLogger(__name__)
51 51
52 52
53 53 FILEMODE_DEFAULT = 0100644
54 54 FILEMODE_EXECUTABLE = 0100755
55 55
56 56 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
57 57 MergeResponse = collections.namedtuple(
58 58 'MergeResponse',
59 59 ('possible', 'executed', 'merge_ref', 'failure_reason'))
60 60
61 61
62 62 class MergeFailureReason(object):
63 63 """
64 64 Enumeration with all the reasons why the server side merge could fail.
65 65
66 66 DO NOT change the number of the reasons, as they may be stored in the
67 67 database.
68 68
69 69 Changing the name of a reason is acceptable and encouraged to deprecate old
70 70 reasons.
71 71 """
72 72
73 73 # Everything went well.
74 74 NONE = 0
75 75
76 76 # An unexpected exception was raised. Check the logs for more details.
77 77 UNKNOWN = 1
78 78
79 79 # The merge was not successful, there are conflicts.
80 80 MERGE_FAILED = 2
81 81
82 82 # The merge succeeded but we could not push it to the target repository.
83 83 PUSH_FAILED = 3
84 84
85 85 # The specified target is not a head in the target repository.
86 86 TARGET_IS_NOT_HEAD = 4
87 87
88 88 # The source repository contains more branches than the target. Pushing
89 89 # the merge will create additional branches in the target.
90 90 HG_SOURCE_HAS_MORE_BRANCHES = 5
91 91
92 92 # The target reference has multiple heads. That does not allow to correctly
93 93 # identify the target location. This could only happen for mercurial
94 94 # branches.
95 95 HG_TARGET_HAS_MULTIPLE_HEADS = 6
96 96
97 97 # The target repository is locked
98 98 TARGET_IS_LOCKED = 7
99 99
100 100 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
101 101 # A involved commit could not be found.
102 102 _DEPRECATED_MISSING_COMMIT = 8
103 103
104 104 # The target repo reference is missing.
105 105 MISSING_TARGET_REF = 9
106 106
107 107 # The source repo reference is missing.
108 108 MISSING_SOURCE_REF = 10
109 109
110 110 # The merge was not successful, there are conflicts related to sub
111 111 # repositories.
112 112 SUBREPO_MERGE_FAILED = 11
113 113
114 114
115 115 class UpdateFailureReason(object):
116 116 """
117 117 Enumeration with all the reasons why the pull request update could fail.
118 118
119 119 DO NOT change the number of the reasons, as they may be stored in the
120 120 database.
121 121
122 122 Changing the name of a reason is acceptable and encouraged to deprecate old
123 123 reasons.
124 124 """
125 125
126 126 # Everything went well.
127 127 NONE = 0
128 128
129 129 # An unexpected exception was raised. Check the logs for more details.
130 130 UNKNOWN = 1
131 131
132 132 # The pull request is up to date.
133 133 NO_CHANGE = 2
134 134
135 135 # The pull request has a reference type that is not supported for update.
136 136 WRONG_REF_TYPE = 3
137 137
138 138 # Update failed because the target reference is missing.
139 139 MISSING_TARGET_REF = 4
140 140
141 141 # Update failed because the source reference is missing.
142 142 MISSING_SOURCE_REF = 5
143 143
144 144
145 145 class BaseRepository(object):
146 146 """
147 147 Base Repository for final backends
148 148
149 149 .. attribute:: DEFAULT_BRANCH_NAME
150 150
151 151 name of default branch (i.e. "trunk" for svn, "master" for git etc.
152 152
153 153 .. attribute:: commit_ids
154 154
155 155 list of all available commit ids, in ascending order
156 156
157 157 .. attribute:: path
158 158
159 159 absolute path to the repository
160 160
161 161 .. attribute:: bookmarks
162 162
163 163 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
164 164 there are no bookmarks or the backend implementation does not support
165 165 bookmarks.
166 166
167 167 .. attribute:: tags
168 168
169 169 Mapping from name to :term:`Commit ID` of the tag.
170 170
171 171 """
172 172
173 173 DEFAULT_BRANCH_NAME = None
174 174 DEFAULT_CONTACT = u"Unknown"
175 175 DEFAULT_DESCRIPTION = u"unknown"
176 176 EMPTY_COMMIT_ID = '0' * 40
177 177
178 178 path = None
179 179 _remote = None
180 180
181 181 def __init__(self, repo_path, config=None, create=False, **kwargs):
182 182 """
183 183 Initializes repository. Raises RepositoryError if repository could
184 184 not be find at the given ``repo_path`` or directory at ``repo_path``
185 185 exists and ``create`` is set to True.
186 186
187 187 :param repo_path: local path of the repository
188 188 :param config: repository configuration
189 189 :param create=False: if set to True, would try to create repository.
190 190 :param src_url=None: if set, should be proper url from which repository
191 191 would be cloned; requires ``create`` parameter to be set to True -
192 192 raises RepositoryError if src_url is set and create evaluates to
193 193 False
194 194 """
195 195 raise NotImplementedError
196 196
197 197 def __repr__(self):
198 198 return '<%s at %s>' % (self.__class__.__name__, self.path)
199 199
200 200 def __len__(self):
201 201 return self.count()
202 202
203 203 def __eq__(self, other):
204 204 same_instance = isinstance(other, self.__class__)
205 205 return same_instance and other.path == self.path
206 206
207 207 def __ne__(self, other):
208 208 return not self.__eq__(other)
209 209
210 210 def get_create_shadow_cache_pr_path(self, db_repo):
211 211 path = db_repo.cached_diffs_dir
212 212 if not os.path.exists(path):
213 213 os.makedirs(path, 0755)
214 214 return path
215 215
216 216 @classmethod
217 217 def get_default_config(cls, default=None):
218 218 config = Config()
219 219 if default and isinstance(default, list):
220 220 for section, key, val in default:
221 221 config.set(section, key, val)
222 222 return config
223 223
224 224 @LazyProperty
225 225 def EMPTY_COMMIT(self):
226 226 return EmptyCommit(self.EMPTY_COMMIT_ID)
227 227
228 228 @LazyProperty
229 229 def alias(self):
230 230 for k, v in settings.BACKENDS.items():
231 231 if v.split('.')[-1] == str(self.__class__.__name__):
232 232 return k
233 233
234 234 @LazyProperty
235 235 def name(self):
236 236 return safe_unicode(os.path.basename(self.path))
237 237
238 238 @LazyProperty
239 239 def description(self):
240 240 raise NotImplementedError
241 241
242 242 def refs(self):
243 243 """
244 244 returns a `dict` with branches, bookmarks, tags, and closed_branches
245 245 for this repository
246 246 """
247 247 return dict(
248 248 branches=self.branches,
249 249 branches_closed=self.branches_closed,
250 250 tags=self.tags,
251 251 bookmarks=self.bookmarks
252 252 )
253 253
254 254 @LazyProperty
255 255 def branches(self):
256 256 """
257 257 A `dict` which maps branch names to commit ids.
258 258 """
259 259 raise NotImplementedError
260 260
261 261 @LazyProperty
262 262 def branches_closed(self):
263 263 """
264 264 A `dict` which maps tags names to commit ids.
265 265 """
266 266 raise NotImplementedError
267 267
268 268 @LazyProperty
269 269 def bookmarks(self):
270 270 """
271 271 A `dict` which maps tags names to commit ids.
272 272 """
273 273 raise NotImplementedError
274 274
275 275 @LazyProperty
276 276 def tags(self):
277 277 """
278 278 A `dict` which maps tags names to commit ids.
279 279 """
280 280 raise NotImplementedError
281 281
282 282 @LazyProperty
283 283 def size(self):
284 284 """
285 285 Returns combined size in bytes for all repository files
286 286 """
287 287 tip = self.get_commit()
288 288 return tip.size
289 289
290 290 def size_at_commit(self, commit_id):
291 291 commit = self.get_commit(commit_id)
292 292 return commit.size
293 293
294 294 def is_empty(self):
295 295 return not bool(self.commit_ids)
296 296
297 297 @staticmethod
298 298 def check_url(url, config):
299 299 """
300 300 Function will check given url and try to verify if it's a valid
301 301 link.
302 302 """
303 303 raise NotImplementedError
304 304
305 305 @staticmethod
306 306 def is_valid_repository(path):
307 307 """
308 308 Check if given `path` contains a valid repository of this backend
309 309 """
310 310 raise NotImplementedError
311 311
312 312 # ==========================================================================
313 313 # COMMITS
314 314 # ==========================================================================
315 315
316 316 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
317 317 """
318 318 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
319 319 are both None, most recent commit is returned.
320 320
321 321 :param pre_load: Optional. List of commit attributes to load.
322 322
323 323 :raises ``EmptyRepositoryError``: if there are no commits
324 324 """
325 325 raise NotImplementedError
326 326
327 327 def __iter__(self):
328 328 for commit_id in self.commit_ids:
329 329 yield self.get_commit(commit_id=commit_id)
330 330
331 331 def get_commits(
332 332 self, start_id=None, end_id=None, start_date=None, end_date=None,
333 333 branch_name=None, show_hidden=False, pre_load=None):
334 334 """
335 335 Returns iterator of `BaseCommit` objects from start to end
336 336 not inclusive. This should behave just like a list, ie. end is not
337 337 inclusive.
338 338
339 339 :param start_id: None or str, must be a valid commit id
340 340 :param end_id: None or str, must be a valid commit id
341 341 :param start_date:
342 342 :param end_date:
343 343 :param branch_name:
344 344 :param show_hidden:
345 345 :param pre_load:
346 346 """
347 347 raise NotImplementedError
348 348
349 349 def __getitem__(self, key):
350 350 """
351 351 Allows index based access to the commit objects of this repository.
352 352 """
353 353 pre_load = ["author", "branch", "date", "message", "parents"]
354 354 if isinstance(key, slice):
355 355 return self._get_range(key, pre_load)
356 356 return self.get_commit(commit_idx=key, pre_load=pre_load)
357 357
358 358 def _get_range(self, slice_obj, pre_load):
359 359 for commit_id in self.commit_ids.__getitem__(slice_obj):
360 360 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
361 361
362 362 def count(self):
363 363 return len(self.commit_ids)
364 364
365 365 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
366 366 """
367 367 Creates and returns a tag for the given ``commit_id``.
368 368
369 369 :param name: name for new tag
370 370 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
371 371 :param commit_id: commit id for which new tag would be created
372 372 :param message: message of the tag's commit
373 373 :param date: date of tag's commit
374 374
375 375 :raises TagAlreadyExistError: if tag with same name already exists
376 376 """
377 377 raise NotImplementedError
378 378
379 379 def remove_tag(self, name, user, message=None, date=None):
380 380 """
381 381 Removes tag with the given ``name``.
382 382
383 383 :param name: name of the tag to be removed
384 384 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
385 385 :param message: message of the tag's removal commit
386 386 :param date: date of tag's removal commit
387 387
388 388 :raises TagDoesNotExistError: if tag with given name does not exists
389 389 """
390 390 raise NotImplementedError
391 391
392 392 def get_diff(
393 393 self, commit1, commit2, path=None, ignore_whitespace=False,
394 394 context=3, path1=None):
395 395 """
396 396 Returns (git like) *diff*, as plain text. Shows changes introduced by
397 397 `commit2` since `commit1`.
398 398
399 399 :param commit1: Entry point from which diff is shown. Can be
400 400 ``self.EMPTY_COMMIT`` - in this case, patch showing all
401 401 the changes since empty state of the repository until `commit2`
402 402 :param commit2: Until which commit changes should be shown.
403 403 :param path: Can be set to a path of a file to create a diff of that
404 404 file. If `path1` is also set, this value is only associated to
405 405 `commit2`.
406 406 :param ignore_whitespace: If set to ``True``, would not show whitespace
407 407 changes. Defaults to ``False``.
408 408 :param context: How many lines before/after changed lines should be
409 409 shown. Defaults to ``3``.
410 410 :param path1: Can be set to a path to associate with `commit1`. This
411 411 parameter works only for backends which support diff generation for
412 412 different paths. Other backends will raise a `ValueError` if `path1`
413 413 is set and has a different value than `path`.
414 414 :param file_path: filter this diff by given path pattern
415 415 """
416 416 raise NotImplementedError
417 417
418 418 def strip(self, commit_id, branch=None):
419 419 """
420 420 Strip given commit_id from the repository
421 421 """
422 422 raise NotImplementedError
423 423
424 424 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
425 425 """
426 426 Return a latest common ancestor commit if one exists for this repo
427 427 `commit_id1` vs `commit_id2` from `repo2`.
428 428
429 429 :param commit_id1: Commit it from this repository to use as a
430 430 target for the comparison.
431 431 :param commit_id2: Source commit id to use for comparison.
432 432 :param repo2: Source repository to use for comparison.
433 433 """
434 434 raise NotImplementedError
435 435
436 436 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
437 437 """
438 438 Compare this repository's revision `commit_id1` with `commit_id2`.
439 439
440 440 Returns a tuple(commits, ancestor) that would be merged from
441 441 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
442 442 will be returned as ancestor.
443 443
444 444 :param commit_id1: Commit it from this repository to use as a
445 445 target for the comparison.
446 446 :param commit_id2: Source commit id to use for comparison.
447 447 :param repo2: Source repository to use for comparison.
448 448 :param merge: If set to ``True`` will do a merge compare which also
449 449 returns the common ancestor.
450 450 :param pre_load: Optional. List of commit attributes to load.
451 451 """
452 452 raise NotImplementedError
453 453
454 def merge(self, target_ref, source_repo, source_ref, workspace_id,
454 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
455 455 user_name='', user_email='', message='', dry_run=False,
456 456 use_rebase=False, close_branch=False):
457 457 """
458 458 Merge the revisions specified in `source_ref` from `source_repo`
459 459 onto the `target_ref` of this repository.
460 460
461 461 `source_ref` and `target_ref` are named tupls with the following
462 462 fields `type`, `name` and `commit_id`.
463 463
464 464 Returns a MergeResponse named tuple with the following fields
465 465 'possible', 'executed', 'source_commit', 'target_commit',
466 466 'merge_commit'.
467 467
468 :param repo_id: `repo_id` target repo id.
469 :param workspace_id: `workspace_id` unique identifier.
468 470 :param target_ref: `target_ref` points to the commit on top of which
469 471 the `source_ref` should be merged.
470 472 :param source_repo: The repository that contains the commits to be
471 473 merged.
472 474 :param source_ref: `source_ref` points to the topmost commit from
473 475 the `source_repo` which should be merged.
474 :param workspace_id: `workspace_id` unique identifier.
475 476 :param user_name: Merge commit `user_name`.
476 477 :param user_email: Merge commit `user_email`.
477 478 :param message: Merge commit `message`.
478 479 :param dry_run: If `True` the merge will not take place.
479 480 :param use_rebase: If `True` commits from the source will be rebased
480 481 on top of the target instead of being merged.
481 482 :param close_branch: If `True` branch will be close before merging it
482 483 """
483 484 if dry_run:
484 485 message = message or 'dry_run_merge_message'
485 486 user_email = user_email or 'dry-run-merge@rhodecode.com'
486 487 user_name = user_name or 'Dry-Run User'
487 488 else:
488 489 if not user_name:
489 490 raise ValueError('user_name cannot be empty')
490 491 if not user_email:
491 492 raise ValueError('user_email cannot be empty')
492 493 if not message:
493 494 raise ValueError('message cannot be empty')
494 495
495 shadow_repository_path = self._maybe_prepare_merge_workspace(
496 workspace_id, target_ref, source_ref)
497
498 496 try:
499 497 return self._merge_repo(
500 shadow_repository_path, target_ref, source_repo,
498 repo_id, workspace_id, target_ref, source_repo,
501 499 source_ref, message, user_name, user_email, dry_run=dry_run,
502 500 use_rebase=use_rebase, close_branch=close_branch)
503 501 except RepositoryError:
504 502 log.exception(
505 503 'Unexpected failure when running merge, dry-run=%s',
506 504 dry_run)
507 505 return MergeResponse(
508 506 False, False, None, MergeFailureReason.UNKNOWN)
509 507
510 def _merge_repo(self, shadow_repository_path, target_ref,
508 def _merge_repo(self, repo_id, workspace_id, target_ref,
511 509 source_repo, source_ref, merge_message,
512 510 merger_name, merger_email, dry_run=False,
513 511 use_rebase=False, close_branch=False):
514 512 """Internal implementation of merge."""
515 513 raise NotImplementedError
516 514
517 def _maybe_prepare_merge_workspace(self, workspace_id, target_ref, source_ref):
515 def _maybe_prepare_merge_workspace(
516 self, repo_id, workspace_id, target_ref, source_ref):
518 517 """
519 518 Create the merge workspace.
520 519
521 520 :param workspace_id: `workspace_id` unique identifier.
522 521 """
523 522 raise NotImplementedError
524 523
525 def _get_shadow_repository_path(self, workspace_id):
526 raise NotImplementedError
524 def _get_legacy_shadow_repository_path(self, workspace_id):
525 """
526 Legacy version that was used before. We still need it for
527 backward compat
528 """
529 return os.path.join(
530 os.path.dirname(self.path),
531 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
527 532
528 def cleanup_merge_workspace(self, workspace_id):
533 def _get_shadow_repository_path(self, repo_id, workspace_id):
534 # The name of the shadow repository must start with '.', so it is
535 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
536 legacy_repository_path = self._get_legacy_shadow_repository_path(workspace_id)
537 if os.path.exists(legacy_repository_path):
538 return legacy_repository_path
539 else:
540 return os.path.join(
541 os.path.dirname(self.path),
542 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
543
544 def cleanup_merge_workspace(self, repo_id, workspace_id):
529 545 """
530 546 Remove merge workspace.
531 547
532 548 This function MUST not fail in case there is no workspace associated to
533 549 the given `workspace_id`.
534 550
535 551 :param workspace_id: `workspace_id` unique identifier.
536 552 """
537 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
553 shadow_repository_path = self._get_shadow_repository_path(repo_id, workspace_id)
538 554 shadow_repository_path_del = '{}.{}.delete'.format(
539 555 shadow_repository_path, time.time())
540 556
541 557 # move the shadow repo, so it never conflicts with the one used.
542 558 # we use this method because shutil.rmtree had some edge case problems
543 559 # removing symlinked repositories
544 560 if not os.path.isdir(shadow_repository_path):
545 561 return
546 562
547 563 shutil.move(shadow_repository_path, shadow_repository_path_del)
548 564 try:
549 565 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
550 566 except Exception:
551 567 log.exception('Failed to gracefully remove shadow repo under %s',
552 568 shadow_repository_path_del)
553 569 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
554 570
555 571 # ========== #
556 572 # COMMIT API #
557 573 # ========== #
558 574
559 575 @LazyProperty
560 576 def in_memory_commit(self):
561 577 """
562 578 Returns :class:`InMemoryCommit` object for this repository.
563 579 """
564 580 raise NotImplementedError
565 581
566 582 # ======================== #
567 583 # UTILITIES FOR SUBCLASSES #
568 584 # ======================== #
569 585
570 586 def _validate_diff_commits(self, commit1, commit2):
571 587 """
572 588 Validates that the given commits are related to this repository.
573 589
574 590 Intended as a utility for sub classes to have a consistent validation
575 591 of input parameters in methods like :meth:`get_diff`.
576 592 """
577 593 self._validate_commit(commit1)
578 594 self._validate_commit(commit2)
579 595 if (isinstance(commit1, EmptyCommit) and
580 596 isinstance(commit2, EmptyCommit)):
581 597 raise ValueError("Cannot compare two empty commits")
582 598
583 599 def _validate_commit(self, commit):
584 600 if not isinstance(commit, BaseCommit):
585 601 raise TypeError(
586 602 "%s is not of type BaseCommit" % repr(commit))
587 603 if commit.repository != self and not isinstance(commit, EmptyCommit):
588 604 raise ValueError(
589 605 "Commit %s must be a valid commit from this repository %s, "
590 606 "related to this repository instead %s." %
591 607 (commit, self, commit.repository))
592 608
593 609 def _validate_commit_id(self, commit_id):
594 610 if not isinstance(commit_id, basestring):
595 611 raise TypeError("commit_id must be a string value")
596 612
597 613 def _validate_commit_idx(self, commit_idx):
598 614 if not isinstance(commit_idx, (int, long)):
599 615 raise TypeError("commit_idx must be a numeric value")
600 616
601 617 def _validate_branch_name(self, branch_name):
602 618 if branch_name and branch_name not in self.branches_all:
603 619 msg = ("Branch %s not found in %s" % (branch_name, self))
604 620 raise BranchDoesNotExistError(msg)
605 621
606 622 #
607 623 # Supporting deprecated API parts
608 624 # TODO: johbo: consider to move this into a mixin
609 625 #
610 626
611 627 @property
612 628 def EMPTY_CHANGESET(self):
613 629 warnings.warn(
614 630 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
615 631 return self.EMPTY_COMMIT_ID
616 632
617 633 @property
618 634 def revisions(self):
619 635 warnings.warn("Use commits attribute instead", DeprecationWarning)
620 636 return self.commit_ids
621 637
622 638 @revisions.setter
623 639 def revisions(self, value):
624 640 warnings.warn("Use commits attribute instead", DeprecationWarning)
625 641 self.commit_ids = value
626 642
627 643 def get_changeset(self, revision=None, pre_load=None):
628 644 warnings.warn("Use get_commit instead", DeprecationWarning)
629 645 commit_id = None
630 646 commit_idx = None
631 647 if isinstance(revision, basestring):
632 648 commit_id = revision
633 649 else:
634 650 commit_idx = revision
635 651 return self.get_commit(
636 652 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
637 653
638 654 def get_changesets(
639 655 self, start=None, end=None, start_date=None, end_date=None,
640 656 branch_name=None, pre_load=None):
641 657 warnings.warn("Use get_commits instead", DeprecationWarning)
642 658 start_id = self._revision_to_commit(start)
643 659 end_id = self._revision_to_commit(end)
644 660 return self.get_commits(
645 661 start_id=start_id, end_id=end_id, start_date=start_date,
646 662 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
647 663
648 664 def _revision_to_commit(self, revision):
649 665 """
650 666 Translates a revision to a commit_id
651 667
652 668 Helps to support the old changeset based API which allows to use
653 669 commit ids and commit indices interchangeable.
654 670 """
655 671 if revision is None:
656 672 return revision
657 673
658 674 if isinstance(revision, basestring):
659 675 commit_id = revision
660 676 else:
661 677 commit_id = self.commit_ids[revision]
662 678 return commit_id
663 679
664 680 @property
665 681 def in_memory_changeset(self):
666 682 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
667 683 return self.in_memory_commit
668 684
669 685 def get_path_permissions(self, username):
670 686 """
671 687 Returns a path permission checker or None if not supported
672 688
673 689 :param username: session user name
674 690 :return: an instance of BasePathPermissionChecker or None
675 691 """
676 692 return None
677 693
678 694 def install_hooks(self, force=False):
679 695 return self._remote.install_hooks(force)
680 696
681 697
682 698 class BaseCommit(object):
683 699 """
684 700 Each backend should implement it's commit representation.
685 701
686 702 **Attributes**
687 703
688 704 ``repository``
689 705 repository object within which commit exists
690 706
691 707 ``id``
692 708 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
693 709 just ``tip``.
694 710
695 711 ``raw_id``
696 712 raw commit representation (i.e. full 40 length sha for git
697 713 backend)
698 714
699 715 ``short_id``
700 716 shortened (if apply) version of ``raw_id``; it would be simple
701 717 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
702 718 as ``raw_id`` for subversion
703 719
704 720 ``idx``
705 721 commit index
706 722
707 723 ``files``
708 724 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
709 725
710 726 ``dirs``
711 727 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
712 728
713 729 ``nodes``
714 730 combined list of ``Node`` objects
715 731
716 732 ``author``
717 733 author of the commit, as unicode
718 734
719 735 ``message``
720 736 message of the commit, as unicode
721 737
722 738 ``parents``
723 739 list of parent commits
724 740
725 741 """
726 742
727 743 branch = None
728 744 """
729 745 Depending on the backend this should be set to the branch name of the
730 746 commit. Backends not supporting branches on commits should leave this
731 747 value as ``None``.
732 748 """
733 749
734 750 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
735 751 """
736 752 This template is used to generate a default prefix for repository archives
737 753 if no prefix has been specified.
738 754 """
739 755
740 756 def __str__(self):
741 757 return '<%s at %s:%s>' % (
742 758 self.__class__.__name__, self.idx, self.short_id)
743 759
744 760 def __repr__(self):
745 761 return self.__str__()
746 762
747 763 def __unicode__(self):
748 764 return u'%s:%s' % (self.idx, self.short_id)
749 765
750 766 def __eq__(self, other):
751 767 same_instance = isinstance(other, self.__class__)
752 768 return same_instance and self.raw_id == other.raw_id
753 769
754 770 def __json__(self):
755 771 parents = []
756 772 try:
757 773 for parent in self.parents:
758 774 parents.append({'raw_id': parent.raw_id})
759 775 except NotImplementedError:
760 776 # empty commit doesn't have parents implemented
761 777 pass
762 778
763 779 return {
764 780 'short_id': self.short_id,
765 781 'raw_id': self.raw_id,
766 782 'revision': self.idx,
767 783 'message': self.message,
768 784 'date': self.date,
769 785 'author': self.author,
770 786 'parents': parents,
771 787 'branch': self.branch
772 788 }
773 789
774 790 def __getstate__(self):
775 791 d = self.__dict__.copy()
776 792 d.pop('_remote', None)
777 793 d.pop('repository', None)
778 794 return d
779 795
780 796 def _get_refs(self):
781 797 return {
782 798 'branches': [self.branch] if self.branch else [],
783 799 'bookmarks': getattr(self, 'bookmarks', []),
784 800 'tags': self.tags
785 801 }
786 802
787 803 @LazyProperty
788 804 def last(self):
789 805 """
790 806 ``True`` if this is last commit in repository, ``False``
791 807 otherwise; trying to access this attribute while there is no
792 808 commits would raise `EmptyRepositoryError`
793 809 """
794 810 if self.repository is None:
795 811 raise CommitError("Cannot check if it's most recent commit")
796 812 return self.raw_id == self.repository.commit_ids[-1]
797 813
798 814 @LazyProperty
799 815 def parents(self):
800 816 """
801 817 Returns list of parent commits.
802 818 """
803 819 raise NotImplementedError
804 820
805 821 @property
806 822 def merge(self):
807 823 """
808 824 Returns boolean if commit is a merge.
809 825 """
810 826 return len(self.parents) > 1
811 827
812 828 @LazyProperty
813 829 def children(self):
814 830 """
815 831 Returns list of child commits.
816 832 """
817 833 raise NotImplementedError
818 834
819 835 @LazyProperty
820 836 def id(self):
821 837 """
822 838 Returns string identifying this commit.
823 839 """
824 840 raise NotImplementedError
825 841
826 842 @LazyProperty
827 843 def raw_id(self):
828 844 """
829 845 Returns raw string identifying this commit.
830 846 """
831 847 raise NotImplementedError
832 848
833 849 @LazyProperty
834 850 def short_id(self):
835 851 """
836 852 Returns shortened version of ``raw_id`` attribute, as string,
837 853 identifying this commit, useful for presentation to users.
838 854 """
839 855 raise NotImplementedError
840 856
841 857 @LazyProperty
842 858 def idx(self):
843 859 """
844 860 Returns integer identifying this commit.
845 861 """
846 862 raise NotImplementedError
847 863
848 864 @LazyProperty
849 865 def committer(self):
850 866 """
851 867 Returns committer for this commit
852 868 """
853 869 raise NotImplementedError
854 870
855 871 @LazyProperty
856 872 def committer_name(self):
857 873 """
858 874 Returns committer name for this commit
859 875 """
860 876
861 877 return author_name(self.committer)
862 878
863 879 @LazyProperty
864 880 def committer_email(self):
865 881 """
866 882 Returns committer email address for this commit
867 883 """
868 884
869 885 return author_email(self.committer)
870 886
871 887 @LazyProperty
872 888 def author(self):
873 889 """
874 890 Returns author for this commit
875 891 """
876 892
877 893 raise NotImplementedError
878 894
879 895 @LazyProperty
880 896 def author_name(self):
881 897 """
882 898 Returns author name for this commit
883 899 """
884 900
885 901 return author_name(self.author)
886 902
887 903 @LazyProperty
888 904 def author_email(self):
889 905 """
890 906 Returns author email address for this commit
891 907 """
892 908
893 909 return author_email(self.author)
894 910
895 911 def get_file_mode(self, path):
896 912 """
897 913 Returns stat mode of the file at `path`.
898 914 """
899 915 raise NotImplementedError
900 916
901 917 def is_link(self, path):
902 918 """
903 919 Returns ``True`` if given `path` is a symlink
904 920 """
905 921 raise NotImplementedError
906 922
907 923 def get_file_content(self, path):
908 924 """
909 925 Returns content of the file at the given `path`.
910 926 """
911 927 raise NotImplementedError
912 928
913 929 def get_file_size(self, path):
914 930 """
915 931 Returns size of the file at the given `path`.
916 932 """
917 933 raise NotImplementedError
918 934
919 935 def get_file_commit(self, path, pre_load=None):
920 936 """
921 937 Returns last commit of the file at the given `path`.
922 938
923 939 :param pre_load: Optional. List of commit attributes to load.
924 940 """
925 941 commits = self.get_file_history(path, limit=1, pre_load=pre_load)
926 942 if not commits:
927 943 raise RepositoryError(
928 944 'Failed to fetch history for path {}. '
929 945 'Please check if such path exists in your repository'.format(
930 946 path))
931 947 return commits[0]
932 948
933 949 def get_file_history(self, path, limit=None, pre_load=None):
934 950 """
935 951 Returns history of file as reversed list of :class:`BaseCommit`
936 952 objects for which file at given `path` has been modified.
937 953
938 954 :param limit: Optional. Allows to limit the size of the returned
939 955 history. This is intended as a hint to the underlying backend, so
940 956 that it can apply optimizations depending on the limit.
941 957 :param pre_load: Optional. List of commit attributes to load.
942 958 """
943 959 raise NotImplementedError
944 960
945 961 def get_file_annotate(self, path, pre_load=None):
946 962 """
947 963 Returns a generator of four element tuples with
948 964 lineno, sha, commit lazy loader and line
949 965
950 966 :param pre_load: Optional. List of commit attributes to load.
951 967 """
952 968 raise NotImplementedError
953 969
954 970 def get_nodes(self, path):
955 971 """
956 972 Returns combined ``DirNode`` and ``FileNode`` objects list representing
957 973 state of commit at the given ``path``.
958 974
959 975 :raises ``CommitError``: if node at the given ``path`` is not
960 976 instance of ``DirNode``
961 977 """
962 978 raise NotImplementedError
963 979
964 980 def get_node(self, path):
965 981 """
966 982 Returns ``Node`` object from the given ``path``.
967 983
968 984 :raises ``NodeDoesNotExistError``: if there is no node at the given
969 985 ``path``
970 986 """
971 987 raise NotImplementedError
972 988
973 989 def get_largefile_node(self, path):
974 990 """
975 991 Returns the path to largefile from Mercurial/Git-lfs storage.
976 992 or None if it's not a largefile node
977 993 """
978 994 return None
979 995
980 996 def archive_repo(self, file_path, kind='tgz', subrepos=None,
981 997 prefix=None, write_metadata=False, mtime=None):
982 998 """
983 999 Creates an archive containing the contents of the repository.
984 1000
985 1001 :param file_path: path to the file which to create the archive.
986 1002 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
987 1003 :param prefix: name of root directory in archive.
988 1004 Default is repository name and commit's short_id joined with dash:
989 1005 ``"{repo_name}-{short_id}"``.
990 1006 :param write_metadata: write a metadata file into archive.
991 1007 :param mtime: custom modification time for archive creation, defaults
992 1008 to time.time() if not given.
993 1009
994 1010 :raise VCSError: If prefix has a problem.
995 1011 """
996 1012 allowed_kinds = settings.ARCHIVE_SPECS.keys()
997 1013 if kind not in allowed_kinds:
998 1014 raise ImproperArchiveTypeError(
999 1015 'Archive kind (%s) not supported use one of %s' %
1000 1016 (kind, allowed_kinds))
1001 1017
1002 1018 prefix = self._validate_archive_prefix(prefix)
1003 1019
1004 1020 mtime = mtime or time.mktime(self.date.timetuple())
1005 1021
1006 1022 file_info = []
1007 1023 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1008 1024 for _r, _d, files in cur_rev.walk('/'):
1009 1025 for f in files:
1010 1026 f_path = os.path.join(prefix, f.path)
1011 1027 file_info.append(
1012 1028 (f_path, f.mode, f.is_link(), f.raw_bytes))
1013 1029
1014 1030 if write_metadata:
1015 1031 metadata = [
1016 1032 ('repo_name', self.repository.name),
1017 1033 ('rev', self.raw_id),
1018 1034 ('create_time', mtime),
1019 1035 ('branch', self.branch),
1020 1036 ('tags', ','.join(self.tags)),
1021 1037 ]
1022 1038 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1023 1039 file_info.append(('.archival.txt', 0644, False, '\n'.join(meta)))
1024 1040
1025 1041 connection.Hg.archive_repo(file_path, mtime, file_info, kind)
1026 1042
1027 1043 def _validate_archive_prefix(self, prefix):
1028 1044 if prefix is None:
1029 1045 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1030 1046 repo_name=safe_str(self.repository.name),
1031 1047 short_id=self.short_id)
1032 1048 elif not isinstance(prefix, str):
1033 1049 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1034 1050 elif prefix.startswith('/'):
1035 1051 raise VCSError("Prefix cannot start with leading slash")
1036 1052 elif prefix.strip() == '':
1037 1053 raise VCSError("Prefix cannot be empty")
1038 1054 return prefix
1039 1055
1040 1056 @LazyProperty
1041 1057 def root(self):
1042 1058 """
1043 1059 Returns ``RootNode`` object for this commit.
1044 1060 """
1045 1061 return self.get_node('')
1046 1062
1047 1063 def next(self, branch=None):
1048 1064 """
1049 1065 Returns next commit from current, if branch is gives it will return
1050 1066 next commit belonging to this branch
1051 1067
1052 1068 :param branch: show commits within the given named branch
1053 1069 """
1054 1070 indexes = xrange(self.idx + 1, self.repository.count())
1055 1071 return self._find_next(indexes, branch)
1056 1072
1057 1073 def prev(self, branch=None):
1058 1074 """
1059 1075 Returns previous commit from current, if branch is gives it will
1060 1076 return previous commit belonging to this branch
1061 1077
1062 1078 :param branch: show commit within the given named branch
1063 1079 """
1064 1080 indexes = xrange(self.idx - 1, -1, -1)
1065 1081 return self._find_next(indexes, branch)
1066 1082
1067 1083 def _find_next(self, indexes, branch=None):
1068 1084 if branch and self.branch != branch:
1069 1085 raise VCSError('Branch option used on commit not belonging '
1070 1086 'to that branch')
1071 1087
1072 1088 for next_idx in indexes:
1073 1089 commit = self.repository.get_commit(commit_idx=next_idx)
1074 1090 if branch and branch != commit.branch:
1075 1091 continue
1076 1092 return commit
1077 1093 raise CommitDoesNotExistError
1078 1094
1079 1095 def diff(self, ignore_whitespace=True, context=3):
1080 1096 """
1081 1097 Returns a `Diff` object representing the change made by this commit.
1082 1098 """
1083 1099 parent = (
1084 1100 self.parents[0] if self.parents else self.repository.EMPTY_COMMIT)
1085 1101 diff = self.repository.get_diff(
1086 1102 parent, self,
1087 1103 ignore_whitespace=ignore_whitespace,
1088 1104 context=context)
1089 1105 return diff
1090 1106
1091 1107 @LazyProperty
1092 1108 def added(self):
1093 1109 """
1094 1110 Returns list of added ``FileNode`` objects.
1095 1111 """
1096 1112 raise NotImplementedError
1097 1113
1098 1114 @LazyProperty
1099 1115 def changed(self):
1100 1116 """
1101 1117 Returns list of modified ``FileNode`` objects.
1102 1118 """
1103 1119 raise NotImplementedError
1104 1120
1105 1121 @LazyProperty
1106 1122 def removed(self):
1107 1123 """
1108 1124 Returns list of removed ``FileNode`` objects.
1109 1125 """
1110 1126 raise NotImplementedError
1111 1127
1112 1128 @LazyProperty
1113 1129 def size(self):
1114 1130 """
1115 1131 Returns total number of bytes from contents of all filenodes.
1116 1132 """
1117 1133 return sum((node.size for node in self.get_filenodes_generator()))
1118 1134
1119 1135 def walk(self, topurl=''):
1120 1136 """
1121 1137 Similar to os.walk method. Insted of filesystem it walks through
1122 1138 commit starting at given ``topurl``. Returns generator of tuples
1123 1139 (topnode, dirnodes, filenodes).
1124 1140 """
1125 1141 topnode = self.get_node(topurl)
1126 1142 if not topnode.is_dir():
1127 1143 return
1128 1144 yield (topnode, topnode.dirs, topnode.files)
1129 1145 for dirnode in topnode.dirs:
1130 1146 for tup in self.walk(dirnode.path):
1131 1147 yield tup
1132 1148
1133 1149 def get_filenodes_generator(self):
1134 1150 """
1135 1151 Returns generator that yields *all* file nodes.
1136 1152 """
1137 1153 for topnode, dirs, files in self.walk():
1138 1154 for node in files:
1139 1155 yield node
1140 1156
1141 1157 #
1142 1158 # Utilities for sub classes to support consistent behavior
1143 1159 #
1144 1160
1145 1161 def no_node_at_path(self, path):
1146 1162 return NodeDoesNotExistError(
1147 1163 u"There is no file nor directory at the given path: "
1148 1164 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1149 1165
1150 1166 def _fix_path(self, path):
1151 1167 """
1152 1168 Paths are stored without trailing slash so we need to get rid off it if
1153 1169 needed.
1154 1170 """
1155 1171 return path.rstrip('/')
1156 1172
1157 1173 #
1158 1174 # Deprecated API based on changesets
1159 1175 #
1160 1176
1161 1177 @property
1162 1178 def revision(self):
1163 1179 warnings.warn("Use idx instead", DeprecationWarning)
1164 1180 return self.idx
1165 1181
1166 1182 @revision.setter
1167 1183 def revision(self, value):
1168 1184 warnings.warn("Use idx instead", DeprecationWarning)
1169 1185 self.idx = value
1170 1186
1171 1187 def get_file_changeset(self, path):
1172 1188 warnings.warn("Use get_file_commit instead", DeprecationWarning)
1173 1189 return self.get_file_commit(path)
1174 1190
1175 1191
1176 1192 class BaseChangesetClass(type):
1177 1193
1178 1194 def __instancecheck__(self, instance):
1179 1195 return isinstance(instance, BaseCommit)
1180 1196
1181 1197
1182 1198 class BaseChangeset(BaseCommit):
1183 1199
1184 1200 __metaclass__ = BaseChangesetClass
1185 1201
1186 1202 def __new__(cls, *args, **kwargs):
1187 1203 warnings.warn(
1188 1204 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1189 1205 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1190 1206
1191 1207
1192 1208 class BaseInMemoryCommit(object):
1193 1209 """
1194 1210 Represents differences between repository's state (most recent head) and
1195 1211 changes made *in place*.
1196 1212
1197 1213 **Attributes**
1198 1214
1199 1215 ``repository``
1200 1216 repository object for this in-memory-commit
1201 1217
1202 1218 ``added``
1203 1219 list of ``FileNode`` objects marked as *added*
1204 1220
1205 1221 ``changed``
1206 1222 list of ``FileNode`` objects marked as *changed*
1207 1223
1208 1224 ``removed``
1209 1225 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1210 1226 *removed*
1211 1227
1212 1228 ``parents``
1213 1229 list of :class:`BaseCommit` instances representing parents of
1214 1230 in-memory commit. Should always be 2-element sequence.
1215 1231
1216 1232 """
1217 1233
1218 1234 def __init__(self, repository):
1219 1235 self.repository = repository
1220 1236 self.added = []
1221 1237 self.changed = []
1222 1238 self.removed = []
1223 1239 self.parents = []
1224 1240
1225 1241 def add(self, *filenodes):
1226 1242 """
1227 1243 Marks given ``FileNode`` objects as *to be committed*.
1228 1244
1229 1245 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1230 1246 latest commit
1231 1247 :raises ``NodeAlreadyAddedError``: if node with same path is already
1232 1248 marked as *added*
1233 1249 """
1234 1250 # Check if not already marked as *added* first
1235 1251 for node in filenodes:
1236 1252 if node.path in (n.path for n in self.added):
1237 1253 raise NodeAlreadyAddedError(
1238 1254 "Such FileNode %s is already marked for addition"
1239 1255 % node.path)
1240 1256 for node in filenodes:
1241 1257 self.added.append(node)
1242 1258
1243 1259 def change(self, *filenodes):
1244 1260 """
1245 1261 Marks given ``FileNode`` objects to be *changed* in next commit.
1246 1262
1247 1263 :raises ``EmptyRepositoryError``: if there are no commits yet
1248 1264 :raises ``NodeAlreadyExistsError``: if node with same path is already
1249 1265 marked to be *changed*
1250 1266 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1251 1267 marked to be *removed*
1252 1268 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1253 1269 commit
1254 1270 :raises ``NodeNotChangedError``: if node hasn't really be changed
1255 1271 """
1256 1272 for node in filenodes:
1257 1273 if node.path in (n.path for n in self.removed):
1258 1274 raise NodeAlreadyRemovedError(
1259 1275 "Node at %s is already marked as removed" % node.path)
1260 1276 try:
1261 1277 self.repository.get_commit()
1262 1278 except EmptyRepositoryError:
1263 1279 raise EmptyRepositoryError(
1264 1280 "Nothing to change - try to *add* new nodes rather than "
1265 1281 "changing them")
1266 1282 for node in filenodes:
1267 1283 if node.path in (n.path for n in self.changed):
1268 1284 raise NodeAlreadyChangedError(
1269 1285 "Node at '%s' is already marked as changed" % node.path)
1270 1286 self.changed.append(node)
1271 1287
1272 1288 def remove(self, *filenodes):
1273 1289 """
1274 1290 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1275 1291 *removed* in next commit.
1276 1292
1277 1293 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1278 1294 be *removed*
1279 1295 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1280 1296 be *changed*
1281 1297 """
1282 1298 for node in filenodes:
1283 1299 if node.path in (n.path for n in self.removed):
1284 1300 raise NodeAlreadyRemovedError(
1285 1301 "Node is already marked to for removal at %s" % node.path)
1286 1302 if node.path in (n.path for n in self.changed):
1287 1303 raise NodeAlreadyChangedError(
1288 1304 "Node is already marked to be changed at %s" % node.path)
1289 1305 # We only mark node as *removed* - real removal is done by
1290 1306 # commit method
1291 1307 self.removed.append(node)
1292 1308
1293 1309 def reset(self):
1294 1310 """
1295 1311 Resets this instance to initial state (cleans ``added``, ``changed``
1296 1312 and ``removed`` lists).
1297 1313 """
1298 1314 self.added = []
1299 1315 self.changed = []
1300 1316 self.removed = []
1301 1317 self.parents = []
1302 1318
1303 1319 def get_ipaths(self):
1304 1320 """
1305 1321 Returns generator of paths from nodes marked as added, changed or
1306 1322 removed.
1307 1323 """
1308 1324 for node in itertools.chain(self.added, self.changed, self.removed):
1309 1325 yield node.path
1310 1326
1311 1327 def get_paths(self):
1312 1328 """
1313 1329 Returns list of paths from nodes marked as added, changed or removed.
1314 1330 """
1315 1331 return list(self.get_ipaths())
1316 1332
1317 1333 def check_integrity(self, parents=None):
1318 1334 """
1319 1335 Checks in-memory commit's integrity. Also, sets parents if not
1320 1336 already set.
1321 1337
1322 1338 :raises CommitError: if any error occurs (i.e.
1323 1339 ``NodeDoesNotExistError``).
1324 1340 """
1325 1341 if not self.parents:
1326 1342 parents = parents or []
1327 1343 if len(parents) == 0:
1328 1344 try:
1329 1345 parents = [self.repository.get_commit(), None]
1330 1346 except EmptyRepositoryError:
1331 1347 parents = [None, None]
1332 1348 elif len(parents) == 1:
1333 1349 parents += [None]
1334 1350 self.parents = parents
1335 1351
1336 1352 # Local parents, only if not None
1337 1353 parents = [p for p in self.parents if p]
1338 1354
1339 1355 # Check nodes marked as added
1340 1356 for p in parents:
1341 1357 for node in self.added:
1342 1358 try:
1343 1359 p.get_node(node.path)
1344 1360 except NodeDoesNotExistError:
1345 1361 pass
1346 1362 else:
1347 1363 raise NodeAlreadyExistsError(
1348 1364 "Node `%s` already exists at %s" % (node.path, p))
1349 1365
1350 1366 # Check nodes marked as changed
1351 1367 missing = set(self.changed)
1352 1368 not_changed = set(self.changed)
1353 1369 if self.changed and not parents:
1354 1370 raise NodeDoesNotExistError(str(self.changed[0].path))
1355 1371 for p in parents:
1356 1372 for node in self.changed:
1357 1373 try:
1358 1374 old = p.get_node(node.path)
1359 1375 missing.remove(node)
1360 1376 # if content actually changed, remove node from not_changed
1361 1377 if old.content != node.content:
1362 1378 not_changed.remove(node)
1363 1379 except NodeDoesNotExistError:
1364 1380 pass
1365 1381 if self.changed and missing:
1366 1382 raise NodeDoesNotExistError(
1367 1383 "Node `%s` marked as modified but missing in parents: %s"
1368 1384 % (node.path, parents))
1369 1385
1370 1386 if self.changed and not_changed:
1371 1387 raise NodeNotChangedError(
1372 1388 "Node `%s` wasn't actually changed (parents: %s)"
1373 1389 % (not_changed.pop().path, parents))
1374 1390
1375 1391 # Check nodes marked as removed
1376 1392 if self.removed and not parents:
1377 1393 raise NodeDoesNotExistError(
1378 1394 "Cannot remove node at %s as there "
1379 1395 "were no parents specified" % self.removed[0].path)
1380 1396 really_removed = set()
1381 1397 for p in parents:
1382 1398 for node in self.removed:
1383 1399 try:
1384 1400 p.get_node(node.path)
1385 1401 really_removed.add(node)
1386 1402 except CommitError:
1387 1403 pass
1388 1404 not_removed = set(self.removed) - really_removed
1389 1405 if not_removed:
1390 1406 # TODO: johbo: This code branch does not seem to be covered
1391 1407 raise NodeDoesNotExistError(
1392 1408 "Cannot remove node at %s from "
1393 1409 "following parents: %s" % (not_removed, parents))
1394 1410
1395 1411 def commit(
1396 1412 self, message, author, parents=None, branch=None, date=None,
1397 1413 **kwargs):
1398 1414 """
1399 1415 Performs in-memory commit (doesn't check workdir in any way) and
1400 1416 returns newly created :class:`BaseCommit`. Updates repository's
1401 1417 attribute `commits`.
1402 1418
1403 1419 .. note::
1404 1420
1405 1421 While overriding this method each backend's should call
1406 1422 ``self.check_integrity(parents)`` in the first place.
1407 1423
1408 1424 :param message: message of the commit
1409 1425 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1410 1426 :param parents: single parent or sequence of parents from which commit
1411 1427 would be derived
1412 1428 :param date: ``datetime.datetime`` instance. Defaults to
1413 1429 ``datetime.datetime.now()``.
1414 1430 :param branch: branch name, as string. If none given, default backend's
1415 1431 branch would be used.
1416 1432
1417 1433 :raises ``CommitError``: if any error occurs while committing
1418 1434 """
1419 1435 raise NotImplementedError
1420 1436
1421 1437
1422 1438 class BaseInMemoryChangesetClass(type):
1423 1439
1424 1440 def __instancecheck__(self, instance):
1425 1441 return isinstance(instance, BaseInMemoryCommit)
1426 1442
1427 1443
1428 1444 class BaseInMemoryChangeset(BaseInMemoryCommit):
1429 1445
1430 1446 __metaclass__ = BaseInMemoryChangesetClass
1431 1447
1432 1448 def __new__(cls, *args, **kwargs):
1433 1449 warnings.warn(
1434 1450 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1435 1451 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1436 1452
1437 1453
1438 1454 class EmptyCommit(BaseCommit):
1439 1455 """
1440 1456 An dummy empty commit. It's possible to pass hash when creating
1441 1457 an EmptyCommit
1442 1458 """
1443 1459
1444 1460 def __init__(
1445 1461 self, commit_id='0' * 40, repo=None, alias=None, idx=-1,
1446 1462 message='', author='', date=None):
1447 1463 self._empty_commit_id = commit_id
1448 1464 # TODO: johbo: Solve idx parameter, default value does not make
1449 1465 # too much sense
1450 1466 self.idx = idx
1451 1467 self.message = message
1452 1468 self.author = author
1453 1469 self.date = date or datetime.datetime.fromtimestamp(0)
1454 1470 self.repository = repo
1455 1471 self.alias = alias
1456 1472
1457 1473 @LazyProperty
1458 1474 def raw_id(self):
1459 1475 """
1460 1476 Returns raw string identifying this commit, useful for web
1461 1477 representation.
1462 1478 """
1463 1479
1464 1480 return self._empty_commit_id
1465 1481
1466 1482 @LazyProperty
1467 1483 def branch(self):
1468 1484 if self.alias:
1469 1485 from rhodecode.lib.vcs.backends import get_backend
1470 1486 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1471 1487
1472 1488 @LazyProperty
1473 1489 def short_id(self):
1474 1490 return self.raw_id[:12]
1475 1491
1476 1492 @LazyProperty
1477 1493 def id(self):
1478 1494 return self.raw_id
1479 1495
1480 1496 def get_file_commit(self, path):
1481 1497 return self
1482 1498
1483 1499 def get_file_content(self, path):
1484 1500 return u''
1485 1501
1486 1502 def get_file_size(self, path):
1487 1503 return 0
1488 1504
1489 1505
1490 1506 class EmptyChangesetClass(type):
1491 1507
1492 1508 def __instancecheck__(self, instance):
1493 1509 return isinstance(instance, EmptyCommit)
1494 1510
1495 1511
1496 1512 class EmptyChangeset(EmptyCommit):
1497 1513
1498 1514 __metaclass__ = EmptyChangesetClass
1499 1515
1500 1516 def __new__(cls, *args, **kwargs):
1501 1517 warnings.warn(
1502 1518 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1503 1519 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1504 1520
1505 1521 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
1506 1522 alias=None, revision=-1, message='', author='', date=None):
1507 1523 if requested_revision is not None:
1508 1524 warnings.warn(
1509 1525 "Parameter requested_revision not supported anymore",
1510 1526 DeprecationWarning)
1511 1527 super(EmptyChangeset, self).__init__(
1512 1528 commit_id=cs, repo=repo, alias=alias, idx=revision,
1513 1529 message=message, author=author, date=date)
1514 1530
1515 1531 @property
1516 1532 def revision(self):
1517 1533 warnings.warn("Use idx instead", DeprecationWarning)
1518 1534 return self.idx
1519 1535
1520 1536 @revision.setter
1521 1537 def revision(self, value):
1522 1538 warnings.warn("Use idx instead", DeprecationWarning)
1523 1539 self.idx = value
1524 1540
1525 1541
1526 1542 class EmptyRepository(BaseRepository):
1527 1543 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1528 1544 pass
1529 1545
1530 1546 def get_diff(self, *args, **kwargs):
1531 1547 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1532 1548 return GitDiff('')
1533 1549
1534 1550
1535 1551 class CollectionGenerator(object):
1536 1552
1537 1553 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None):
1538 1554 self.repo = repo
1539 1555 self.commit_ids = commit_ids
1540 1556 # TODO: (oliver) this isn't currently hooked up
1541 1557 self.collection_size = None
1542 1558 self.pre_load = pre_load
1543 1559
1544 1560 def __len__(self):
1545 1561 if self.collection_size is not None:
1546 1562 return self.collection_size
1547 1563 return self.commit_ids.__len__()
1548 1564
1549 1565 def __iter__(self):
1550 1566 for commit_id in self.commit_ids:
1551 1567 # TODO: johbo: Mercurial passes in commit indices or commit ids
1552 1568 yield self._commit_factory(commit_id)
1553 1569
1554 1570 def _commit_factory(self, commit_id):
1555 1571 """
1556 1572 Allows backends to override the way commits are generated.
1557 1573 """
1558 1574 return self.repo.get_commit(commit_id=commit_id,
1559 1575 pre_load=self.pre_load)
1560 1576
1561 1577 def __getslice__(self, i, j):
1562 1578 """
1563 1579 Returns an iterator of sliced repository
1564 1580 """
1565 1581 commit_ids = self.commit_ids[i:j]
1566 1582 return self.__class__(
1567 1583 self.repo, commit_ids, pre_load=self.pre_load)
1568 1584
1569 1585 def __repr__(self):
1570 1586 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1571 1587
1572 1588
1573 1589 class Config(object):
1574 1590 """
1575 1591 Represents the configuration for a repository.
1576 1592
1577 1593 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1578 1594 standard library. It implements only the needed subset.
1579 1595 """
1580 1596
1581 1597 def __init__(self):
1582 1598 self._values = {}
1583 1599
1584 1600 def copy(self):
1585 1601 clone = Config()
1586 1602 for section, values in self._values.items():
1587 1603 clone._values[section] = values.copy()
1588 1604 return clone
1589 1605
1590 1606 def __repr__(self):
1591 1607 return '<Config(%s sections) at %s>' % (
1592 1608 len(self._values), hex(id(self)))
1593 1609
1594 1610 def items(self, section):
1595 1611 return self._values.get(section, {}).iteritems()
1596 1612
1597 1613 def get(self, section, option):
1598 1614 return self._values.get(section, {}).get(option)
1599 1615
1600 1616 def set(self, section, option, value):
1601 1617 section_values = self._values.setdefault(section, {})
1602 1618 section_values[option] = value
1603 1619
1604 1620 def clear_section(self, section):
1605 1621 self._values[section] = {}
1606 1622
1607 1623 def serialize(self):
1608 1624 """
1609 1625 Creates a list of three tuples (section, key, value) representing
1610 1626 this config object.
1611 1627 """
1612 1628 items = []
1613 1629 for section in self._values:
1614 1630 for option, value in self._values[section].items():
1615 1631 items.append(
1616 1632 (safe_str(section), safe_str(option), safe_str(value)))
1617 1633 return items
1618 1634
1619 1635
1620 1636 class Diff(object):
1621 1637 """
1622 1638 Represents a diff result from a repository backend.
1623 1639
1624 1640 Subclasses have to provide a backend specific value for
1625 1641 :attr:`_header_re` and :attr:`_meta_re`.
1626 1642 """
1627 1643 _meta_re = None
1628 1644 _header_re = None
1629 1645
1630 1646 def __init__(self, raw_diff):
1631 1647 self.raw = raw_diff
1632 1648
1633 1649 def chunks(self):
1634 1650 """
1635 1651 split the diff in chunks of separate --git a/file b/file chunks
1636 1652 to make diffs consistent we must prepend with \n, and make sure
1637 1653 we can detect last chunk as this was also has special rule
1638 1654 """
1639 1655
1640 1656 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1641 1657 header = diff_parts[0]
1642 1658
1643 1659 if self._meta_re:
1644 1660 match = self._meta_re.match(header)
1645 1661
1646 1662 chunks = diff_parts[1:]
1647 1663 total_chunks = len(chunks)
1648 1664
1649 1665 return (
1650 1666 DiffChunk(chunk, self, cur_chunk == total_chunks)
1651 1667 for cur_chunk, chunk in enumerate(chunks, start=1))
1652 1668
1653 1669
1654 1670 class DiffChunk(object):
1655 1671
1656 1672 def __init__(self, chunk, diff, last_chunk):
1657 1673 self._diff = diff
1658 1674
1659 1675 # since we split by \ndiff --git that part is lost from original diff
1660 1676 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1661 1677 if not last_chunk:
1662 1678 chunk += '\n'
1663 1679
1664 1680 match = self._diff._header_re.match(chunk)
1665 1681 self.header = match.groupdict()
1666 1682 self.diff = chunk[match.end():]
1667 1683 self.raw = chunk
1668 1684
1669 1685
1670 1686 class BasePathPermissionChecker(object):
1671 1687
1672 1688 @staticmethod
1673 1689 def create_from_patterns(includes, excludes):
1674 1690 if includes and '*' in includes and not excludes:
1675 1691 return AllPathPermissionChecker()
1676 1692 elif excludes and '*' in excludes:
1677 1693 return NonePathPermissionChecker()
1678 1694 else:
1679 1695 return PatternPathPermissionChecker(includes, excludes)
1680 1696
1681 1697 @property
1682 1698 def has_full_access(self):
1683 1699 raise NotImplemented()
1684 1700
1685 1701 def has_access(self, path):
1686 1702 raise NotImplemented()
1687 1703
1688 1704
1689 1705 class AllPathPermissionChecker(BasePathPermissionChecker):
1690 1706
1691 1707 @property
1692 1708 def has_full_access(self):
1693 1709 return True
1694 1710
1695 1711 def has_access(self, path):
1696 1712 return True
1697 1713
1698 1714
1699 1715 class NonePathPermissionChecker(BasePathPermissionChecker):
1700 1716
1701 1717 @property
1702 1718 def has_full_access(self):
1703 1719 return False
1704 1720
1705 1721 def has_access(self, path):
1706 1722 return False
1707 1723
1708 1724
1709 1725 class PatternPathPermissionChecker(BasePathPermissionChecker):
1710 1726
1711 1727 def __init__(self, includes, excludes):
1712 1728 self.includes = includes
1713 1729 self.excludes = excludes
1714 1730 self.includes_re = [] if not includes else [
1715 1731 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1716 1732 self.excludes_re = [] if not excludes else [
1717 1733 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1718 1734
1719 1735 @property
1720 1736 def has_full_access(self):
1721 1737 return '*' in self.includes and not self.excludes
1722 1738
1723 1739 def has_access(self, path):
1724 1740 for regex in self.excludes_re:
1725 1741 if regex.match(path):
1726 1742 return False
1727 1743 for regex in self.includes_re:
1728 1744 if regex.match(path):
1729 1745 return True
1730 1746 return False
@@ -1,1006 +1,1006 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 GIT repository module
23 23 """
24 24
25 25 import logging
26 26 import os
27 27 import re
28 28
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 31 from rhodecode.lib.compat import OrderedDict
32 32 from rhodecode.lib.datelib import (
33 33 utcdate_fromtimestamp, makedate, date_astimestamp)
34 34 from rhodecode.lib.utils import safe_unicode, safe_str
35 35 from rhodecode.lib.vcs import connection, path as vcspath
36 36 from rhodecode.lib.vcs.backends.base import (
37 37 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 38 MergeFailureReason, Reference)
39 39 from rhodecode.lib.vcs.backends.git.commit import GitCommit
40 40 from rhodecode.lib.vcs.backends.git.diff import GitDiff
41 41 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
42 42 from rhodecode.lib.vcs.exceptions import (
43 43 CommitDoesNotExistError, EmptyRepositoryError,
44 44 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError)
45 45
46 46
47 47 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
48 48
49 49 log = logging.getLogger(__name__)
50 50
51 51
52 52 class GitRepository(BaseRepository):
53 53 """
54 54 Git repository backend.
55 55 """
56 56 DEFAULT_BRANCH_NAME = 'master'
57 57
58 58 contact = BaseRepository.DEFAULT_CONTACT
59 59
60 60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 61 update_after_clone=False, with_wire=None, bare=False):
62 62
63 63 self.path = safe_str(os.path.abspath(repo_path))
64 64 self.config = config if config else self.get_default_config()
65 65 self._remote = connection.Git(
66 66 self.path, self.config, with_wire=with_wire)
67 67
68 68 self._init_repo(create, src_url, update_after_clone, bare)
69 69
70 70 # caches
71 71 self._commit_ids = {}
72 72
73 73 @LazyProperty
74 74 def bare(self):
75 75 return self._remote.bare()
76 76
77 77 @LazyProperty
78 78 def head(self):
79 79 return self._remote.head()
80 80
81 81 @LazyProperty
82 82 def commit_ids(self):
83 83 """
84 84 Returns list of commit ids, in ascending order. Being lazy
85 85 attribute allows external tools to inject commit ids from cache.
86 86 """
87 87 commit_ids = self._get_all_commit_ids()
88 88 self._rebuild_cache(commit_ids)
89 89 return commit_ids
90 90
91 91 def _rebuild_cache(self, commit_ids):
92 92 self._commit_ids = dict((commit_id, index)
93 93 for index, commit_id in enumerate(commit_ids))
94 94
95 95 def run_git_command(self, cmd, **opts):
96 96 """
97 97 Runs given ``cmd`` as git command and returns tuple
98 98 (stdout, stderr).
99 99
100 100 :param cmd: git command to be executed
101 101 :param opts: env options to pass into Subprocess command
102 102 """
103 103 if not isinstance(cmd, list):
104 104 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
105 105
106 106 skip_stderr_log = opts.pop('skip_stderr_log', False)
107 107 out, err = self._remote.run_git_command(cmd, **opts)
108 108 if err and not skip_stderr_log:
109 109 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
110 110 return out, err
111 111
112 112 @staticmethod
113 113 def check_url(url, config):
114 114 """
115 115 Function will check given url and try to verify if it's a valid
116 116 link. Sometimes it may happened that git will issue basic
117 117 auth request that can cause whole API to hang when used from python
118 118 or other external calls.
119 119
120 120 On failures it'll raise urllib2.HTTPError, exception is also thrown
121 121 when the return code is non 200
122 122 """
123 123 # check first if it's not an url
124 124 if os.path.isdir(url) or url.startswith('file:'):
125 125 return True
126 126
127 127 if '+' in url.split('://', 1)[0]:
128 128 url = url.split('+', 1)[1]
129 129
130 130 # Request the _remote to verify the url
131 131 return connection.Git.check_url(url, config.serialize())
132 132
133 133 @staticmethod
134 134 def is_valid_repository(path):
135 135 if os.path.isdir(os.path.join(path, '.git')):
136 136 return True
137 137 # check case of bare repository
138 138 try:
139 139 GitRepository(path)
140 140 return True
141 141 except VCSError:
142 142 pass
143 143 return False
144 144
145 145 def _init_repo(self, create, src_url=None, update_after_clone=False,
146 146 bare=False):
147 147 if create and os.path.exists(self.path):
148 148 raise RepositoryError(
149 149 "Cannot create repository at %s, location already exist"
150 150 % self.path)
151 151
152 152 try:
153 153 if create and src_url:
154 154 GitRepository.check_url(src_url, self.config)
155 155 self.clone(src_url, update_after_clone, bare)
156 156 elif create:
157 157 os.makedirs(self.path, mode=0755)
158 158
159 159 if bare:
160 160 self._remote.init_bare()
161 161 else:
162 162 self._remote.init()
163 163 else:
164 164 if not self._remote.assert_correct_path():
165 165 raise RepositoryError(
166 166 'Path "%s" does not contain a Git repository' %
167 167 (self.path,))
168 168
169 169 # TODO: johbo: check if we have to translate the OSError here
170 170 except OSError as err:
171 171 raise RepositoryError(err)
172 172
173 173 def _get_all_commit_ids(self, filters=None):
174 174 # we must check if this repo is not empty, since later command
175 175 # fails if it is. And it's cheaper to ask than throw the subprocess
176 176 # errors
177 177 try:
178 178 self._remote.head()
179 179 except KeyError:
180 180 return []
181 181
182 182 rev_filter = ['--branches', '--tags']
183 183 extra_filter = []
184 184
185 185 if filters:
186 186 if filters.get('since'):
187 187 extra_filter.append('--since=%s' % (filters['since']))
188 188 if filters.get('until'):
189 189 extra_filter.append('--until=%s' % (filters['until']))
190 190 if filters.get('branch_name'):
191 191 rev_filter = ['--tags']
192 192 extra_filter.append(filters['branch_name'])
193 193 rev_filter.extend(extra_filter)
194 194
195 195 # if filters.get('start') or filters.get('end'):
196 196 # # skip is offset, max-count is limit
197 197 # if filters.get('start'):
198 198 # extra_filter += ' --skip=%s' % filters['start']
199 199 # if filters.get('end'):
200 200 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
201 201
202 202 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
203 203 try:
204 204 output, __ = self.run_git_command(cmd)
205 205 except RepositoryError:
206 206 # Can be raised for empty repositories
207 207 return []
208 208 return output.splitlines()
209 209
210 210 def _get_commit_id(self, commit_id_or_idx):
211 211 def is_null(value):
212 212 return len(value) == commit_id_or_idx.count('0')
213 213
214 214 if self.is_empty():
215 215 raise EmptyRepositoryError("There are no commits yet")
216 216
217 217 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
218 218 return self.commit_ids[-1]
219 219
220 220 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
221 221 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
222 222 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
223 223 try:
224 224 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
225 225 except Exception:
226 226 msg = "Commit %s does not exist for %s" % (
227 227 commit_id_or_idx, self)
228 228 raise CommitDoesNotExistError(msg)
229 229
230 230 elif is_bstr:
231 231 # check full path ref, eg. refs/heads/master
232 232 ref_id = self._refs.get(commit_id_or_idx)
233 233 if ref_id:
234 234 return ref_id
235 235
236 236 # check branch name
237 237 branch_ids = self.branches.values()
238 238 ref_id = self._refs.get('refs/heads/%s' % commit_id_or_idx)
239 239 if ref_id:
240 240 return ref_id
241 241
242 242 # check tag name
243 243 ref_id = self._refs.get('refs/tags/%s' % commit_id_or_idx)
244 244 if ref_id:
245 245 return ref_id
246 246
247 247 if (not SHA_PATTERN.match(commit_id_or_idx) or
248 248 commit_id_or_idx not in self.commit_ids):
249 249 msg = "Commit %s does not exist for %s" % (
250 250 commit_id_or_idx, self)
251 251 raise CommitDoesNotExistError(msg)
252 252
253 253 # Ensure we return full id
254 254 if not SHA_PATTERN.match(str(commit_id_or_idx)):
255 255 raise CommitDoesNotExistError(
256 256 "Given commit id %s not recognized" % commit_id_or_idx)
257 257 return commit_id_or_idx
258 258
259 259 def get_hook_location(self):
260 260 """
261 261 returns absolute path to location where hooks are stored
262 262 """
263 263 loc = os.path.join(self.path, 'hooks')
264 264 if not self.bare:
265 265 loc = os.path.join(self.path, '.git', 'hooks')
266 266 return loc
267 267
268 268 @LazyProperty
269 269 def last_change(self):
270 270 """
271 271 Returns last change made on this repository as
272 272 `datetime.datetime` object.
273 273 """
274 274 try:
275 275 return self.get_commit().date
276 276 except RepositoryError:
277 277 tzoffset = makedate()[1]
278 278 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
279 279
280 280 def _get_fs_mtime(self):
281 281 idx_loc = '' if self.bare else '.git'
282 282 # fallback to filesystem
283 283 in_path = os.path.join(self.path, idx_loc, "index")
284 284 he_path = os.path.join(self.path, idx_loc, "HEAD")
285 285 if os.path.exists(in_path):
286 286 return os.stat(in_path).st_mtime
287 287 else:
288 288 return os.stat(he_path).st_mtime
289 289
290 290 @LazyProperty
291 291 def description(self):
292 292 description = self._remote.get_description()
293 293 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
294 294
295 295 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
296 296 if self.is_empty():
297 297 return OrderedDict()
298 298
299 299 result = []
300 300 for ref, sha in self._refs.iteritems():
301 301 if ref.startswith(prefix):
302 302 ref_name = ref
303 303 if strip_prefix:
304 304 ref_name = ref[len(prefix):]
305 305 result.append((safe_unicode(ref_name), sha))
306 306
307 307 def get_name(entry):
308 308 return entry[0]
309 309
310 310 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
311 311
312 312 def _get_branches(self):
313 313 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
314 314
315 315 @LazyProperty
316 316 def branches(self):
317 317 return self._get_branches()
318 318
319 319 @LazyProperty
320 320 def branches_closed(self):
321 321 return {}
322 322
323 323 @LazyProperty
324 324 def bookmarks(self):
325 325 return {}
326 326
327 327 @LazyProperty
328 328 def branches_all(self):
329 329 all_branches = {}
330 330 all_branches.update(self.branches)
331 331 all_branches.update(self.branches_closed)
332 332 return all_branches
333 333
334 334 @LazyProperty
335 335 def tags(self):
336 336 return self._get_tags()
337 337
338 338 def _get_tags(self):
339 339 return self._get_refs_entries(
340 340 prefix='refs/tags/', strip_prefix=True, reverse=True)
341 341
342 342 def tag(self, name, user, commit_id=None, message=None, date=None,
343 343 **kwargs):
344 344 # TODO: fix this method to apply annotated tags correct with message
345 345 """
346 346 Creates and returns a tag for the given ``commit_id``.
347 347
348 348 :param name: name for new tag
349 349 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
350 350 :param commit_id: commit id for which new tag would be created
351 351 :param message: message of the tag's commit
352 352 :param date: date of tag's commit
353 353
354 354 :raises TagAlreadyExistError: if tag with same name already exists
355 355 """
356 356 if name in self.tags:
357 357 raise TagAlreadyExistError("Tag %s already exists" % name)
358 358 commit = self.get_commit(commit_id=commit_id)
359 359 message = message or "Added tag %s for commit %s" % (
360 360 name, commit.raw_id)
361 361 self._remote.set_refs('refs/tags/%s' % name, commit._commit['id'])
362 362
363 363 self._refs = self._get_refs()
364 364 self.tags = self._get_tags()
365 365 return commit
366 366
367 367 def remove_tag(self, name, user, message=None, date=None):
368 368 """
369 369 Removes tag with the given ``name``.
370 370
371 371 :param name: name of the tag to be removed
372 372 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
373 373 :param message: message of the tag's removal commit
374 374 :param date: date of tag's removal commit
375 375
376 376 :raises TagDoesNotExistError: if tag with given name does not exists
377 377 """
378 378 if name not in self.tags:
379 379 raise TagDoesNotExistError("Tag %s does not exist" % name)
380 380 tagpath = vcspath.join(
381 381 self._remote.get_refs_path(), 'refs', 'tags', name)
382 382 try:
383 383 os.remove(tagpath)
384 384 self._refs = self._get_refs()
385 385 self.tags = self._get_tags()
386 386 except OSError as e:
387 387 raise RepositoryError(e.strerror)
388 388
389 389 def _get_refs(self):
390 390 return self._remote.get_refs()
391 391
392 392 @LazyProperty
393 393 def _refs(self):
394 394 return self._get_refs()
395 395
396 396 @property
397 397 def _ref_tree(self):
398 398 node = tree = {}
399 399 for ref, sha in self._refs.iteritems():
400 400 path = ref.split('/')
401 401 for bit in path[:-1]:
402 402 node = node.setdefault(bit, {})
403 403 node[path[-1]] = sha
404 404 node = tree
405 405 return tree
406 406
407 407 def get_remote_ref(self, ref_name):
408 408 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
409 409 try:
410 410 return self._refs[ref_key]
411 411 except Exception:
412 412 return
413 413
414 414 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
415 415 """
416 416 Returns `GitCommit` object representing commit from git repository
417 417 at the given `commit_id` or head (most recent commit) if None given.
418 418 """
419 419 if commit_id is not None:
420 420 self._validate_commit_id(commit_id)
421 421 elif commit_idx is not None:
422 422 self._validate_commit_idx(commit_idx)
423 423 commit_id = commit_idx
424 424 commit_id = self._get_commit_id(commit_id)
425 425 try:
426 426 # Need to call remote to translate id for tagging scenario
427 427 commit_id = self._remote.get_object(commit_id)["commit_id"]
428 428 idx = self._commit_ids[commit_id]
429 429 except KeyError:
430 430 raise RepositoryError("Cannot get object with id %s" % commit_id)
431 431
432 432 return GitCommit(self, commit_id, idx, pre_load=pre_load)
433 433
434 434 def get_commits(
435 435 self, start_id=None, end_id=None, start_date=None, end_date=None,
436 436 branch_name=None, show_hidden=False, pre_load=None):
437 437 """
438 438 Returns generator of `GitCommit` objects from start to end (both
439 439 are inclusive), in ascending date order.
440 440
441 441 :param start_id: None, str(commit_id)
442 442 :param end_id: None, str(commit_id)
443 443 :param start_date: if specified, commits with commit date less than
444 444 ``start_date`` would be filtered out from returned set
445 445 :param end_date: if specified, commits with commit date greater than
446 446 ``end_date`` would be filtered out from returned set
447 447 :param branch_name: if specified, commits not reachable from given
448 448 branch would be filtered out from returned set
449 449 :param show_hidden: Show hidden commits such as obsolete or hidden from
450 450 Mercurial evolve
451 451 :raise BranchDoesNotExistError: If given `branch_name` does not
452 452 exist.
453 453 :raise CommitDoesNotExistError: If commits for given `start` or
454 454 `end` could not be found.
455 455
456 456 """
457 457 if self.is_empty():
458 458 raise EmptyRepositoryError("There are no commits yet")
459 459 self._validate_branch_name(branch_name)
460 460
461 461 if start_id is not None:
462 462 self._validate_commit_id(start_id)
463 463 if end_id is not None:
464 464 self._validate_commit_id(end_id)
465 465
466 466 start_raw_id = self._get_commit_id(start_id)
467 467 start_pos = self._commit_ids[start_raw_id] if start_id else None
468 468 end_raw_id = self._get_commit_id(end_id)
469 469 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
470 470
471 471 if None not in [start_id, end_id] and start_pos > end_pos:
472 472 raise RepositoryError(
473 473 "Start commit '%s' cannot be after end commit '%s'" %
474 474 (start_id, end_id))
475 475
476 476 if end_pos is not None:
477 477 end_pos += 1
478 478
479 479 filter_ = []
480 480 if branch_name:
481 481 filter_.append({'branch_name': branch_name})
482 482 if start_date and not end_date:
483 483 filter_.append({'since': start_date})
484 484 if end_date and not start_date:
485 485 filter_.append({'until': end_date})
486 486 if start_date and end_date:
487 487 filter_.append({'since': start_date})
488 488 filter_.append({'until': end_date})
489 489
490 490 # if start_pos or end_pos:
491 491 # filter_.append({'start': start_pos})
492 492 # filter_.append({'end': end_pos})
493 493
494 494 if filter_:
495 495 revfilters = {
496 496 'branch_name': branch_name,
497 497 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
498 498 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
499 499 'start': start_pos,
500 500 'end': end_pos,
501 501 }
502 502 commit_ids = self._get_all_commit_ids(filters=revfilters)
503 503
504 504 # pure python stuff, it's slow due to walker walking whole repo
505 505 # def get_revs(walker):
506 506 # for walker_entry in walker:
507 507 # yield walker_entry.commit.id
508 508 # revfilters = {}
509 509 # commit_ids = list(reversed(list(get_revs(self._repo.get_walker(**revfilters)))))
510 510 else:
511 511 commit_ids = self.commit_ids
512 512
513 513 if start_pos or end_pos:
514 514 commit_ids = commit_ids[start_pos: end_pos]
515 515
516 516 return CollectionGenerator(self, commit_ids, pre_load=pre_load)
517 517
518 518 def get_diff(
519 519 self, commit1, commit2, path='', ignore_whitespace=False,
520 520 context=3, path1=None):
521 521 """
522 522 Returns (git like) *diff*, as plain text. Shows changes introduced by
523 523 ``commit2`` since ``commit1``.
524 524
525 525 :param commit1: Entry point from which diff is shown. Can be
526 526 ``self.EMPTY_COMMIT`` - in this case, patch showing all
527 527 the changes since empty state of the repository until ``commit2``
528 528 :param commit2: Until which commits changes should be shown.
529 529 :param ignore_whitespace: If set to ``True``, would not show whitespace
530 530 changes. Defaults to ``False``.
531 531 :param context: How many lines before/after changed lines should be
532 532 shown. Defaults to ``3``.
533 533 """
534 534 self._validate_diff_commits(commit1, commit2)
535 535 if path1 is not None and path1 != path:
536 536 raise ValueError("Diff of two different paths not supported.")
537 537
538 538 flags = [
539 539 '-U%s' % context, '--full-index', '--binary', '-p',
540 540 '-M', '--abbrev=40']
541 541 if ignore_whitespace:
542 542 flags.append('-w')
543 543
544 544 if commit1 == self.EMPTY_COMMIT:
545 545 cmd = ['show'] + flags + [commit2.raw_id]
546 546 else:
547 547 cmd = ['diff'] + flags + [commit1.raw_id, commit2.raw_id]
548 548
549 549 if path:
550 550 cmd.extend(['--', path])
551 551
552 552 stdout, __ = self.run_git_command(cmd)
553 553 # If we used 'show' command, strip first few lines (until actual diff
554 554 # starts)
555 555 if commit1 == self.EMPTY_COMMIT:
556 556 lines = stdout.splitlines()
557 557 x = 0
558 558 for line in lines:
559 559 if line.startswith('diff'):
560 560 break
561 561 x += 1
562 562 # Append new line just like 'diff' command do
563 563 stdout = '\n'.join(lines[x:]) + '\n'
564 564 return GitDiff(stdout)
565 565
566 566 def strip(self, commit_id, branch_name):
567 567 commit = self.get_commit(commit_id=commit_id)
568 568 if commit.merge:
569 569 raise Exception('Cannot reset to merge commit')
570 570
571 571 # parent is going to be the new head now
572 572 commit = commit.parents[0]
573 573 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
574 574
575 575 self.commit_ids = self._get_all_commit_ids()
576 576 self._rebuild_cache(self.commit_ids)
577 577
578 578 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
579 579 if commit_id1 == commit_id2:
580 580 return commit_id1
581 581
582 582 if self != repo2:
583 583 commits = self._remote.get_missing_revs(
584 584 commit_id1, commit_id2, repo2.path)
585 585 if commits:
586 586 commit = repo2.get_commit(commits[-1])
587 587 if commit.parents:
588 588 ancestor_id = commit.parents[0].raw_id
589 589 else:
590 590 ancestor_id = None
591 591 else:
592 592 # no commits from other repo, ancestor_id is the commit_id2
593 593 ancestor_id = commit_id2
594 594 else:
595 595 output, __ = self.run_git_command(
596 596 ['merge-base', commit_id1, commit_id2])
597 597 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
598 598
599 599 return ancestor_id
600 600
601 601 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
602 602 repo1 = self
603 603 ancestor_id = None
604 604
605 605 if commit_id1 == commit_id2:
606 606 commits = []
607 607 elif repo1 != repo2:
608 608 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
609 609 repo2.path)
610 610 commits = [
611 611 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
612 612 for commit_id in reversed(missing_ids)]
613 613 else:
614 614 output, __ = repo1.run_git_command(
615 615 ['log', '--reverse', '--pretty=format: %H', '-s',
616 616 '%s..%s' % (commit_id1, commit_id2)])
617 617 commits = [
618 618 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
619 619 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
620 620
621 621 return commits
622 622
623 623 @LazyProperty
624 624 def in_memory_commit(self):
625 625 """
626 626 Returns ``GitInMemoryCommit`` object for this repository.
627 627 """
628 628 return GitInMemoryCommit(self)
629 629
630 630 def clone(self, url, update_after_clone=True, bare=False):
631 631 """
632 632 Tries to clone commits from external location.
633 633
634 634 :param update_after_clone: If set to ``False``, git won't checkout
635 635 working directory
636 636 :param bare: If set to ``True``, repository would be cloned into
637 637 *bare* git repository (no working directory at all).
638 638 """
639 639 # init_bare and init expect empty dir created to proceed
640 640 if not os.path.exists(self.path):
641 641 os.mkdir(self.path)
642 642
643 643 if bare:
644 644 self._remote.init_bare()
645 645 else:
646 646 self._remote.init()
647 647
648 648 deferred = '^{}'
649 649 valid_refs = ('refs/heads', 'refs/tags', 'HEAD')
650 650
651 651 return self._remote.clone(
652 652 url, deferred, valid_refs, update_after_clone)
653 653
654 654 def pull(self, url, commit_ids=None):
655 655 """
656 656 Tries to pull changes from external location. We use fetch here since
657 657 pull in get does merges and we want to be compatible with hg backend so
658 658 pull == fetch in this case
659 659 """
660 660 self.fetch(url, commit_ids=commit_ids)
661 661
662 662 def fetch(self, url, commit_ids=None):
663 663 """
664 664 Tries to fetch changes from external location.
665 665 """
666 666 refs = None
667 667
668 668 if commit_ids is not None:
669 669 remote_refs = self._remote.get_remote_refs(url)
670 670 refs = [
671 671 ref for ref in remote_refs if remote_refs[ref] in commit_ids]
672 672 self._remote.fetch(url, refs=refs)
673 673
674 674 def push(self, url):
675 675 refs = None
676 676 self._remote.sync_push(url, refs=refs)
677 677
678 678 def set_refs(self, ref_name, commit_id):
679 679 self._remote.set_refs(ref_name, commit_id)
680 680
681 681 def remove_ref(self, ref_name):
682 682 self._remote.remove_ref(ref_name)
683 683
684 684 def _update_server_info(self):
685 685 """
686 686 runs gits update-server-info command in this repo instance
687 687 """
688 688 self._remote.update_server_info()
689 689
690 690 def _current_branch(self):
691 691 """
692 692 Return the name of the current branch.
693 693
694 694 It only works for non bare repositories (i.e. repositories with a
695 695 working copy)
696 696 """
697 697 if self.bare:
698 698 raise RepositoryError('Bare git repos do not have active branches')
699 699
700 700 if self.is_empty():
701 701 return None
702 702
703 703 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
704 704 return stdout.strip()
705 705
706 706 def _checkout(self, branch_name, create=False, force=False):
707 707 """
708 708 Checkout a branch in the working directory.
709 709
710 710 It tries to create the branch if create is True, failing if the branch
711 711 already exists.
712 712
713 713 It only works for non bare repositories (i.e. repositories with a
714 714 working copy)
715 715 """
716 716 if self.bare:
717 717 raise RepositoryError('Cannot checkout branches in a bare git repo')
718 718
719 719 cmd = ['checkout']
720 720 if force:
721 721 cmd.append('-f')
722 722 if create:
723 723 cmd.append('-b')
724 724 cmd.append(branch_name)
725 725 self.run_git_command(cmd, fail_on_stderr=False)
726 726
727 727 def _identify(self):
728 728 """
729 729 Return the current state of the working directory.
730 730 """
731 731 if self.bare:
732 732 raise RepositoryError('Bare git repos do not have active branches')
733 733
734 734 if self.is_empty():
735 735 return None
736 736
737 737 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
738 738 return stdout.strip()
739 739
740 740 def _local_clone(self, clone_path, branch_name, source_branch=None):
741 741 """
742 742 Create a local clone of the current repo.
743 743 """
744 744 # N.B.(skreft): the --branch option is required as otherwise the shallow
745 745 # clone will only fetch the active branch.
746 746 cmd = ['clone', '--branch', branch_name,
747 747 self.path, os.path.abspath(clone_path)]
748 748
749 749 self.run_git_command(cmd, fail_on_stderr=False)
750 750
751 751 # if we get the different source branch, make sure we also fetch it for
752 752 # merge conditions
753 753 if source_branch and source_branch != branch_name:
754 754 # check if the ref exists.
755 755 shadow_repo = GitRepository(os.path.abspath(clone_path))
756 756 if shadow_repo.get_remote_ref(source_branch):
757 757 cmd = ['fetch', self.path, source_branch]
758 758 self.run_git_command(cmd, fail_on_stderr=False)
759 759
760 760 def _local_fetch(self, repository_path, branch_name, use_origin=False):
761 761 """
762 762 Fetch a branch from a local repository.
763 763 """
764 764 repository_path = os.path.abspath(repository_path)
765 765 if repository_path == self.path:
766 766 raise ValueError('Cannot fetch from the same repository')
767 767
768 768 if use_origin:
769 769 branch_name = '+{branch}:refs/heads/{branch}'.format(
770 770 branch=branch_name)
771 771
772 772 cmd = ['fetch', '--no-tags', '--update-head-ok',
773 773 repository_path, branch_name]
774 774 self.run_git_command(cmd, fail_on_stderr=False)
775 775
776 776 def _local_reset(self, branch_name):
777 777 branch_name = '{}'.format(branch_name)
778 778 cmd = ['reset', '--hard', branch_name]
779 779 self.run_git_command(cmd, fail_on_stderr=False)
780 780
781 781 def _last_fetch_heads(self):
782 782 """
783 783 Return the last fetched heads that need merging.
784 784
785 785 The algorithm is defined at
786 786 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
787 787 """
788 788 if not self.bare:
789 789 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
790 790 else:
791 791 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
792 792
793 793 heads = []
794 794 with open(fetch_heads_path) as f:
795 795 for line in f:
796 796 if ' not-for-merge ' in line:
797 797 continue
798 798 line = re.sub('\t.*', '', line, flags=re.DOTALL)
799 799 heads.append(line)
800 800
801 801 return heads
802 802
803 803 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
804 804 return GitRepository(shadow_repository_path)
805 805
806 806 def _local_pull(self, repository_path, branch_name, ff_only=True):
807 807 """
808 808 Pull a branch from a local repository.
809 809 """
810 810 if self.bare:
811 811 raise RepositoryError('Cannot pull into a bare git repository')
812 812 # N.B.(skreft): The --ff-only option is to make sure this is a
813 813 # fast-forward (i.e., we are only pulling new changes and there are no
814 814 # conflicts with our current branch)
815 815 # Additionally, that option needs to go before --no-tags, otherwise git
816 816 # pull complains about it being an unknown flag.
817 817 cmd = ['pull']
818 818 if ff_only:
819 819 cmd.append('--ff-only')
820 820 cmd.extend(['--no-tags', repository_path, branch_name])
821 821 self.run_git_command(cmd, fail_on_stderr=False)
822 822
823 823 def _local_merge(self, merge_message, user_name, user_email, heads):
824 824 """
825 825 Merge the given head into the checked out branch.
826 826
827 827 It will force a merge commit.
828 828
829 829 Currently it raises an error if the repo is empty, as it is not possible
830 830 to create a merge commit in an empty repo.
831 831
832 832 :param merge_message: The message to use for the merge commit.
833 833 :param heads: the heads to merge.
834 834 """
835 835 if self.bare:
836 836 raise RepositoryError('Cannot merge into a bare git repository')
837 837
838 838 if not heads:
839 839 return
840 840
841 841 if self.is_empty():
842 842 # TODO(skreft): do somehting more robust in this case.
843 843 raise RepositoryError(
844 844 'Do not know how to merge into empty repositories yet')
845 845
846 846 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
847 847 # commit message. We also specify the user who is doing the merge.
848 848 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
849 849 '-c', 'user.email=%s' % safe_str(user_email),
850 850 'merge', '--no-ff', '-m', safe_str(merge_message)]
851 851 cmd.extend(heads)
852 852 try:
853 853 output = self.run_git_command(cmd, fail_on_stderr=False)
854 854 except RepositoryError:
855 855 # Cleanup any merge leftovers
856 856 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
857 857 raise
858 858
859 859 def _local_push(
860 860 self, source_branch, repository_path, target_branch,
861 861 enable_hooks=False, rc_scm_data=None):
862 862 """
863 863 Push the source_branch to the given repository and target_branch.
864 864
865 865 Currently it if the target_branch is not master and the target repo is
866 866 empty, the push will work, but then GitRepository won't be able to find
867 867 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
868 868 pointing to master, which does not exist).
869 869
870 870 It does not run the hooks in the target repo.
871 871 """
872 872 # TODO(skreft): deal with the case in which the target repo is empty,
873 873 # and the target_branch is not master.
874 874 target_repo = GitRepository(repository_path)
875 875 if (not target_repo.bare and
876 876 target_repo._current_branch() == target_branch):
877 877 # Git prevents pushing to the checked out branch, so simulate it by
878 878 # pulling into the target repository.
879 879 target_repo._local_pull(self.path, source_branch)
880 880 else:
881 881 cmd = ['push', os.path.abspath(repository_path),
882 882 '%s:%s' % (source_branch, target_branch)]
883 883 gitenv = {}
884 884 if rc_scm_data:
885 885 gitenv.update({'RC_SCM_DATA': rc_scm_data})
886 886
887 887 if not enable_hooks:
888 888 gitenv['RC_SKIP_HOOKS'] = '1'
889 889 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
890 890
891 891 def _get_new_pr_branch(self, source_branch, target_branch):
892 892 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
893 893 pr_branches = []
894 894 for branch in self.branches:
895 895 if branch.startswith(prefix):
896 896 pr_branches.append(int(branch[len(prefix):]))
897 897
898 898 if not pr_branches:
899 899 branch_id = 0
900 900 else:
901 901 branch_id = max(pr_branches) + 1
902 902
903 903 return '%s%d' % (prefix, branch_id)
904 904
905 def _merge_repo(self, shadow_repository_path, target_ref,
905 def _maybe_prepare_merge_workspace(
906 self, repo_id, workspace_id, target_ref, source_ref):
907 shadow_repository_path = self._get_shadow_repository_path(
908 repo_id, workspace_id)
909 if not os.path.exists(shadow_repository_path):
910 self._local_clone(
911 shadow_repository_path, target_ref.name, source_ref.name)
912 log.debug(
913 'Prepared shadow repository in %s', shadow_repository_path)
914
915 return shadow_repository_path
916
917 def _merge_repo(self, repo_id, workspace_id, target_ref,
906 918 source_repo, source_ref, merge_message,
907 919 merger_name, merger_email, dry_run=False,
908 920 use_rebase=False, close_branch=False):
909 921 if target_ref.commit_id != self.branches[target_ref.name]:
910 922 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
911 923 target_ref.commit_id, self.branches[target_ref.name])
912 924 return MergeResponse(
913 925 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
914 926
915 shadow_repo = GitRepository(shadow_repository_path)
927 shadow_repository_path = self._maybe_prepare_merge_workspace(
928 repo_id, workspace_id, target_ref, source_ref)
929 shadow_repo = self._get_shadow_instance(shadow_repository_path)
930
916 931 # checkout source, if it's different. Otherwise we could not
917 932 # fetch proper commits for merge testing
918 933 if source_ref.name != target_ref.name:
919 934 if shadow_repo.get_remote_ref(source_ref.name):
920 935 shadow_repo._checkout(source_ref.name, force=True)
921 936
922 937 # checkout target, and fetch changes
923 938 shadow_repo._checkout(target_ref.name, force=True)
924 939
925 940 # fetch/reset pull the target, in case it is changed
926 941 # this handles even force changes
927 942 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
928 943 shadow_repo._local_reset(target_ref.name)
929 944
930 945 # Need to reload repo to invalidate the cache, or otherwise we cannot
931 946 # retrieve the last target commit.
932 shadow_repo = GitRepository(shadow_repository_path)
947 shadow_repo = self._get_shadow_instance(shadow_repository_path)
933 948 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
934 949 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
935 950 target_ref, target_ref.commit_id,
936 951 shadow_repo.branches[target_ref.name])
937 952 return MergeResponse(
938 953 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
939 954
940 955 # calculate new branch
941 956 pr_branch = shadow_repo._get_new_pr_branch(
942 957 source_ref.name, target_ref.name)
943 958 log.debug('using pull-request merge branch: `%s`', pr_branch)
944 959 # checkout to temp branch, and fetch changes
945 960 shadow_repo._checkout(pr_branch, create=True)
946 961 try:
947 962 shadow_repo._local_fetch(source_repo.path, source_ref.name)
948 963 except RepositoryError:
949 964 log.exception('Failure when doing local fetch on git shadow repo')
950 965 return MergeResponse(
951 966 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
952 967
953 968 merge_ref = None
954 969 merge_failure_reason = MergeFailureReason.NONE
955 970 try:
956 971 shadow_repo._local_merge(merge_message, merger_name, merger_email,
957 972 [source_ref.commit_id])
958 973 merge_possible = True
959 974
960 975 # Need to reload repo to invalidate the cache, or otherwise we
961 976 # cannot retrieve the merge commit.
962 977 shadow_repo = GitRepository(shadow_repository_path)
963 978 merge_commit_id = shadow_repo.branches[pr_branch]
964 979
965 980 # Set a reference pointing to the merge commit. This reference may
966 981 # be used to easily identify the last successful merge commit in
967 982 # the shadow repository.
968 983 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
969 984 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
970 985 except RepositoryError:
971 986 log.exception('Failure when doing local merge on git shadow repo')
972 987 merge_possible = False
973 988 merge_failure_reason = MergeFailureReason.MERGE_FAILED
974 989
975 990 if merge_possible and not dry_run:
976 991 try:
977 992 shadow_repo._local_push(
978 993 pr_branch, self.path, target_ref.name, enable_hooks=True,
979 994 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
980 995 merge_succeeded = True
981 996 except RepositoryError:
982 997 log.exception(
983 998 'Failure when doing local push on git shadow repo')
984 999 merge_succeeded = False
985 1000 merge_failure_reason = MergeFailureReason.PUSH_FAILED
986 1001 else:
987 1002 merge_succeeded = False
988 1003
989 1004 return MergeResponse(
990 1005 merge_possible, merge_succeeded, merge_ref,
991 1006 merge_failure_reason)
992
993 def _get_shadow_repository_path(self, workspace_id):
994 # The name of the shadow repository must start with '.', so it is
995 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
996 return os.path.join(
997 os.path.dirname(self.path),
998 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
999
1000 def _maybe_prepare_merge_workspace(self, workspace_id, target_ref, source_ref):
1001 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
1002 if not os.path.exists(shadow_repository_path):
1003 self._local_clone(
1004 shadow_repository_path, target_ref.name, source_ref.name)
1005
1006 return shadow_repository_path
@@ -1,918 +1,915 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 HG repository module
23 23 """
24 24 import os
25 25 import logging
26 26 import binascii
27 27 import urllib
28 28
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 31 from rhodecode.lib.compat import OrderedDict
32 32 from rhodecode.lib.datelib import (
33 33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 34 from rhodecode.lib.utils import safe_unicode, safe_str
35 35 from rhodecode.lib.vcs import connection, exceptions
36 36 from rhodecode.lib.vcs.backends.base import (
37 37 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 38 MergeFailureReason, Reference, BasePathPermissionChecker)
39 39 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
40 40 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
41 41 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
42 42 from rhodecode.lib.vcs.exceptions import (
43 43 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
44 44 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
45 45 from rhodecode.lib.vcs.compat import configparser
46 46
47 47 hexlify = binascii.hexlify
48 48 nullid = "\0" * 20
49 49
50 50 log = logging.getLogger(__name__)
51 51
52 52
53 53 class MercurialRepository(BaseRepository):
54 54 """
55 55 Mercurial repository backend
56 56 """
57 57 DEFAULT_BRANCH_NAME = 'default'
58 58
59 59 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 60 update_after_clone=False, with_wire=None):
61 61 """
62 62 Raises RepositoryError if repository could not be find at the given
63 63 ``repo_path``.
64 64
65 65 :param repo_path: local path of the repository
66 66 :param config: config object containing the repo configuration
67 67 :param create=False: if set to True, would try to create repository if
68 68 it does not exist rather than raising exception
69 69 :param src_url=None: would try to clone repository from given location
70 70 :param update_after_clone=False: sets update of working copy after
71 71 making a clone
72 72 """
73 73
74 74 self.path = safe_str(os.path.abspath(repo_path))
75 75 # mercurial since 4.4.X requires certain configuration to be present
76 76 # because sometimes we init the repos with config we need to meet
77 77 # special requirements
78 78 self.config = config if config else self.get_default_config(
79 79 default=[('extensions', 'largefiles', '1')])
80 80
81 81 self._remote = connection.Hg(
82 82 self.path, self.config, with_wire=with_wire)
83 83
84 84 self._init_repo(create, src_url, update_after_clone)
85 85
86 86 # caches
87 87 self._commit_ids = {}
88 88
89 89 @LazyProperty
90 90 def commit_ids(self):
91 91 """
92 92 Returns list of commit ids, in ascending order. Being lazy
93 93 attribute allows external tools to inject shas from cache.
94 94 """
95 95 commit_ids = self._get_all_commit_ids()
96 96 self._rebuild_cache(commit_ids)
97 97 return commit_ids
98 98
99 99 def _rebuild_cache(self, commit_ids):
100 100 self._commit_ids = dict((commit_id, index)
101 101 for index, commit_id in enumerate(commit_ids))
102 102
103 103 @LazyProperty
104 104 def branches(self):
105 105 return self._get_branches()
106 106
107 107 @LazyProperty
108 108 def branches_closed(self):
109 109 return self._get_branches(active=False, closed=True)
110 110
111 111 @LazyProperty
112 112 def branches_all(self):
113 113 all_branches = {}
114 114 all_branches.update(self.branches)
115 115 all_branches.update(self.branches_closed)
116 116 return all_branches
117 117
118 118 def _get_branches(self, active=True, closed=False):
119 119 """
120 120 Gets branches for this repository
121 121 Returns only not closed active branches by default
122 122
123 123 :param active: return also active branches
124 124 :param closed: return also closed branches
125 125
126 126 """
127 127 if self.is_empty():
128 128 return {}
129 129
130 130 def get_name(ctx):
131 131 return ctx[0]
132 132
133 133 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
134 134 self._remote.branches(active, closed).items()]
135 135
136 136 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
137 137
138 138 @LazyProperty
139 139 def tags(self):
140 140 """
141 141 Gets tags for this repository
142 142 """
143 143 return self._get_tags()
144 144
145 145 def _get_tags(self):
146 146 if self.is_empty():
147 147 return {}
148 148
149 149 def get_name(ctx):
150 150 return ctx[0]
151 151
152 152 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
153 153 self._remote.tags().items()]
154 154
155 155 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
156 156
157 157 def tag(self, name, user, commit_id=None, message=None, date=None,
158 158 **kwargs):
159 159 """
160 160 Creates and returns a tag for the given ``commit_id``.
161 161
162 162 :param name: name for new tag
163 163 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
164 164 :param commit_id: commit id for which new tag would be created
165 165 :param message: message of the tag's commit
166 166 :param date: date of tag's commit
167 167
168 168 :raises TagAlreadyExistError: if tag with same name already exists
169 169 """
170 170 if name in self.tags:
171 171 raise TagAlreadyExistError("Tag %s already exists" % name)
172 172 commit = self.get_commit(commit_id=commit_id)
173 173 local = kwargs.setdefault('local', False)
174 174
175 175 if message is None:
176 176 message = "Added tag %s for commit %s" % (name, commit.short_id)
177 177
178 178 date, tz = date_to_timestamp_plus_offset(date)
179 179
180 180 self._remote.tag(
181 181 name, commit.raw_id, message, local, user, date, tz)
182 182 self._remote.invalidate_vcs_cache()
183 183
184 184 # Reinitialize tags
185 185 self.tags = self._get_tags()
186 186 tag_id = self.tags[name]
187 187
188 188 return self.get_commit(commit_id=tag_id)
189 189
190 190 def remove_tag(self, name, user, message=None, date=None):
191 191 """
192 192 Removes tag with the given `name`.
193 193
194 194 :param name: name of the tag to be removed
195 195 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
196 196 :param message: message of the tag's removal commit
197 197 :param date: date of tag's removal commit
198 198
199 199 :raises TagDoesNotExistError: if tag with given name does not exists
200 200 """
201 201 if name not in self.tags:
202 202 raise TagDoesNotExistError("Tag %s does not exist" % name)
203 203 if message is None:
204 204 message = "Removed tag %s" % name
205 205 local = False
206 206
207 207 date, tz = date_to_timestamp_plus_offset(date)
208 208
209 209 self._remote.tag(name, nullid, message, local, user, date, tz)
210 210 self._remote.invalidate_vcs_cache()
211 211 self.tags = self._get_tags()
212 212
213 213 @LazyProperty
214 214 def bookmarks(self):
215 215 """
216 216 Gets bookmarks for this repository
217 217 """
218 218 return self._get_bookmarks()
219 219
220 220 def _get_bookmarks(self):
221 221 if self.is_empty():
222 222 return {}
223 223
224 224 def get_name(ctx):
225 225 return ctx[0]
226 226
227 227 _bookmarks = [
228 228 (safe_unicode(n), hexlify(h)) for n, h in
229 229 self._remote.bookmarks().items()]
230 230
231 231 return OrderedDict(sorted(_bookmarks, key=get_name))
232 232
233 233 def _get_all_commit_ids(self):
234 234 return self._remote.get_all_commit_ids('visible')
235 235
236 236 def get_diff(
237 237 self, commit1, commit2, path='', ignore_whitespace=False,
238 238 context=3, path1=None):
239 239 """
240 240 Returns (git like) *diff*, as plain text. Shows changes introduced by
241 241 `commit2` since `commit1`.
242 242
243 243 :param commit1: Entry point from which diff is shown. Can be
244 244 ``self.EMPTY_COMMIT`` - in this case, patch showing all
245 245 the changes since empty state of the repository until `commit2`
246 246 :param commit2: Until which commit changes should be shown.
247 247 :param ignore_whitespace: If set to ``True``, would not show whitespace
248 248 changes. Defaults to ``False``.
249 249 :param context: How many lines before/after changed lines should be
250 250 shown. Defaults to ``3``.
251 251 """
252 252 self._validate_diff_commits(commit1, commit2)
253 253 if path1 is not None and path1 != path:
254 254 raise ValueError("Diff of two different paths not supported.")
255 255
256 256 if path:
257 257 file_filter = [self.path, path]
258 258 else:
259 259 file_filter = None
260 260
261 261 diff = self._remote.diff(
262 262 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
263 263 opt_git=True, opt_ignorews=ignore_whitespace,
264 264 context=context)
265 265 return MercurialDiff(diff)
266 266
267 267 def strip(self, commit_id, branch=None):
268 268 self._remote.strip(commit_id, update=False, backup="none")
269 269
270 270 self._remote.invalidate_vcs_cache()
271 271 self.commit_ids = self._get_all_commit_ids()
272 272 self._rebuild_cache(self.commit_ids)
273 273
274 274 def verify(self):
275 275 verify = self._remote.verify()
276 276
277 277 self._remote.invalidate_vcs_cache()
278 278 return verify
279 279
280 280 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
281 281 if commit_id1 == commit_id2:
282 282 return commit_id1
283 283
284 284 ancestors = self._remote.revs_from_revspec(
285 285 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
286 286 other_path=repo2.path)
287 287 return repo2[ancestors[0]].raw_id if ancestors else None
288 288
289 289 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
290 290 if commit_id1 == commit_id2:
291 291 commits = []
292 292 else:
293 293 if merge:
294 294 indexes = self._remote.revs_from_revspec(
295 295 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
296 296 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
297 297 else:
298 298 indexes = self._remote.revs_from_revspec(
299 299 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
300 300 commit_id1, other_path=repo2.path)
301 301
302 302 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
303 303 for idx in indexes]
304 304
305 305 return commits
306 306
307 307 @staticmethod
308 308 def check_url(url, config):
309 309 """
310 310 Function will check given url and try to verify if it's a valid
311 311 link. Sometimes it may happened that mercurial will issue basic
312 312 auth request that can cause whole API to hang when used from python
313 313 or other external calls.
314 314
315 315 On failures it'll raise urllib2.HTTPError, exception is also thrown
316 316 when the return code is non 200
317 317 """
318 318 # check first if it's not an local url
319 319 if os.path.isdir(url) or url.startswith('file:'):
320 320 return True
321 321
322 322 # Request the _remote to verify the url
323 323 return connection.Hg.check_url(url, config.serialize())
324 324
325 325 @staticmethod
326 326 def is_valid_repository(path):
327 327 return os.path.isdir(os.path.join(path, '.hg'))
328 328
329 329 def _init_repo(self, create, src_url=None, update_after_clone=False):
330 330 """
331 331 Function will check for mercurial repository in given path. If there
332 332 is no repository in that path it will raise an exception unless
333 333 `create` parameter is set to True - in that case repository would
334 334 be created.
335 335
336 336 If `src_url` is given, would try to clone repository from the
337 337 location at given clone_point. Additionally it'll make update to
338 338 working copy accordingly to `update_after_clone` flag.
339 339 """
340 340 if create and os.path.exists(self.path):
341 341 raise RepositoryError(
342 342 "Cannot create repository at %s, location already exist"
343 343 % self.path)
344 344
345 345 if src_url:
346 346 url = str(self._get_url(src_url))
347 347 MercurialRepository.check_url(url, self.config)
348 348
349 349 self._remote.clone(url, self.path, update_after_clone)
350 350
351 351 # Don't try to create if we've already cloned repo
352 352 create = False
353 353
354 354 if create:
355 355 os.makedirs(self.path, mode=0755)
356 356
357 357 self._remote.localrepository(create)
358 358
359 359 @LazyProperty
360 360 def in_memory_commit(self):
361 361 return MercurialInMemoryCommit(self)
362 362
363 363 @LazyProperty
364 364 def description(self):
365 365 description = self._remote.get_config_value(
366 366 'web', 'description', untrusted=True)
367 367 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
368 368
369 369 @LazyProperty
370 370 def contact(self):
371 371 contact = (
372 372 self._remote.get_config_value("web", "contact") or
373 373 self._remote.get_config_value("ui", "username"))
374 374 return safe_unicode(contact or self.DEFAULT_CONTACT)
375 375
376 376 @LazyProperty
377 377 def last_change(self):
378 378 """
379 379 Returns last change made on this repository as
380 380 `datetime.datetime` object.
381 381 """
382 382 try:
383 383 return self.get_commit().date
384 384 except RepositoryError:
385 385 tzoffset = makedate()[1]
386 386 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
387 387
388 388 def _get_fs_mtime(self):
389 389 # fallback to filesystem
390 390 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
391 391 st_path = os.path.join(self.path, '.hg', "store")
392 392 if os.path.exists(cl_path):
393 393 return os.stat(cl_path).st_mtime
394 394 else:
395 395 return os.stat(st_path).st_mtime
396 396
397 397 def _get_url(self, url):
398 398 """
399 399 Returns normalized url. If schema is not given, would fall
400 400 to filesystem
401 401 (``file:///``) schema.
402 402 """
403 403 url = url.encode('utf8')
404 404 if url != 'default' and '://' not in url:
405 405 url = "file:" + urllib.pathname2url(url)
406 406 return url
407 407
408 408 def get_hook_location(self):
409 409 """
410 410 returns absolute path to location where hooks are stored
411 411 """
412 412 return os.path.join(self.path, '.hg', '.hgrc')
413 413
414 414 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
415 415 """
416 416 Returns ``MercurialCommit`` object representing repository's
417 417 commit at the given `commit_id` or `commit_idx`.
418 418 """
419 419 if self.is_empty():
420 420 raise EmptyRepositoryError("There are no commits yet")
421 421
422 422 if commit_id is not None:
423 423 self._validate_commit_id(commit_id)
424 424 try:
425 425 idx = self._commit_ids[commit_id]
426 426 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
427 427 except KeyError:
428 428 pass
429 429 elif commit_idx is not None:
430 430 self._validate_commit_idx(commit_idx)
431 431 try:
432 432 id_ = self.commit_ids[commit_idx]
433 433 if commit_idx < 0:
434 434 commit_idx += len(self.commit_ids)
435 435 return MercurialCommit(
436 436 self, id_, commit_idx, pre_load=pre_load)
437 437 except IndexError:
438 438 commit_id = commit_idx
439 439 else:
440 440 commit_id = "tip"
441 441
442 442 if isinstance(commit_id, unicode):
443 443 commit_id = safe_str(commit_id)
444 444
445 445 try:
446 446 raw_id, idx = self._remote.lookup(commit_id, both=True)
447 447 except CommitDoesNotExistError:
448 448 msg = "Commit %s does not exist for %s" % (
449 449 commit_id, self)
450 450 raise CommitDoesNotExistError(msg)
451 451
452 452 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
453 453
454 454 def get_commits(
455 455 self, start_id=None, end_id=None, start_date=None, end_date=None,
456 456 branch_name=None, show_hidden=False, pre_load=None):
457 457 """
458 458 Returns generator of ``MercurialCommit`` objects from start to end
459 459 (both are inclusive)
460 460
461 461 :param start_id: None, str(commit_id)
462 462 :param end_id: None, str(commit_id)
463 463 :param start_date: if specified, commits with commit date less than
464 464 ``start_date`` would be filtered out from returned set
465 465 :param end_date: if specified, commits with commit date greater than
466 466 ``end_date`` would be filtered out from returned set
467 467 :param branch_name: if specified, commits not reachable from given
468 468 branch would be filtered out from returned set
469 469 :param show_hidden: Show hidden commits such as obsolete or hidden from
470 470 Mercurial evolve
471 471 :raise BranchDoesNotExistError: If given ``branch_name`` does not
472 472 exist.
473 473 :raise CommitDoesNotExistError: If commit for given ``start`` or
474 474 ``end`` could not be found.
475 475 """
476 476 # actually we should check now if it's not an empty repo
477 477 branch_ancestors = False
478 478 if self.is_empty():
479 479 raise EmptyRepositoryError("There are no commits yet")
480 480 self._validate_branch_name(branch_name)
481 481
482 482 if start_id is not None:
483 483 self._validate_commit_id(start_id)
484 484 c_start = self.get_commit(commit_id=start_id)
485 485 start_pos = self._commit_ids[c_start.raw_id]
486 486 else:
487 487 start_pos = None
488 488
489 489 if end_id is not None:
490 490 self._validate_commit_id(end_id)
491 491 c_end = self.get_commit(commit_id=end_id)
492 492 end_pos = max(0, self._commit_ids[c_end.raw_id])
493 493 else:
494 494 end_pos = None
495 495
496 496 if None not in [start_id, end_id] and start_pos > end_pos:
497 497 raise RepositoryError(
498 498 "Start commit '%s' cannot be after end commit '%s'" %
499 499 (start_id, end_id))
500 500
501 501 if end_pos is not None:
502 502 end_pos += 1
503 503
504 504 commit_filter = []
505 505
506 506 if branch_name and not branch_ancestors:
507 507 commit_filter.append('branch("%s")' % (branch_name,))
508 508 elif branch_name and branch_ancestors:
509 509 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
510 510
511 511 if start_date and not end_date:
512 512 commit_filter.append('date(">%s")' % (start_date,))
513 513 if end_date and not start_date:
514 514 commit_filter.append('date("<%s")' % (end_date,))
515 515 if start_date and end_date:
516 516 commit_filter.append(
517 517 'date(">%s") and date("<%s")' % (start_date, end_date))
518 518
519 519 if not show_hidden:
520 520 commit_filter.append('not obsolete()')
521 521 commit_filter.append('not hidden()')
522 522
523 523 # TODO: johbo: Figure out a simpler way for this solution
524 524 collection_generator = CollectionGenerator
525 525 if commit_filter:
526 526 commit_filter = ' and '.join(map(safe_str, commit_filter))
527 527 revisions = self._remote.rev_range([commit_filter])
528 528 collection_generator = MercurialIndexBasedCollectionGenerator
529 529 else:
530 530 revisions = self.commit_ids
531 531
532 532 if start_pos or end_pos:
533 533 revisions = revisions[start_pos:end_pos]
534 534
535 535 return collection_generator(self, revisions, pre_load=pre_load)
536 536
537 537 def pull(self, url, commit_ids=None):
538 538 """
539 539 Tries to pull changes from external location.
540 540
541 541 :param commit_ids: Optional. Can be set to a list of commit ids
542 542 which shall be pulled from the other repository.
543 543 """
544 544 url = self._get_url(url)
545 545 self._remote.pull(url, commit_ids=commit_ids)
546 546 self._remote.invalidate_vcs_cache()
547 547
548 548 def push(self, url):
549 549 url = self._get_url(url)
550 550 self._remote.sync_push(url)
551 551
552 552 def _local_clone(self, clone_path):
553 553 """
554 554 Create a local clone of the current repo.
555 555 """
556 556 self._remote.clone(self.path, clone_path, update_after_clone=True,
557 557 hooks=False)
558 558
559 559 def _update(self, revision, clean=False):
560 560 """
561 561 Update the working copy to the specified revision.
562 562 """
563 563 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
564 564 self._remote.update(revision, clean=clean)
565 565
566 566 def _identify(self):
567 567 """
568 568 Return the current state of the working directory.
569 569 """
570 570 return self._remote.identify().strip().rstrip('+')
571 571
572 572 def _heads(self, branch=None):
573 573 """
574 574 Return the commit ids of the repository heads.
575 575 """
576 576 return self._remote.heads(branch=branch).strip().split(' ')
577 577
578 578 def _ancestor(self, revision1, revision2):
579 579 """
580 580 Return the common ancestor of the two revisions.
581 581 """
582 582 return self._remote.ancestor(revision1, revision2)
583 583
584 584 def _local_push(
585 585 self, revision, repository_path, push_branches=False,
586 586 enable_hooks=False):
587 587 """
588 588 Push the given revision to the specified repository.
589 589
590 590 :param push_branches: allow to create branches in the target repo.
591 591 """
592 592 self._remote.push(
593 593 [revision], repository_path, hooks=enable_hooks,
594 594 push_branches=push_branches)
595 595
596 596 def _local_merge(self, target_ref, merge_message, user_name, user_email,
597 597 source_ref, use_rebase=False, dry_run=False):
598 598 """
599 599 Merge the given source_revision into the checked out revision.
600 600
601 601 Returns the commit id of the merge and a boolean indicating if the
602 602 commit needs to be pushed.
603 603 """
604 604 self._update(target_ref.commit_id)
605 605
606 606 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
607 607 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
608 608
609 609 if ancestor == source_ref.commit_id:
610 610 # Nothing to do, the changes were already integrated
611 611 return target_ref.commit_id, False
612 612
613 613 elif ancestor == target_ref.commit_id and is_the_same_branch:
614 614 # In this case we should force a commit message
615 615 return source_ref.commit_id, True
616 616
617 617 if use_rebase:
618 618 try:
619 619 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
620 620 target_ref.commit_id)
621 621 self.bookmark(bookmark_name, revision=source_ref.commit_id)
622 622 self._remote.rebase(
623 623 source=source_ref.commit_id, dest=target_ref.commit_id)
624 624 self._remote.invalidate_vcs_cache()
625 625 self._update(bookmark_name)
626 626 return self._identify(), True
627 627 except RepositoryError:
628 628 # The rebase-abort may raise another exception which 'hides'
629 629 # the original one, therefore we log it here.
630 630 log.exception('Error while rebasing shadow repo during merge.')
631 631
632 632 # Cleanup any rebase leftovers
633 633 self._remote.invalidate_vcs_cache()
634 634 self._remote.rebase(abort=True)
635 635 self._remote.invalidate_vcs_cache()
636 636 self._remote.update(clean=True)
637 637 raise
638 638 else:
639 639 try:
640 640 self._remote.merge(source_ref.commit_id)
641 641 self._remote.invalidate_vcs_cache()
642 642 self._remote.commit(
643 643 message=safe_str(merge_message),
644 644 username=safe_str('%s <%s>' % (user_name, user_email)))
645 645 self._remote.invalidate_vcs_cache()
646 646 return self._identify(), True
647 647 except RepositoryError:
648 648 # Cleanup any merge leftovers
649 649 self._remote.update(clean=True)
650 650 raise
651 651
652 652 def _local_close(self, target_ref, user_name, user_email,
653 653 source_ref, close_message=''):
654 654 """
655 655 Close the branch of the given source_revision
656 656
657 657 Returns the commit id of the close and a boolean indicating if the
658 658 commit needs to be pushed.
659 659 """
660 660 self._update(source_ref.commit_id)
661 661 message = close_message or "Closing branch: `{}`".format(source_ref.name)
662 662 try:
663 663 self._remote.commit(
664 664 message=safe_str(message),
665 665 username=safe_str('%s <%s>' % (user_name, user_email)),
666 666 close_branch=True)
667 667 self._remote.invalidate_vcs_cache()
668 668 return self._identify(), True
669 669 except RepositoryError:
670 670 # Cleanup any commit leftovers
671 671 self._remote.update(clean=True)
672 672 raise
673 673
674 674 def _is_the_same_branch(self, target_ref, source_ref):
675 675 return (
676 676 self._get_branch_name(target_ref) ==
677 677 self._get_branch_name(source_ref))
678 678
679 679 def _get_branch_name(self, ref):
680 680 if ref.type == 'branch':
681 681 return ref.name
682 682 return self._remote.ctx_branch(ref.commit_id)
683 683
684 def _get_shadow_repository_path(self, workspace_id):
685 # The name of the shadow repository must start with '.', so it is
686 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
687 return os.path.join(
688 os.path.dirname(self.path),
689 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
690
691 def _maybe_prepare_merge_workspace(self, workspace_id, unused_target_ref, unused_source_ref):
692 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
684 def _maybe_prepare_merge_workspace(
685 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
686 shadow_repository_path = self._get_shadow_repository_path(
687 repo_id, workspace_id)
693 688 if not os.path.exists(shadow_repository_path):
694 689 self._local_clone(shadow_repository_path)
695 690 log.debug(
696 691 'Prepared shadow repository in %s', shadow_repository_path)
697 692
698 693 return shadow_repository_path
699 694
700 def _merge_repo(self, shadow_repository_path, target_ref,
695 def _merge_repo(self, repo_id, workspace_id, target_ref,
701 696 source_repo, source_ref, merge_message,
702 697 merger_name, merger_email, dry_run=False,
703 698 use_rebase=False, close_branch=False):
704 699
705 700 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
706 701 'rebase' if use_rebase else 'merge', dry_run)
707 702 if target_ref.commit_id not in self._heads():
708 703 return MergeResponse(
709 704 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
710 705
711 706 try:
712 707 if (target_ref.type == 'branch' and
713 708 len(self._heads(target_ref.name)) != 1):
714 709 return MergeResponse(
715 710 False, False, None,
716 711 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS)
717 712 except CommitDoesNotExistError:
718 713 log.exception('Failure when looking up branch heads on hg target')
719 714 return MergeResponse(
720 715 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
721 716
717 shadow_repository_path = self._maybe_prepare_merge_workspace(
718 repo_id, workspace_id, target_ref, source_ref)
722 719 shadow_repo = self._get_shadow_instance(shadow_repository_path)
723 720
724 721 log.debug('Pulling in target reference %s', target_ref)
725 722 self._validate_pull_reference(target_ref)
726 723 shadow_repo._local_pull(self.path, target_ref)
727 724 try:
728 725 log.debug('Pulling in source reference %s', source_ref)
729 726 source_repo._validate_pull_reference(source_ref)
730 727 shadow_repo._local_pull(source_repo.path, source_ref)
731 728 except CommitDoesNotExistError:
732 729 log.exception('Failure when doing local pull on hg shadow repo')
733 730 return MergeResponse(
734 731 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
735 732
736 733 merge_ref = None
737 734 merge_commit_id = None
738 735 close_commit_id = None
739 736 merge_failure_reason = MergeFailureReason.NONE
740 737
741 738 # enforce that close branch should be used only in case we source from
742 739 # an actual Branch
743 740 close_branch = close_branch and source_ref.type == 'branch'
744 741
745 742 # don't allow to close branch if source and target are the same
746 743 close_branch = close_branch and source_ref.name != target_ref.name
747 744
748 745 needs_push_on_close = False
749 746 if close_branch and not use_rebase and not dry_run:
750 747 try:
751 748 close_commit_id, needs_push_on_close = shadow_repo._local_close(
752 749 target_ref, merger_name, merger_email, source_ref)
753 750 merge_possible = True
754 751 except RepositoryError:
755 752 log.exception(
756 753 'Failure when doing close branch on hg shadow repo')
757 754 merge_possible = False
758 755 merge_failure_reason = MergeFailureReason.MERGE_FAILED
759 756 else:
760 757 merge_possible = True
761 758
762 759 needs_push = False
763 760 if merge_possible:
764 761 try:
765 762 merge_commit_id, needs_push = shadow_repo._local_merge(
766 763 target_ref, merge_message, merger_name, merger_email,
767 764 source_ref, use_rebase=use_rebase, dry_run=dry_run)
768 765 merge_possible = True
769 766
770 767 # read the state of the close action, if it
771 768 # maybe required a push
772 769 needs_push = needs_push or needs_push_on_close
773 770
774 771 # Set a bookmark pointing to the merge commit. This bookmark
775 772 # may be used to easily identify the last successful merge
776 773 # commit in the shadow repository.
777 774 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
778 775 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
779 776 except SubrepoMergeError:
780 777 log.exception(
781 778 'Subrepo merge error during local merge on hg shadow repo.')
782 779 merge_possible = False
783 780 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
784 781 needs_push = False
785 782 except RepositoryError:
786 783 log.exception('Failure when doing local merge on hg shadow repo')
787 784 merge_possible = False
788 785 merge_failure_reason = MergeFailureReason.MERGE_FAILED
789 786 needs_push = False
790 787
791 788 if merge_possible and not dry_run:
792 789 if needs_push:
793 790 # In case the target is a bookmark, update it, so after pushing
794 791 # the bookmarks is also updated in the target.
795 792 if target_ref.type == 'book':
796 793 shadow_repo.bookmark(
797 794 target_ref.name, revision=merge_commit_id)
798 795 try:
799 796 shadow_repo_with_hooks = self._get_shadow_instance(
800 797 shadow_repository_path,
801 798 enable_hooks=True)
802 799 # This is the actual merge action, we push from shadow
803 800 # into origin.
804 801 # Note: the push_branches option will push any new branch
805 802 # defined in the source repository to the target. This may
806 803 # be dangerous as branches are permanent in Mercurial.
807 804 # This feature was requested in issue #441.
808 805 shadow_repo_with_hooks._local_push(
809 806 merge_commit_id, self.path, push_branches=True,
810 807 enable_hooks=True)
811 808
812 809 # maybe we also need to push the close_commit_id
813 810 if close_commit_id:
814 811 shadow_repo_with_hooks._local_push(
815 812 close_commit_id, self.path, push_branches=True,
816 813 enable_hooks=True)
817 814 merge_succeeded = True
818 815 except RepositoryError:
819 816 log.exception(
820 817 'Failure when doing local push from the shadow '
821 818 'repository to the target repository.')
822 819 merge_succeeded = False
823 820 merge_failure_reason = MergeFailureReason.PUSH_FAILED
824 821 else:
825 822 merge_succeeded = True
826 823 else:
827 824 merge_succeeded = False
828 825
829 826 return MergeResponse(
830 827 merge_possible, merge_succeeded, merge_ref, merge_failure_reason)
831 828
832 829 def _get_shadow_instance(
833 830 self, shadow_repository_path, enable_hooks=False):
834 831 config = self.config.copy()
835 832 if not enable_hooks:
836 833 config.clear_section('hooks')
837 834 return MercurialRepository(shadow_repository_path, config)
838 835
839 836 def _validate_pull_reference(self, reference):
840 837 if not (reference.name in self.bookmarks or
841 838 reference.name in self.branches or
842 839 self.get_commit(reference.commit_id)):
843 840 raise CommitDoesNotExistError(
844 841 'Unknown branch, bookmark or commit id')
845 842
846 843 def _local_pull(self, repository_path, reference):
847 844 """
848 845 Fetch a branch, bookmark or commit from a local repository.
849 846 """
850 847 repository_path = os.path.abspath(repository_path)
851 848 if repository_path == self.path:
852 849 raise ValueError('Cannot pull from the same repository')
853 850
854 851 reference_type_to_option_name = {
855 852 'book': 'bookmark',
856 853 'branch': 'branch',
857 854 }
858 855 option_name = reference_type_to_option_name.get(
859 856 reference.type, 'revision')
860 857
861 858 if option_name == 'revision':
862 859 ref = reference.commit_id
863 860 else:
864 861 ref = reference.name
865 862
866 863 options = {option_name: [ref]}
867 864 self._remote.pull_cmd(repository_path, hooks=False, **options)
868 865 self._remote.invalidate_vcs_cache()
869 866
870 867 def bookmark(self, bookmark, revision=None):
871 868 if isinstance(bookmark, unicode):
872 869 bookmark = safe_str(bookmark)
873 870 self._remote.bookmark(bookmark, revision=revision)
874 871 self._remote.invalidate_vcs_cache()
875 872
876 873 def get_path_permissions(self, username):
877 874 hgacl_file = os.path.join(self.path, '.hg/hgacl')
878 875
879 876 def read_patterns(suffix):
880 877 svalue = None
881 878 try:
882 879 svalue = hgacl.get('narrowhgacl', username + suffix)
883 880 except configparser.NoOptionError:
884 881 try:
885 882 svalue = hgacl.get('narrowhgacl', 'default' + suffix)
886 883 except configparser.NoOptionError:
887 884 pass
888 885 if not svalue:
889 886 return None
890 887 result = ['/']
891 888 for pattern in svalue.split():
892 889 result.append(pattern)
893 890 if '*' not in pattern and '?' not in pattern:
894 891 result.append(pattern + '/*')
895 892 return result
896 893
897 894 if os.path.exists(hgacl_file):
898 895 try:
899 896 hgacl = configparser.RawConfigParser()
900 897 hgacl.read(hgacl_file)
901 898
902 899 includes = read_patterns('.includes')
903 900 excludes = read_patterns('.excludes')
904 901 return BasePathPermissionChecker.create_from_patterns(
905 902 includes, excludes)
906 903 except BaseException as e:
907 904 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
908 905 hgacl_file, self.name, e)
909 906 raise exceptions.RepositoryRequirementError(msg)
910 907 else:
911 908 return None
912 909
913 910
914 911 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
915 912
916 913 def _commit_factory(self, commit_id):
917 914 return self.repo.get_commit(
918 915 commit_idx=commit_id, pre_load=self.pre_load)
@@ -1,4524 +1,4530 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Database Models for RhodeCode Enterprise
23 23 """
24 24
25 25 import re
26 26 import os
27 27 import time
28 28 import hashlib
29 29 import logging
30 30 import datetime
31 31 import warnings
32 32 import ipaddress
33 33 import functools
34 34 import traceback
35 35 import collections
36 36
37 37 from sqlalchemy import (
38 38 or_, and_, not_, func, TypeDecorator, event,
39 39 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
40 40 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
41 41 Text, Float, PickleType)
42 42 from sqlalchemy.sql.expression import true, false
43 43 from sqlalchemy.sql.functions import coalesce, count # noqa
44 44 from sqlalchemy.orm import (
45 45 relationship, joinedload, class_mapper, validates, aliased)
46 46 from sqlalchemy.ext.declarative import declared_attr
47 47 from sqlalchemy.ext.hybrid import hybrid_property
48 48 from sqlalchemy.exc import IntegrityError # noqa
49 49 from sqlalchemy.dialects.mysql import LONGTEXT
50 50 from beaker.cache import cache_region
51 51 from zope.cachedescriptors.property import Lazy as LazyProperty
52 52
53 53 from pyramid.threadlocal import get_current_request
54 54
55 55 from rhodecode.translation import _
56 56 from rhodecode.lib.vcs import get_vcs_instance
57 57 from rhodecode.lib.vcs.backends.base import EmptyCommit, Reference
58 58 from rhodecode.lib.utils2 import (
59 59 str2bool, safe_str, get_commit_safe, safe_unicode, md5_safe,
60 60 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
61 61 glob2re, StrictAttributeDict, cleaned_uri)
62 62 from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType, \
63 63 JsonRaw
64 64 from rhodecode.lib.ext_json import json
65 65 from rhodecode.lib.caching_query import FromCache
66 66 from rhodecode.lib.encrypt import AESCipher
67 67
68 68 from rhodecode.model.meta import Base, Session
69 69
70 70 URL_SEP = '/'
71 71 log = logging.getLogger(__name__)
72 72
73 73 # =============================================================================
74 74 # BASE CLASSES
75 75 # =============================================================================
76 76
77 77 # this is propagated from .ini file rhodecode.encrypted_values.secret or
78 78 # beaker.session.secret if first is not set.
79 79 # and initialized at environment.py
80 80 ENCRYPTION_KEY = None
81 81
82 82 # used to sort permissions by types, '#' used here is not allowed to be in
83 83 # usernames, and it's very early in sorted string.printable table.
84 84 PERMISSION_TYPE_SORT = {
85 85 'admin': '####',
86 86 'write': '###',
87 87 'read': '##',
88 88 'none': '#',
89 89 }
90 90
91 91
92 92 def display_user_sort(obj):
93 93 """
94 94 Sort function used to sort permissions in .permissions() function of
95 95 Repository, RepoGroup, UserGroup. Also it put the default user in front
96 96 of all other resources
97 97 """
98 98
99 99 if obj.username == User.DEFAULT_USER:
100 100 return '#####'
101 101 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
102 102 return prefix + obj.username
103 103
104 104
105 105 def display_user_group_sort(obj):
106 106 """
107 107 Sort function used to sort permissions in .permissions() function of
108 108 Repository, RepoGroup, UserGroup. Also it put the default user in front
109 109 of all other resources
110 110 """
111 111
112 112 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
113 113 return prefix + obj.users_group_name
114 114
115 115
116 116 def _hash_key(k):
117 117 return md5_safe(k)
118 118
119 119
120 120 def in_filter_generator(qry, items, limit=500):
121 121 """
122 122 Splits IN() into multiple with OR
123 123 e.g.::
124 124 cnt = Repository.query().filter(
125 125 or_(
126 126 *in_filter_generator(Repository.repo_id, range(100000))
127 127 )).count()
128 128 """
129 129 if not items:
130 130 # empty list will cause empty query which might cause security issues
131 131 # this can lead to hidden unpleasant results
132 132 items = [-1]
133 133
134 134 parts = []
135 135 for chunk in xrange(0, len(items), limit):
136 136 parts.append(
137 137 qry.in_(items[chunk: chunk + limit])
138 138 )
139 139
140 140 return parts
141 141
142 142
143 143 class EncryptedTextValue(TypeDecorator):
144 144 """
145 145 Special column for encrypted long text data, use like::
146 146
147 147 value = Column("encrypted_value", EncryptedValue(), nullable=False)
148 148
149 149 This column is intelligent so if value is in unencrypted form it return
150 150 unencrypted form, but on save it always encrypts
151 151 """
152 152 impl = Text
153 153
154 154 def process_bind_param(self, value, dialect):
155 155 if not value:
156 156 return value
157 157 if value.startswith('enc$aes$') or value.startswith('enc$aes_hmac$'):
158 158 # protect against double encrypting if someone manually starts
159 159 # doing
160 160 raise ValueError('value needs to be in unencrypted format, ie. '
161 161 'not starting with enc$aes')
162 162 return 'enc$aes_hmac$%s' % AESCipher(
163 163 ENCRYPTION_KEY, hmac=True).encrypt(value)
164 164
165 165 def process_result_value(self, value, dialect):
166 166 import rhodecode
167 167
168 168 if not value:
169 169 return value
170 170
171 171 parts = value.split('$', 3)
172 172 if not len(parts) == 3:
173 173 # probably not encrypted values
174 174 return value
175 175 else:
176 176 if parts[0] != 'enc':
177 177 # parts ok but without our header ?
178 178 return value
179 179 enc_strict_mode = str2bool(rhodecode.CONFIG.get(
180 180 'rhodecode.encrypted_values.strict') or True)
181 181 # at that stage we know it's our encryption
182 182 if parts[1] == 'aes':
183 183 decrypted_data = AESCipher(ENCRYPTION_KEY).decrypt(parts[2])
184 184 elif parts[1] == 'aes_hmac':
185 185 decrypted_data = AESCipher(
186 186 ENCRYPTION_KEY, hmac=True,
187 187 strict_verification=enc_strict_mode).decrypt(parts[2])
188 188 else:
189 189 raise ValueError(
190 190 'Encryption type part is wrong, must be `aes` '
191 191 'or `aes_hmac`, got `%s` instead' % (parts[1]))
192 192 return decrypted_data
193 193
194 194
195 195 class BaseModel(object):
196 196 """
197 197 Base Model for all classes
198 198 """
199 199
200 200 @classmethod
201 201 def _get_keys(cls):
202 202 """return column names for this model """
203 203 return class_mapper(cls).c.keys()
204 204
205 205 def get_dict(self):
206 206 """
207 207 return dict with keys and values corresponding
208 208 to this model data """
209 209
210 210 d = {}
211 211 for k in self._get_keys():
212 212 d[k] = getattr(self, k)
213 213
214 214 # also use __json__() if present to get additional fields
215 215 _json_attr = getattr(self, '__json__', None)
216 216 if _json_attr:
217 217 # update with attributes from __json__
218 218 if callable(_json_attr):
219 219 _json_attr = _json_attr()
220 220 for k, val in _json_attr.iteritems():
221 221 d[k] = val
222 222 return d
223 223
224 224 def get_appstruct(self):
225 225 """return list with keys and values tuples corresponding
226 226 to this model data """
227 227
228 228 lst = []
229 229 for k in self._get_keys():
230 230 lst.append((k, getattr(self, k),))
231 231 return lst
232 232
233 233 def populate_obj(self, populate_dict):
234 234 """populate model with data from given populate_dict"""
235 235
236 236 for k in self._get_keys():
237 237 if k in populate_dict:
238 238 setattr(self, k, populate_dict[k])
239 239
240 240 @classmethod
241 241 def query(cls):
242 242 return Session().query(cls)
243 243
244 244 @classmethod
245 245 def get(cls, id_):
246 246 if id_:
247 247 return cls.query().get(id_)
248 248
249 249 @classmethod
250 250 def get_or_404(cls, id_):
251 251 from pyramid.httpexceptions import HTTPNotFound
252 252
253 253 try:
254 254 id_ = int(id_)
255 255 except (TypeError, ValueError):
256 256 raise HTTPNotFound()
257 257
258 258 res = cls.query().get(id_)
259 259 if not res:
260 260 raise HTTPNotFound()
261 261 return res
262 262
263 263 @classmethod
264 264 def getAll(cls):
265 265 # deprecated and left for backward compatibility
266 266 return cls.get_all()
267 267
268 268 @classmethod
269 269 def get_all(cls):
270 270 return cls.query().all()
271 271
272 272 @classmethod
273 273 def delete(cls, id_):
274 274 obj = cls.query().get(id_)
275 275 Session().delete(obj)
276 276
277 277 @classmethod
278 278 def identity_cache(cls, session, attr_name, value):
279 279 exist_in_session = []
280 280 for (item_cls, pkey), instance in session.identity_map.items():
281 281 if cls == item_cls and getattr(instance, attr_name) == value:
282 282 exist_in_session.append(instance)
283 283 if exist_in_session:
284 284 if len(exist_in_session) == 1:
285 285 return exist_in_session[0]
286 286 log.exception(
287 287 'multiple objects with attr %s and '
288 288 'value %s found with same name: %r',
289 289 attr_name, value, exist_in_session)
290 290
291 291 def __repr__(self):
292 292 if hasattr(self, '__unicode__'):
293 293 # python repr needs to return str
294 294 try:
295 295 return safe_str(self.__unicode__())
296 296 except UnicodeDecodeError:
297 297 pass
298 298 return '<DB:%s>' % (self.__class__.__name__)
299 299
300 300
301 301 class RhodeCodeSetting(Base, BaseModel):
302 302 __tablename__ = 'rhodecode_settings'
303 303 __table_args__ = (
304 304 UniqueConstraint('app_settings_name'),
305 305 {'extend_existing': True, 'mysql_engine': 'InnoDB',
306 306 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
307 307 )
308 308
309 309 SETTINGS_TYPES = {
310 310 'str': safe_str,
311 311 'int': safe_int,
312 312 'unicode': safe_unicode,
313 313 'bool': str2bool,
314 314 'list': functools.partial(aslist, sep=',')
315 315 }
316 316 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
317 317 GLOBAL_CONF_KEY = 'app_settings'
318 318
319 319 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
320 320 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
321 321 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
322 322 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
323 323
324 324 def __init__(self, key='', val='', type='unicode'):
325 325 self.app_settings_name = key
326 326 self.app_settings_type = type
327 327 self.app_settings_value = val
328 328
329 329 @validates('_app_settings_value')
330 330 def validate_settings_value(self, key, val):
331 331 assert type(val) == unicode
332 332 return val
333 333
334 334 @hybrid_property
335 335 def app_settings_value(self):
336 336 v = self._app_settings_value
337 337 _type = self.app_settings_type
338 338 if _type:
339 339 _type = self.app_settings_type.split('.')[0]
340 340 # decode the encrypted value
341 341 if 'encrypted' in self.app_settings_type:
342 342 cipher = EncryptedTextValue()
343 343 v = safe_unicode(cipher.process_result_value(v, None))
344 344
345 345 converter = self.SETTINGS_TYPES.get(_type) or \
346 346 self.SETTINGS_TYPES['unicode']
347 347 return converter(v)
348 348
349 349 @app_settings_value.setter
350 350 def app_settings_value(self, val):
351 351 """
352 352 Setter that will always make sure we use unicode in app_settings_value
353 353
354 354 :param val:
355 355 """
356 356 val = safe_unicode(val)
357 357 # encode the encrypted value
358 358 if 'encrypted' in self.app_settings_type:
359 359 cipher = EncryptedTextValue()
360 360 val = safe_unicode(cipher.process_bind_param(val, None))
361 361 self._app_settings_value = val
362 362
363 363 @hybrid_property
364 364 def app_settings_type(self):
365 365 return self._app_settings_type
366 366
367 367 @app_settings_type.setter
368 368 def app_settings_type(self, val):
369 369 if val.split('.')[0] not in self.SETTINGS_TYPES:
370 370 raise Exception('type must be one of %s got %s'
371 371 % (self.SETTINGS_TYPES.keys(), val))
372 372 self._app_settings_type = val
373 373
374 374 def __unicode__(self):
375 375 return u"<%s('%s:%s[%s]')>" % (
376 376 self.__class__.__name__,
377 377 self.app_settings_name, self.app_settings_value,
378 378 self.app_settings_type
379 379 )
380 380
381 381
382 382 class RhodeCodeUi(Base, BaseModel):
383 383 __tablename__ = 'rhodecode_ui'
384 384 __table_args__ = (
385 385 UniqueConstraint('ui_key'),
386 386 {'extend_existing': True, 'mysql_engine': 'InnoDB',
387 387 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
388 388 )
389 389
390 390 HOOK_REPO_SIZE = 'changegroup.repo_size'
391 391 # HG
392 392 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
393 393 HOOK_PULL = 'outgoing.pull_logger'
394 394 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
395 395 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
396 396 HOOK_PUSH = 'changegroup.push_logger'
397 397 HOOK_PUSH_KEY = 'pushkey.key_push'
398 398
399 399 # TODO: johbo: Unify way how hooks are configured for git and hg,
400 400 # git part is currently hardcoded.
401 401
402 402 # SVN PATTERNS
403 403 SVN_BRANCH_ID = 'vcs_svn_branch'
404 404 SVN_TAG_ID = 'vcs_svn_tag'
405 405
406 406 ui_id = Column(
407 407 "ui_id", Integer(), nullable=False, unique=True, default=None,
408 408 primary_key=True)
409 409 ui_section = Column(
410 410 "ui_section", String(255), nullable=True, unique=None, default=None)
411 411 ui_key = Column(
412 412 "ui_key", String(255), nullable=True, unique=None, default=None)
413 413 ui_value = Column(
414 414 "ui_value", String(255), nullable=True, unique=None, default=None)
415 415 ui_active = Column(
416 416 "ui_active", Boolean(), nullable=True, unique=None, default=True)
417 417
418 418 def __repr__(self):
419 419 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
420 420 self.ui_key, self.ui_value)
421 421
422 422
423 423 class RepoRhodeCodeSetting(Base, BaseModel):
424 424 __tablename__ = 'repo_rhodecode_settings'
425 425 __table_args__ = (
426 426 UniqueConstraint(
427 427 'app_settings_name', 'repository_id',
428 428 name='uq_repo_rhodecode_setting_name_repo_id'),
429 429 {'extend_existing': True, 'mysql_engine': 'InnoDB',
430 430 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
431 431 )
432 432
433 433 repository_id = Column(
434 434 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
435 435 nullable=False)
436 436 app_settings_id = Column(
437 437 "app_settings_id", Integer(), nullable=False, unique=True,
438 438 default=None, primary_key=True)
439 439 app_settings_name = Column(
440 440 "app_settings_name", String(255), nullable=True, unique=None,
441 441 default=None)
442 442 _app_settings_value = Column(
443 443 "app_settings_value", String(4096), nullable=True, unique=None,
444 444 default=None)
445 445 _app_settings_type = Column(
446 446 "app_settings_type", String(255), nullable=True, unique=None,
447 447 default=None)
448 448
449 449 repository = relationship('Repository')
450 450
451 451 def __init__(self, repository_id, key='', val='', type='unicode'):
452 452 self.repository_id = repository_id
453 453 self.app_settings_name = key
454 454 self.app_settings_type = type
455 455 self.app_settings_value = val
456 456
457 457 @validates('_app_settings_value')
458 458 def validate_settings_value(self, key, val):
459 459 assert type(val) == unicode
460 460 return val
461 461
462 462 @hybrid_property
463 463 def app_settings_value(self):
464 464 v = self._app_settings_value
465 465 type_ = self.app_settings_type
466 466 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
467 467 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
468 468 return converter(v)
469 469
470 470 @app_settings_value.setter
471 471 def app_settings_value(self, val):
472 472 """
473 473 Setter that will always make sure we use unicode in app_settings_value
474 474
475 475 :param val:
476 476 """
477 477 self._app_settings_value = safe_unicode(val)
478 478
479 479 @hybrid_property
480 480 def app_settings_type(self):
481 481 return self._app_settings_type
482 482
483 483 @app_settings_type.setter
484 484 def app_settings_type(self, val):
485 485 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
486 486 if val not in SETTINGS_TYPES:
487 487 raise Exception('type must be one of %s got %s'
488 488 % (SETTINGS_TYPES.keys(), val))
489 489 self._app_settings_type = val
490 490
491 491 def __unicode__(self):
492 492 return u"<%s('%s:%s:%s[%s]')>" % (
493 493 self.__class__.__name__, self.repository.repo_name,
494 494 self.app_settings_name, self.app_settings_value,
495 495 self.app_settings_type
496 496 )
497 497
498 498
499 499 class RepoRhodeCodeUi(Base, BaseModel):
500 500 __tablename__ = 'repo_rhodecode_ui'
501 501 __table_args__ = (
502 502 UniqueConstraint(
503 503 'repository_id', 'ui_section', 'ui_key',
504 504 name='uq_repo_rhodecode_ui_repository_id_section_key'),
505 505 {'extend_existing': True, 'mysql_engine': 'InnoDB',
506 506 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
507 507 )
508 508
509 509 repository_id = Column(
510 510 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
511 511 nullable=False)
512 512 ui_id = Column(
513 513 "ui_id", Integer(), nullable=False, unique=True, default=None,
514 514 primary_key=True)
515 515 ui_section = Column(
516 516 "ui_section", String(255), nullable=True, unique=None, default=None)
517 517 ui_key = Column(
518 518 "ui_key", String(255), nullable=True, unique=None, default=None)
519 519 ui_value = Column(
520 520 "ui_value", String(255), nullable=True, unique=None, default=None)
521 521 ui_active = Column(
522 522 "ui_active", Boolean(), nullable=True, unique=None, default=True)
523 523
524 524 repository = relationship('Repository')
525 525
526 526 def __repr__(self):
527 527 return '<%s[%s:%s]%s=>%s]>' % (
528 528 self.__class__.__name__, self.repository.repo_name,
529 529 self.ui_section, self.ui_key, self.ui_value)
530 530
531 531
532 532 class User(Base, BaseModel):
533 533 __tablename__ = 'users'
534 534 __table_args__ = (
535 535 UniqueConstraint('username'), UniqueConstraint('email'),
536 536 Index('u_username_idx', 'username'),
537 537 Index('u_email_idx', 'email'),
538 538 {'extend_existing': True, 'mysql_engine': 'InnoDB',
539 539 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
540 540 )
541 541 DEFAULT_USER = 'default'
542 542 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
543 543 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
544 544
545 545 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
546 546 username = Column("username", String(255), nullable=True, unique=None, default=None)
547 547 password = Column("password", String(255), nullable=True, unique=None, default=None)
548 548 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
549 549 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
550 550 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
551 551 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
552 552 _email = Column("email", String(255), nullable=True, unique=None, default=None)
553 553 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
554 554 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
555 555
556 556 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
557 557 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
558 558 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
559 559 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
560 560 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
561 561 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
562 562
563 563 user_log = relationship('UserLog')
564 564 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
565 565
566 566 repositories = relationship('Repository')
567 567 repository_groups = relationship('RepoGroup')
568 568 user_groups = relationship('UserGroup')
569 569
570 570 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
571 571 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
572 572
573 573 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
574 574 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
575 575 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all')
576 576
577 577 group_member = relationship('UserGroupMember', cascade='all')
578 578
579 579 notifications = relationship('UserNotification', cascade='all')
580 580 # notifications assigned to this user
581 581 user_created_notifications = relationship('Notification', cascade='all')
582 582 # comments created by this user
583 583 user_comments = relationship('ChangesetComment', cascade='all')
584 584 # user profile extra info
585 585 user_emails = relationship('UserEmailMap', cascade='all')
586 586 user_ip_map = relationship('UserIpMap', cascade='all')
587 587 user_auth_tokens = relationship('UserApiKeys', cascade='all')
588 588 user_ssh_keys = relationship('UserSshKeys', cascade='all')
589 589
590 590 # gists
591 591 user_gists = relationship('Gist', cascade='all')
592 592 # user pull requests
593 593 user_pull_requests = relationship('PullRequest', cascade='all')
594 594 # external identities
595 595 extenal_identities = relationship(
596 596 'ExternalIdentity',
597 597 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
598 598 cascade='all')
599 599 # review rules
600 600 user_review_rules = relationship('RepoReviewRuleUser', cascade='all')
601 601
602 602 def __unicode__(self):
603 603 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
604 604 self.user_id, self.username)
605 605
606 606 @hybrid_property
607 607 def email(self):
608 608 return self._email
609 609
610 610 @email.setter
611 611 def email(self, val):
612 612 self._email = val.lower() if val else None
613 613
614 614 @hybrid_property
615 615 def first_name(self):
616 616 from rhodecode.lib import helpers as h
617 617 if self.name:
618 618 return h.escape(self.name)
619 619 return self.name
620 620
621 621 @hybrid_property
622 622 def last_name(self):
623 623 from rhodecode.lib import helpers as h
624 624 if self.lastname:
625 625 return h.escape(self.lastname)
626 626 return self.lastname
627 627
628 628 @hybrid_property
629 629 def api_key(self):
630 630 """
631 631 Fetch if exist an auth-token with role ALL connected to this user
632 632 """
633 633 user_auth_token = UserApiKeys.query()\
634 634 .filter(UserApiKeys.user_id == self.user_id)\
635 635 .filter(or_(UserApiKeys.expires == -1,
636 636 UserApiKeys.expires >= time.time()))\
637 637 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
638 638 if user_auth_token:
639 639 user_auth_token = user_auth_token.api_key
640 640
641 641 return user_auth_token
642 642
643 643 @api_key.setter
644 644 def api_key(self, val):
645 645 # don't allow to set API key this is deprecated for now
646 646 self._api_key = None
647 647
648 648 @property
649 649 def reviewer_pull_requests(self):
650 650 return PullRequestReviewers.query() \
651 651 .options(joinedload(PullRequestReviewers.pull_request)) \
652 652 .filter(PullRequestReviewers.user_id == self.user_id) \
653 653 .all()
654 654
655 655 @property
656 656 def firstname(self):
657 657 # alias for future
658 658 return self.name
659 659
660 660 @property
661 661 def emails(self):
662 662 other = UserEmailMap.query()\
663 663 .filter(UserEmailMap.user == self) \
664 664 .order_by(UserEmailMap.email_id.asc()) \
665 665 .all()
666 666 return [self.email] + [x.email for x in other]
667 667
668 668 @property
669 669 def auth_tokens(self):
670 670 auth_tokens = self.get_auth_tokens()
671 671 return [x.api_key for x in auth_tokens]
672 672
673 673 def get_auth_tokens(self):
674 674 return UserApiKeys.query()\
675 675 .filter(UserApiKeys.user == self)\
676 676 .order_by(UserApiKeys.user_api_key_id.asc())\
677 677 .all()
678 678
679 679 @LazyProperty
680 680 def feed_token(self):
681 681 return self.get_feed_token()
682 682
683 683 def get_feed_token(self, cache=True):
684 684 feed_tokens = UserApiKeys.query()\
685 685 .filter(UserApiKeys.user == self)\
686 686 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
687 687 if cache:
688 688 feed_tokens = feed_tokens.options(
689 689 FromCache("long_term", "get_user_feed_token_%s" % self.user_id))
690 690
691 691 feed_tokens = feed_tokens.all()
692 692 if feed_tokens:
693 693 return feed_tokens[0].api_key
694 694 return 'NO_FEED_TOKEN_AVAILABLE'
695 695
696 696 @classmethod
697 697 def get(cls, user_id, cache=False):
698 698 if not user_id:
699 699 return
700 700
701 701 user = cls.query()
702 702 if cache:
703 703 user = user.options(
704 704 FromCache("sql_cache_short", "get_users_%s" % user_id))
705 705 return user.get(user_id)
706 706
707 707 @classmethod
708 708 def extra_valid_auth_tokens(cls, user, role=None):
709 709 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
710 710 .filter(or_(UserApiKeys.expires == -1,
711 711 UserApiKeys.expires >= time.time()))
712 712 if role:
713 713 tokens = tokens.filter(or_(UserApiKeys.role == role,
714 714 UserApiKeys.role == UserApiKeys.ROLE_ALL))
715 715 return tokens.all()
716 716
717 717 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
718 718 from rhodecode.lib import auth
719 719
720 720 log.debug('Trying to authenticate user: %s via auth-token, '
721 721 'and roles: %s', self, roles)
722 722
723 723 if not auth_token:
724 724 return False
725 725
726 726 crypto_backend = auth.crypto_backend()
727 727
728 728 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
729 729 tokens_q = UserApiKeys.query()\
730 730 .filter(UserApiKeys.user_id == self.user_id)\
731 731 .filter(or_(UserApiKeys.expires == -1,
732 732 UserApiKeys.expires >= time.time()))
733 733
734 734 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
735 735
736 736 plain_tokens = []
737 737 hash_tokens = []
738 738
739 739 for token in tokens_q.all():
740 740 # verify scope first
741 741 if token.repo_id:
742 742 # token has a scope, we need to verify it
743 743 if scope_repo_id != token.repo_id:
744 744 log.debug(
745 745 'Scope mismatch: token has a set repo scope: %s, '
746 746 'and calling scope is:%s, skipping further checks',
747 747 token.repo, scope_repo_id)
748 748 # token has a scope, and it doesn't match, skip token
749 749 continue
750 750
751 751 if token.api_key.startswith(crypto_backend.ENC_PREF):
752 752 hash_tokens.append(token.api_key)
753 753 else:
754 754 plain_tokens.append(token.api_key)
755 755
756 756 is_plain_match = auth_token in plain_tokens
757 757 if is_plain_match:
758 758 return True
759 759
760 760 for hashed in hash_tokens:
761 761 # TODO(marcink): this is expensive to calculate, but most secure
762 762 match = crypto_backend.hash_check(auth_token, hashed)
763 763 if match:
764 764 return True
765 765
766 766 return False
767 767
768 768 @property
769 769 def ip_addresses(self):
770 770 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
771 771 return [x.ip_addr for x in ret]
772 772
773 773 @property
774 774 def username_and_name(self):
775 775 return '%s (%s %s)' % (self.username, self.first_name, self.last_name)
776 776
777 777 @property
778 778 def username_or_name_or_email(self):
779 779 full_name = self.full_name if self.full_name is not ' ' else None
780 780 return self.username or full_name or self.email
781 781
782 782 @property
783 783 def full_name(self):
784 784 return '%s %s' % (self.first_name, self.last_name)
785 785
786 786 @property
787 787 def full_name_or_username(self):
788 788 return ('%s %s' % (self.first_name, self.last_name)
789 789 if (self.first_name and self.last_name) else self.username)
790 790
791 791 @property
792 792 def full_contact(self):
793 793 return '%s %s <%s>' % (self.first_name, self.last_name, self.email)
794 794
795 795 @property
796 796 def short_contact(self):
797 797 return '%s %s' % (self.first_name, self.last_name)
798 798
799 799 @property
800 800 def is_admin(self):
801 801 return self.admin
802 802
803 803 def AuthUser(self, **kwargs):
804 804 """
805 805 Returns instance of AuthUser for this user
806 806 """
807 807 from rhodecode.lib.auth import AuthUser
808 808 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
809 809
810 810 @hybrid_property
811 811 def user_data(self):
812 812 if not self._user_data:
813 813 return {}
814 814
815 815 try:
816 816 return json.loads(self._user_data)
817 817 except TypeError:
818 818 return {}
819 819
820 820 @user_data.setter
821 821 def user_data(self, val):
822 822 if not isinstance(val, dict):
823 823 raise Exception('user_data must be dict, got %s' % type(val))
824 824 try:
825 825 self._user_data = json.dumps(val)
826 826 except Exception:
827 827 log.error(traceback.format_exc())
828 828
829 829 @classmethod
830 830 def get_by_username(cls, username, case_insensitive=False,
831 831 cache=False, identity_cache=False):
832 832 session = Session()
833 833
834 834 if case_insensitive:
835 835 q = cls.query().filter(
836 836 func.lower(cls.username) == func.lower(username))
837 837 else:
838 838 q = cls.query().filter(cls.username == username)
839 839
840 840 if cache:
841 841 if identity_cache:
842 842 val = cls.identity_cache(session, 'username', username)
843 843 if val:
844 844 return val
845 845 else:
846 846 cache_key = "get_user_by_name_%s" % _hash_key(username)
847 847 q = q.options(
848 848 FromCache("sql_cache_short", cache_key))
849 849
850 850 return q.scalar()
851 851
852 852 @classmethod
853 853 def get_by_auth_token(cls, auth_token, cache=False):
854 854 q = UserApiKeys.query()\
855 855 .filter(UserApiKeys.api_key == auth_token)\
856 856 .filter(or_(UserApiKeys.expires == -1,
857 857 UserApiKeys.expires >= time.time()))
858 858 if cache:
859 859 q = q.options(
860 860 FromCache("sql_cache_short", "get_auth_token_%s" % auth_token))
861 861
862 862 match = q.first()
863 863 if match:
864 864 return match.user
865 865
866 866 @classmethod
867 867 def get_by_email(cls, email, case_insensitive=False, cache=False):
868 868
869 869 if case_insensitive:
870 870 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
871 871
872 872 else:
873 873 q = cls.query().filter(cls.email == email)
874 874
875 875 email_key = _hash_key(email)
876 876 if cache:
877 877 q = q.options(
878 878 FromCache("sql_cache_short", "get_email_key_%s" % email_key))
879 879
880 880 ret = q.scalar()
881 881 if ret is None:
882 882 q = UserEmailMap.query()
883 883 # try fetching in alternate email map
884 884 if case_insensitive:
885 885 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
886 886 else:
887 887 q = q.filter(UserEmailMap.email == email)
888 888 q = q.options(joinedload(UserEmailMap.user))
889 889 if cache:
890 890 q = q.options(
891 891 FromCache("sql_cache_short", "get_email_map_key_%s" % email_key))
892 892 ret = getattr(q.scalar(), 'user', None)
893 893
894 894 return ret
895 895
896 896 @classmethod
897 897 def get_from_cs_author(cls, author):
898 898 """
899 899 Tries to get User objects out of commit author string
900 900
901 901 :param author:
902 902 """
903 903 from rhodecode.lib.helpers import email, author_name
904 904 # Valid email in the attribute passed, see if they're in the system
905 905 _email = email(author)
906 906 if _email:
907 907 user = cls.get_by_email(_email, case_insensitive=True)
908 908 if user:
909 909 return user
910 910 # Maybe we can match by username?
911 911 _author = author_name(author)
912 912 user = cls.get_by_username(_author, case_insensitive=True)
913 913 if user:
914 914 return user
915 915
916 916 def update_userdata(self, **kwargs):
917 917 usr = self
918 918 old = usr.user_data
919 919 old.update(**kwargs)
920 920 usr.user_data = old
921 921 Session().add(usr)
922 922 log.debug('updated userdata with ', kwargs)
923 923
924 924 def update_lastlogin(self):
925 925 """Update user lastlogin"""
926 926 self.last_login = datetime.datetime.now()
927 927 Session().add(self)
928 928 log.debug('updated user %s lastlogin', self.username)
929 929
930 930 def update_lastactivity(self):
931 931 """Update user lastactivity"""
932 932 self.last_activity = datetime.datetime.now()
933 933 Session().add(self)
934 934 log.debug('updated user `%s` last activity', self.username)
935 935
936 936 def update_password(self, new_password):
937 937 from rhodecode.lib.auth import get_crypt_password
938 938
939 939 self.password = get_crypt_password(new_password)
940 940 Session().add(self)
941 941
942 942 @classmethod
943 943 def get_first_super_admin(cls):
944 944 user = User.query().filter(User.admin == true()).first()
945 945 if user is None:
946 946 raise Exception('FATAL: Missing administrative account!')
947 947 return user
948 948
949 949 @classmethod
950 950 def get_all_super_admins(cls):
951 951 """
952 952 Returns all admin accounts sorted by username
953 953 """
954 954 return User.query().filter(User.admin == true())\
955 955 .order_by(User.username.asc()).all()
956 956
957 957 @classmethod
958 958 def get_default_user(cls, cache=False, refresh=False):
959 959 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
960 960 if user is None:
961 961 raise Exception('FATAL: Missing default account!')
962 962 if refresh:
963 963 # The default user might be based on outdated state which
964 964 # has been loaded from the cache.
965 965 # A call to refresh() ensures that the
966 966 # latest state from the database is used.
967 967 Session().refresh(user)
968 968 return user
969 969
970 970 def _get_default_perms(self, user, suffix=''):
971 971 from rhodecode.model.permission import PermissionModel
972 972 return PermissionModel().get_default_perms(user.user_perms, suffix)
973 973
974 974 def get_default_perms(self, suffix=''):
975 975 return self._get_default_perms(self, suffix)
976 976
977 977 def get_api_data(self, include_secrets=False, details='full'):
978 978 """
979 979 Common function for generating user related data for API
980 980
981 981 :param include_secrets: By default secrets in the API data will be replaced
982 982 by a placeholder value to prevent exposing this data by accident. In case
983 983 this data shall be exposed, set this flag to ``True``.
984 984
985 985 :param details: details can be 'basic|full' basic gives only a subset of
986 986 the available user information that includes user_id, name and emails.
987 987 """
988 988 user = self
989 989 user_data = self.user_data
990 990 data = {
991 991 'user_id': user.user_id,
992 992 'username': user.username,
993 993 'firstname': user.name,
994 994 'lastname': user.lastname,
995 995 'email': user.email,
996 996 'emails': user.emails,
997 997 }
998 998 if details == 'basic':
999 999 return data
1000 1000
1001 1001 auth_token_length = 40
1002 1002 auth_token_replacement = '*' * auth_token_length
1003 1003
1004 1004 extras = {
1005 1005 'auth_tokens': [auth_token_replacement],
1006 1006 'active': user.active,
1007 1007 'admin': user.admin,
1008 1008 'extern_type': user.extern_type,
1009 1009 'extern_name': user.extern_name,
1010 1010 'last_login': user.last_login,
1011 1011 'last_activity': user.last_activity,
1012 1012 'ip_addresses': user.ip_addresses,
1013 1013 'language': user_data.get('language')
1014 1014 }
1015 1015 data.update(extras)
1016 1016
1017 1017 if include_secrets:
1018 1018 data['auth_tokens'] = user.auth_tokens
1019 1019 return data
1020 1020
1021 1021 def __json__(self):
1022 1022 data = {
1023 1023 'full_name': self.full_name,
1024 1024 'full_name_or_username': self.full_name_or_username,
1025 1025 'short_contact': self.short_contact,
1026 1026 'full_contact': self.full_contact,
1027 1027 }
1028 1028 data.update(self.get_api_data())
1029 1029 return data
1030 1030
1031 1031
1032 1032 class UserApiKeys(Base, BaseModel):
1033 1033 __tablename__ = 'user_api_keys'
1034 1034 __table_args__ = (
1035 1035 Index('uak_api_key_idx', 'api_key', unique=True),
1036 1036 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1037 1037 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1038 1038 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
1039 1039 )
1040 1040 __mapper_args__ = {}
1041 1041
1042 1042 # ApiKey role
1043 1043 ROLE_ALL = 'token_role_all'
1044 1044 ROLE_HTTP = 'token_role_http'
1045 1045 ROLE_VCS = 'token_role_vcs'
1046 1046 ROLE_API = 'token_role_api'
1047 1047 ROLE_FEED = 'token_role_feed'
1048 1048 ROLE_PASSWORD_RESET = 'token_password_reset'
1049 1049
1050 1050 ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED]
1051 1051
1052 1052 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1053 1053 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1054 1054 api_key = Column("api_key", String(255), nullable=False, unique=True)
1055 1055 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1056 1056 expires = Column('expires', Float(53), nullable=False)
1057 1057 role = Column('role', String(255), nullable=True)
1058 1058 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1059 1059
1060 1060 # scope columns
1061 1061 repo_id = Column(
1062 1062 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1063 1063 nullable=True, unique=None, default=None)
1064 1064 repo = relationship('Repository', lazy='joined')
1065 1065
1066 1066 repo_group_id = Column(
1067 1067 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1068 1068 nullable=True, unique=None, default=None)
1069 1069 repo_group = relationship('RepoGroup', lazy='joined')
1070 1070
1071 1071 user = relationship('User', lazy='joined')
1072 1072
1073 1073 def __unicode__(self):
1074 1074 return u"<%s('%s')>" % (self.__class__.__name__, self.role)
1075 1075
1076 1076 def __json__(self):
1077 1077 data = {
1078 1078 'auth_token': self.api_key,
1079 1079 'role': self.role,
1080 1080 'scope': self.scope_humanized,
1081 1081 'expired': self.expired
1082 1082 }
1083 1083 return data
1084 1084
1085 1085 def get_api_data(self, include_secrets=False):
1086 1086 data = self.__json__()
1087 1087 if include_secrets:
1088 1088 return data
1089 1089 else:
1090 1090 data['auth_token'] = self.token_obfuscated
1091 1091 return data
1092 1092
1093 1093 @hybrid_property
1094 1094 def description_safe(self):
1095 1095 from rhodecode.lib import helpers as h
1096 1096 return h.escape(self.description)
1097 1097
1098 1098 @property
1099 1099 def expired(self):
1100 1100 if self.expires == -1:
1101 1101 return False
1102 1102 return time.time() > self.expires
1103 1103
1104 1104 @classmethod
1105 1105 def _get_role_name(cls, role):
1106 1106 return {
1107 1107 cls.ROLE_ALL: _('all'),
1108 1108 cls.ROLE_HTTP: _('http/web interface'),
1109 1109 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1110 1110 cls.ROLE_API: _('api calls'),
1111 1111 cls.ROLE_FEED: _('feed access'),
1112 1112 }.get(role, role)
1113 1113
1114 1114 @property
1115 1115 def role_humanized(self):
1116 1116 return self._get_role_name(self.role)
1117 1117
1118 1118 def _get_scope(self):
1119 1119 if self.repo:
1120 1120 return repr(self.repo)
1121 1121 if self.repo_group:
1122 1122 return repr(self.repo_group) + ' (recursive)'
1123 1123 return 'global'
1124 1124
1125 1125 @property
1126 1126 def scope_humanized(self):
1127 1127 return self._get_scope()
1128 1128
1129 1129 @property
1130 1130 def token_obfuscated(self):
1131 1131 if self.api_key:
1132 1132 return self.api_key[:4] + "****"
1133 1133
1134 1134
1135 1135 class UserEmailMap(Base, BaseModel):
1136 1136 __tablename__ = 'user_email_map'
1137 1137 __table_args__ = (
1138 1138 Index('uem_email_idx', 'email'),
1139 1139 UniqueConstraint('email'),
1140 1140 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1141 1141 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
1142 1142 )
1143 1143 __mapper_args__ = {}
1144 1144
1145 1145 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1146 1146 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1147 1147 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1148 1148 user = relationship('User', lazy='joined')
1149 1149
1150 1150 @validates('_email')
1151 1151 def validate_email(self, key, email):
1152 1152 # check if this email is not main one
1153 1153 main_email = Session().query(User).filter(User.email == email).scalar()
1154 1154 if main_email is not None:
1155 1155 raise AttributeError('email %s is present is user table' % email)
1156 1156 return email
1157 1157
1158 1158 @hybrid_property
1159 1159 def email(self):
1160 1160 return self._email
1161 1161
1162 1162 @email.setter
1163 1163 def email(self, val):
1164 1164 self._email = val.lower() if val else None
1165 1165
1166 1166
1167 1167 class UserIpMap(Base, BaseModel):
1168 1168 __tablename__ = 'user_ip_map'
1169 1169 __table_args__ = (
1170 1170 UniqueConstraint('user_id', 'ip_addr'),
1171 1171 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1172 1172 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
1173 1173 )
1174 1174 __mapper_args__ = {}
1175 1175
1176 1176 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1177 1177 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1178 1178 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1179 1179 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1180 1180 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1181 1181 user = relationship('User', lazy='joined')
1182 1182
1183 1183 @hybrid_property
1184 1184 def description_safe(self):
1185 1185 from rhodecode.lib import helpers as h
1186 1186 return h.escape(self.description)
1187 1187
1188 1188 @classmethod
1189 1189 def _get_ip_range(cls, ip_addr):
1190 1190 net = ipaddress.ip_network(safe_unicode(ip_addr), strict=False)
1191 1191 return [str(net.network_address), str(net.broadcast_address)]
1192 1192
1193 1193 def __json__(self):
1194 1194 return {
1195 1195 'ip_addr': self.ip_addr,
1196 1196 'ip_range': self._get_ip_range(self.ip_addr),
1197 1197 }
1198 1198
1199 1199 def __unicode__(self):
1200 1200 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
1201 1201 self.user_id, self.ip_addr)
1202 1202
1203 1203
1204 1204 class UserSshKeys(Base, BaseModel):
1205 1205 __tablename__ = 'user_ssh_keys'
1206 1206 __table_args__ = (
1207 1207 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1208 1208
1209 1209 UniqueConstraint('ssh_key_fingerprint'),
1210 1210
1211 1211 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1212 1212 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
1213 1213 )
1214 1214 __mapper_args__ = {}
1215 1215
1216 1216 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1217 1217 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1218 1218 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1219 1219
1220 1220 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1221 1221
1222 1222 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1223 1223 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1224 1224 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1225 1225
1226 1226 user = relationship('User', lazy='joined')
1227 1227
1228 1228 def __json__(self):
1229 1229 data = {
1230 1230 'ssh_fingerprint': self.ssh_key_fingerprint,
1231 1231 'description': self.description,
1232 1232 'created_on': self.created_on
1233 1233 }
1234 1234 return data
1235 1235
1236 1236 def get_api_data(self):
1237 1237 data = self.__json__()
1238 1238 return data
1239 1239
1240 1240
1241 1241 class UserLog(Base, BaseModel):
1242 1242 __tablename__ = 'user_logs'
1243 1243 __table_args__ = (
1244 1244 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1245 1245 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1246 1246 )
1247 1247 VERSION_1 = 'v1'
1248 1248 VERSION_2 = 'v2'
1249 1249 VERSIONS = [VERSION_1, VERSION_2]
1250 1250
1251 1251 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1252 1252 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1253 1253 username = Column("username", String(255), nullable=True, unique=None, default=None)
1254 1254 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1255 1255 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1256 1256 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1257 1257 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1258 1258 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1259 1259
1260 1260 version = Column("version", String(255), nullable=True, default=VERSION_1)
1261 1261 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1262 1262 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1263 1263
1264 1264 def __unicode__(self):
1265 1265 return u"<%s('id:%s:%s')>" % (
1266 1266 self.__class__.__name__, self.repository_name, self.action)
1267 1267
1268 1268 def __json__(self):
1269 1269 return {
1270 1270 'user_id': self.user_id,
1271 1271 'username': self.username,
1272 1272 'repository_id': self.repository_id,
1273 1273 'repository_name': self.repository_name,
1274 1274 'user_ip': self.user_ip,
1275 1275 'action_date': self.action_date,
1276 1276 'action': self.action,
1277 1277 }
1278 1278
1279 1279 @hybrid_property
1280 1280 def entry_id(self):
1281 1281 return self.user_log_id
1282 1282
1283 1283 @property
1284 1284 def action_as_day(self):
1285 1285 return datetime.date(*self.action_date.timetuple()[:3])
1286 1286
1287 1287 user = relationship('User')
1288 1288 repository = relationship('Repository', cascade='')
1289 1289
1290 1290
1291 1291 class UserGroup(Base, BaseModel):
1292 1292 __tablename__ = 'users_groups'
1293 1293 __table_args__ = (
1294 1294 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1295 1295 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1296 1296 )
1297 1297
1298 1298 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1299 1299 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1300 1300 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1301 1301 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1302 1302 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1303 1303 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1304 1304 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1305 1305 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1306 1306
1307 1307 members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
1308 1308 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1309 1309 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1310 1310 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1311 1311 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1312 1312 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1313 1313
1314 1314 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all')
1315 1315 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id")
1316 1316
1317 1317 @classmethod
1318 1318 def _load_group_data(cls, column):
1319 1319 if not column:
1320 1320 return {}
1321 1321
1322 1322 try:
1323 1323 return json.loads(column) or {}
1324 1324 except TypeError:
1325 1325 return {}
1326 1326
1327 1327 @hybrid_property
1328 1328 def description_safe(self):
1329 1329 from rhodecode.lib import helpers as h
1330 1330 return h.escape(self.user_group_description)
1331 1331
1332 1332 @hybrid_property
1333 1333 def group_data(self):
1334 1334 return self._load_group_data(self._group_data)
1335 1335
1336 1336 @group_data.expression
1337 1337 def group_data(self, **kwargs):
1338 1338 return self._group_data
1339 1339
1340 1340 @group_data.setter
1341 1341 def group_data(self, val):
1342 1342 try:
1343 1343 self._group_data = json.dumps(val)
1344 1344 except Exception:
1345 1345 log.error(traceback.format_exc())
1346 1346
1347 1347 @classmethod
1348 1348 def _load_sync(cls, group_data):
1349 1349 if group_data:
1350 1350 return group_data.get('extern_type')
1351 1351
1352 1352 @property
1353 1353 def sync(self):
1354 1354 return self._load_sync(self.group_data)
1355 1355
1356 1356 def __unicode__(self):
1357 1357 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1358 1358 self.users_group_id,
1359 1359 self.users_group_name)
1360 1360
1361 1361 @classmethod
1362 1362 def get_by_group_name(cls, group_name, cache=False,
1363 1363 case_insensitive=False):
1364 1364 if case_insensitive:
1365 1365 q = cls.query().filter(func.lower(cls.users_group_name) ==
1366 1366 func.lower(group_name))
1367 1367
1368 1368 else:
1369 1369 q = cls.query().filter(cls.users_group_name == group_name)
1370 1370 if cache:
1371 1371 q = q.options(
1372 1372 FromCache("sql_cache_short", "get_group_%s" % _hash_key(group_name)))
1373 1373 return q.scalar()
1374 1374
1375 1375 @classmethod
1376 1376 def get(cls, user_group_id, cache=False):
1377 1377 if not user_group_id:
1378 1378 return
1379 1379
1380 1380 user_group = cls.query()
1381 1381 if cache:
1382 1382 user_group = user_group.options(
1383 1383 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1384 1384 return user_group.get(user_group_id)
1385 1385
1386 1386 def permissions(self, with_admins=True, with_owner=True):
1387 1387 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1388 1388 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1389 1389 joinedload(UserUserGroupToPerm.user),
1390 1390 joinedload(UserUserGroupToPerm.permission),)
1391 1391
1392 1392 # get owners and admins and permissions. We do a trick of re-writing
1393 1393 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1394 1394 # has a global reference and changing one object propagates to all
1395 1395 # others. This means if admin is also an owner admin_row that change
1396 1396 # would propagate to both objects
1397 1397 perm_rows = []
1398 1398 for _usr in q.all():
1399 1399 usr = AttributeDict(_usr.user.get_dict())
1400 1400 usr.permission = _usr.permission.permission_name
1401 1401 perm_rows.append(usr)
1402 1402
1403 1403 # filter the perm rows by 'default' first and then sort them by
1404 1404 # admin,write,read,none permissions sorted again alphabetically in
1405 1405 # each group
1406 1406 perm_rows = sorted(perm_rows, key=display_user_sort)
1407 1407
1408 1408 _admin_perm = 'usergroup.admin'
1409 1409 owner_row = []
1410 1410 if with_owner:
1411 1411 usr = AttributeDict(self.user.get_dict())
1412 1412 usr.owner_row = True
1413 1413 usr.permission = _admin_perm
1414 1414 owner_row.append(usr)
1415 1415
1416 1416 super_admin_rows = []
1417 1417 if with_admins:
1418 1418 for usr in User.get_all_super_admins():
1419 1419 # if this admin is also owner, don't double the record
1420 1420 if usr.user_id == owner_row[0].user_id:
1421 1421 owner_row[0].admin_row = True
1422 1422 else:
1423 1423 usr = AttributeDict(usr.get_dict())
1424 1424 usr.admin_row = True
1425 1425 usr.permission = _admin_perm
1426 1426 super_admin_rows.append(usr)
1427 1427
1428 1428 return super_admin_rows + owner_row + perm_rows
1429 1429
1430 1430 def permission_user_groups(self):
1431 1431 q = UserGroupUserGroupToPerm.query().filter(UserGroupUserGroupToPerm.target_user_group == self)
1432 1432 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1433 1433 joinedload(UserGroupUserGroupToPerm.target_user_group),
1434 1434 joinedload(UserGroupUserGroupToPerm.permission),)
1435 1435
1436 1436 perm_rows = []
1437 1437 for _user_group in q.all():
1438 1438 usr = AttributeDict(_user_group.user_group.get_dict())
1439 1439 usr.permission = _user_group.permission.permission_name
1440 1440 perm_rows.append(usr)
1441 1441
1442 1442 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1443 1443 return perm_rows
1444 1444
1445 1445 def _get_default_perms(self, user_group, suffix=''):
1446 1446 from rhodecode.model.permission import PermissionModel
1447 1447 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1448 1448
1449 1449 def get_default_perms(self, suffix=''):
1450 1450 return self._get_default_perms(self, suffix)
1451 1451
1452 1452 def get_api_data(self, with_group_members=True, include_secrets=False):
1453 1453 """
1454 1454 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1455 1455 basically forwarded.
1456 1456
1457 1457 """
1458 1458 user_group = self
1459 1459 data = {
1460 1460 'users_group_id': user_group.users_group_id,
1461 1461 'group_name': user_group.users_group_name,
1462 1462 'group_description': user_group.user_group_description,
1463 1463 'active': user_group.users_group_active,
1464 1464 'owner': user_group.user.username,
1465 1465 'sync': user_group.sync,
1466 1466 'owner_email': user_group.user.email,
1467 1467 }
1468 1468
1469 1469 if with_group_members:
1470 1470 users = []
1471 1471 for user in user_group.members:
1472 1472 user = user.user
1473 1473 users.append(user.get_api_data(include_secrets=include_secrets))
1474 1474 data['users'] = users
1475 1475
1476 1476 return data
1477 1477
1478 1478
1479 1479 class UserGroupMember(Base, BaseModel):
1480 1480 __tablename__ = 'users_groups_members'
1481 1481 __table_args__ = (
1482 1482 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1483 1483 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1484 1484 )
1485 1485
1486 1486 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1487 1487 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1488 1488 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1489 1489
1490 1490 user = relationship('User', lazy='joined')
1491 1491 users_group = relationship('UserGroup')
1492 1492
1493 1493 def __init__(self, gr_id='', u_id=''):
1494 1494 self.users_group_id = gr_id
1495 1495 self.user_id = u_id
1496 1496
1497 1497
1498 1498 class RepositoryField(Base, BaseModel):
1499 1499 __tablename__ = 'repositories_fields'
1500 1500 __table_args__ = (
1501 1501 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1502 1502 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1503 1503 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1504 1504 )
1505 1505 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1506 1506
1507 1507 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1508 1508 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1509 1509 field_key = Column("field_key", String(250))
1510 1510 field_label = Column("field_label", String(1024), nullable=False)
1511 1511 field_value = Column("field_value", String(10000), nullable=False)
1512 1512 field_desc = Column("field_desc", String(1024), nullable=False)
1513 1513 field_type = Column("field_type", String(255), nullable=False, unique=None)
1514 1514 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1515 1515
1516 1516 repository = relationship('Repository')
1517 1517
1518 1518 @property
1519 1519 def field_key_prefixed(self):
1520 1520 return 'ex_%s' % self.field_key
1521 1521
1522 1522 @classmethod
1523 1523 def un_prefix_key(cls, key):
1524 1524 if key.startswith(cls.PREFIX):
1525 1525 return key[len(cls.PREFIX):]
1526 1526 return key
1527 1527
1528 1528 @classmethod
1529 1529 def get_by_key_name(cls, key, repo):
1530 1530 row = cls.query()\
1531 1531 .filter(cls.repository == repo)\
1532 1532 .filter(cls.field_key == key).scalar()
1533 1533 return row
1534 1534
1535 1535
1536 1536 class Repository(Base, BaseModel):
1537 1537 __tablename__ = 'repositories'
1538 1538 __table_args__ = (
1539 1539 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1540 1540 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1541 1541 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1542 1542 )
1543 1543 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1544 1544 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1545 1545 DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}'
1546 1546
1547 1547 STATE_CREATED = 'repo_state_created'
1548 1548 STATE_PENDING = 'repo_state_pending'
1549 1549 STATE_ERROR = 'repo_state_error'
1550 1550
1551 1551 LOCK_AUTOMATIC = 'lock_auto'
1552 1552 LOCK_API = 'lock_api'
1553 1553 LOCK_WEB = 'lock_web'
1554 1554 LOCK_PULL = 'lock_pull'
1555 1555
1556 1556 NAME_SEP = URL_SEP
1557 1557
1558 1558 repo_id = Column(
1559 1559 "repo_id", Integer(), nullable=False, unique=True, default=None,
1560 1560 primary_key=True)
1561 1561 _repo_name = Column(
1562 1562 "repo_name", Text(), nullable=False, default=None)
1563 1563 _repo_name_hash = Column(
1564 1564 "repo_name_hash", String(255), nullable=False, unique=True)
1565 1565 repo_state = Column("repo_state", String(255), nullable=True)
1566 1566
1567 1567 clone_uri = Column(
1568 1568 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1569 1569 default=None)
1570 1570 push_uri = Column(
1571 1571 "push_uri", EncryptedTextValue(), nullable=True, unique=False,
1572 1572 default=None)
1573 1573 repo_type = Column(
1574 1574 "repo_type", String(255), nullable=False, unique=False, default=None)
1575 1575 user_id = Column(
1576 1576 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1577 1577 unique=False, default=None)
1578 1578 private = Column(
1579 1579 "private", Boolean(), nullable=True, unique=None, default=None)
1580 1580 enable_statistics = Column(
1581 1581 "statistics", Boolean(), nullable=True, unique=None, default=True)
1582 1582 enable_downloads = Column(
1583 1583 "downloads", Boolean(), nullable=True, unique=None, default=True)
1584 1584 description = Column(
1585 1585 "description", String(10000), nullable=True, unique=None, default=None)
1586 1586 created_on = Column(
1587 1587 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1588 1588 default=datetime.datetime.now)
1589 1589 updated_on = Column(
1590 1590 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1591 1591 default=datetime.datetime.now)
1592 1592 _landing_revision = Column(
1593 1593 "landing_revision", String(255), nullable=False, unique=False,
1594 1594 default=None)
1595 1595 enable_locking = Column(
1596 1596 "enable_locking", Boolean(), nullable=False, unique=None,
1597 1597 default=False)
1598 1598 _locked = Column(
1599 1599 "locked", String(255), nullable=True, unique=False, default=None)
1600 1600 _changeset_cache = Column(
1601 1601 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1602 1602
1603 1603 fork_id = Column(
1604 1604 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1605 1605 nullable=True, unique=False, default=None)
1606 1606 group_id = Column(
1607 1607 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1608 1608 unique=False, default=None)
1609 1609
1610 1610 user = relationship('User', lazy='joined')
1611 1611 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1612 1612 group = relationship('RepoGroup', lazy='joined')
1613 1613 repo_to_perm = relationship(
1614 1614 'UserRepoToPerm', cascade='all',
1615 1615 order_by='UserRepoToPerm.repo_to_perm_id')
1616 1616 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1617 1617 stats = relationship('Statistics', cascade='all', uselist=False)
1618 1618
1619 1619 followers = relationship(
1620 1620 'UserFollowing',
1621 1621 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1622 1622 cascade='all')
1623 1623 extra_fields = relationship(
1624 1624 'RepositoryField', cascade="all, delete, delete-orphan")
1625 1625 logs = relationship('UserLog')
1626 1626 comments = relationship(
1627 1627 'ChangesetComment', cascade="all, delete, delete-orphan")
1628 1628 pull_requests_source = relationship(
1629 1629 'PullRequest',
1630 1630 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1631 1631 cascade="all, delete, delete-orphan")
1632 1632 pull_requests_target = relationship(
1633 1633 'PullRequest',
1634 1634 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1635 1635 cascade="all, delete, delete-orphan")
1636 1636 ui = relationship('RepoRhodeCodeUi', cascade="all")
1637 1637 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1638 1638 integrations = relationship('Integration',
1639 1639 cascade="all, delete, delete-orphan")
1640 1640
1641 1641 scoped_tokens = relationship('UserApiKeys', cascade="all")
1642 1642
1643 1643 def __unicode__(self):
1644 1644 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1645 1645 safe_unicode(self.repo_name))
1646 1646
1647 1647 @hybrid_property
1648 1648 def description_safe(self):
1649 1649 from rhodecode.lib import helpers as h
1650 1650 return h.escape(self.description)
1651 1651
1652 1652 @hybrid_property
1653 1653 def landing_rev(self):
1654 1654 # always should return [rev_type, rev]
1655 1655 if self._landing_revision:
1656 1656 _rev_info = self._landing_revision.split(':')
1657 1657 if len(_rev_info) < 2:
1658 1658 _rev_info.insert(0, 'rev')
1659 1659 return [_rev_info[0], _rev_info[1]]
1660 1660 return [None, None]
1661 1661
1662 1662 @landing_rev.setter
1663 1663 def landing_rev(self, val):
1664 1664 if ':' not in val:
1665 1665 raise ValueError('value must be delimited with `:` and consist '
1666 1666 'of <rev_type>:<rev>, got %s instead' % val)
1667 1667 self._landing_revision = val
1668 1668
1669 1669 @hybrid_property
1670 1670 def locked(self):
1671 1671 if self._locked:
1672 1672 user_id, timelocked, reason = self._locked.split(':')
1673 1673 lock_values = int(user_id), timelocked, reason
1674 1674 else:
1675 1675 lock_values = [None, None, None]
1676 1676 return lock_values
1677 1677
1678 1678 @locked.setter
1679 1679 def locked(self, val):
1680 1680 if val and isinstance(val, (list, tuple)):
1681 1681 self._locked = ':'.join(map(str, val))
1682 1682 else:
1683 1683 self._locked = None
1684 1684
1685 1685 @hybrid_property
1686 1686 def changeset_cache(self):
1687 1687 from rhodecode.lib.vcs.backends.base import EmptyCommit
1688 1688 dummy = EmptyCommit().__json__()
1689 1689 if not self._changeset_cache:
1690 1690 return dummy
1691 1691 try:
1692 1692 return json.loads(self._changeset_cache)
1693 1693 except TypeError:
1694 1694 return dummy
1695 1695 except Exception:
1696 1696 log.error(traceback.format_exc())
1697 1697 return dummy
1698 1698
1699 1699 @changeset_cache.setter
1700 1700 def changeset_cache(self, val):
1701 1701 try:
1702 1702 self._changeset_cache = json.dumps(val)
1703 1703 except Exception:
1704 1704 log.error(traceback.format_exc())
1705 1705
1706 1706 @hybrid_property
1707 1707 def repo_name(self):
1708 1708 return self._repo_name
1709 1709
1710 1710 @repo_name.setter
1711 1711 def repo_name(self, value):
1712 1712 self._repo_name = value
1713 1713 self._repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1714 1714
1715 1715 @classmethod
1716 1716 def normalize_repo_name(cls, repo_name):
1717 1717 """
1718 1718 Normalizes os specific repo_name to the format internally stored inside
1719 1719 database using URL_SEP
1720 1720
1721 1721 :param cls:
1722 1722 :param repo_name:
1723 1723 """
1724 1724 return cls.NAME_SEP.join(repo_name.split(os.sep))
1725 1725
1726 1726 @classmethod
1727 1727 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1728 1728 session = Session()
1729 1729 q = session.query(cls).filter(cls.repo_name == repo_name)
1730 1730
1731 1731 if cache:
1732 1732 if identity_cache:
1733 1733 val = cls.identity_cache(session, 'repo_name', repo_name)
1734 1734 if val:
1735 1735 return val
1736 1736 else:
1737 1737 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1738 1738 q = q.options(
1739 1739 FromCache("sql_cache_short", cache_key))
1740 1740
1741 1741 return q.scalar()
1742 1742
1743 1743 @classmethod
1744 1744 def get_by_id_or_repo_name(cls, repoid):
1745 1745 if isinstance(repoid, (int, long)):
1746 1746 try:
1747 1747 repo = cls.get(repoid)
1748 1748 except ValueError:
1749 1749 repo = None
1750 1750 else:
1751 1751 repo = cls.get_by_repo_name(repoid)
1752 1752 return repo
1753 1753
1754 1754 @classmethod
1755 1755 def get_by_full_path(cls, repo_full_path):
1756 1756 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1757 1757 repo_name = cls.normalize_repo_name(repo_name)
1758 1758 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1759 1759
1760 1760 @classmethod
1761 1761 def get_repo_forks(cls, repo_id):
1762 1762 return cls.query().filter(Repository.fork_id == repo_id)
1763 1763
1764 1764 @classmethod
1765 1765 def base_path(cls):
1766 1766 """
1767 1767 Returns base path when all repos are stored
1768 1768
1769 1769 :param cls:
1770 1770 """
1771 1771 q = Session().query(RhodeCodeUi)\
1772 1772 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1773 1773 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1774 1774 return q.one().ui_value
1775 1775
1776 1776 @classmethod
1777 1777 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1778 1778 case_insensitive=True):
1779 1779 q = Repository.query()
1780 1780
1781 1781 if not isinstance(user_id, Optional):
1782 1782 q = q.filter(Repository.user_id == user_id)
1783 1783
1784 1784 if not isinstance(group_id, Optional):
1785 1785 q = q.filter(Repository.group_id == group_id)
1786 1786
1787 1787 if case_insensitive:
1788 1788 q = q.order_by(func.lower(Repository.repo_name))
1789 1789 else:
1790 1790 q = q.order_by(Repository.repo_name)
1791 1791 return q.all()
1792 1792
1793 1793 @property
1794 1794 def forks(self):
1795 1795 """
1796 1796 Return forks of this repo
1797 1797 """
1798 1798 return Repository.get_repo_forks(self.repo_id)
1799 1799
1800 1800 @property
1801 1801 def parent(self):
1802 1802 """
1803 1803 Returns fork parent
1804 1804 """
1805 1805 return self.fork
1806 1806
1807 1807 @property
1808 1808 def just_name(self):
1809 1809 return self.repo_name.split(self.NAME_SEP)[-1]
1810 1810
1811 1811 @property
1812 1812 def groups_with_parents(self):
1813 1813 groups = []
1814 1814 if self.group is None:
1815 1815 return groups
1816 1816
1817 1817 cur_gr = self.group
1818 1818 groups.insert(0, cur_gr)
1819 1819 while 1:
1820 1820 gr = getattr(cur_gr, 'parent_group', None)
1821 1821 cur_gr = cur_gr.parent_group
1822 1822 if gr is None:
1823 1823 break
1824 1824 groups.insert(0, gr)
1825 1825
1826 1826 return groups
1827 1827
1828 1828 @property
1829 1829 def groups_and_repo(self):
1830 1830 return self.groups_with_parents, self
1831 1831
1832 1832 @LazyProperty
1833 1833 def repo_path(self):
1834 1834 """
1835 1835 Returns base full path for that repository means where it actually
1836 1836 exists on a filesystem
1837 1837 """
1838 1838 q = Session().query(RhodeCodeUi).filter(
1839 1839 RhodeCodeUi.ui_key == self.NAME_SEP)
1840 1840 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1841 1841 return q.one().ui_value
1842 1842
1843 1843 @property
1844 1844 def repo_full_path(self):
1845 1845 p = [self.repo_path]
1846 1846 # we need to split the name by / since this is how we store the
1847 1847 # names in the database, but that eventually needs to be converted
1848 1848 # into a valid system path
1849 1849 p += self.repo_name.split(self.NAME_SEP)
1850 1850 return os.path.join(*map(safe_unicode, p))
1851 1851
1852 1852 @property
1853 1853 def cache_keys(self):
1854 1854 """
1855 1855 Returns associated cache keys for that repo
1856 1856 """
1857 1857 return CacheKey.query()\
1858 1858 .filter(CacheKey.cache_args == self.repo_name)\
1859 1859 .order_by(CacheKey.cache_key)\
1860 1860 .all()
1861 1861
1862 1862 @property
1863 1863 def cached_diffs_relative_dir(self):
1864 1864 """
1865 1865 Return a relative to the repository store path of cached diffs
1866 1866 used for safe display for users, who shouldn't know the absolute store
1867 1867 path
1868 1868 """
1869 1869 return os.path.join(
1870 1870 os.path.dirname(self.repo_name),
1871 1871 self.cached_diffs_dir.split(os.path.sep)[-1])
1872 1872
1873 1873 @property
1874 1874 def cached_diffs_dir(self):
1875 1875 path = self.repo_full_path
1876 1876 return os.path.join(
1877 1877 os.path.dirname(path),
1878 1878 '.__shadow_diff_cache_repo_{}'.format(self.repo_id))
1879 1879
1880 1880 def cached_diffs(self):
1881 1881 diff_cache_dir = self.cached_diffs_dir
1882 1882 if os.path.isdir(diff_cache_dir):
1883 1883 return os.listdir(diff_cache_dir)
1884 1884 return []
1885 1885
1886 def shadow_repos(self):
1887 shadow_repos_pattern = '.__shadow_repo_{}'.format(self.repo_id)
1888 return [
1889 x for x in os.listdir(os.path.dirname(self.repo_full_path))
1890 if x.startswith(shadow_repos_pattern)]
1891
1886 1892 def get_new_name(self, repo_name):
1887 1893 """
1888 1894 returns new full repository name based on assigned group and new new
1889 1895
1890 1896 :param group_name:
1891 1897 """
1892 1898 path_prefix = self.group.full_path_splitted if self.group else []
1893 1899 return self.NAME_SEP.join(path_prefix + [repo_name])
1894 1900
1895 1901 @property
1896 1902 def _config(self):
1897 1903 """
1898 1904 Returns db based config object.
1899 1905 """
1900 1906 from rhodecode.lib.utils import make_db_config
1901 1907 return make_db_config(clear_session=False, repo=self)
1902 1908
1903 1909 def permissions(self, with_admins=True, with_owner=True):
1904 1910 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
1905 1911 q = q.options(joinedload(UserRepoToPerm.repository),
1906 1912 joinedload(UserRepoToPerm.user),
1907 1913 joinedload(UserRepoToPerm.permission),)
1908 1914
1909 1915 # get owners and admins and permissions. We do a trick of re-writing
1910 1916 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1911 1917 # has a global reference and changing one object propagates to all
1912 1918 # others. This means if admin is also an owner admin_row that change
1913 1919 # would propagate to both objects
1914 1920 perm_rows = []
1915 1921 for _usr in q.all():
1916 1922 usr = AttributeDict(_usr.user.get_dict())
1917 1923 usr.permission = _usr.permission.permission_name
1918 1924 perm_rows.append(usr)
1919 1925
1920 1926 # filter the perm rows by 'default' first and then sort them by
1921 1927 # admin,write,read,none permissions sorted again alphabetically in
1922 1928 # each group
1923 1929 perm_rows = sorted(perm_rows, key=display_user_sort)
1924 1930
1925 1931 _admin_perm = 'repository.admin'
1926 1932 owner_row = []
1927 1933 if with_owner:
1928 1934 usr = AttributeDict(self.user.get_dict())
1929 1935 usr.owner_row = True
1930 1936 usr.permission = _admin_perm
1931 1937 owner_row.append(usr)
1932 1938
1933 1939 super_admin_rows = []
1934 1940 if with_admins:
1935 1941 for usr in User.get_all_super_admins():
1936 1942 # if this admin is also owner, don't double the record
1937 1943 if usr.user_id == owner_row[0].user_id:
1938 1944 owner_row[0].admin_row = True
1939 1945 else:
1940 1946 usr = AttributeDict(usr.get_dict())
1941 1947 usr.admin_row = True
1942 1948 usr.permission = _admin_perm
1943 1949 super_admin_rows.append(usr)
1944 1950
1945 1951 return super_admin_rows + owner_row + perm_rows
1946 1952
1947 1953 def permission_user_groups(self):
1948 1954 q = UserGroupRepoToPerm.query().filter(
1949 1955 UserGroupRepoToPerm.repository == self)
1950 1956 q = q.options(joinedload(UserGroupRepoToPerm.repository),
1951 1957 joinedload(UserGroupRepoToPerm.users_group),
1952 1958 joinedload(UserGroupRepoToPerm.permission),)
1953 1959
1954 1960 perm_rows = []
1955 1961 for _user_group in q.all():
1956 1962 usr = AttributeDict(_user_group.users_group.get_dict())
1957 1963 usr.permission = _user_group.permission.permission_name
1958 1964 perm_rows.append(usr)
1959 1965
1960 1966 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1961 1967 return perm_rows
1962 1968
1963 1969 def get_api_data(self, include_secrets=False):
1964 1970 """
1965 1971 Common function for generating repo api data
1966 1972
1967 1973 :param include_secrets: See :meth:`User.get_api_data`.
1968 1974
1969 1975 """
1970 1976 # TODO: mikhail: Here there is an anti-pattern, we probably need to
1971 1977 # move this methods on models level.
1972 1978 from rhodecode.model.settings import SettingsModel
1973 1979 from rhodecode.model.repo import RepoModel
1974 1980
1975 1981 repo = self
1976 1982 _user_id, _time, _reason = self.locked
1977 1983
1978 1984 data = {
1979 1985 'repo_id': repo.repo_id,
1980 1986 'repo_name': repo.repo_name,
1981 1987 'repo_type': repo.repo_type,
1982 1988 'clone_uri': repo.clone_uri or '',
1983 1989 'push_uri': repo.push_uri or '',
1984 1990 'url': RepoModel().get_url(self),
1985 1991 'private': repo.private,
1986 1992 'created_on': repo.created_on,
1987 1993 'description': repo.description_safe,
1988 1994 'landing_rev': repo.landing_rev,
1989 1995 'owner': repo.user.username,
1990 1996 'fork_of': repo.fork.repo_name if repo.fork else None,
1991 1997 'fork_of_id': repo.fork.repo_id if repo.fork else None,
1992 1998 'enable_statistics': repo.enable_statistics,
1993 1999 'enable_locking': repo.enable_locking,
1994 2000 'enable_downloads': repo.enable_downloads,
1995 2001 'last_changeset': repo.changeset_cache,
1996 2002 'locked_by': User.get(_user_id).get_api_data(
1997 2003 include_secrets=include_secrets) if _user_id else None,
1998 2004 'locked_date': time_to_datetime(_time) if _time else None,
1999 2005 'lock_reason': _reason if _reason else None,
2000 2006 }
2001 2007
2002 2008 # TODO: mikhail: should be per-repo settings here
2003 2009 rc_config = SettingsModel().get_all_settings()
2004 2010 repository_fields = str2bool(
2005 2011 rc_config.get('rhodecode_repository_fields'))
2006 2012 if repository_fields:
2007 2013 for f in self.extra_fields:
2008 2014 data[f.field_key_prefixed] = f.field_value
2009 2015
2010 2016 return data
2011 2017
2012 2018 @classmethod
2013 2019 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
2014 2020 if not lock_time:
2015 2021 lock_time = time.time()
2016 2022 if not lock_reason:
2017 2023 lock_reason = cls.LOCK_AUTOMATIC
2018 2024 repo.locked = [user_id, lock_time, lock_reason]
2019 2025 Session().add(repo)
2020 2026 Session().commit()
2021 2027
2022 2028 @classmethod
2023 2029 def unlock(cls, repo):
2024 2030 repo.locked = None
2025 2031 Session().add(repo)
2026 2032 Session().commit()
2027 2033
2028 2034 @classmethod
2029 2035 def getlock(cls, repo):
2030 2036 return repo.locked
2031 2037
2032 2038 def is_user_lock(self, user_id):
2033 2039 if self.lock[0]:
2034 2040 lock_user_id = safe_int(self.lock[0])
2035 2041 user_id = safe_int(user_id)
2036 2042 # both are ints, and they are equal
2037 2043 return all([lock_user_id, user_id]) and lock_user_id == user_id
2038 2044
2039 2045 return False
2040 2046
2041 2047 def get_locking_state(self, action, user_id, only_when_enabled=True):
2042 2048 """
2043 2049 Checks locking on this repository, if locking is enabled and lock is
2044 2050 present returns a tuple of make_lock, locked, locked_by.
2045 2051 make_lock can have 3 states None (do nothing) True, make lock
2046 2052 False release lock, This value is later propagated to hooks, which
2047 2053 do the locking. Think about this as signals passed to hooks what to do.
2048 2054
2049 2055 """
2050 2056 # TODO: johbo: This is part of the business logic and should be moved
2051 2057 # into the RepositoryModel.
2052 2058
2053 2059 if action not in ('push', 'pull'):
2054 2060 raise ValueError("Invalid action value: %s" % repr(action))
2055 2061
2056 2062 # defines if locked error should be thrown to user
2057 2063 currently_locked = False
2058 2064 # defines if new lock should be made, tri-state
2059 2065 make_lock = None
2060 2066 repo = self
2061 2067 user = User.get(user_id)
2062 2068
2063 2069 lock_info = repo.locked
2064 2070
2065 2071 if repo and (repo.enable_locking or not only_when_enabled):
2066 2072 if action == 'push':
2067 2073 # check if it's already locked !, if it is compare users
2068 2074 locked_by_user_id = lock_info[0]
2069 2075 if user.user_id == locked_by_user_id:
2070 2076 log.debug(
2071 2077 'Got `push` action from user %s, now unlocking', user)
2072 2078 # unlock if we have push from user who locked
2073 2079 make_lock = False
2074 2080 else:
2075 2081 # we're not the same user who locked, ban with
2076 2082 # code defined in settings (default is 423 HTTP Locked) !
2077 2083 log.debug('Repo %s is currently locked by %s', repo, user)
2078 2084 currently_locked = True
2079 2085 elif action == 'pull':
2080 2086 # [0] user [1] date
2081 2087 if lock_info[0] and lock_info[1]:
2082 2088 log.debug('Repo %s is currently locked by %s', repo, user)
2083 2089 currently_locked = True
2084 2090 else:
2085 2091 log.debug('Setting lock on repo %s by %s', repo, user)
2086 2092 make_lock = True
2087 2093
2088 2094 else:
2089 2095 log.debug('Repository %s do not have locking enabled', repo)
2090 2096
2091 2097 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2092 2098 make_lock, currently_locked, lock_info)
2093 2099
2094 2100 from rhodecode.lib.auth import HasRepoPermissionAny
2095 2101 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2096 2102 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2097 2103 # if we don't have at least write permission we cannot make a lock
2098 2104 log.debug('lock state reset back to FALSE due to lack '
2099 2105 'of at least read permission')
2100 2106 make_lock = False
2101 2107
2102 2108 return make_lock, currently_locked, lock_info
2103 2109
2104 2110 @property
2105 2111 def last_db_change(self):
2106 2112 return self.updated_on
2107 2113
2108 2114 @property
2109 2115 def clone_uri_hidden(self):
2110 2116 clone_uri = self.clone_uri
2111 2117 if clone_uri:
2112 2118 import urlobject
2113 2119 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2114 2120 if url_obj.password:
2115 2121 clone_uri = url_obj.with_password('*****')
2116 2122 return clone_uri
2117 2123
2118 2124 @property
2119 2125 def push_uri_hidden(self):
2120 2126 push_uri = self.push_uri
2121 2127 if push_uri:
2122 2128 import urlobject
2123 2129 url_obj = urlobject.URLObject(cleaned_uri(push_uri))
2124 2130 if url_obj.password:
2125 2131 push_uri = url_obj.with_password('*****')
2126 2132 return push_uri
2127 2133
2128 2134 def clone_url(self, **override):
2129 2135 from rhodecode.model.settings import SettingsModel
2130 2136
2131 2137 uri_tmpl = None
2132 2138 if 'with_id' in override:
2133 2139 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2134 2140 del override['with_id']
2135 2141
2136 2142 if 'uri_tmpl' in override:
2137 2143 uri_tmpl = override['uri_tmpl']
2138 2144 del override['uri_tmpl']
2139 2145
2140 2146 ssh = False
2141 2147 if 'ssh' in override:
2142 2148 ssh = True
2143 2149 del override['ssh']
2144 2150
2145 2151 # we didn't override our tmpl from **overrides
2146 2152 if not uri_tmpl:
2147 2153 rc_config = SettingsModel().get_all_settings(cache=True)
2148 2154 if ssh:
2149 2155 uri_tmpl = rc_config.get(
2150 2156 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH
2151 2157 else:
2152 2158 uri_tmpl = rc_config.get(
2153 2159 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2154 2160
2155 2161 request = get_current_request()
2156 2162 return get_clone_url(request=request,
2157 2163 uri_tmpl=uri_tmpl,
2158 2164 repo_name=self.repo_name,
2159 2165 repo_id=self.repo_id, **override)
2160 2166
2161 2167 def set_state(self, state):
2162 2168 self.repo_state = state
2163 2169 Session().add(self)
2164 2170 #==========================================================================
2165 2171 # SCM PROPERTIES
2166 2172 #==========================================================================
2167 2173
2168 2174 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
2169 2175 return get_commit_safe(
2170 2176 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load)
2171 2177
2172 2178 def get_changeset(self, rev=None, pre_load=None):
2173 2179 warnings.warn("Use get_commit", DeprecationWarning)
2174 2180 commit_id = None
2175 2181 commit_idx = None
2176 2182 if isinstance(rev, basestring):
2177 2183 commit_id = rev
2178 2184 else:
2179 2185 commit_idx = rev
2180 2186 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2181 2187 pre_load=pre_load)
2182 2188
2183 2189 def get_landing_commit(self):
2184 2190 """
2185 2191 Returns landing commit, or if that doesn't exist returns the tip
2186 2192 """
2187 2193 _rev_type, _rev = self.landing_rev
2188 2194 commit = self.get_commit(_rev)
2189 2195 if isinstance(commit, EmptyCommit):
2190 2196 return self.get_commit()
2191 2197 return commit
2192 2198
2193 2199 def update_commit_cache(self, cs_cache=None, config=None):
2194 2200 """
2195 2201 Update cache of last changeset for repository, keys should be::
2196 2202
2197 2203 short_id
2198 2204 raw_id
2199 2205 revision
2200 2206 parents
2201 2207 message
2202 2208 date
2203 2209 author
2204 2210
2205 2211 :param cs_cache:
2206 2212 """
2207 2213 from rhodecode.lib.vcs.backends.base import BaseChangeset
2208 2214 if cs_cache is None:
2209 2215 # use no-cache version here
2210 2216 scm_repo = self.scm_instance(cache=False, config=config)
2211 2217 if scm_repo:
2212 2218 cs_cache = scm_repo.get_commit(
2213 2219 pre_load=["author", "date", "message", "parents"])
2214 2220 else:
2215 2221 cs_cache = EmptyCommit()
2216 2222
2217 2223 if isinstance(cs_cache, BaseChangeset):
2218 2224 cs_cache = cs_cache.__json__()
2219 2225
2220 2226 def is_outdated(new_cs_cache):
2221 2227 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2222 2228 new_cs_cache['revision'] != self.changeset_cache['revision']):
2223 2229 return True
2224 2230 return False
2225 2231
2226 2232 # check if we have maybe already latest cached revision
2227 2233 if is_outdated(cs_cache) or not self.changeset_cache:
2228 2234 _default = datetime.datetime.fromtimestamp(0)
2229 2235 last_change = cs_cache.get('date') or _default
2230 2236 log.debug('updated repo %s with new cs cache %s',
2231 2237 self.repo_name, cs_cache)
2232 2238 self.updated_on = last_change
2233 2239 self.changeset_cache = cs_cache
2234 2240 Session().add(self)
2235 2241 Session().commit()
2236 2242 else:
2237 2243 log.debug('Skipping update_commit_cache for repo:`%s` '
2238 2244 'commit already with latest changes', self.repo_name)
2239 2245
2240 2246 @property
2241 2247 def tip(self):
2242 2248 return self.get_commit('tip')
2243 2249
2244 2250 @property
2245 2251 def author(self):
2246 2252 return self.tip.author
2247 2253
2248 2254 @property
2249 2255 def last_change(self):
2250 2256 return self.scm_instance().last_change
2251 2257
2252 2258 def get_comments(self, revisions=None):
2253 2259 """
2254 2260 Returns comments for this repository grouped by revisions
2255 2261
2256 2262 :param revisions: filter query by revisions only
2257 2263 """
2258 2264 cmts = ChangesetComment.query()\
2259 2265 .filter(ChangesetComment.repo == self)
2260 2266 if revisions:
2261 2267 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2262 2268 grouped = collections.defaultdict(list)
2263 2269 for cmt in cmts.all():
2264 2270 grouped[cmt.revision].append(cmt)
2265 2271 return grouped
2266 2272
2267 2273 def statuses(self, revisions=None):
2268 2274 """
2269 2275 Returns statuses for this repository
2270 2276
2271 2277 :param revisions: list of revisions to get statuses for
2272 2278 """
2273 2279 statuses = ChangesetStatus.query()\
2274 2280 .filter(ChangesetStatus.repo == self)\
2275 2281 .filter(ChangesetStatus.version == 0)
2276 2282
2277 2283 if revisions:
2278 2284 # Try doing the filtering in chunks to avoid hitting limits
2279 2285 size = 500
2280 2286 status_results = []
2281 2287 for chunk in xrange(0, len(revisions), size):
2282 2288 status_results += statuses.filter(
2283 2289 ChangesetStatus.revision.in_(
2284 2290 revisions[chunk: chunk+size])
2285 2291 ).all()
2286 2292 else:
2287 2293 status_results = statuses.all()
2288 2294
2289 2295 grouped = {}
2290 2296
2291 2297 # maybe we have open new pullrequest without a status?
2292 2298 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2293 2299 status_lbl = ChangesetStatus.get_status_lbl(stat)
2294 2300 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2295 2301 for rev in pr.revisions:
2296 2302 pr_id = pr.pull_request_id
2297 2303 pr_repo = pr.target_repo.repo_name
2298 2304 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2299 2305
2300 2306 for stat in status_results:
2301 2307 pr_id = pr_repo = None
2302 2308 if stat.pull_request:
2303 2309 pr_id = stat.pull_request.pull_request_id
2304 2310 pr_repo = stat.pull_request.target_repo.repo_name
2305 2311 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2306 2312 pr_id, pr_repo]
2307 2313 return grouped
2308 2314
2309 2315 # ==========================================================================
2310 2316 # SCM CACHE INSTANCE
2311 2317 # ==========================================================================
2312 2318
2313 2319 def scm_instance(self, **kwargs):
2314 2320 import rhodecode
2315 2321
2316 2322 # Passing a config will not hit the cache currently only used
2317 2323 # for repo2dbmapper
2318 2324 config = kwargs.pop('config', None)
2319 2325 cache = kwargs.pop('cache', None)
2320 2326 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
2321 2327 # if cache is NOT defined use default global, else we have a full
2322 2328 # control over cache behaviour
2323 2329 if cache is None and full_cache and not config:
2324 2330 return self._get_instance_cached()
2325 2331 return self._get_instance(cache=bool(cache), config=config)
2326 2332
2327 2333 def _get_instance_cached(self):
2328 2334 @cache_region('long_term')
2329 2335 def _get_repo(cache_key):
2330 2336 return self._get_instance()
2331 2337
2332 2338 invalidator_context = CacheKey.repo_context_cache(
2333 2339 _get_repo, self.repo_name, None, thread_scoped=True)
2334 2340
2335 2341 with invalidator_context as context:
2336 2342 context.invalidate()
2337 2343 repo = context.compute()
2338 2344
2339 2345 return repo
2340 2346
2341 2347 def _get_instance(self, cache=True, config=None):
2342 2348 config = config or self._config
2343 2349 custom_wire = {
2344 2350 'cache': cache # controls the vcs.remote cache
2345 2351 }
2346 2352 repo = get_vcs_instance(
2347 2353 repo_path=safe_str(self.repo_full_path),
2348 2354 config=config,
2349 2355 with_wire=custom_wire,
2350 2356 create=False,
2351 2357 _vcs_alias=self.repo_type)
2352 2358
2353 2359 return repo
2354 2360
2355 2361 def __json__(self):
2356 2362 return {'landing_rev': self.landing_rev}
2357 2363
2358 2364 def get_dict(self):
2359 2365
2360 2366 # Since we transformed `repo_name` to a hybrid property, we need to
2361 2367 # keep compatibility with the code which uses `repo_name` field.
2362 2368
2363 2369 result = super(Repository, self).get_dict()
2364 2370 result['repo_name'] = result.pop('_repo_name', None)
2365 2371 return result
2366 2372
2367 2373
2368 2374 class RepoGroup(Base, BaseModel):
2369 2375 __tablename__ = 'groups'
2370 2376 __table_args__ = (
2371 2377 UniqueConstraint('group_name', 'group_parent_id'),
2372 2378 CheckConstraint('group_id != group_parent_id'),
2373 2379 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2374 2380 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2375 2381 )
2376 2382 __mapper_args__ = {'order_by': 'group_name'}
2377 2383
2378 2384 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2379 2385
2380 2386 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2381 2387 group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2382 2388 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2383 2389 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2384 2390 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2385 2391 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2386 2392 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2387 2393 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2388 2394 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2389 2395
2390 2396 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2391 2397 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2392 2398 parent_group = relationship('RepoGroup', remote_side=group_id)
2393 2399 user = relationship('User')
2394 2400 integrations = relationship('Integration',
2395 2401 cascade="all, delete, delete-orphan")
2396 2402
2397 2403 def __init__(self, group_name='', parent_group=None):
2398 2404 self.group_name = group_name
2399 2405 self.parent_group = parent_group
2400 2406
2401 2407 def __unicode__(self):
2402 2408 return u"<%s('id:%s:%s')>" % (
2403 2409 self.__class__.__name__, self.group_id, self.group_name)
2404 2410
2405 2411 @hybrid_property
2406 2412 def description_safe(self):
2407 2413 from rhodecode.lib import helpers as h
2408 2414 return h.escape(self.group_description)
2409 2415
2410 2416 @classmethod
2411 2417 def _generate_choice(cls, repo_group):
2412 2418 from webhelpers.html import literal as _literal
2413 2419 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2414 2420 return repo_group.group_id, _name(repo_group.full_path_splitted)
2415 2421
2416 2422 @classmethod
2417 2423 def groups_choices(cls, groups=None, show_empty_group=True):
2418 2424 if not groups:
2419 2425 groups = cls.query().all()
2420 2426
2421 2427 repo_groups = []
2422 2428 if show_empty_group:
2423 2429 repo_groups = [(-1, u'-- %s --' % _('No parent'))]
2424 2430
2425 2431 repo_groups.extend([cls._generate_choice(x) for x in groups])
2426 2432
2427 2433 repo_groups = sorted(
2428 2434 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2429 2435 return repo_groups
2430 2436
2431 2437 @classmethod
2432 2438 def url_sep(cls):
2433 2439 return URL_SEP
2434 2440
2435 2441 @classmethod
2436 2442 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2437 2443 if case_insensitive:
2438 2444 gr = cls.query().filter(func.lower(cls.group_name)
2439 2445 == func.lower(group_name))
2440 2446 else:
2441 2447 gr = cls.query().filter(cls.group_name == group_name)
2442 2448 if cache:
2443 2449 name_key = _hash_key(group_name)
2444 2450 gr = gr.options(
2445 2451 FromCache("sql_cache_short", "get_group_%s" % name_key))
2446 2452 return gr.scalar()
2447 2453
2448 2454 @classmethod
2449 2455 def get_user_personal_repo_group(cls, user_id):
2450 2456 user = User.get(user_id)
2451 2457 if user.username == User.DEFAULT_USER:
2452 2458 return None
2453 2459
2454 2460 return cls.query()\
2455 2461 .filter(cls.personal == true()) \
2456 2462 .filter(cls.user == user).scalar()
2457 2463
2458 2464 @classmethod
2459 2465 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2460 2466 case_insensitive=True):
2461 2467 q = RepoGroup.query()
2462 2468
2463 2469 if not isinstance(user_id, Optional):
2464 2470 q = q.filter(RepoGroup.user_id == user_id)
2465 2471
2466 2472 if not isinstance(group_id, Optional):
2467 2473 q = q.filter(RepoGroup.group_parent_id == group_id)
2468 2474
2469 2475 if case_insensitive:
2470 2476 q = q.order_by(func.lower(RepoGroup.group_name))
2471 2477 else:
2472 2478 q = q.order_by(RepoGroup.group_name)
2473 2479 return q.all()
2474 2480
2475 2481 @property
2476 2482 def parents(self):
2477 2483 parents_recursion_limit = 10
2478 2484 groups = []
2479 2485 if self.parent_group is None:
2480 2486 return groups
2481 2487 cur_gr = self.parent_group
2482 2488 groups.insert(0, cur_gr)
2483 2489 cnt = 0
2484 2490 while 1:
2485 2491 cnt += 1
2486 2492 gr = getattr(cur_gr, 'parent_group', None)
2487 2493 cur_gr = cur_gr.parent_group
2488 2494 if gr is None:
2489 2495 break
2490 2496 if cnt == parents_recursion_limit:
2491 2497 # this will prevent accidental infinit loops
2492 2498 log.error(('more than %s parents found for group %s, stopping '
2493 2499 'recursive parent fetching' % (parents_recursion_limit, self)))
2494 2500 break
2495 2501
2496 2502 groups.insert(0, gr)
2497 2503 return groups
2498 2504
2499 2505 @property
2500 2506 def last_db_change(self):
2501 2507 return self.updated_on
2502 2508
2503 2509 @property
2504 2510 def children(self):
2505 2511 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2506 2512
2507 2513 @property
2508 2514 def name(self):
2509 2515 return self.group_name.split(RepoGroup.url_sep())[-1]
2510 2516
2511 2517 @property
2512 2518 def full_path(self):
2513 2519 return self.group_name
2514 2520
2515 2521 @property
2516 2522 def full_path_splitted(self):
2517 2523 return self.group_name.split(RepoGroup.url_sep())
2518 2524
2519 2525 @property
2520 2526 def repositories(self):
2521 2527 return Repository.query()\
2522 2528 .filter(Repository.group == self)\
2523 2529 .order_by(Repository.repo_name)
2524 2530
2525 2531 @property
2526 2532 def repositories_recursive_count(self):
2527 2533 cnt = self.repositories.count()
2528 2534
2529 2535 def children_count(group):
2530 2536 cnt = 0
2531 2537 for child in group.children:
2532 2538 cnt += child.repositories.count()
2533 2539 cnt += children_count(child)
2534 2540 return cnt
2535 2541
2536 2542 return cnt + children_count(self)
2537 2543
2538 2544 def _recursive_objects(self, include_repos=True):
2539 2545 all_ = []
2540 2546
2541 2547 def _get_members(root_gr):
2542 2548 if include_repos:
2543 2549 for r in root_gr.repositories:
2544 2550 all_.append(r)
2545 2551 childs = root_gr.children.all()
2546 2552 if childs:
2547 2553 for gr in childs:
2548 2554 all_.append(gr)
2549 2555 _get_members(gr)
2550 2556
2551 2557 _get_members(self)
2552 2558 return [self] + all_
2553 2559
2554 2560 def recursive_groups_and_repos(self):
2555 2561 """
2556 2562 Recursive return all groups, with repositories in those groups
2557 2563 """
2558 2564 return self._recursive_objects()
2559 2565
2560 2566 def recursive_groups(self):
2561 2567 """
2562 2568 Returns all children groups for this group including children of children
2563 2569 """
2564 2570 return self._recursive_objects(include_repos=False)
2565 2571
2566 2572 def get_new_name(self, group_name):
2567 2573 """
2568 2574 returns new full group name based on parent and new name
2569 2575
2570 2576 :param group_name:
2571 2577 """
2572 2578 path_prefix = (self.parent_group.full_path_splitted if
2573 2579 self.parent_group else [])
2574 2580 return RepoGroup.url_sep().join(path_prefix + [group_name])
2575 2581
2576 2582 def permissions(self, with_admins=True, with_owner=True):
2577 2583 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
2578 2584 q = q.options(joinedload(UserRepoGroupToPerm.group),
2579 2585 joinedload(UserRepoGroupToPerm.user),
2580 2586 joinedload(UserRepoGroupToPerm.permission),)
2581 2587
2582 2588 # get owners and admins and permissions. We do a trick of re-writing
2583 2589 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2584 2590 # has a global reference and changing one object propagates to all
2585 2591 # others. This means if admin is also an owner admin_row that change
2586 2592 # would propagate to both objects
2587 2593 perm_rows = []
2588 2594 for _usr in q.all():
2589 2595 usr = AttributeDict(_usr.user.get_dict())
2590 2596 usr.permission = _usr.permission.permission_name
2591 2597 perm_rows.append(usr)
2592 2598
2593 2599 # filter the perm rows by 'default' first and then sort them by
2594 2600 # admin,write,read,none permissions sorted again alphabetically in
2595 2601 # each group
2596 2602 perm_rows = sorted(perm_rows, key=display_user_sort)
2597 2603
2598 2604 _admin_perm = 'group.admin'
2599 2605 owner_row = []
2600 2606 if with_owner:
2601 2607 usr = AttributeDict(self.user.get_dict())
2602 2608 usr.owner_row = True
2603 2609 usr.permission = _admin_perm
2604 2610 owner_row.append(usr)
2605 2611
2606 2612 super_admin_rows = []
2607 2613 if with_admins:
2608 2614 for usr in User.get_all_super_admins():
2609 2615 # if this admin is also owner, don't double the record
2610 2616 if usr.user_id == owner_row[0].user_id:
2611 2617 owner_row[0].admin_row = True
2612 2618 else:
2613 2619 usr = AttributeDict(usr.get_dict())
2614 2620 usr.admin_row = True
2615 2621 usr.permission = _admin_perm
2616 2622 super_admin_rows.append(usr)
2617 2623
2618 2624 return super_admin_rows + owner_row + perm_rows
2619 2625
2620 2626 def permission_user_groups(self):
2621 2627 q = UserGroupRepoGroupToPerm.query().filter(UserGroupRepoGroupToPerm.group == self)
2622 2628 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
2623 2629 joinedload(UserGroupRepoGroupToPerm.users_group),
2624 2630 joinedload(UserGroupRepoGroupToPerm.permission),)
2625 2631
2626 2632 perm_rows = []
2627 2633 for _user_group in q.all():
2628 2634 usr = AttributeDict(_user_group.users_group.get_dict())
2629 2635 usr.permission = _user_group.permission.permission_name
2630 2636 perm_rows.append(usr)
2631 2637
2632 2638 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2633 2639 return perm_rows
2634 2640
2635 2641 def get_api_data(self):
2636 2642 """
2637 2643 Common function for generating api data
2638 2644
2639 2645 """
2640 2646 group = self
2641 2647 data = {
2642 2648 'group_id': group.group_id,
2643 2649 'group_name': group.group_name,
2644 2650 'group_description': group.description_safe,
2645 2651 'parent_group': group.parent_group.group_name if group.parent_group else None,
2646 2652 'repositories': [x.repo_name for x in group.repositories],
2647 2653 'owner': group.user.username,
2648 2654 }
2649 2655 return data
2650 2656
2651 2657
2652 2658 class Permission(Base, BaseModel):
2653 2659 __tablename__ = 'permissions'
2654 2660 __table_args__ = (
2655 2661 Index('p_perm_name_idx', 'permission_name'),
2656 2662 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2657 2663 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2658 2664 )
2659 2665 PERMS = [
2660 2666 ('hg.admin', _('RhodeCode Super Administrator')),
2661 2667
2662 2668 ('repository.none', _('Repository no access')),
2663 2669 ('repository.read', _('Repository read access')),
2664 2670 ('repository.write', _('Repository write access')),
2665 2671 ('repository.admin', _('Repository admin access')),
2666 2672
2667 2673 ('group.none', _('Repository group no access')),
2668 2674 ('group.read', _('Repository group read access')),
2669 2675 ('group.write', _('Repository group write access')),
2670 2676 ('group.admin', _('Repository group admin access')),
2671 2677
2672 2678 ('usergroup.none', _('User group no access')),
2673 2679 ('usergroup.read', _('User group read access')),
2674 2680 ('usergroup.write', _('User group write access')),
2675 2681 ('usergroup.admin', _('User group admin access')),
2676 2682
2677 2683 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
2678 2684 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
2679 2685
2680 2686 ('hg.usergroup.create.false', _('User Group creation disabled')),
2681 2687 ('hg.usergroup.create.true', _('User Group creation enabled')),
2682 2688
2683 2689 ('hg.create.none', _('Repository creation disabled')),
2684 2690 ('hg.create.repository', _('Repository creation enabled')),
2685 2691 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
2686 2692 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
2687 2693
2688 2694 ('hg.fork.none', _('Repository forking disabled')),
2689 2695 ('hg.fork.repository', _('Repository forking enabled')),
2690 2696
2691 2697 ('hg.register.none', _('Registration disabled')),
2692 2698 ('hg.register.manual_activate', _('User Registration with manual account activation')),
2693 2699 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
2694 2700
2695 2701 ('hg.password_reset.enabled', _('Password reset enabled')),
2696 2702 ('hg.password_reset.hidden', _('Password reset hidden')),
2697 2703 ('hg.password_reset.disabled', _('Password reset disabled')),
2698 2704
2699 2705 ('hg.extern_activate.manual', _('Manual activation of external account')),
2700 2706 ('hg.extern_activate.auto', _('Automatic activation of external account')),
2701 2707
2702 2708 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
2703 2709 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
2704 2710 ]
2705 2711
2706 2712 # definition of system default permissions for DEFAULT user
2707 2713 DEFAULT_USER_PERMISSIONS = [
2708 2714 'repository.read',
2709 2715 'group.read',
2710 2716 'usergroup.read',
2711 2717 'hg.create.repository',
2712 2718 'hg.repogroup.create.false',
2713 2719 'hg.usergroup.create.false',
2714 2720 'hg.create.write_on_repogroup.true',
2715 2721 'hg.fork.repository',
2716 2722 'hg.register.manual_activate',
2717 2723 'hg.password_reset.enabled',
2718 2724 'hg.extern_activate.auto',
2719 2725 'hg.inherit_default_perms.true',
2720 2726 ]
2721 2727
2722 2728 # defines which permissions are more important higher the more important
2723 2729 # Weight defines which permissions are more important.
2724 2730 # The higher number the more important.
2725 2731 PERM_WEIGHTS = {
2726 2732 'repository.none': 0,
2727 2733 'repository.read': 1,
2728 2734 'repository.write': 3,
2729 2735 'repository.admin': 4,
2730 2736
2731 2737 'group.none': 0,
2732 2738 'group.read': 1,
2733 2739 'group.write': 3,
2734 2740 'group.admin': 4,
2735 2741
2736 2742 'usergroup.none': 0,
2737 2743 'usergroup.read': 1,
2738 2744 'usergroup.write': 3,
2739 2745 'usergroup.admin': 4,
2740 2746
2741 2747 'hg.repogroup.create.false': 0,
2742 2748 'hg.repogroup.create.true': 1,
2743 2749
2744 2750 'hg.usergroup.create.false': 0,
2745 2751 'hg.usergroup.create.true': 1,
2746 2752
2747 2753 'hg.fork.none': 0,
2748 2754 'hg.fork.repository': 1,
2749 2755 'hg.create.none': 0,
2750 2756 'hg.create.repository': 1
2751 2757 }
2752 2758
2753 2759 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2754 2760 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
2755 2761 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
2756 2762
2757 2763 def __unicode__(self):
2758 2764 return u"<%s('%s:%s')>" % (
2759 2765 self.__class__.__name__, self.permission_id, self.permission_name
2760 2766 )
2761 2767
2762 2768 @classmethod
2763 2769 def get_by_key(cls, key):
2764 2770 return cls.query().filter(cls.permission_name == key).scalar()
2765 2771
2766 2772 @classmethod
2767 2773 def get_default_repo_perms(cls, user_id, repo_id=None):
2768 2774 q = Session().query(UserRepoToPerm, Repository, Permission)\
2769 2775 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
2770 2776 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
2771 2777 .filter(UserRepoToPerm.user_id == user_id)
2772 2778 if repo_id:
2773 2779 q = q.filter(UserRepoToPerm.repository_id == repo_id)
2774 2780 return q.all()
2775 2781
2776 2782 @classmethod
2777 2783 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
2778 2784 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
2779 2785 .join(
2780 2786 Permission,
2781 2787 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
2782 2788 .join(
2783 2789 Repository,
2784 2790 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
2785 2791 .join(
2786 2792 UserGroup,
2787 2793 UserGroupRepoToPerm.users_group_id ==
2788 2794 UserGroup.users_group_id)\
2789 2795 .join(
2790 2796 UserGroupMember,
2791 2797 UserGroupRepoToPerm.users_group_id ==
2792 2798 UserGroupMember.users_group_id)\
2793 2799 .filter(
2794 2800 UserGroupMember.user_id == user_id,
2795 2801 UserGroup.users_group_active == true())
2796 2802 if repo_id:
2797 2803 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
2798 2804 return q.all()
2799 2805
2800 2806 @classmethod
2801 2807 def get_default_group_perms(cls, user_id, repo_group_id=None):
2802 2808 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
2803 2809 .join((Permission, UserRepoGroupToPerm.permission_id == Permission.permission_id))\
2804 2810 .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\
2805 2811 .filter(UserRepoGroupToPerm.user_id == user_id)
2806 2812 if repo_group_id:
2807 2813 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
2808 2814 return q.all()
2809 2815
2810 2816 @classmethod
2811 2817 def get_default_group_perms_from_user_group(
2812 2818 cls, user_id, repo_group_id=None):
2813 2819 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
2814 2820 .join(
2815 2821 Permission,
2816 2822 UserGroupRepoGroupToPerm.permission_id ==
2817 2823 Permission.permission_id)\
2818 2824 .join(
2819 2825 RepoGroup,
2820 2826 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
2821 2827 .join(
2822 2828 UserGroup,
2823 2829 UserGroupRepoGroupToPerm.users_group_id ==
2824 2830 UserGroup.users_group_id)\
2825 2831 .join(
2826 2832 UserGroupMember,
2827 2833 UserGroupRepoGroupToPerm.users_group_id ==
2828 2834 UserGroupMember.users_group_id)\
2829 2835 .filter(
2830 2836 UserGroupMember.user_id == user_id,
2831 2837 UserGroup.users_group_active == true())
2832 2838 if repo_group_id:
2833 2839 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
2834 2840 return q.all()
2835 2841
2836 2842 @classmethod
2837 2843 def get_default_user_group_perms(cls, user_id, user_group_id=None):
2838 2844 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
2839 2845 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
2840 2846 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
2841 2847 .filter(UserUserGroupToPerm.user_id == user_id)
2842 2848 if user_group_id:
2843 2849 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
2844 2850 return q.all()
2845 2851
2846 2852 @classmethod
2847 2853 def get_default_user_group_perms_from_user_group(
2848 2854 cls, user_id, user_group_id=None):
2849 2855 TargetUserGroup = aliased(UserGroup, name='target_user_group')
2850 2856 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
2851 2857 .join(
2852 2858 Permission,
2853 2859 UserGroupUserGroupToPerm.permission_id ==
2854 2860 Permission.permission_id)\
2855 2861 .join(
2856 2862 TargetUserGroup,
2857 2863 UserGroupUserGroupToPerm.target_user_group_id ==
2858 2864 TargetUserGroup.users_group_id)\
2859 2865 .join(
2860 2866 UserGroup,
2861 2867 UserGroupUserGroupToPerm.user_group_id ==
2862 2868 UserGroup.users_group_id)\
2863 2869 .join(
2864 2870 UserGroupMember,
2865 2871 UserGroupUserGroupToPerm.user_group_id ==
2866 2872 UserGroupMember.users_group_id)\
2867 2873 .filter(
2868 2874 UserGroupMember.user_id == user_id,
2869 2875 UserGroup.users_group_active == true())
2870 2876 if user_group_id:
2871 2877 q = q.filter(
2872 2878 UserGroupUserGroupToPerm.user_group_id == user_group_id)
2873 2879
2874 2880 return q.all()
2875 2881
2876 2882
2877 2883 class UserRepoToPerm(Base, BaseModel):
2878 2884 __tablename__ = 'repo_to_perm'
2879 2885 __table_args__ = (
2880 2886 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
2881 2887 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2882 2888 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2883 2889 )
2884 2890 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2885 2891 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2886 2892 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2887 2893 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2888 2894
2889 2895 user = relationship('User')
2890 2896 repository = relationship('Repository')
2891 2897 permission = relationship('Permission')
2892 2898
2893 2899 @classmethod
2894 2900 def create(cls, user, repository, permission):
2895 2901 n = cls()
2896 2902 n.user = user
2897 2903 n.repository = repository
2898 2904 n.permission = permission
2899 2905 Session().add(n)
2900 2906 return n
2901 2907
2902 2908 def __unicode__(self):
2903 2909 return u'<%s => %s >' % (self.user, self.repository)
2904 2910
2905 2911
2906 2912 class UserUserGroupToPerm(Base, BaseModel):
2907 2913 __tablename__ = 'user_user_group_to_perm'
2908 2914 __table_args__ = (
2909 2915 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
2910 2916 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2911 2917 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2912 2918 )
2913 2919 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2914 2920 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2915 2921 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2916 2922 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2917 2923
2918 2924 user = relationship('User')
2919 2925 user_group = relationship('UserGroup')
2920 2926 permission = relationship('Permission')
2921 2927
2922 2928 @classmethod
2923 2929 def create(cls, user, user_group, permission):
2924 2930 n = cls()
2925 2931 n.user = user
2926 2932 n.user_group = user_group
2927 2933 n.permission = permission
2928 2934 Session().add(n)
2929 2935 return n
2930 2936
2931 2937 def __unicode__(self):
2932 2938 return u'<%s => %s >' % (self.user, self.user_group)
2933 2939
2934 2940
2935 2941 class UserToPerm(Base, BaseModel):
2936 2942 __tablename__ = 'user_to_perm'
2937 2943 __table_args__ = (
2938 2944 UniqueConstraint('user_id', 'permission_id'),
2939 2945 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2940 2946 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2941 2947 )
2942 2948 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2943 2949 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2944 2950 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2945 2951
2946 2952 user = relationship('User')
2947 2953 permission = relationship('Permission', lazy='joined')
2948 2954
2949 2955 def __unicode__(self):
2950 2956 return u'<%s => %s >' % (self.user, self.permission)
2951 2957
2952 2958
2953 2959 class UserGroupRepoToPerm(Base, BaseModel):
2954 2960 __tablename__ = 'users_group_repo_to_perm'
2955 2961 __table_args__ = (
2956 2962 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
2957 2963 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2958 2964 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2959 2965 )
2960 2966 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2961 2967 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2962 2968 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2963 2969 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2964 2970
2965 2971 users_group = relationship('UserGroup')
2966 2972 permission = relationship('Permission')
2967 2973 repository = relationship('Repository')
2968 2974
2969 2975 @classmethod
2970 2976 def create(cls, users_group, repository, permission):
2971 2977 n = cls()
2972 2978 n.users_group = users_group
2973 2979 n.repository = repository
2974 2980 n.permission = permission
2975 2981 Session().add(n)
2976 2982 return n
2977 2983
2978 2984 def __unicode__(self):
2979 2985 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
2980 2986
2981 2987
2982 2988 class UserGroupUserGroupToPerm(Base, BaseModel):
2983 2989 __tablename__ = 'user_group_user_group_to_perm'
2984 2990 __table_args__ = (
2985 2991 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
2986 2992 CheckConstraint('target_user_group_id != user_group_id'),
2987 2993 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2988 2994 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2989 2995 )
2990 2996 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2991 2997 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2992 2998 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2993 2999 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2994 3000
2995 3001 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
2996 3002 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
2997 3003 permission = relationship('Permission')
2998 3004
2999 3005 @classmethod
3000 3006 def create(cls, target_user_group, user_group, permission):
3001 3007 n = cls()
3002 3008 n.target_user_group = target_user_group
3003 3009 n.user_group = user_group
3004 3010 n.permission = permission
3005 3011 Session().add(n)
3006 3012 return n
3007 3013
3008 3014 def __unicode__(self):
3009 3015 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
3010 3016
3011 3017
3012 3018 class UserGroupToPerm(Base, BaseModel):
3013 3019 __tablename__ = 'users_group_to_perm'
3014 3020 __table_args__ = (
3015 3021 UniqueConstraint('users_group_id', 'permission_id',),
3016 3022 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3017 3023 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3018 3024 )
3019 3025 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3020 3026 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3021 3027 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3022 3028
3023 3029 users_group = relationship('UserGroup')
3024 3030 permission = relationship('Permission')
3025 3031
3026 3032
3027 3033 class UserRepoGroupToPerm(Base, BaseModel):
3028 3034 __tablename__ = 'user_repo_group_to_perm'
3029 3035 __table_args__ = (
3030 3036 UniqueConstraint('user_id', 'group_id', 'permission_id'),
3031 3037 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3032 3038 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3033 3039 )
3034 3040
3035 3041 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3036 3042 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3037 3043 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3038 3044 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3039 3045
3040 3046 user = relationship('User')
3041 3047 group = relationship('RepoGroup')
3042 3048 permission = relationship('Permission')
3043 3049
3044 3050 @classmethod
3045 3051 def create(cls, user, repository_group, permission):
3046 3052 n = cls()
3047 3053 n.user = user
3048 3054 n.group = repository_group
3049 3055 n.permission = permission
3050 3056 Session().add(n)
3051 3057 return n
3052 3058
3053 3059
3054 3060 class UserGroupRepoGroupToPerm(Base, BaseModel):
3055 3061 __tablename__ = 'users_group_repo_group_to_perm'
3056 3062 __table_args__ = (
3057 3063 UniqueConstraint('users_group_id', 'group_id'),
3058 3064 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3059 3065 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3060 3066 )
3061 3067
3062 3068 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3063 3069 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3064 3070 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3065 3071 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3066 3072
3067 3073 users_group = relationship('UserGroup')
3068 3074 permission = relationship('Permission')
3069 3075 group = relationship('RepoGroup')
3070 3076
3071 3077 @classmethod
3072 3078 def create(cls, user_group, repository_group, permission):
3073 3079 n = cls()
3074 3080 n.users_group = user_group
3075 3081 n.group = repository_group
3076 3082 n.permission = permission
3077 3083 Session().add(n)
3078 3084 return n
3079 3085
3080 3086 def __unicode__(self):
3081 3087 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3082 3088
3083 3089
3084 3090 class Statistics(Base, BaseModel):
3085 3091 __tablename__ = 'statistics'
3086 3092 __table_args__ = (
3087 3093 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3088 3094 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3089 3095 )
3090 3096 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3091 3097 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3092 3098 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3093 3099 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
3094 3100 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
3095 3101 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
3096 3102
3097 3103 repository = relationship('Repository', single_parent=True)
3098 3104
3099 3105
3100 3106 class UserFollowing(Base, BaseModel):
3101 3107 __tablename__ = 'user_followings'
3102 3108 __table_args__ = (
3103 3109 UniqueConstraint('user_id', 'follows_repository_id'),
3104 3110 UniqueConstraint('user_id', 'follows_user_id'),
3105 3111 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3106 3112 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3107 3113 )
3108 3114
3109 3115 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3110 3116 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3111 3117 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3112 3118 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3113 3119 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3114 3120
3115 3121 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
3116 3122
3117 3123 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3118 3124 follows_repository = relationship('Repository', order_by='Repository.repo_name')
3119 3125
3120 3126 @classmethod
3121 3127 def get_repo_followers(cls, repo_id):
3122 3128 return cls.query().filter(cls.follows_repo_id == repo_id)
3123 3129
3124 3130
3125 3131 class CacheKey(Base, BaseModel):
3126 3132 __tablename__ = 'cache_invalidation'
3127 3133 __table_args__ = (
3128 3134 UniqueConstraint('cache_key'),
3129 3135 Index('key_idx', 'cache_key'),
3130 3136 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3131 3137 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3132 3138 )
3133 3139 CACHE_TYPE_ATOM = 'ATOM'
3134 3140 CACHE_TYPE_RSS = 'RSS'
3135 3141 CACHE_TYPE_README = 'README'
3136 3142
3137 3143 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3138 3144 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3139 3145 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3140 3146 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3141 3147
3142 3148 def __init__(self, cache_key, cache_args=''):
3143 3149 self.cache_key = cache_key
3144 3150 self.cache_args = cache_args
3145 3151 self.cache_active = False
3146 3152
3147 3153 def __unicode__(self):
3148 3154 return u"<%s('%s:%s[%s]')>" % (
3149 3155 self.__class__.__name__,
3150 3156 self.cache_id, self.cache_key, self.cache_active)
3151 3157
3152 3158 def _cache_key_partition(self):
3153 3159 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3154 3160 return prefix, repo_name, suffix
3155 3161
3156 3162 def get_prefix(self):
3157 3163 """
3158 3164 Try to extract prefix from existing cache key. The key could consist
3159 3165 of prefix, repo_name, suffix
3160 3166 """
3161 3167 # this returns prefix, repo_name, suffix
3162 3168 return self._cache_key_partition()[0]
3163 3169
3164 3170 def get_suffix(self):
3165 3171 """
3166 3172 get suffix that might have been used in _get_cache_key to
3167 3173 generate self.cache_key. Only used for informational purposes
3168 3174 in repo_edit.mako.
3169 3175 """
3170 3176 # prefix, repo_name, suffix
3171 3177 return self._cache_key_partition()[2]
3172 3178
3173 3179 @classmethod
3174 3180 def delete_all_cache(cls):
3175 3181 """
3176 3182 Delete all cache keys from database.
3177 3183 Should only be run when all instances are down and all entries
3178 3184 thus stale.
3179 3185 """
3180 3186 cls.query().delete()
3181 3187 Session().commit()
3182 3188
3183 3189 @classmethod
3184 3190 def get_cache_key(cls, repo_name, cache_type):
3185 3191 """
3186 3192
3187 3193 Generate a cache key for this process of RhodeCode instance.
3188 3194 Prefix most likely will be process id or maybe explicitly set
3189 3195 instance_id from .ini file.
3190 3196 """
3191 3197 import rhodecode
3192 3198 prefix = safe_unicode(rhodecode.CONFIG.get('instance_id') or '')
3193 3199
3194 3200 repo_as_unicode = safe_unicode(repo_name)
3195 3201 key = u'{}_{}'.format(repo_as_unicode, cache_type) \
3196 3202 if cache_type else repo_as_unicode
3197 3203
3198 3204 return u'{}{}'.format(prefix, key)
3199 3205
3200 3206 @classmethod
3201 3207 def set_invalidate(cls, repo_name, delete=False):
3202 3208 """
3203 3209 Mark all caches of a repo as invalid in the database.
3204 3210 """
3205 3211
3206 3212 try:
3207 3213 qry = Session().query(cls).filter(cls.cache_args == repo_name)
3208 3214 if delete:
3209 3215 log.debug('cache objects deleted for repo %s',
3210 3216 safe_str(repo_name))
3211 3217 qry.delete()
3212 3218 else:
3213 3219 log.debug('cache objects marked as invalid for repo %s',
3214 3220 safe_str(repo_name))
3215 3221 qry.update({"cache_active": False})
3216 3222
3217 3223 Session().commit()
3218 3224 except Exception:
3219 3225 log.exception(
3220 3226 'Cache key invalidation failed for repository %s',
3221 3227 safe_str(repo_name))
3222 3228 Session().rollback()
3223 3229
3224 3230 @classmethod
3225 3231 def get_active_cache(cls, cache_key):
3226 3232 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3227 3233 if inv_obj:
3228 3234 return inv_obj
3229 3235 return None
3230 3236
3231 3237 @classmethod
3232 3238 def repo_context_cache(cls, compute_func, repo_name, cache_type,
3233 3239 thread_scoped=False):
3234 3240 """
3235 3241 @cache_region('long_term')
3236 3242 def _heavy_calculation(cache_key):
3237 3243 return 'result'
3238 3244
3239 3245 cache_context = CacheKey.repo_context_cache(
3240 3246 _heavy_calculation, repo_name, cache_type)
3241 3247
3242 3248 with cache_context as context:
3243 3249 context.invalidate()
3244 3250 computed = context.compute()
3245 3251
3246 3252 assert computed == 'result'
3247 3253 """
3248 3254 from rhodecode.lib import caches
3249 3255 return caches.InvalidationContext(
3250 3256 compute_func, repo_name, cache_type, thread_scoped=thread_scoped)
3251 3257
3252 3258
3253 3259 class ChangesetComment(Base, BaseModel):
3254 3260 __tablename__ = 'changeset_comments'
3255 3261 __table_args__ = (
3256 3262 Index('cc_revision_idx', 'revision'),
3257 3263 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3258 3264 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3259 3265 )
3260 3266
3261 3267 COMMENT_OUTDATED = u'comment_outdated'
3262 3268 COMMENT_TYPE_NOTE = u'note'
3263 3269 COMMENT_TYPE_TODO = u'todo'
3264 3270 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3265 3271
3266 3272 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3267 3273 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3268 3274 revision = Column('revision', String(40), nullable=True)
3269 3275 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3270 3276 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3271 3277 line_no = Column('line_no', Unicode(10), nullable=True)
3272 3278 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3273 3279 f_path = Column('f_path', Unicode(1000), nullable=True)
3274 3280 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3275 3281 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3276 3282 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3277 3283 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3278 3284 renderer = Column('renderer', Unicode(64), nullable=True)
3279 3285 display_state = Column('display_state', Unicode(128), nullable=True)
3280 3286
3281 3287 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3282 3288 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3283 3289 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, backref='resolved_by')
3284 3290 author = relationship('User', lazy='joined')
3285 3291 repo = relationship('Repository')
3286 3292 status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan", lazy='joined')
3287 3293 pull_request = relationship('PullRequest', lazy='joined')
3288 3294 pull_request_version = relationship('PullRequestVersion')
3289 3295
3290 3296 @classmethod
3291 3297 def get_users(cls, revision=None, pull_request_id=None):
3292 3298 """
3293 3299 Returns user associated with this ChangesetComment. ie those
3294 3300 who actually commented
3295 3301
3296 3302 :param cls:
3297 3303 :param revision:
3298 3304 """
3299 3305 q = Session().query(User)\
3300 3306 .join(ChangesetComment.author)
3301 3307 if revision:
3302 3308 q = q.filter(cls.revision == revision)
3303 3309 elif pull_request_id:
3304 3310 q = q.filter(cls.pull_request_id == pull_request_id)
3305 3311 return q.all()
3306 3312
3307 3313 @classmethod
3308 3314 def get_index_from_version(cls, pr_version, versions):
3309 3315 num_versions = [x.pull_request_version_id for x in versions]
3310 3316 try:
3311 3317 return num_versions.index(pr_version) +1
3312 3318 except (IndexError, ValueError):
3313 3319 return
3314 3320
3315 3321 @property
3316 3322 def outdated(self):
3317 3323 return self.display_state == self.COMMENT_OUTDATED
3318 3324
3319 3325 def outdated_at_version(self, version):
3320 3326 """
3321 3327 Checks if comment is outdated for given pull request version
3322 3328 """
3323 3329 return self.outdated and self.pull_request_version_id != version
3324 3330
3325 3331 def older_than_version(self, version):
3326 3332 """
3327 3333 Checks if comment is made from previous version than given
3328 3334 """
3329 3335 if version is None:
3330 3336 return self.pull_request_version_id is not None
3331 3337
3332 3338 return self.pull_request_version_id < version
3333 3339
3334 3340 @property
3335 3341 def resolved(self):
3336 3342 return self.resolved_by[0] if self.resolved_by else None
3337 3343
3338 3344 @property
3339 3345 def is_todo(self):
3340 3346 return self.comment_type == self.COMMENT_TYPE_TODO
3341 3347
3342 3348 @property
3343 3349 def is_inline(self):
3344 3350 return self.line_no and self.f_path
3345 3351
3346 3352 def get_index_version(self, versions):
3347 3353 return self.get_index_from_version(
3348 3354 self.pull_request_version_id, versions)
3349 3355
3350 3356 def __repr__(self):
3351 3357 if self.comment_id:
3352 3358 return '<DB:Comment #%s>' % self.comment_id
3353 3359 else:
3354 3360 return '<DB:Comment at %#x>' % id(self)
3355 3361
3356 3362 def get_api_data(self):
3357 3363 comment = self
3358 3364 data = {
3359 3365 'comment_id': comment.comment_id,
3360 3366 'comment_type': comment.comment_type,
3361 3367 'comment_text': comment.text,
3362 3368 'comment_status': comment.status_change,
3363 3369 'comment_f_path': comment.f_path,
3364 3370 'comment_lineno': comment.line_no,
3365 3371 'comment_author': comment.author,
3366 3372 'comment_created_on': comment.created_on
3367 3373 }
3368 3374 return data
3369 3375
3370 3376 def __json__(self):
3371 3377 data = dict()
3372 3378 data.update(self.get_api_data())
3373 3379 return data
3374 3380
3375 3381
3376 3382 class ChangesetStatus(Base, BaseModel):
3377 3383 __tablename__ = 'changeset_statuses'
3378 3384 __table_args__ = (
3379 3385 Index('cs_revision_idx', 'revision'),
3380 3386 Index('cs_version_idx', 'version'),
3381 3387 UniqueConstraint('repo_id', 'revision', 'version'),
3382 3388 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3383 3389 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3384 3390 )
3385 3391 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
3386 3392 STATUS_APPROVED = 'approved'
3387 3393 STATUS_REJECTED = 'rejected'
3388 3394 STATUS_UNDER_REVIEW = 'under_review'
3389 3395
3390 3396 STATUSES = [
3391 3397 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
3392 3398 (STATUS_APPROVED, _("Approved")),
3393 3399 (STATUS_REJECTED, _("Rejected")),
3394 3400 (STATUS_UNDER_REVIEW, _("Under Review")),
3395 3401 ]
3396 3402
3397 3403 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
3398 3404 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3399 3405 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
3400 3406 revision = Column('revision', String(40), nullable=False)
3401 3407 status = Column('status', String(128), nullable=False, default=DEFAULT)
3402 3408 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
3403 3409 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
3404 3410 version = Column('version', Integer(), nullable=False, default=0)
3405 3411 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3406 3412
3407 3413 author = relationship('User', lazy='joined')
3408 3414 repo = relationship('Repository')
3409 3415 comment = relationship('ChangesetComment', lazy='joined')
3410 3416 pull_request = relationship('PullRequest', lazy='joined')
3411 3417
3412 3418 def __unicode__(self):
3413 3419 return u"<%s('%s[v%s]:%s')>" % (
3414 3420 self.__class__.__name__,
3415 3421 self.status, self.version, self.author
3416 3422 )
3417 3423
3418 3424 @classmethod
3419 3425 def get_status_lbl(cls, value):
3420 3426 return dict(cls.STATUSES).get(value)
3421 3427
3422 3428 @property
3423 3429 def status_lbl(self):
3424 3430 return ChangesetStatus.get_status_lbl(self.status)
3425 3431
3426 3432 def get_api_data(self):
3427 3433 status = self
3428 3434 data = {
3429 3435 'status_id': status.changeset_status_id,
3430 3436 'status': status.status,
3431 3437 }
3432 3438 return data
3433 3439
3434 3440 def __json__(self):
3435 3441 data = dict()
3436 3442 data.update(self.get_api_data())
3437 3443 return data
3438 3444
3439 3445
3440 3446 class _PullRequestBase(BaseModel):
3441 3447 """
3442 3448 Common attributes of pull request and version entries.
3443 3449 """
3444 3450
3445 3451 # .status values
3446 3452 STATUS_NEW = u'new'
3447 3453 STATUS_OPEN = u'open'
3448 3454 STATUS_CLOSED = u'closed'
3449 3455
3450 3456 title = Column('title', Unicode(255), nullable=True)
3451 3457 description = Column(
3452 3458 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
3453 3459 nullable=True)
3454 3460 # new/open/closed status of pull request (not approve/reject/etc)
3455 3461 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
3456 3462 created_on = Column(
3457 3463 'created_on', DateTime(timezone=False), nullable=False,
3458 3464 default=datetime.datetime.now)
3459 3465 updated_on = Column(
3460 3466 'updated_on', DateTime(timezone=False), nullable=False,
3461 3467 default=datetime.datetime.now)
3462 3468
3463 3469 @declared_attr
3464 3470 def user_id(cls):
3465 3471 return Column(
3466 3472 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
3467 3473 unique=None)
3468 3474
3469 3475 # 500 revisions max
3470 3476 _revisions = Column(
3471 3477 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
3472 3478
3473 3479 @declared_attr
3474 3480 def source_repo_id(cls):
3475 3481 # TODO: dan: rename column to source_repo_id
3476 3482 return Column(
3477 3483 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3478 3484 nullable=False)
3479 3485
3480 3486 source_ref = Column('org_ref', Unicode(255), nullable=False)
3481 3487
3482 3488 @declared_attr
3483 3489 def target_repo_id(cls):
3484 3490 # TODO: dan: rename column to target_repo_id
3485 3491 return Column(
3486 3492 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3487 3493 nullable=False)
3488 3494
3489 3495 target_ref = Column('other_ref', Unicode(255), nullable=False)
3490 3496 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
3491 3497
3492 3498 # TODO: dan: rename column to last_merge_source_rev
3493 3499 _last_merge_source_rev = Column(
3494 3500 'last_merge_org_rev', String(40), nullable=True)
3495 3501 # TODO: dan: rename column to last_merge_target_rev
3496 3502 _last_merge_target_rev = Column(
3497 3503 'last_merge_other_rev', String(40), nullable=True)
3498 3504 _last_merge_status = Column('merge_status', Integer(), nullable=True)
3499 3505 merge_rev = Column('merge_rev', String(40), nullable=True)
3500 3506
3501 3507 reviewer_data = Column(
3502 3508 'reviewer_data_json', MutationObj.as_mutable(
3503 3509 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
3504 3510
3505 3511 @property
3506 3512 def reviewer_data_json(self):
3507 3513 return json.dumps(self.reviewer_data)
3508 3514
3509 3515 @hybrid_property
3510 3516 def description_safe(self):
3511 3517 from rhodecode.lib import helpers as h
3512 3518 return h.escape(self.description)
3513 3519
3514 3520 @hybrid_property
3515 3521 def revisions(self):
3516 3522 return self._revisions.split(':') if self._revisions else []
3517 3523
3518 3524 @revisions.setter
3519 3525 def revisions(self, val):
3520 3526 self._revisions = ':'.join(val)
3521 3527
3522 3528 @hybrid_property
3523 3529 def last_merge_status(self):
3524 3530 return safe_int(self._last_merge_status)
3525 3531
3526 3532 @last_merge_status.setter
3527 3533 def last_merge_status(self, val):
3528 3534 self._last_merge_status = val
3529 3535
3530 3536 @declared_attr
3531 3537 def author(cls):
3532 3538 return relationship('User', lazy='joined')
3533 3539
3534 3540 @declared_attr
3535 3541 def source_repo(cls):
3536 3542 return relationship(
3537 3543 'Repository',
3538 3544 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
3539 3545
3540 3546 @property
3541 3547 def source_ref_parts(self):
3542 3548 return self.unicode_to_reference(self.source_ref)
3543 3549
3544 3550 @declared_attr
3545 3551 def target_repo(cls):
3546 3552 return relationship(
3547 3553 'Repository',
3548 3554 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
3549 3555
3550 3556 @property
3551 3557 def target_ref_parts(self):
3552 3558 return self.unicode_to_reference(self.target_ref)
3553 3559
3554 3560 @property
3555 3561 def shadow_merge_ref(self):
3556 3562 return self.unicode_to_reference(self._shadow_merge_ref)
3557 3563
3558 3564 @shadow_merge_ref.setter
3559 3565 def shadow_merge_ref(self, ref):
3560 3566 self._shadow_merge_ref = self.reference_to_unicode(ref)
3561 3567
3562 3568 def unicode_to_reference(self, raw):
3563 3569 """
3564 3570 Convert a unicode (or string) to a reference object.
3565 3571 If unicode evaluates to False it returns None.
3566 3572 """
3567 3573 if raw:
3568 3574 refs = raw.split(':')
3569 3575 return Reference(*refs)
3570 3576 else:
3571 3577 return None
3572 3578
3573 3579 def reference_to_unicode(self, ref):
3574 3580 """
3575 3581 Convert a reference object to unicode.
3576 3582 If reference is None it returns None.
3577 3583 """
3578 3584 if ref:
3579 3585 return u':'.join(ref)
3580 3586 else:
3581 3587 return None
3582 3588
3583 3589 def get_api_data(self, with_merge_state=True):
3584 3590 from rhodecode.model.pull_request import PullRequestModel
3585 3591
3586 3592 pull_request = self
3587 3593 if with_merge_state:
3588 3594 merge_status = PullRequestModel().merge_status(pull_request)
3589 3595 merge_state = {
3590 3596 'status': merge_status[0],
3591 3597 'message': safe_unicode(merge_status[1]),
3592 3598 }
3593 3599 else:
3594 3600 merge_state = {'status': 'not_available',
3595 3601 'message': 'not_available'}
3596 3602
3597 3603 merge_data = {
3598 3604 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
3599 3605 'reference': (
3600 3606 pull_request.shadow_merge_ref._asdict()
3601 3607 if pull_request.shadow_merge_ref else None),
3602 3608 }
3603 3609
3604 3610 data = {
3605 3611 'pull_request_id': pull_request.pull_request_id,
3606 3612 'url': PullRequestModel().get_url(pull_request),
3607 3613 'title': pull_request.title,
3608 3614 'description': pull_request.description,
3609 3615 'status': pull_request.status,
3610 3616 'created_on': pull_request.created_on,
3611 3617 'updated_on': pull_request.updated_on,
3612 3618 'commit_ids': pull_request.revisions,
3613 3619 'review_status': pull_request.calculated_review_status(),
3614 3620 'mergeable': merge_state,
3615 3621 'source': {
3616 3622 'clone_url': pull_request.source_repo.clone_url(),
3617 3623 'repository': pull_request.source_repo.repo_name,
3618 3624 'reference': {
3619 3625 'name': pull_request.source_ref_parts.name,
3620 3626 'type': pull_request.source_ref_parts.type,
3621 3627 'commit_id': pull_request.source_ref_parts.commit_id,
3622 3628 },
3623 3629 },
3624 3630 'target': {
3625 3631 'clone_url': pull_request.target_repo.clone_url(),
3626 3632 'repository': pull_request.target_repo.repo_name,
3627 3633 'reference': {
3628 3634 'name': pull_request.target_ref_parts.name,
3629 3635 'type': pull_request.target_ref_parts.type,
3630 3636 'commit_id': pull_request.target_ref_parts.commit_id,
3631 3637 },
3632 3638 },
3633 3639 'merge': merge_data,
3634 3640 'author': pull_request.author.get_api_data(include_secrets=False,
3635 3641 details='basic'),
3636 3642 'reviewers': [
3637 3643 {
3638 3644 'user': reviewer.get_api_data(include_secrets=False,
3639 3645 details='basic'),
3640 3646 'reasons': reasons,
3641 3647 'review_status': st[0][1].status if st else 'not_reviewed',
3642 3648 }
3643 3649 for obj, reviewer, reasons, mandatory, st in
3644 3650 pull_request.reviewers_statuses()
3645 3651 ]
3646 3652 }
3647 3653
3648 3654 return data
3649 3655
3650 3656
3651 3657 class PullRequest(Base, _PullRequestBase):
3652 3658 __tablename__ = 'pull_requests'
3653 3659 __table_args__ = (
3654 3660 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3655 3661 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3656 3662 )
3657 3663
3658 3664 pull_request_id = Column(
3659 3665 'pull_request_id', Integer(), nullable=False, primary_key=True)
3660 3666
3661 3667 def __repr__(self):
3662 3668 if self.pull_request_id:
3663 3669 return '<DB:PullRequest #%s>' % self.pull_request_id
3664 3670 else:
3665 3671 return '<DB:PullRequest at %#x>' % id(self)
3666 3672
3667 3673 reviewers = relationship('PullRequestReviewers',
3668 3674 cascade="all, delete, delete-orphan")
3669 3675 statuses = relationship('ChangesetStatus',
3670 3676 cascade="all, delete, delete-orphan")
3671 3677 comments = relationship('ChangesetComment',
3672 3678 cascade="all, delete, delete-orphan")
3673 3679 versions = relationship('PullRequestVersion',
3674 3680 cascade="all, delete, delete-orphan",
3675 3681 lazy='dynamic')
3676 3682
3677 3683 @classmethod
3678 3684 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
3679 3685 internal_methods=None):
3680 3686
3681 3687 class PullRequestDisplay(object):
3682 3688 """
3683 3689 Special object wrapper for showing PullRequest data via Versions
3684 3690 It mimics PR object as close as possible. This is read only object
3685 3691 just for display
3686 3692 """
3687 3693
3688 3694 def __init__(self, attrs, internal=None):
3689 3695 self.attrs = attrs
3690 3696 # internal have priority over the given ones via attrs
3691 3697 self.internal = internal or ['versions']
3692 3698
3693 3699 def __getattr__(self, item):
3694 3700 if item in self.internal:
3695 3701 return getattr(self, item)
3696 3702 try:
3697 3703 return self.attrs[item]
3698 3704 except KeyError:
3699 3705 raise AttributeError(
3700 3706 '%s object has no attribute %s' % (self, item))
3701 3707
3702 3708 def __repr__(self):
3703 3709 return '<DB:PullRequestDisplay #%s>' % self.attrs.get('pull_request_id')
3704 3710
3705 3711 def versions(self):
3706 3712 return pull_request_obj.versions.order_by(
3707 3713 PullRequestVersion.pull_request_version_id).all()
3708 3714
3709 3715 def is_closed(self):
3710 3716 return pull_request_obj.is_closed()
3711 3717
3712 3718 @property
3713 3719 def pull_request_version_id(self):
3714 3720 return getattr(pull_request_obj, 'pull_request_version_id', None)
3715 3721
3716 3722 attrs = StrictAttributeDict(pull_request_obj.get_api_data())
3717 3723
3718 3724 attrs.author = StrictAttributeDict(
3719 3725 pull_request_obj.author.get_api_data())
3720 3726 if pull_request_obj.target_repo:
3721 3727 attrs.target_repo = StrictAttributeDict(
3722 3728 pull_request_obj.target_repo.get_api_data())
3723 3729 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
3724 3730
3725 3731 if pull_request_obj.source_repo:
3726 3732 attrs.source_repo = StrictAttributeDict(
3727 3733 pull_request_obj.source_repo.get_api_data())
3728 3734 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
3729 3735
3730 3736 attrs.source_ref_parts = pull_request_obj.source_ref_parts
3731 3737 attrs.target_ref_parts = pull_request_obj.target_ref_parts
3732 3738 attrs.revisions = pull_request_obj.revisions
3733 3739
3734 3740 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
3735 3741 attrs.reviewer_data = org_pull_request_obj.reviewer_data
3736 3742 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
3737 3743
3738 3744 return PullRequestDisplay(attrs, internal=internal_methods)
3739 3745
3740 3746 def is_closed(self):
3741 3747 return self.status == self.STATUS_CLOSED
3742 3748
3743 3749 def __json__(self):
3744 3750 return {
3745 3751 'revisions': self.revisions,
3746 3752 }
3747 3753
3748 3754 def calculated_review_status(self):
3749 3755 from rhodecode.model.changeset_status import ChangesetStatusModel
3750 3756 return ChangesetStatusModel().calculated_review_status(self)
3751 3757
3752 3758 def reviewers_statuses(self):
3753 3759 from rhodecode.model.changeset_status import ChangesetStatusModel
3754 3760 return ChangesetStatusModel().reviewers_statuses(self)
3755 3761
3756 3762 @property
3757 3763 def workspace_id(self):
3758 3764 from rhodecode.model.pull_request import PullRequestModel
3759 3765 return PullRequestModel()._workspace_id(self)
3760 3766
3761 3767 def get_shadow_repo(self):
3762 3768 workspace_id = self.workspace_id
3763 3769 vcs_obj = self.target_repo.scm_instance()
3764 3770 shadow_repository_path = vcs_obj._get_shadow_repository_path(
3765 workspace_id)
3771 self.target_repo.repo_id, workspace_id)
3766 3772 if os.path.isdir(shadow_repository_path):
3767 3773 return vcs_obj._get_shadow_instance(shadow_repository_path)
3768 3774
3769 3775
3770 3776 class PullRequestVersion(Base, _PullRequestBase):
3771 3777 __tablename__ = 'pull_request_versions'
3772 3778 __table_args__ = (
3773 3779 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3774 3780 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3775 3781 )
3776 3782
3777 3783 pull_request_version_id = Column(
3778 3784 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
3779 3785 pull_request_id = Column(
3780 3786 'pull_request_id', Integer(),
3781 3787 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3782 3788 pull_request = relationship('PullRequest')
3783 3789
3784 3790 def __repr__(self):
3785 3791 if self.pull_request_version_id:
3786 3792 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
3787 3793 else:
3788 3794 return '<DB:PullRequestVersion at %#x>' % id(self)
3789 3795
3790 3796 @property
3791 3797 def reviewers(self):
3792 3798 return self.pull_request.reviewers
3793 3799
3794 3800 @property
3795 3801 def versions(self):
3796 3802 return self.pull_request.versions
3797 3803
3798 3804 def is_closed(self):
3799 3805 # calculate from original
3800 3806 return self.pull_request.status == self.STATUS_CLOSED
3801 3807
3802 3808 def calculated_review_status(self):
3803 3809 return self.pull_request.calculated_review_status()
3804 3810
3805 3811 def reviewers_statuses(self):
3806 3812 return self.pull_request.reviewers_statuses()
3807 3813
3808 3814
3809 3815 class PullRequestReviewers(Base, BaseModel):
3810 3816 __tablename__ = 'pull_request_reviewers'
3811 3817 __table_args__ = (
3812 3818 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3813 3819 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3814 3820 )
3815 3821
3816 3822 @hybrid_property
3817 3823 def reasons(self):
3818 3824 if not self._reasons:
3819 3825 return []
3820 3826 return self._reasons
3821 3827
3822 3828 @reasons.setter
3823 3829 def reasons(self, val):
3824 3830 val = val or []
3825 3831 if any(not isinstance(x, basestring) for x in val):
3826 3832 raise Exception('invalid reasons type, must be list of strings')
3827 3833 self._reasons = val
3828 3834
3829 3835 pull_requests_reviewers_id = Column(
3830 3836 'pull_requests_reviewers_id', Integer(), nullable=False,
3831 3837 primary_key=True)
3832 3838 pull_request_id = Column(
3833 3839 "pull_request_id", Integer(),
3834 3840 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3835 3841 user_id = Column(
3836 3842 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
3837 3843 _reasons = Column(
3838 3844 'reason', MutationList.as_mutable(
3839 3845 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
3840 3846
3841 3847 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
3842 3848 user = relationship('User')
3843 3849 pull_request = relationship('PullRequest')
3844 3850
3845 3851 rule_data = Column(
3846 3852 'rule_data_json',
3847 3853 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
3848 3854
3849 3855 def rule_user_group_data(self):
3850 3856 """
3851 3857 Returns the voting user group rule data for this reviewer
3852 3858 """
3853 3859
3854 3860 if self.rule_data and 'vote_rule' in self.rule_data:
3855 3861 user_group_data = {}
3856 3862 if 'rule_user_group_entry_id' in self.rule_data:
3857 3863 # means a group with voting rules !
3858 3864 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
3859 3865 user_group_data['name'] = self.rule_data['rule_name']
3860 3866 user_group_data['vote_rule'] = self.rule_data['vote_rule']
3861 3867
3862 3868 return user_group_data
3863 3869
3864 3870 def __unicode__(self):
3865 3871 return u"<%s('id:%s')>" % (self.__class__.__name__,
3866 3872 self.pull_requests_reviewers_id)
3867 3873
3868 3874
3869 3875 class Notification(Base, BaseModel):
3870 3876 __tablename__ = 'notifications'
3871 3877 __table_args__ = (
3872 3878 Index('notification_type_idx', 'type'),
3873 3879 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3874 3880 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3875 3881 )
3876 3882
3877 3883 TYPE_CHANGESET_COMMENT = u'cs_comment'
3878 3884 TYPE_MESSAGE = u'message'
3879 3885 TYPE_MENTION = u'mention'
3880 3886 TYPE_REGISTRATION = u'registration'
3881 3887 TYPE_PULL_REQUEST = u'pull_request'
3882 3888 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
3883 3889
3884 3890 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
3885 3891 subject = Column('subject', Unicode(512), nullable=True)
3886 3892 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
3887 3893 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
3888 3894 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3889 3895 type_ = Column('type', Unicode(255))
3890 3896
3891 3897 created_by_user = relationship('User')
3892 3898 notifications_to_users = relationship('UserNotification', lazy='joined',
3893 3899 cascade="all, delete, delete-orphan")
3894 3900
3895 3901 @property
3896 3902 def recipients(self):
3897 3903 return [x.user for x in UserNotification.query()\
3898 3904 .filter(UserNotification.notification == self)\
3899 3905 .order_by(UserNotification.user_id.asc()).all()]
3900 3906
3901 3907 @classmethod
3902 3908 def create(cls, created_by, subject, body, recipients, type_=None):
3903 3909 if type_ is None:
3904 3910 type_ = Notification.TYPE_MESSAGE
3905 3911
3906 3912 notification = cls()
3907 3913 notification.created_by_user = created_by
3908 3914 notification.subject = subject
3909 3915 notification.body = body
3910 3916 notification.type_ = type_
3911 3917 notification.created_on = datetime.datetime.now()
3912 3918
3913 3919 for u in recipients:
3914 3920 assoc = UserNotification()
3915 3921 assoc.notification = notification
3916 3922
3917 3923 # if created_by is inside recipients mark his notification
3918 3924 # as read
3919 3925 if u.user_id == created_by.user_id:
3920 3926 assoc.read = True
3921 3927
3922 3928 u.notifications.append(assoc)
3923 3929 Session().add(notification)
3924 3930
3925 3931 return notification
3926 3932
3927 3933
3928 3934 class UserNotification(Base, BaseModel):
3929 3935 __tablename__ = 'user_to_notification'
3930 3936 __table_args__ = (
3931 3937 UniqueConstraint('user_id', 'notification_id'),
3932 3938 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3933 3939 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3934 3940 )
3935 3941 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
3936 3942 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
3937 3943 read = Column('read', Boolean, default=False)
3938 3944 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
3939 3945
3940 3946 user = relationship('User', lazy="joined")
3941 3947 notification = relationship('Notification', lazy="joined",
3942 3948 order_by=lambda: Notification.created_on.desc(),)
3943 3949
3944 3950 def mark_as_read(self):
3945 3951 self.read = True
3946 3952 Session().add(self)
3947 3953
3948 3954
3949 3955 class Gist(Base, BaseModel):
3950 3956 __tablename__ = 'gists'
3951 3957 __table_args__ = (
3952 3958 Index('g_gist_access_id_idx', 'gist_access_id'),
3953 3959 Index('g_created_on_idx', 'created_on'),
3954 3960 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3955 3961 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3956 3962 )
3957 3963 GIST_PUBLIC = u'public'
3958 3964 GIST_PRIVATE = u'private'
3959 3965 DEFAULT_FILENAME = u'gistfile1.txt'
3960 3966
3961 3967 ACL_LEVEL_PUBLIC = u'acl_public'
3962 3968 ACL_LEVEL_PRIVATE = u'acl_private'
3963 3969
3964 3970 gist_id = Column('gist_id', Integer(), primary_key=True)
3965 3971 gist_access_id = Column('gist_access_id', Unicode(250))
3966 3972 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
3967 3973 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
3968 3974 gist_expires = Column('gist_expires', Float(53), nullable=False)
3969 3975 gist_type = Column('gist_type', Unicode(128), nullable=False)
3970 3976 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3971 3977 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3972 3978 acl_level = Column('acl_level', Unicode(128), nullable=True)
3973 3979
3974 3980 owner = relationship('User')
3975 3981
3976 3982 def __repr__(self):
3977 3983 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
3978 3984
3979 3985 @hybrid_property
3980 3986 def description_safe(self):
3981 3987 from rhodecode.lib import helpers as h
3982 3988 return h.escape(self.gist_description)
3983 3989
3984 3990 @classmethod
3985 3991 def get_or_404(cls, id_):
3986 3992 from pyramid.httpexceptions import HTTPNotFound
3987 3993
3988 3994 res = cls.query().filter(cls.gist_access_id == id_).scalar()
3989 3995 if not res:
3990 3996 raise HTTPNotFound()
3991 3997 return res
3992 3998
3993 3999 @classmethod
3994 4000 def get_by_access_id(cls, gist_access_id):
3995 4001 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
3996 4002
3997 4003 def gist_url(self):
3998 4004 from rhodecode.model.gist import GistModel
3999 4005 return GistModel().get_url(self)
4000 4006
4001 4007 @classmethod
4002 4008 def base_path(cls):
4003 4009 """
4004 4010 Returns base path when all gists are stored
4005 4011
4006 4012 :param cls:
4007 4013 """
4008 4014 from rhodecode.model.gist import GIST_STORE_LOC
4009 4015 q = Session().query(RhodeCodeUi)\
4010 4016 .filter(RhodeCodeUi.ui_key == URL_SEP)
4011 4017 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
4012 4018 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
4013 4019
4014 4020 def get_api_data(self):
4015 4021 """
4016 4022 Common function for generating gist related data for API
4017 4023 """
4018 4024 gist = self
4019 4025 data = {
4020 4026 'gist_id': gist.gist_id,
4021 4027 'type': gist.gist_type,
4022 4028 'access_id': gist.gist_access_id,
4023 4029 'description': gist.gist_description,
4024 4030 'url': gist.gist_url(),
4025 4031 'expires': gist.gist_expires,
4026 4032 'created_on': gist.created_on,
4027 4033 'modified_at': gist.modified_at,
4028 4034 'content': None,
4029 4035 'acl_level': gist.acl_level,
4030 4036 }
4031 4037 return data
4032 4038
4033 4039 def __json__(self):
4034 4040 data = dict(
4035 4041 )
4036 4042 data.update(self.get_api_data())
4037 4043 return data
4038 4044 # SCM functions
4039 4045
4040 4046 def scm_instance(self, **kwargs):
4041 4047 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
4042 4048 return get_vcs_instance(
4043 4049 repo_path=safe_str(full_repo_path), create=False)
4044 4050
4045 4051
4046 4052 class ExternalIdentity(Base, BaseModel):
4047 4053 __tablename__ = 'external_identities'
4048 4054 __table_args__ = (
4049 4055 Index('local_user_id_idx', 'local_user_id'),
4050 4056 Index('external_id_idx', 'external_id'),
4051 4057 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4052 4058 'mysql_charset': 'utf8'})
4053 4059
4054 4060 external_id = Column('external_id', Unicode(255), default=u'',
4055 4061 primary_key=True)
4056 4062 external_username = Column('external_username', Unicode(1024), default=u'')
4057 4063 local_user_id = Column('local_user_id', Integer(),
4058 4064 ForeignKey('users.user_id'), primary_key=True)
4059 4065 provider_name = Column('provider_name', Unicode(255), default=u'',
4060 4066 primary_key=True)
4061 4067 access_token = Column('access_token', String(1024), default=u'')
4062 4068 alt_token = Column('alt_token', String(1024), default=u'')
4063 4069 token_secret = Column('token_secret', String(1024), default=u'')
4064 4070
4065 4071 @classmethod
4066 4072 def by_external_id_and_provider(cls, external_id, provider_name,
4067 4073 local_user_id=None):
4068 4074 """
4069 4075 Returns ExternalIdentity instance based on search params
4070 4076
4071 4077 :param external_id:
4072 4078 :param provider_name:
4073 4079 :return: ExternalIdentity
4074 4080 """
4075 4081 query = cls.query()
4076 4082 query = query.filter(cls.external_id == external_id)
4077 4083 query = query.filter(cls.provider_name == provider_name)
4078 4084 if local_user_id:
4079 4085 query = query.filter(cls.local_user_id == local_user_id)
4080 4086 return query.first()
4081 4087
4082 4088 @classmethod
4083 4089 def user_by_external_id_and_provider(cls, external_id, provider_name):
4084 4090 """
4085 4091 Returns User instance based on search params
4086 4092
4087 4093 :param external_id:
4088 4094 :param provider_name:
4089 4095 :return: User
4090 4096 """
4091 4097 query = User.query()
4092 4098 query = query.filter(cls.external_id == external_id)
4093 4099 query = query.filter(cls.provider_name == provider_name)
4094 4100 query = query.filter(User.user_id == cls.local_user_id)
4095 4101 return query.first()
4096 4102
4097 4103 @classmethod
4098 4104 def by_local_user_id(cls, local_user_id):
4099 4105 """
4100 4106 Returns all tokens for user
4101 4107
4102 4108 :param local_user_id:
4103 4109 :return: ExternalIdentity
4104 4110 """
4105 4111 query = cls.query()
4106 4112 query = query.filter(cls.local_user_id == local_user_id)
4107 4113 return query
4108 4114
4109 4115
4110 4116 class Integration(Base, BaseModel):
4111 4117 __tablename__ = 'integrations'
4112 4118 __table_args__ = (
4113 4119 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4114 4120 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
4115 4121 )
4116 4122
4117 4123 integration_id = Column('integration_id', Integer(), primary_key=True)
4118 4124 integration_type = Column('integration_type', String(255))
4119 4125 enabled = Column('enabled', Boolean(), nullable=False)
4120 4126 name = Column('name', String(255), nullable=False)
4121 4127 child_repos_only = Column('child_repos_only', Boolean(), nullable=False,
4122 4128 default=False)
4123 4129
4124 4130 settings = Column(
4125 4131 'settings_json', MutationObj.as_mutable(
4126 4132 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4127 4133 repo_id = Column(
4128 4134 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
4129 4135 nullable=True, unique=None, default=None)
4130 4136 repo = relationship('Repository', lazy='joined')
4131 4137
4132 4138 repo_group_id = Column(
4133 4139 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
4134 4140 nullable=True, unique=None, default=None)
4135 4141 repo_group = relationship('RepoGroup', lazy='joined')
4136 4142
4137 4143 @property
4138 4144 def scope(self):
4139 4145 if self.repo:
4140 4146 return repr(self.repo)
4141 4147 if self.repo_group:
4142 4148 if self.child_repos_only:
4143 4149 return repr(self.repo_group) + ' (child repos only)'
4144 4150 else:
4145 4151 return repr(self.repo_group) + ' (recursive)'
4146 4152 if self.child_repos_only:
4147 4153 return 'root_repos'
4148 4154 return 'global'
4149 4155
4150 4156 def __repr__(self):
4151 4157 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
4152 4158
4153 4159
4154 4160 class RepoReviewRuleUser(Base, BaseModel):
4155 4161 __tablename__ = 'repo_review_rules_users'
4156 4162 __table_args__ = (
4157 4163 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4158 4164 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
4159 4165 )
4160 4166
4161 4167 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
4162 4168 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4163 4169 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
4164 4170 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4165 4171 user = relationship('User')
4166 4172
4167 4173 def rule_data(self):
4168 4174 return {
4169 4175 'mandatory': self.mandatory
4170 4176 }
4171 4177
4172 4178
4173 4179 class RepoReviewRuleUserGroup(Base, BaseModel):
4174 4180 __tablename__ = 'repo_review_rules_users_groups'
4175 4181 __table_args__ = (
4176 4182 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4177 4183 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
4178 4184 )
4179 4185 VOTE_RULE_ALL = -1
4180 4186
4181 4187 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
4182 4188 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4183 4189 users_group_id = Column("users_group_id", Integer(),ForeignKey('users_groups.users_group_id'), nullable=False)
4184 4190 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4185 4191 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
4186 4192 users_group = relationship('UserGroup')
4187 4193
4188 4194 def rule_data(self):
4189 4195 return {
4190 4196 'mandatory': self.mandatory,
4191 4197 'vote_rule': self.vote_rule
4192 4198 }
4193 4199
4194 4200 @property
4195 4201 def vote_rule_label(self):
4196 4202 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
4197 4203 return 'all must vote'
4198 4204 else:
4199 4205 return 'min. vote {}'.format(self.vote_rule)
4200 4206
4201 4207
4202 4208 class RepoReviewRule(Base, BaseModel):
4203 4209 __tablename__ = 'repo_review_rules'
4204 4210 __table_args__ = (
4205 4211 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4206 4212 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
4207 4213 )
4208 4214
4209 4215 repo_review_rule_id = Column(
4210 4216 'repo_review_rule_id', Integer(), primary_key=True)
4211 4217 repo_id = Column(
4212 4218 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
4213 4219 repo = relationship('Repository', backref='review_rules')
4214 4220
4215 4221 review_rule_name = Column('review_rule_name', String(255))
4216 4222 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4217 4223 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4218 4224 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4219 4225
4220 4226 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
4221 4227 forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
4222 4228 forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
4223 4229 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
4224 4230
4225 4231 rule_users = relationship('RepoReviewRuleUser')
4226 4232 rule_user_groups = relationship('RepoReviewRuleUserGroup')
4227 4233
4228 4234 def _validate_glob(self, value):
4229 4235 re.compile('^' + glob2re(value) + '$')
4230 4236
4231 4237 @hybrid_property
4232 4238 def source_branch_pattern(self):
4233 4239 return self._branch_pattern or '*'
4234 4240
4235 4241 @source_branch_pattern.setter
4236 4242 def source_branch_pattern(self, value):
4237 4243 self._validate_glob(value)
4238 4244 self._branch_pattern = value or '*'
4239 4245
4240 4246 @hybrid_property
4241 4247 def target_branch_pattern(self):
4242 4248 return self._target_branch_pattern or '*'
4243 4249
4244 4250 @target_branch_pattern.setter
4245 4251 def target_branch_pattern(self, value):
4246 4252 self._validate_glob(value)
4247 4253 self._target_branch_pattern = value or '*'
4248 4254
4249 4255 @hybrid_property
4250 4256 def file_pattern(self):
4251 4257 return self._file_pattern or '*'
4252 4258
4253 4259 @file_pattern.setter
4254 4260 def file_pattern(self, value):
4255 4261 self._validate_glob(value)
4256 4262 self._file_pattern = value or '*'
4257 4263
4258 4264 def matches(self, source_branch, target_branch, files_changed):
4259 4265 """
4260 4266 Check if this review rule matches a branch/files in a pull request
4261 4267
4262 4268 :param source_branch: source branch name for the commit
4263 4269 :param target_branch: target branch name for the commit
4264 4270 :param files_changed: list of file paths changed in the pull request
4265 4271 """
4266 4272
4267 4273 source_branch = source_branch or ''
4268 4274 target_branch = target_branch or ''
4269 4275 files_changed = files_changed or []
4270 4276
4271 4277 branch_matches = True
4272 4278 if source_branch or target_branch:
4273 4279 if self.source_branch_pattern == '*':
4274 4280 source_branch_match = True
4275 4281 else:
4276 4282 source_branch_regex = re.compile(
4277 4283 '^' + glob2re(self.source_branch_pattern) + '$')
4278 4284 source_branch_match = bool(source_branch_regex.search(source_branch))
4279 4285 if self.target_branch_pattern == '*':
4280 4286 target_branch_match = True
4281 4287 else:
4282 4288 target_branch_regex = re.compile(
4283 4289 '^' + glob2re(self.target_branch_pattern) + '$')
4284 4290 target_branch_match = bool(target_branch_regex.search(target_branch))
4285 4291
4286 4292 branch_matches = source_branch_match and target_branch_match
4287 4293
4288 4294 files_matches = True
4289 4295 if self.file_pattern != '*':
4290 4296 files_matches = False
4291 4297 file_regex = re.compile(glob2re(self.file_pattern))
4292 4298 for filename in files_changed:
4293 4299 if file_regex.search(filename):
4294 4300 files_matches = True
4295 4301 break
4296 4302
4297 4303 return branch_matches and files_matches
4298 4304
4299 4305 @property
4300 4306 def review_users(self):
4301 4307 """ Returns the users which this rule applies to """
4302 4308
4303 4309 users = collections.OrderedDict()
4304 4310
4305 4311 for rule_user in self.rule_users:
4306 4312 if rule_user.user.active:
4307 4313 if rule_user.user not in users:
4308 4314 users[rule_user.user.username] = {
4309 4315 'user': rule_user.user,
4310 4316 'source': 'user',
4311 4317 'source_data': {},
4312 4318 'data': rule_user.rule_data()
4313 4319 }
4314 4320
4315 4321 for rule_user_group in self.rule_user_groups:
4316 4322 source_data = {
4317 4323 'user_group_id': rule_user_group.users_group.users_group_id,
4318 4324 'name': rule_user_group.users_group.users_group_name,
4319 4325 'members': len(rule_user_group.users_group.members)
4320 4326 }
4321 4327 for member in rule_user_group.users_group.members:
4322 4328 if member.user.active:
4323 4329 key = member.user.username
4324 4330 if key in users:
4325 4331 # skip this member as we have him already
4326 4332 # this prevents from override the "first" matched
4327 4333 # users with duplicates in multiple groups
4328 4334 continue
4329 4335
4330 4336 users[key] = {
4331 4337 'user': member.user,
4332 4338 'source': 'user_group',
4333 4339 'source_data': source_data,
4334 4340 'data': rule_user_group.rule_data()
4335 4341 }
4336 4342
4337 4343 return users
4338 4344
4339 4345 def user_group_vote_rule(self):
4340 4346 rules = []
4341 4347 if self.rule_user_groups:
4342 4348 for user_group in self.rule_user_groups:
4343 4349 rules.append(user_group)
4344 4350 return rules
4345 4351
4346 4352 def __repr__(self):
4347 4353 return '<RepoReviewerRule(id=%r, repo=%r)>' % (
4348 4354 self.repo_review_rule_id, self.repo)
4349 4355
4350 4356
4351 4357 class ScheduleEntry(Base, BaseModel):
4352 4358 __tablename__ = 'schedule_entries'
4353 4359 __table_args__ = (
4354 4360 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
4355 4361 UniqueConstraint('task_uid', name='s_task_uid_idx'),
4356 4362 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4357 4363 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
4358 4364 )
4359 4365 schedule_types = ['crontab', 'timedelta', 'integer']
4360 4366 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
4361 4367
4362 4368 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
4363 4369 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
4364 4370 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
4365 4371
4366 4372 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
4367 4373 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
4368 4374
4369 4375 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
4370 4376 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
4371 4377
4372 4378 # task
4373 4379 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
4374 4380 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
4375 4381 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
4376 4382 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
4377 4383
4378 4384 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4379 4385 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
4380 4386
4381 4387 @hybrid_property
4382 4388 def schedule_type(self):
4383 4389 return self._schedule_type
4384 4390
4385 4391 @schedule_type.setter
4386 4392 def schedule_type(self, val):
4387 4393 if val not in self.schedule_types:
4388 4394 raise ValueError('Value must be on of `{}` and got `{}`'.format(
4389 4395 val, self.schedule_type))
4390 4396
4391 4397 self._schedule_type = val
4392 4398
4393 4399 @classmethod
4394 4400 def get_uid(cls, obj):
4395 4401 args = obj.task_args
4396 4402 kwargs = obj.task_kwargs
4397 4403 if isinstance(args, JsonRaw):
4398 4404 try:
4399 4405 args = json.loads(args)
4400 4406 except ValueError:
4401 4407 args = tuple()
4402 4408
4403 4409 if isinstance(kwargs, JsonRaw):
4404 4410 try:
4405 4411 kwargs = json.loads(kwargs)
4406 4412 except ValueError:
4407 4413 kwargs = dict()
4408 4414
4409 4415 dot_notation = obj.task_dot_notation
4410 4416 val = '.'.join(map(safe_str, [
4411 4417 sorted(dot_notation), args, sorted(kwargs.items())]))
4412 4418 return hashlib.sha1(val).hexdigest()
4413 4419
4414 4420 @classmethod
4415 4421 def get_by_schedule_name(cls, schedule_name):
4416 4422 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
4417 4423
4418 4424 @classmethod
4419 4425 def get_by_schedule_id(cls, schedule_id):
4420 4426 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
4421 4427
4422 4428 @property
4423 4429 def task(self):
4424 4430 return self.task_dot_notation
4425 4431
4426 4432 @property
4427 4433 def schedule(self):
4428 4434 from rhodecode.lib.celerylib.utils import raw_2_schedule
4429 4435 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
4430 4436 return schedule
4431 4437
4432 4438 @property
4433 4439 def args(self):
4434 4440 try:
4435 4441 return list(self.task_args or [])
4436 4442 except ValueError:
4437 4443 return list()
4438 4444
4439 4445 @property
4440 4446 def kwargs(self):
4441 4447 try:
4442 4448 return dict(self.task_kwargs or {})
4443 4449 except ValueError:
4444 4450 return dict()
4445 4451
4446 4452 def _as_raw(self, val):
4447 4453 if hasattr(val, 'de_coerce'):
4448 4454 val = val.de_coerce()
4449 4455 if val:
4450 4456 val = json.dumps(val)
4451 4457
4452 4458 return val
4453 4459
4454 4460 @property
4455 4461 def schedule_definition_raw(self):
4456 4462 return self._as_raw(self.schedule_definition)
4457 4463
4458 4464 @property
4459 4465 def args_raw(self):
4460 4466 return self._as_raw(self.task_args)
4461 4467
4462 4468 @property
4463 4469 def kwargs_raw(self):
4464 4470 return self._as_raw(self.task_kwargs)
4465 4471
4466 4472 def __repr__(self):
4467 4473 return '<DB:ScheduleEntry({}:{})>'.format(
4468 4474 self.schedule_entry_id, self.schedule_name)
4469 4475
4470 4476
4471 4477 @event.listens_for(ScheduleEntry, 'before_update')
4472 4478 def update_task_uid(mapper, connection, target):
4473 4479 target.task_uid = ScheduleEntry.get_uid(target)
4474 4480
4475 4481
4476 4482 @event.listens_for(ScheduleEntry, 'before_insert')
4477 4483 def set_task_uid(mapper, connection, target):
4478 4484 target.task_uid = ScheduleEntry.get_uid(target)
4479 4485
4480 4486
4481 4487 class DbMigrateVersion(Base, BaseModel):
4482 4488 __tablename__ = 'db_migrate_version'
4483 4489 __table_args__ = (
4484 4490 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4485 4491 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
4486 4492 )
4487 4493 repository_id = Column('repository_id', String(250), primary_key=True)
4488 4494 repository_path = Column('repository_path', Text)
4489 4495 version = Column('version', Integer)
4490 4496
4491 4497
4492 4498 class DbSession(Base, BaseModel):
4493 4499 __tablename__ = 'db_session'
4494 4500 __table_args__ = (
4495 4501 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4496 4502 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
4497 4503 )
4498 4504
4499 4505 def __repr__(self):
4500 4506 return '<DB:DbSession({})>'.format(self.id)
4501 4507
4502 4508 id = Column('id', Integer())
4503 4509 namespace = Column('namespace', String(255), primary_key=True)
4504 4510 accessed = Column('accessed', DateTime, nullable=False)
4505 4511 created = Column('created', DateTime, nullable=False)
4506 4512 data = Column('data', PickleType, nullable=False)
4507 4513
4508 4514
4509 4515
4510 4516 class BeakerCache(Base, BaseModel):
4511 4517 __tablename__ = 'beaker_cache'
4512 4518 __table_args__ = (
4513 4519 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4514 4520 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
4515 4521 )
4516 4522
4517 4523 def __repr__(self):
4518 4524 return '<DB:DbSession({})>'.format(self.id)
4519 4525
4520 4526 id = Column('id', Integer())
4521 4527 namespace = Column('namespace', String(255), primary_key=True)
4522 4528 accessed = Column('accessed', DateTime, nullable=False)
4523 4529 created = Column('created', DateTime, nullable=False)
4524 4530 data = Column('data', PickleType, nullable=False)
@@ -1,1695 +1,1700 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26
27 27 import json
28 28 import logging
29 29 import datetime
30 30 import urllib
31 31 import collections
32 32
33 33 from pyramid.threadlocal import get_current_request
34 34
35 35 from rhodecode import events
36 36 from rhodecode.translation import lazy_ugettext#, _
37 37 from rhodecode.lib import helpers as h, hooks_utils, diffs
38 38 from rhodecode.lib import audit_logger
39 39 from rhodecode.lib.compat import OrderedDict
40 40 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
41 41 from rhodecode.lib.markup_renderer import (
42 42 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
43 43 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
44 44 from rhodecode.lib.vcs.backends.base import (
45 45 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
46 46 from rhodecode.lib.vcs.conf import settings as vcs_settings
47 47 from rhodecode.lib.vcs.exceptions import (
48 48 CommitDoesNotExistError, EmptyRepositoryError)
49 49 from rhodecode.model import BaseModel
50 50 from rhodecode.model.changeset_status import ChangesetStatusModel
51 51 from rhodecode.model.comment import CommentsModel
52 52 from rhodecode.model.db import (
53 53 or_, PullRequest, PullRequestReviewers, ChangesetStatus,
54 54 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
55 55 from rhodecode.model.meta import Session
56 56 from rhodecode.model.notification import NotificationModel, \
57 57 EmailNotificationModel
58 58 from rhodecode.model.scm import ScmModel
59 59 from rhodecode.model.settings import VcsSettingsModel
60 60
61 61
62 62 log = logging.getLogger(__name__)
63 63
64 64
65 65 # Data structure to hold the response data when updating commits during a pull
66 66 # request update.
67 67 UpdateResponse = collections.namedtuple('UpdateResponse', [
68 68 'executed', 'reason', 'new', 'old', 'changes',
69 69 'source_changed', 'target_changed'])
70 70
71 71
72 72 class PullRequestModel(BaseModel):
73 73
74 74 cls = PullRequest
75 75
76 76 DIFF_CONTEXT = 3
77 77
78 78 MERGE_STATUS_MESSAGES = {
79 79 MergeFailureReason.NONE: lazy_ugettext(
80 80 'This pull request can be automatically merged.'),
81 81 MergeFailureReason.UNKNOWN: lazy_ugettext(
82 82 'This pull request cannot be merged because of an unhandled'
83 83 ' exception.'),
84 84 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
85 85 'This pull request cannot be merged because of merge conflicts.'),
86 86 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
87 87 'This pull request could not be merged because push to target'
88 88 ' failed.'),
89 89 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
90 90 'This pull request cannot be merged because the target is not a'
91 91 ' head.'),
92 92 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
93 93 'This pull request cannot be merged because the source contains'
94 94 ' more branches than the target.'),
95 95 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
96 96 'This pull request cannot be merged because the target has'
97 97 ' multiple heads.'),
98 98 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
99 99 'This pull request cannot be merged because the target repository'
100 100 ' is locked.'),
101 101 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
102 102 'This pull request cannot be merged because the target or the '
103 103 'source reference is missing.'),
104 104 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
105 105 'This pull request cannot be merged because the target '
106 106 'reference is missing.'),
107 107 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
108 108 'This pull request cannot be merged because the source '
109 109 'reference is missing.'),
110 110 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
111 111 'This pull request cannot be merged because of conflicts related '
112 112 'to sub repositories.'),
113 113 }
114 114
115 115 UPDATE_STATUS_MESSAGES = {
116 116 UpdateFailureReason.NONE: lazy_ugettext(
117 117 'Pull request update successful.'),
118 118 UpdateFailureReason.UNKNOWN: lazy_ugettext(
119 119 'Pull request update failed because of an unknown error.'),
120 120 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
121 121 'No update needed because the source and target have not changed.'),
122 122 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
123 123 'Pull request cannot be updated because the reference type is '
124 124 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
125 125 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
126 126 'This pull request cannot be updated because the target '
127 127 'reference is missing.'),
128 128 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
129 129 'This pull request cannot be updated because the source '
130 130 'reference is missing.'),
131 131 }
132 132
133 133 def __get_pull_request(self, pull_request):
134 134 return self._get_instance((
135 135 PullRequest, PullRequestVersion), pull_request)
136 136
137 137 def _check_perms(self, perms, pull_request, user, api=False):
138 138 if not api:
139 139 return h.HasRepoPermissionAny(*perms)(
140 140 user=user, repo_name=pull_request.target_repo.repo_name)
141 141 else:
142 142 return h.HasRepoPermissionAnyApi(*perms)(
143 143 user=user, repo_name=pull_request.target_repo.repo_name)
144 144
145 145 def check_user_read(self, pull_request, user, api=False):
146 146 _perms = ('repository.admin', 'repository.write', 'repository.read',)
147 147 return self._check_perms(_perms, pull_request, user, api)
148 148
149 149 def check_user_merge(self, pull_request, user, api=False):
150 150 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
151 151 return self._check_perms(_perms, pull_request, user, api)
152 152
153 153 def check_user_update(self, pull_request, user, api=False):
154 154 owner = user.user_id == pull_request.user_id
155 155 return self.check_user_merge(pull_request, user, api) or owner
156 156
157 157 def check_user_delete(self, pull_request, user):
158 158 owner = user.user_id == pull_request.user_id
159 159 _perms = ('repository.admin',)
160 160 return self._check_perms(_perms, pull_request, user) or owner
161 161
162 162 def check_user_change_status(self, pull_request, user, api=False):
163 163 reviewer = user.user_id in [x.user_id for x in
164 164 pull_request.reviewers]
165 165 return self.check_user_update(pull_request, user, api) or reviewer
166 166
167 167 def check_user_comment(self, pull_request, user):
168 168 owner = user.user_id == pull_request.user_id
169 169 return self.check_user_read(pull_request, user) or owner
170 170
171 171 def get(self, pull_request):
172 172 return self.__get_pull_request(pull_request)
173 173
174 174 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
175 175 opened_by=None, order_by=None,
176 176 order_dir='desc'):
177 177 repo = None
178 178 if repo_name:
179 179 repo = self._get_repo(repo_name)
180 180
181 181 q = PullRequest.query()
182 182
183 183 # source or target
184 184 if repo and source:
185 185 q = q.filter(PullRequest.source_repo == repo)
186 186 elif repo:
187 187 q = q.filter(PullRequest.target_repo == repo)
188 188
189 189 # closed,opened
190 190 if statuses:
191 191 q = q.filter(PullRequest.status.in_(statuses))
192 192
193 193 # opened by filter
194 194 if opened_by:
195 195 q = q.filter(PullRequest.user_id.in_(opened_by))
196 196
197 197 if order_by:
198 198 order_map = {
199 199 'name_raw': PullRequest.pull_request_id,
200 200 'title': PullRequest.title,
201 201 'updated_on_raw': PullRequest.updated_on,
202 202 'target_repo': PullRequest.target_repo_id
203 203 }
204 204 if order_dir == 'asc':
205 205 q = q.order_by(order_map[order_by].asc())
206 206 else:
207 207 q = q.order_by(order_map[order_by].desc())
208 208
209 209 return q
210 210
211 211 def count_all(self, repo_name, source=False, statuses=None,
212 212 opened_by=None):
213 213 """
214 214 Count the number of pull requests for a specific repository.
215 215
216 216 :param repo_name: target or source repo
217 217 :param source: boolean flag to specify if repo_name refers to source
218 218 :param statuses: list of pull request statuses
219 219 :param opened_by: author user of the pull request
220 220 :returns: int number of pull requests
221 221 """
222 222 q = self._prepare_get_all_query(
223 223 repo_name, source=source, statuses=statuses, opened_by=opened_by)
224 224
225 225 return q.count()
226 226
227 227 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
228 228 offset=0, length=None, order_by=None, order_dir='desc'):
229 229 """
230 230 Get all pull requests for a specific repository.
231 231
232 232 :param repo_name: target or source repo
233 233 :param source: boolean flag to specify if repo_name refers to source
234 234 :param statuses: list of pull request statuses
235 235 :param opened_by: author user of the pull request
236 236 :param offset: pagination offset
237 237 :param length: length of returned list
238 238 :param order_by: order of the returned list
239 239 :param order_dir: 'asc' or 'desc' ordering direction
240 240 :returns: list of pull requests
241 241 """
242 242 q = self._prepare_get_all_query(
243 243 repo_name, source=source, statuses=statuses, opened_by=opened_by,
244 244 order_by=order_by, order_dir=order_dir)
245 245
246 246 if length:
247 247 pull_requests = q.limit(length).offset(offset).all()
248 248 else:
249 249 pull_requests = q.all()
250 250
251 251 return pull_requests
252 252
253 253 def count_awaiting_review(self, repo_name, source=False, statuses=None,
254 254 opened_by=None):
255 255 """
256 256 Count the number of pull requests for a specific repository that are
257 257 awaiting review.
258 258
259 259 :param repo_name: target or source repo
260 260 :param source: boolean flag to specify if repo_name refers to source
261 261 :param statuses: list of pull request statuses
262 262 :param opened_by: author user of the pull request
263 263 :returns: int number of pull requests
264 264 """
265 265 pull_requests = self.get_awaiting_review(
266 266 repo_name, source=source, statuses=statuses, opened_by=opened_by)
267 267
268 268 return len(pull_requests)
269 269
270 270 def get_awaiting_review(self, repo_name, source=False, statuses=None,
271 271 opened_by=None, offset=0, length=None,
272 272 order_by=None, order_dir='desc'):
273 273 """
274 274 Get all pull requests for a specific repository that are awaiting
275 275 review.
276 276
277 277 :param repo_name: target or source repo
278 278 :param source: boolean flag to specify if repo_name refers to source
279 279 :param statuses: list of pull request statuses
280 280 :param opened_by: author user of the pull request
281 281 :param offset: pagination offset
282 282 :param length: length of returned list
283 283 :param order_by: order of the returned list
284 284 :param order_dir: 'asc' or 'desc' ordering direction
285 285 :returns: list of pull requests
286 286 """
287 287 pull_requests = self.get_all(
288 288 repo_name, source=source, statuses=statuses, opened_by=opened_by,
289 289 order_by=order_by, order_dir=order_dir)
290 290
291 291 _filtered_pull_requests = []
292 292 for pr in pull_requests:
293 293 status = pr.calculated_review_status()
294 294 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
295 295 ChangesetStatus.STATUS_UNDER_REVIEW]:
296 296 _filtered_pull_requests.append(pr)
297 297 if length:
298 298 return _filtered_pull_requests[offset:offset+length]
299 299 else:
300 300 return _filtered_pull_requests
301 301
302 302 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
303 303 opened_by=None, user_id=None):
304 304 """
305 305 Count the number of pull requests for a specific repository that are
306 306 awaiting review from a specific user.
307 307
308 308 :param repo_name: target or source repo
309 309 :param source: boolean flag to specify if repo_name refers to source
310 310 :param statuses: list of pull request statuses
311 311 :param opened_by: author user of the pull request
312 312 :param user_id: reviewer user of the pull request
313 313 :returns: int number of pull requests
314 314 """
315 315 pull_requests = self.get_awaiting_my_review(
316 316 repo_name, source=source, statuses=statuses, opened_by=opened_by,
317 317 user_id=user_id)
318 318
319 319 return len(pull_requests)
320 320
321 321 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
322 322 opened_by=None, user_id=None, offset=0,
323 323 length=None, order_by=None, order_dir='desc'):
324 324 """
325 325 Get all pull requests for a specific repository that are awaiting
326 326 review from a specific user.
327 327
328 328 :param repo_name: target or source repo
329 329 :param source: boolean flag to specify if repo_name refers to source
330 330 :param statuses: list of pull request statuses
331 331 :param opened_by: author user of the pull request
332 332 :param user_id: reviewer user of the pull request
333 333 :param offset: pagination offset
334 334 :param length: length of returned list
335 335 :param order_by: order of the returned list
336 336 :param order_dir: 'asc' or 'desc' ordering direction
337 337 :returns: list of pull requests
338 338 """
339 339 pull_requests = self.get_all(
340 340 repo_name, source=source, statuses=statuses, opened_by=opened_by,
341 341 order_by=order_by, order_dir=order_dir)
342 342
343 343 _my = PullRequestModel().get_not_reviewed(user_id)
344 344 my_participation = []
345 345 for pr in pull_requests:
346 346 if pr in _my:
347 347 my_participation.append(pr)
348 348 _filtered_pull_requests = my_participation
349 349 if length:
350 350 return _filtered_pull_requests[offset:offset+length]
351 351 else:
352 352 return _filtered_pull_requests
353 353
354 354 def get_not_reviewed(self, user_id):
355 355 return [
356 356 x.pull_request for x in PullRequestReviewers.query().filter(
357 357 PullRequestReviewers.user_id == user_id).all()
358 358 ]
359 359
360 360 def _prepare_participating_query(self, user_id=None, statuses=None,
361 361 order_by=None, order_dir='desc'):
362 362 q = PullRequest.query()
363 363 if user_id:
364 364 reviewers_subquery = Session().query(
365 365 PullRequestReviewers.pull_request_id).filter(
366 366 PullRequestReviewers.user_id == user_id).subquery()
367 367 user_filter = or_(
368 368 PullRequest.user_id == user_id,
369 369 PullRequest.pull_request_id.in_(reviewers_subquery)
370 370 )
371 371 q = PullRequest.query().filter(user_filter)
372 372
373 373 # closed,opened
374 374 if statuses:
375 375 q = q.filter(PullRequest.status.in_(statuses))
376 376
377 377 if order_by:
378 378 order_map = {
379 379 'name_raw': PullRequest.pull_request_id,
380 380 'title': PullRequest.title,
381 381 'updated_on_raw': PullRequest.updated_on,
382 382 'target_repo': PullRequest.target_repo_id
383 383 }
384 384 if order_dir == 'asc':
385 385 q = q.order_by(order_map[order_by].asc())
386 386 else:
387 387 q = q.order_by(order_map[order_by].desc())
388 388
389 389 return q
390 390
391 391 def count_im_participating_in(self, user_id=None, statuses=None):
392 392 q = self._prepare_participating_query(user_id, statuses=statuses)
393 393 return q.count()
394 394
395 395 def get_im_participating_in(
396 396 self, user_id=None, statuses=None, offset=0,
397 397 length=None, order_by=None, order_dir='desc'):
398 398 """
399 399 Get all Pull requests that i'm participating in, or i have opened
400 400 """
401 401
402 402 q = self._prepare_participating_query(
403 403 user_id, statuses=statuses, order_by=order_by,
404 404 order_dir=order_dir)
405 405
406 406 if length:
407 407 pull_requests = q.limit(length).offset(offset).all()
408 408 else:
409 409 pull_requests = q.all()
410 410
411 411 return pull_requests
412 412
413 413 def get_versions(self, pull_request):
414 414 """
415 415 returns version of pull request sorted by ID descending
416 416 """
417 417 return PullRequestVersion.query()\
418 418 .filter(PullRequestVersion.pull_request == pull_request)\
419 419 .order_by(PullRequestVersion.pull_request_version_id.asc())\
420 420 .all()
421 421
422 422 def get_pr_version(self, pull_request_id, version=None):
423 423 at_version = None
424 424
425 425 if version and version == 'latest':
426 426 pull_request_ver = PullRequest.get(pull_request_id)
427 427 pull_request_obj = pull_request_ver
428 428 _org_pull_request_obj = pull_request_obj
429 429 at_version = 'latest'
430 430 elif version:
431 431 pull_request_ver = PullRequestVersion.get_or_404(version)
432 432 pull_request_obj = pull_request_ver
433 433 _org_pull_request_obj = pull_request_ver.pull_request
434 434 at_version = pull_request_ver.pull_request_version_id
435 435 else:
436 436 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
437 437 pull_request_id)
438 438
439 439 pull_request_display_obj = PullRequest.get_pr_display_object(
440 440 pull_request_obj, _org_pull_request_obj)
441 441
442 442 return _org_pull_request_obj, pull_request_obj, \
443 443 pull_request_display_obj, at_version
444 444
445 445 def create(self, created_by, source_repo, source_ref, target_repo,
446 446 target_ref, revisions, reviewers, title, description=None,
447 447 reviewer_data=None, translator=None, auth_user=None):
448 448 translator = translator or get_current_request().translate
449 449
450 450 created_by_user = self._get_user(created_by)
451 451 auth_user = auth_user or created_by_user
452 452 source_repo = self._get_repo(source_repo)
453 453 target_repo = self._get_repo(target_repo)
454 454
455 455 pull_request = PullRequest()
456 456 pull_request.source_repo = source_repo
457 457 pull_request.source_ref = source_ref
458 458 pull_request.target_repo = target_repo
459 459 pull_request.target_ref = target_ref
460 460 pull_request.revisions = revisions
461 461 pull_request.title = title
462 462 pull_request.description = description
463 463 pull_request.author = created_by_user
464 464 pull_request.reviewer_data = reviewer_data
465 465
466 466 Session().add(pull_request)
467 467 Session().flush()
468 468
469 469 reviewer_ids = set()
470 470 # members / reviewers
471 471 for reviewer_object in reviewers:
472 472 user_id, reasons, mandatory, rules = reviewer_object
473 473 user = self._get_user(user_id)
474 474
475 475 # skip duplicates
476 476 if user.user_id in reviewer_ids:
477 477 continue
478 478
479 479 reviewer_ids.add(user.user_id)
480 480
481 481 reviewer = PullRequestReviewers()
482 482 reviewer.user = user
483 483 reviewer.pull_request = pull_request
484 484 reviewer.reasons = reasons
485 485 reviewer.mandatory = mandatory
486 486
487 487 # NOTE(marcink): pick only first rule for now
488 488 rule_id = rules[0] if rules else None
489 489 rule = RepoReviewRule.get(rule_id) if rule_id else None
490 490 if rule:
491 491 review_group = rule.user_group_vote_rule()
492 492 if review_group:
493 493 # NOTE(marcink):
494 494 # again, can be that user is member of more,
495 495 # but we pick the first same, as default reviewers algo
496 496 review_group = review_group[0]
497 497
498 498 rule_data = {
499 499 'rule_name':
500 500 rule.review_rule_name,
501 501 'rule_user_group_entry_id':
502 502 review_group.repo_review_rule_users_group_id,
503 503 'rule_user_group_name':
504 504 review_group.users_group.users_group_name,
505 505 'rule_user_group_members':
506 506 [x.user.username for x in review_group.users_group.members],
507 507 }
508 508 # e.g {'vote_rule': -1, 'mandatory': True}
509 509 rule_data.update(review_group.rule_data())
510 510
511 511 reviewer.rule_data = rule_data
512 512
513 513 Session().add(reviewer)
514 514 Session().flush()
515 515
516 516 # Set approval status to "Under Review" for all commits which are
517 517 # part of this pull request.
518 518 ChangesetStatusModel().set_status(
519 519 repo=target_repo,
520 520 status=ChangesetStatus.STATUS_UNDER_REVIEW,
521 521 user=created_by_user,
522 522 pull_request=pull_request
523 523 )
524 524 # we commit early at this point. This has to do with a fact
525 525 # that before queries do some row-locking. And because of that
526 526 # we need to commit and finish transation before below validate call
527 527 # that for large repos could be long resulting in long row locks
528 528 Session().commit()
529 529
530 530 # prepare workspace, and run initial merge simulation
531 531 MergeCheck.validate(
532 532 pull_request, user=created_by_user, translator=translator)
533 533
534 534 self.notify_reviewers(pull_request, reviewer_ids)
535 535 self._trigger_pull_request_hook(
536 536 pull_request, created_by_user, 'create')
537 537
538 538 creation_data = pull_request.get_api_data(with_merge_state=False)
539 539 self._log_audit_action(
540 540 'repo.pull_request.create', {'data': creation_data},
541 541 auth_user, pull_request)
542 542
543 543 return pull_request
544 544
545 545 def _trigger_pull_request_hook(self, pull_request, user, action):
546 546 pull_request = self.__get_pull_request(pull_request)
547 547 target_scm = pull_request.target_repo.scm_instance()
548 548 if action == 'create':
549 549 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
550 550 elif action == 'merge':
551 551 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
552 552 elif action == 'close':
553 553 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
554 554 elif action == 'review_status_change':
555 555 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
556 556 elif action == 'update':
557 557 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
558 558 else:
559 559 return
560 560
561 561 trigger_hook(
562 562 username=user.username,
563 563 repo_name=pull_request.target_repo.repo_name,
564 564 repo_alias=target_scm.alias,
565 565 pull_request=pull_request)
566 566
567 567 def _get_commit_ids(self, pull_request):
568 568 """
569 569 Return the commit ids of the merged pull request.
570 570
571 571 This method is not dealing correctly yet with the lack of autoupdates
572 572 nor with the implicit target updates.
573 573 For example: if a commit in the source repo is already in the target it
574 574 will be reported anyways.
575 575 """
576 576 merge_rev = pull_request.merge_rev
577 577 if merge_rev is None:
578 578 raise ValueError('This pull request was not merged yet')
579 579
580 580 commit_ids = list(pull_request.revisions)
581 581 if merge_rev not in commit_ids:
582 582 commit_ids.append(merge_rev)
583 583
584 584 return commit_ids
585 585
586 def merge(self, pull_request, user, extras):
586 def merge_repo(self, pull_request, user, extras):
587 587 log.debug("Merging pull request %s", pull_request.pull_request_id)
588 588 merge_state = self._merge_pull_request(pull_request, user, extras)
589 589 if merge_state.executed:
590 590 log.debug(
591 591 "Merge was successful, updating the pull request comments.")
592 592 self._comment_and_close_pr(pull_request, user, merge_state)
593 593
594 594 self._log_audit_action(
595 595 'repo.pull_request.merge',
596 596 {'merge_state': merge_state.__dict__},
597 597 user, pull_request)
598 598
599 599 else:
600 600 log.warn("Merge failed, not updating the pull request.")
601 601 return merge_state
602 602
603 603 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
604 604 target_vcs = pull_request.target_repo.scm_instance()
605 605 source_vcs = pull_request.source_repo.scm_instance()
606 606 target_ref = self._refresh_reference(
607 607 pull_request.target_ref_parts, target_vcs)
608 608
609 609 message = merge_msg or (
610 610 'Merge pull request #%(pr_id)s from '
611 611 '%(source_repo)s %(source_ref_name)s\n\n %(pr_title)s') % {
612 612 'pr_id': pull_request.pull_request_id,
613 613 'source_repo': source_vcs.name,
614 614 'source_ref_name': pull_request.source_ref_parts.name,
615 615 'pr_title': pull_request.title
616 616 }
617 617
618 618 workspace_id = self._workspace_id(pull_request)
619 repo_id = pull_request.target_repo.repo_id
619 620 use_rebase = self._use_rebase_for_merging(pull_request)
620 621 close_branch = self._close_branch_before_merging(pull_request)
621 622
622 623 callback_daemon, extras = prepare_callback_daemon(
623 624 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
624 625 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
625 626
626 627 with callback_daemon:
627 628 # TODO: johbo: Implement a clean way to run a config_override
628 629 # for a single call.
629 630 target_vcs.config.set(
630 631 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
631 632 merge_state = target_vcs.merge(
632 target_ref, source_vcs, pull_request.source_ref_parts,
633 workspace_id, user_name=user.username,
634 user_email=user.email, message=message, use_rebase=use_rebase,
633 repo_id, workspace_id, target_ref, source_vcs,
634 pull_request.source_ref_parts,
635 user_name=user.username, user_email=user.email,
636 message=message, use_rebase=use_rebase,
635 637 close_branch=close_branch)
636 638 return merge_state
637 639
638 640 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
639 641 pull_request.merge_rev = merge_state.merge_ref.commit_id
640 642 pull_request.updated_on = datetime.datetime.now()
641 643 close_msg = close_msg or 'Pull request merged and closed'
642 644
643 645 CommentsModel().create(
644 646 text=safe_unicode(close_msg),
645 647 repo=pull_request.target_repo.repo_id,
646 648 user=user.user_id,
647 649 pull_request=pull_request.pull_request_id,
648 650 f_path=None,
649 651 line_no=None,
650 652 closing_pr=True
651 653 )
652 654
653 655 Session().add(pull_request)
654 656 Session().flush()
655 657 # TODO: paris: replace invalidation with less radical solution
656 658 ScmModel().mark_for_invalidation(
657 659 pull_request.target_repo.repo_name)
658 660 self._trigger_pull_request_hook(pull_request, user, 'merge')
659 661
660 662 def has_valid_update_type(self, pull_request):
661 663 source_ref_type = pull_request.source_ref_parts.type
662 664 return source_ref_type in ['book', 'branch', 'tag']
663 665
664 666 def update_commits(self, pull_request):
665 667 """
666 668 Get the updated list of commits for the pull request
667 669 and return the new pull request version and the list
668 670 of commits processed by this update action
669 671 """
670 672 pull_request = self.__get_pull_request(pull_request)
671 673 source_ref_type = pull_request.source_ref_parts.type
672 674 source_ref_name = pull_request.source_ref_parts.name
673 675 source_ref_id = pull_request.source_ref_parts.commit_id
674 676
675 677 target_ref_type = pull_request.target_ref_parts.type
676 678 target_ref_name = pull_request.target_ref_parts.name
677 679 target_ref_id = pull_request.target_ref_parts.commit_id
678 680
679 681 if not self.has_valid_update_type(pull_request):
680 682 log.debug(
681 683 "Skipping update of pull request %s due to ref type: %s",
682 684 pull_request, source_ref_type)
683 685 return UpdateResponse(
684 686 executed=False,
685 687 reason=UpdateFailureReason.WRONG_REF_TYPE,
686 688 old=pull_request, new=None, changes=None,
687 689 source_changed=False, target_changed=False)
688 690
689 691 # source repo
690 692 source_repo = pull_request.source_repo.scm_instance()
691 693 try:
692 694 source_commit = source_repo.get_commit(commit_id=source_ref_name)
693 695 except CommitDoesNotExistError:
694 696 return UpdateResponse(
695 697 executed=False,
696 698 reason=UpdateFailureReason.MISSING_SOURCE_REF,
697 699 old=pull_request, new=None, changes=None,
698 700 source_changed=False, target_changed=False)
699 701
700 702 source_changed = source_ref_id != source_commit.raw_id
701 703
702 704 # target repo
703 705 target_repo = pull_request.target_repo.scm_instance()
704 706 try:
705 707 target_commit = target_repo.get_commit(commit_id=target_ref_name)
706 708 except CommitDoesNotExistError:
707 709 return UpdateResponse(
708 710 executed=False,
709 711 reason=UpdateFailureReason.MISSING_TARGET_REF,
710 712 old=pull_request, new=None, changes=None,
711 713 source_changed=False, target_changed=False)
712 714 target_changed = target_ref_id != target_commit.raw_id
713 715
714 716 if not (source_changed or target_changed):
715 717 log.debug("Nothing changed in pull request %s", pull_request)
716 718 return UpdateResponse(
717 719 executed=False,
718 720 reason=UpdateFailureReason.NO_CHANGE,
719 721 old=pull_request, new=None, changes=None,
720 722 source_changed=target_changed, target_changed=source_changed)
721 723
722 724 change_in_found = 'target repo' if target_changed else 'source repo'
723 725 log.debug('Updating pull request because of change in %s detected',
724 726 change_in_found)
725 727
726 728 # Finally there is a need for an update, in case of source change
727 729 # we create a new version, else just an update
728 730 if source_changed:
729 731 pull_request_version = self._create_version_from_snapshot(pull_request)
730 732 self._link_comments_to_version(pull_request_version)
731 733 else:
732 734 try:
733 735 ver = pull_request.versions[-1]
734 736 except IndexError:
735 737 ver = None
736 738
737 739 pull_request.pull_request_version_id = \
738 740 ver.pull_request_version_id if ver else None
739 741 pull_request_version = pull_request
740 742
741 743 try:
742 744 if target_ref_type in ('tag', 'branch', 'book'):
743 745 target_commit = target_repo.get_commit(target_ref_name)
744 746 else:
745 747 target_commit = target_repo.get_commit(target_ref_id)
746 748 except CommitDoesNotExistError:
747 749 return UpdateResponse(
748 750 executed=False,
749 751 reason=UpdateFailureReason.MISSING_TARGET_REF,
750 752 old=pull_request, new=None, changes=None,
751 753 source_changed=source_changed, target_changed=target_changed)
752 754
753 755 # re-compute commit ids
754 756 old_commit_ids = pull_request.revisions
755 757 pre_load = ["author", "branch", "date", "message"]
756 758 commit_ranges = target_repo.compare(
757 759 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
758 760 pre_load=pre_load)
759 761
760 762 ancestor = target_repo.get_common_ancestor(
761 763 target_commit.raw_id, source_commit.raw_id, source_repo)
762 764
763 765 pull_request.source_ref = '%s:%s:%s' % (
764 766 source_ref_type, source_ref_name, source_commit.raw_id)
765 767 pull_request.target_ref = '%s:%s:%s' % (
766 768 target_ref_type, target_ref_name, ancestor)
767 769
768 770 pull_request.revisions = [
769 771 commit.raw_id for commit in reversed(commit_ranges)]
770 772 pull_request.updated_on = datetime.datetime.now()
771 773 Session().add(pull_request)
772 774 new_commit_ids = pull_request.revisions
773 775
774 776 old_diff_data, new_diff_data = self._generate_update_diffs(
775 777 pull_request, pull_request_version)
776 778
777 779 # calculate commit and file changes
778 780 changes = self._calculate_commit_id_changes(
779 781 old_commit_ids, new_commit_ids)
780 782 file_changes = self._calculate_file_changes(
781 783 old_diff_data, new_diff_data)
782 784
783 785 # set comments as outdated if DIFFS changed
784 786 CommentsModel().outdate_comments(
785 787 pull_request, old_diff_data=old_diff_data,
786 788 new_diff_data=new_diff_data)
787 789
788 790 commit_changes = (changes.added or changes.removed)
789 791 file_node_changes = (
790 792 file_changes.added or file_changes.modified or file_changes.removed)
791 793 pr_has_changes = commit_changes or file_node_changes
792 794
793 795 # Add an automatic comment to the pull request, in case
794 796 # anything has changed
795 797 if pr_has_changes:
796 798 update_comment = CommentsModel().create(
797 799 text=self._render_update_message(changes, file_changes),
798 800 repo=pull_request.target_repo,
799 801 user=pull_request.author,
800 802 pull_request=pull_request,
801 803 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
802 804
803 805 # Update status to "Under Review" for added commits
804 806 for commit_id in changes.added:
805 807 ChangesetStatusModel().set_status(
806 808 repo=pull_request.source_repo,
807 809 status=ChangesetStatus.STATUS_UNDER_REVIEW,
808 810 comment=update_comment,
809 811 user=pull_request.author,
810 812 pull_request=pull_request,
811 813 revision=commit_id)
812 814
813 815 log.debug(
814 816 'Updated pull request %s, added_ids: %s, common_ids: %s, '
815 817 'removed_ids: %s', pull_request.pull_request_id,
816 818 changes.added, changes.common, changes.removed)
817 819 log.debug(
818 820 'Updated pull request with the following file changes: %s',
819 821 file_changes)
820 822
821 823 log.info(
822 824 "Updated pull request %s from commit %s to commit %s, "
823 825 "stored new version %s of this pull request.",
824 826 pull_request.pull_request_id, source_ref_id,
825 827 pull_request.source_ref_parts.commit_id,
826 828 pull_request_version.pull_request_version_id)
827 829 Session().commit()
828 830 self._trigger_pull_request_hook(
829 831 pull_request, pull_request.author, 'update')
830 832
831 833 return UpdateResponse(
832 834 executed=True, reason=UpdateFailureReason.NONE,
833 835 old=pull_request, new=pull_request_version, changes=changes,
834 836 source_changed=source_changed, target_changed=target_changed)
835 837
836 838 def _create_version_from_snapshot(self, pull_request):
837 839 version = PullRequestVersion()
838 840 version.title = pull_request.title
839 841 version.description = pull_request.description
840 842 version.status = pull_request.status
841 843 version.created_on = datetime.datetime.now()
842 844 version.updated_on = pull_request.updated_on
843 845 version.user_id = pull_request.user_id
844 846 version.source_repo = pull_request.source_repo
845 847 version.source_ref = pull_request.source_ref
846 848 version.target_repo = pull_request.target_repo
847 849 version.target_ref = pull_request.target_ref
848 850
849 851 version._last_merge_source_rev = pull_request._last_merge_source_rev
850 852 version._last_merge_target_rev = pull_request._last_merge_target_rev
851 853 version.last_merge_status = pull_request.last_merge_status
852 854 version.shadow_merge_ref = pull_request.shadow_merge_ref
853 855 version.merge_rev = pull_request.merge_rev
854 856 version.reviewer_data = pull_request.reviewer_data
855 857
856 858 version.revisions = pull_request.revisions
857 859 version.pull_request = pull_request
858 860 Session().add(version)
859 861 Session().flush()
860 862
861 863 return version
862 864
863 865 def _generate_update_diffs(self, pull_request, pull_request_version):
864 866
865 867 diff_context = (
866 868 self.DIFF_CONTEXT +
867 869 CommentsModel.needed_extra_diff_context())
868 870
869 871 source_repo = pull_request_version.source_repo
870 872 source_ref_id = pull_request_version.source_ref_parts.commit_id
871 873 target_ref_id = pull_request_version.target_ref_parts.commit_id
872 874 old_diff = self._get_diff_from_pr_or_version(
873 875 source_repo, source_ref_id, target_ref_id, context=diff_context)
874 876
875 877 source_repo = pull_request.source_repo
876 878 source_ref_id = pull_request.source_ref_parts.commit_id
877 879 target_ref_id = pull_request.target_ref_parts.commit_id
878 880
879 881 new_diff = self._get_diff_from_pr_or_version(
880 882 source_repo, source_ref_id, target_ref_id, context=diff_context)
881 883
882 884 old_diff_data = diffs.DiffProcessor(old_diff)
883 885 old_diff_data.prepare()
884 886 new_diff_data = diffs.DiffProcessor(new_diff)
885 887 new_diff_data.prepare()
886 888
887 889 return old_diff_data, new_diff_data
888 890
889 891 def _link_comments_to_version(self, pull_request_version):
890 892 """
891 893 Link all unlinked comments of this pull request to the given version.
892 894
893 895 :param pull_request_version: The `PullRequestVersion` to which
894 896 the comments shall be linked.
895 897
896 898 """
897 899 pull_request = pull_request_version.pull_request
898 900 comments = ChangesetComment.query()\
899 901 .filter(
900 902 # TODO: johbo: Should we query for the repo at all here?
901 903 # Pending decision on how comments of PRs are to be related
902 904 # to either the source repo, the target repo or no repo at all.
903 905 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
904 906 ChangesetComment.pull_request == pull_request,
905 907 ChangesetComment.pull_request_version == None)\
906 908 .order_by(ChangesetComment.comment_id.asc())
907 909
908 910 # TODO: johbo: Find out why this breaks if it is done in a bulk
909 911 # operation.
910 912 for comment in comments:
911 913 comment.pull_request_version_id = (
912 914 pull_request_version.pull_request_version_id)
913 915 Session().add(comment)
914 916
915 917 def _calculate_commit_id_changes(self, old_ids, new_ids):
916 918 added = [x for x in new_ids if x not in old_ids]
917 919 common = [x for x in new_ids if x in old_ids]
918 920 removed = [x for x in old_ids if x not in new_ids]
919 921 total = new_ids
920 922 return ChangeTuple(added, common, removed, total)
921 923
922 924 def _calculate_file_changes(self, old_diff_data, new_diff_data):
923 925
924 926 old_files = OrderedDict()
925 927 for diff_data in old_diff_data.parsed_diff:
926 928 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
927 929
928 930 added_files = []
929 931 modified_files = []
930 932 removed_files = []
931 933 for diff_data in new_diff_data.parsed_diff:
932 934 new_filename = diff_data['filename']
933 935 new_hash = md5_safe(diff_data['raw_diff'])
934 936
935 937 old_hash = old_files.get(new_filename)
936 938 if not old_hash:
937 939 # file is not present in old diff, means it's added
938 940 added_files.append(new_filename)
939 941 else:
940 942 if new_hash != old_hash:
941 943 modified_files.append(new_filename)
942 944 # now remove a file from old, since we have seen it already
943 945 del old_files[new_filename]
944 946
945 947 # removed files is when there are present in old, but not in NEW,
946 948 # since we remove old files that are present in new diff, left-overs
947 949 # if any should be the removed files
948 950 removed_files.extend(old_files.keys())
949 951
950 952 return FileChangeTuple(added_files, modified_files, removed_files)
951 953
952 954 def _render_update_message(self, changes, file_changes):
953 955 """
954 956 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
955 957 so it's always looking the same disregarding on which default
956 958 renderer system is using.
957 959
958 960 :param changes: changes named tuple
959 961 :param file_changes: file changes named tuple
960 962
961 963 """
962 964 new_status = ChangesetStatus.get_status_lbl(
963 965 ChangesetStatus.STATUS_UNDER_REVIEW)
964 966
965 967 changed_files = (
966 968 file_changes.added + file_changes.modified + file_changes.removed)
967 969
968 970 params = {
969 971 'under_review_label': new_status,
970 972 'added_commits': changes.added,
971 973 'removed_commits': changes.removed,
972 974 'changed_files': changed_files,
973 975 'added_files': file_changes.added,
974 976 'modified_files': file_changes.modified,
975 977 'removed_files': file_changes.removed,
976 978 }
977 979 renderer = RstTemplateRenderer()
978 980 return renderer.render('pull_request_update.mako', **params)
979 981
980 982 def edit(self, pull_request, title, description, user):
981 983 pull_request = self.__get_pull_request(pull_request)
982 984 old_data = pull_request.get_api_data(with_merge_state=False)
983 985 if pull_request.is_closed():
984 986 raise ValueError('This pull request is closed')
985 987 if title:
986 988 pull_request.title = title
987 989 pull_request.description = description
988 990 pull_request.updated_on = datetime.datetime.now()
989 991 Session().add(pull_request)
990 992 self._log_audit_action(
991 993 'repo.pull_request.edit', {'old_data': old_data},
992 994 user, pull_request)
993 995
994 996 def update_reviewers(self, pull_request, reviewer_data, user):
995 997 """
996 998 Update the reviewers in the pull request
997 999
998 1000 :param pull_request: the pr to update
999 1001 :param reviewer_data: list of tuples
1000 1002 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
1001 1003 """
1002 1004 pull_request = self.__get_pull_request(pull_request)
1003 1005 if pull_request.is_closed():
1004 1006 raise ValueError('This pull request is closed')
1005 1007
1006 1008 reviewers = {}
1007 1009 for user_id, reasons, mandatory, rules in reviewer_data:
1008 1010 if isinstance(user_id, (int, basestring)):
1009 1011 user_id = self._get_user(user_id).user_id
1010 1012 reviewers[user_id] = {
1011 1013 'reasons': reasons, 'mandatory': mandatory}
1012 1014
1013 1015 reviewers_ids = set(reviewers.keys())
1014 1016 current_reviewers = PullRequestReviewers.query()\
1015 1017 .filter(PullRequestReviewers.pull_request ==
1016 1018 pull_request).all()
1017 1019 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1018 1020
1019 1021 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1020 1022 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1021 1023
1022 1024 log.debug("Adding %s reviewers", ids_to_add)
1023 1025 log.debug("Removing %s reviewers", ids_to_remove)
1024 1026 changed = False
1025 1027 for uid in ids_to_add:
1026 1028 changed = True
1027 1029 _usr = self._get_user(uid)
1028 1030 reviewer = PullRequestReviewers()
1029 1031 reviewer.user = _usr
1030 1032 reviewer.pull_request = pull_request
1031 1033 reviewer.reasons = reviewers[uid]['reasons']
1032 1034 # NOTE(marcink): mandatory shouldn't be changed now
1033 1035 # reviewer.mandatory = reviewers[uid]['reasons']
1034 1036 Session().add(reviewer)
1035 1037 self._log_audit_action(
1036 1038 'repo.pull_request.reviewer.add', {'data': reviewer.get_dict()},
1037 1039 user, pull_request)
1038 1040
1039 1041 for uid in ids_to_remove:
1040 1042 changed = True
1041 1043 reviewers = PullRequestReviewers.query()\
1042 1044 .filter(PullRequestReviewers.user_id == uid,
1043 1045 PullRequestReviewers.pull_request == pull_request)\
1044 1046 .all()
1045 1047 # use .all() in case we accidentally added the same person twice
1046 1048 # this CAN happen due to the lack of DB checks
1047 1049 for obj in reviewers:
1048 1050 old_data = obj.get_dict()
1049 1051 Session().delete(obj)
1050 1052 self._log_audit_action(
1051 1053 'repo.pull_request.reviewer.delete',
1052 1054 {'old_data': old_data}, user, pull_request)
1053 1055
1054 1056 if changed:
1055 1057 pull_request.updated_on = datetime.datetime.now()
1056 1058 Session().add(pull_request)
1057 1059
1058 1060 self.notify_reviewers(pull_request, ids_to_add)
1059 1061 return ids_to_add, ids_to_remove
1060 1062
1061 1063 def get_url(self, pull_request, request=None, permalink=False):
1062 1064 if not request:
1063 1065 request = get_current_request()
1064 1066
1065 1067 if permalink:
1066 1068 return request.route_url(
1067 1069 'pull_requests_global',
1068 1070 pull_request_id=pull_request.pull_request_id,)
1069 1071 else:
1070 1072 return request.route_url('pullrequest_show',
1071 1073 repo_name=safe_str(pull_request.target_repo.repo_name),
1072 1074 pull_request_id=pull_request.pull_request_id,)
1073 1075
1074 1076 def get_shadow_clone_url(self, pull_request, request=None):
1075 1077 """
1076 1078 Returns qualified url pointing to the shadow repository. If this pull
1077 1079 request is closed there is no shadow repository and ``None`` will be
1078 1080 returned.
1079 1081 """
1080 1082 if pull_request.is_closed():
1081 1083 return None
1082 1084 else:
1083 1085 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1084 1086 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1085 1087
1086 1088 def notify_reviewers(self, pull_request, reviewers_ids):
1087 1089 # notification to reviewers
1088 1090 if not reviewers_ids:
1089 1091 return
1090 1092
1091 1093 pull_request_obj = pull_request
1092 1094 # get the current participants of this pull request
1093 1095 recipients = reviewers_ids
1094 1096 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1095 1097
1096 1098 pr_source_repo = pull_request_obj.source_repo
1097 1099 pr_target_repo = pull_request_obj.target_repo
1098 1100
1099 1101 pr_url = h.route_url('pullrequest_show',
1100 1102 repo_name=pr_target_repo.repo_name,
1101 1103 pull_request_id=pull_request_obj.pull_request_id,)
1102 1104
1103 1105 # set some variables for email notification
1104 1106 pr_target_repo_url = h.route_url(
1105 1107 'repo_summary', repo_name=pr_target_repo.repo_name)
1106 1108
1107 1109 pr_source_repo_url = h.route_url(
1108 1110 'repo_summary', repo_name=pr_source_repo.repo_name)
1109 1111
1110 1112 # pull request specifics
1111 1113 pull_request_commits = [
1112 1114 (x.raw_id, x.message)
1113 1115 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1114 1116
1115 1117 kwargs = {
1116 1118 'user': pull_request.author,
1117 1119 'pull_request': pull_request_obj,
1118 1120 'pull_request_commits': pull_request_commits,
1119 1121
1120 1122 'pull_request_target_repo': pr_target_repo,
1121 1123 'pull_request_target_repo_url': pr_target_repo_url,
1122 1124
1123 1125 'pull_request_source_repo': pr_source_repo,
1124 1126 'pull_request_source_repo_url': pr_source_repo_url,
1125 1127
1126 1128 'pull_request_url': pr_url,
1127 1129 }
1128 1130
1129 1131 # pre-generate the subject for notification itself
1130 1132 (subject,
1131 1133 _h, _e, # we don't care about those
1132 1134 body_plaintext) = EmailNotificationModel().render_email(
1133 1135 notification_type, **kwargs)
1134 1136
1135 1137 # create notification objects, and emails
1136 1138 NotificationModel().create(
1137 1139 created_by=pull_request.author,
1138 1140 notification_subject=subject,
1139 1141 notification_body=body_plaintext,
1140 1142 notification_type=notification_type,
1141 1143 recipients=recipients,
1142 1144 email_kwargs=kwargs,
1143 1145 )
1144 1146
1145 1147 def delete(self, pull_request, user):
1146 1148 pull_request = self.__get_pull_request(pull_request)
1147 1149 old_data = pull_request.get_api_data(with_merge_state=False)
1148 1150 self._cleanup_merge_workspace(pull_request)
1149 1151 self._log_audit_action(
1150 1152 'repo.pull_request.delete', {'old_data': old_data},
1151 1153 user, pull_request)
1152 1154 Session().delete(pull_request)
1153 1155
1154 1156 def close_pull_request(self, pull_request, user):
1155 1157 pull_request = self.__get_pull_request(pull_request)
1156 1158 self._cleanup_merge_workspace(pull_request)
1157 1159 pull_request.status = PullRequest.STATUS_CLOSED
1158 1160 pull_request.updated_on = datetime.datetime.now()
1159 1161 Session().add(pull_request)
1160 1162 self._trigger_pull_request_hook(
1161 1163 pull_request, pull_request.author, 'close')
1162 1164
1163 1165 pr_data = pull_request.get_api_data(with_merge_state=False)
1164 1166 self._log_audit_action(
1165 1167 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1166 1168
1167 1169 def close_pull_request_with_comment(
1168 1170 self, pull_request, user, repo, message=None):
1169 1171
1170 1172 pull_request_review_status = pull_request.calculated_review_status()
1171 1173
1172 1174 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1173 1175 # approved only if we have voting consent
1174 1176 status = ChangesetStatus.STATUS_APPROVED
1175 1177 else:
1176 1178 status = ChangesetStatus.STATUS_REJECTED
1177 1179 status_lbl = ChangesetStatus.get_status_lbl(status)
1178 1180
1179 1181 default_message = (
1180 1182 'Closing with status change {transition_icon} {status}.'
1181 1183 ).format(transition_icon='>', status=status_lbl)
1182 1184 text = message or default_message
1183 1185
1184 1186 # create a comment, and link it to new status
1185 1187 comment = CommentsModel().create(
1186 1188 text=text,
1187 1189 repo=repo.repo_id,
1188 1190 user=user.user_id,
1189 1191 pull_request=pull_request.pull_request_id,
1190 1192 status_change=status_lbl,
1191 1193 status_change_type=status,
1192 1194 closing_pr=True
1193 1195 )
1194 1196
1195 1197 # calculate old status before we change it
1196 1198 old_calculated_status = pull_request.calculated_review_status()
1197 1199 ChangesetStatusModel().set_status(
1198 1200 repo.repo_id,
1199 1201 status,
1200 1202 user.user_id,
1201 1203 comment=comment,
1202 1204 pull_request=pull_request.pull_request_id
1203 1205 )
1204 1206
1205 1207 Session().flush()
1206 1208 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1207 1209 # we now calculate the status of pull request again, and based on that
1208 1210 # calculation trigger status change. This might happen in cases
1209 1211 # that non-reviewer admin closes a pr, which means his vote doesn't
1210 1212 # change the status, while if he's a reviewer this might change it.
1211 1213 calculated_status = pull_request.calculated_review_status()
1212 1214 if old_calculated_status != calculated_status:
1213 1215 self._trigger_pull_request_hook(
1214 1216 pull_request, user, 'review_status_change')
1215 1217
1216 1218 # finally close the PR
1217 1219 PullRequestModel().close_pull_request(
1218 1220 pull_request.pull_request_id, user)
1219 1221
1220 1222 return comment, status
1221 1223
1222 1224 def merge_status(self, pull_request, translator=None,
1223 1225 force_shadow_repo_refresh=False):
1224 1226 _ = translator or get_current_request().translate
1225 1227
1226 1228 if not self._is_merge_enabled(pull_request):
1227 1229 return False, _('Server-side pull request merging is disabled.')
1228 1230 if pull_request.is_closed():
1229 1231 return False, _('This pull request is closed.')
1230 1232 merge_possible, msg = self._check_repo_requirements(
1231 1233 target=pull_request.target_repo, source=pull_request.source_repo,
1232 1234 translator=_)
1233 1235 if not merge_possible:
1234 1236 return merge_possible, msg
1235 1237
1236 1238 try:
1237 1239 resp = self._try_merge(
1238 1240 pull_request,
1239 1241 force_shadow_repo_refresh=force_shadow_repo_refresh)
1240 1242 log.debug("Merge response: %s", resp)
1241 1243 status = resp.possible, self.merge_status_message(
1242 1244 resp.failure_reason)
1243 1245 except NotImplementedError:
1244 1246 status = False, _('Pull request merging is not supported.')
1245 1247
1246 1248 return status
1247 1249
1248 1250 def _check_repo_requirements(self, target, source, translator):
1249 1251 """
1250 1252 Check if `target` and `source` have compatible requirements.
1251 1253
1252 1254 Currently this is just checking for largefiles.
1253 1255 """
1254 1256 _ = translator
1255 1257 target_has_largefiles = self._has_largefiles(target)
1256 1258 source_has_largefiles = self._has_largefiles(source)
1257 1259 merge_possible = True
1258 1260 message = u''
1259 1261
1260 1262 if target_has_largefiles != source_has_largefiles:
1261 1263 merge_possible = False
1262 1264 if source_has_largefiles:
1263 1265 message = _(
1264 1266 'Target repository large files support is disabled.')
1265 1267 else:
1266 1268 message = _(
1267 1269 'Source repository large files support is disabled.')
1268 1270
1269 1271 return merge_possible, message
1270 1272
1271 1273 def _has_largefiles(self, repo):
1272 1274 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1273 1275 'extensions', 'largefiles')
1274 1276 return largefiles_ui and largefiles_ui[0].active
1275 1277
1276 1278 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1277 1279 """
1278 1280 Try to merge the pull request and return the merge status.
1279 1281 """
1280 1282 log.debug(
1281 1283 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1282 1284 pull_request.pull_request_id, force_shadow_repo_refresh)
1283 1285 target_vcs = pull_request.target_repo.scm_instance()
1284 1286
1285 1287 # Refresh the target reference.
1286 1288 try:
1287 1289 target_ref = self._refresh_reference(
1288 1290 pull_request.target_ref_parts, target_vcs)
1289 1291 except CommitDoesNotExistError:
1290 1292 merge_state = MergeResponse(
1291 1293 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
1292 1294 return merge_state
1293 1295
1294 1296 target_locked = pull_request.target_repo.locked
1295 1297 if target_locked and target_locked[0]:
1296 1298 log.debug("The target repository is locked.")
1297 1299 merge_state = MergeResponse(
1298 1300 False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
1299 1301 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1300 1302 pull_request, target_ref):
1301 1303 log.debug("Refreshing the merge status of the repository.")
1302 1304 merge_state = self._refresh_merge_state(
1303 1305 pull_request, target_vcs, target_ref)
1304 1306 else:
1305 1307 possible = pull_request.\
1306 1308 last_merge_status == MergeFailureReason.NONE
1307 1309 merge_state = MergeResponse(
1308 1310 possible, False, None, pull_request.last_merge_status)
1309 1311
1310 1312 return merge_state
1311 1313
1312 1314 def _refresh_reference(self, reference, vcs_repository):
1313 1315 if reference.type in ('branch', 'book'):
1314 1316 name_or_id = reference.name
1315 1317 else:
1316 1318 name_or_id = reference.commit_id
1317 1319 refreshed_commit = vcs_repository.get_commit(name_or_id)
1318 1320 refreshed_reference = Reference(
1319 1321 reference.type, reference.name, refreshed_commit.raw_id)
1320 1322 return refreshed_reference
1321 1323
1322 1324 def _needs_merge_state_refresh(self, pull_request, target_reference):
1323 1325 return not(
1324 1326 pull_request.revisions and
1325 1327 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1326 1328 target_reference.commit_id == pull_request._last_merge_target_rev)
1327 1329
1328 1330 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1329 1331 workspace_id = self._workspace_id(pull_request)
1330 1332 source_vcs = pull_request.source_repo.scm_instance()
1333 repo_id = pull_request.target_repo.repo_id
1331 1334 use_rebase = self._use_rebase_for_merging(pull_request)
1332 1335 close_branch = self._close_branch_before_merging(pull_request)
1333 1336 merge_state = target_vcs.merge(
1337 repo_id, workspace_id,
1334 1338 target_reference, source_vcs, pull_request.source_ref_parts,
1335 workspace_id, dry_run=True, use_rebase=use_rebase,
1339 dry_run=True, use_rebase=use_rebase,
1336 1340 close_branch=close_branch)
1337 1341
1338 1342 # Do not store the response if there was an unknown error.
1339 1343 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1340 1344 pull_request._last_merge_source_rev = \
1341 1345 pull_request.source_ref_parts.commit_id
1342 1346 pull_request._last_merge_target_rev = target_reference.commit_id
1343 1347 pull_request.last_merge_status = merge_state.failure_reason
1344 1348 pull_request.shadow_merge_ref = merge_state.merge_ref
1345 1349 Session().add(pull_request)
1346 1350 Session().commit()
1347 1351
1348 1352 return merge_state
1349 1353
1350 1354 def _workspace_id(self, pull_request):
1351 1355 workspace_id = 'pr-%s' % pull_request.pull_request_id
1352 1356 return workspace_id
1353 1357
1354 1358 def merge_status_message(self, status_code):
1355 1359 """
1356 1360 Return a human friendly error message for the given merge status code.
1357 1361 """
1358 1362 return self.MERGE_STATUS_MESSAGES[status_code]
1359 1363
1360 1364 def generate_repo_data(self, repo, commit_id=None, branch=None,
1361 1365 bookmark=None, translator=None):
1362 1366 from rhodecode.model.repo import RepoModel
1363 1367
1364 1368 all_refs, selected_ref = \
1365 1369 self._get_repo_pullrequest_sources(
1366 1370 repo.scm_instance(), commit_id=commit_id,
1367 1371 branch=branch, bookmark=bookmark, translator=translator)
1368 1372
1369 1373 refs_select2 = []
1370 1374 for element in all_refs:
1371 1375 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1372 1376 refs_select2.append({'text': element[1], 'children': children})
1373 1377
1374 1378 return {
1375 1379 'user': {
1376 1380 'user_id': repo.user.user_id,
1377 1381 'username': repo.user.username,
1378 1382 'firstname': repo.user.first_name,
1379 1383 'lastname': repo.user.last_name,
1380 1384 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1381 1385 },
1382 1386 'name': repo.repo_name,
1383 1387 'link': RepoModel().get_url(repo),
1384 1388 'description': h.chop_at_smart(repo.description_safe, '\n'),
1385 1389 'refs': {
1386 1390 'all_refs': all_refs,
1387 1391 'selected_ref': selected_ref,
1388 1392 'select2_refs': refs_select2
1389 1393 }
1390 1394 }
1391 1395
1392 1396 def generate_pullrequest_title(self, source, source_ref, target):
1393 1397 return u'{source}#{at_ref} to {target}'.format(
1394 1398 source=source,
1395 1399 at_ref=source_ref,
1396 1400 target=target,
1397 1401 )
1398 1402
1399 1403 def _cleanup_merge_workspace(self, pull_request):
1400 1404 # Merging related cleanup
1405 repo_id = pull_request.target_repo.repo_id
1401 1406 target_scm = pull_request.target_repo.scm_instance()
1402 workspace_id = 'pr-%s' % pull_request.pull_request_id
1407 workspace_id = self._workspace_id(pull_request)
1403 1408
1404 1409 try:
1405 target_scm.cleanup_merge_workspace(workspace_id)
1410 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1406 1411 except NotImplementedError:
1407 1412 pass
1408 1413
1409 1414 def _get_repo_pullrequest_sources(
1410 1415 self, repo, commit_id=None, branch=None, bookmark=None,
1411 1416 translator=None):
1412 1417 """
1413 1418 Return a structure with repo's interesting commits, suitable for
1414 1419 the selectors in pullrequest controller
1415 1420
1416 1421 :param commit_id: a commit that must be in the list somehow
1417 1422 and selected by default
1418 1423 :param branch: a branch that must be in the list and selected
1419 1424 by default - even if closed
1420 1425 :param bookmark: a bookmark that must be in the list and selected
1421 1426 """
1422 1427 _ = translator or get_current_request().translate
1423 1428
1424 1429 commit_id = safe_str(commit_id) if commit_id else None
1425 1430 branch = safe_str(branch) if branch else None
1426 1431 bookmark = safe_str(bookmark) if bookmark else None
1427 1432
1428 1433 selected = None
1429 1434
1430 1435 # order matters: first source that has commit_id in it will be selected
1431 1436 sources = []
1432 1437 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1433 1438 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1434 1439
1435 1440 if commit_id:
1436 1441 ref_commit = (h.short_id(commit_id), commit_id)
1437 1442 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1438 1443
1439 1444 sources.append(
1440 1445 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1441 1446 )
1442 1447
1443 1448 groups = []
1444 1449 for group_key, ref_list, group_name, match in sources:
1445 1450 group_refs = []
1446 1451 for ref_name, ref_id in ref_list:
1447 1452 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1448 1453 group_refs.append((ref_key, ref_name))
1449 1454
1450 1455 if not selected:
1451 1456 if set([commit_id, match]) & set([ref_id, ref_name]):
1452 1457 selected = ref_key
1453 1458
1454 1459 if group_refs:
1455 1460 groups.append((group_refs, group_name))
1456 1461
1457 1462 if not selected:
1458 1463 ref = commit_id or branch or bookmark
1459 1464 if ref:
1460 1465 raise CommitDoesNotExistError(
1461 1466 'No commit refs could be found matching: %s' % ref)
1462 1467 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1463 1468 selected = 'branch:%s:%s' % (
1464 1469 repo.DEFAULT_BRANCH_NAME,
1465 1470 repo.branches[repo.DEFAULT_BRANCH_NAME]
1466 1471 )
1467 1472 elif repo.commit_ids:
1468 1473 # make the user select in this case
1469 1474 selected = None
1470 1475 else:
1471 1476 raise EmptyRepositoryError()
1472 1477 return groups, selected
1473 1478
1474 1479 def get_diff(self, source_repo, source_ref_id, target_ref_id, context=DIFF_CONTEXT):
1475 1480 return self._get_diff_from_pr_or_version(
1476 1481 source_repo, source_ref_id, target_ref_id, context=context)
1477 1482
1478 1483 def _get_diff_from_pr_or_version(
1479 1484 self, source_repo, source_ref_id, target_ref_id, context):
1480 1485 target_commit = source_repo.get_commit(
1481 1486 commit_id=safe_str(target_ref_id))
1482 1487 source_commit = source_repo.get_commit(
1483 1488 commit_id=safe_str(source_ref_id))
1484 1489 if isinstance(source_repo, Repository):
1485 1490 vcs_repo = source_repo.scm_instance()
1486 1491 else:
1487 1492 vcs_repo = source_repo
1488 1493
1489 1494 # TODO: johbo: In the context of an update, we cannot reach
1490 1495 # the old commit anymore with our normal mechanisms. It needs
1491 1496 # some sort of special support in the vcs layer to avoid this
1492 1497 # workaround.
1493 1498 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1494 1499 vcs_repo.alias == 'git'):
1495 1500 source_commit.raw_id = safe_str(source_ref_id)
1496 1501
1497 1502 log.debug('calculating diff between '
1498 1503 'source_ref:%s and target_ref:%s for repo `%s`',
1499 1504 target_ref_id, source_ref_id,
1500 1505 safe_unicode(vcs_repo.path))
1501 1506
1502 1507 vcs_diff = vcs_repo.get_diff(
1503 1508 commit1=target_commit, commit2=source_commit, context=context)
1504 1509 return vcs_diff
1505 1510
1506 1511 def _is_merge_enabled(self, pull_request):
1507 1512 return self._get_general_setting(
1508 1513 pull_request, 'rhodecode_pr_merge_enabled')
1509 1514
1510 1515 def _use_rebase_for_merging(self, pull_request):
1511 1516 repo_type = pull_request.target_repo.repo_type
1512 1517 if repo_type == 'hg':
1513 1518 return self._get_general_setting(
1514 1519 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1515 1520 elif repo_type == 'git':
1516 1521 return self._get_general_setting(
1517 1522 pull_request, 'rhodecode_git_use_rebase_for_merging')
1518 1523
1519 1524 return False
1520 1525
1521 1526 def _close_branch_before_merging(self, pull_request):
1522 1527 repo_type = pull_request.target_repo.repo_type
1523 1528 if repo_type == 'hg':
1524 1529 return self._get_general_setting(
1525 1530 pull_request, 'rhodecode_hg_close_branch_before_merging')
1526 1531 elif repo_type == 'git':
1527 1532 return self._get_general_setting(
1528 1533 pull_request, 'rhodecode_git_close_branch_before_merging')
1529 1534
1530 1535 return False
1531 1536
1532 1537 def _get_general_setting(self, pull_request, settings_key, default=False):
1533 1538 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1534 1539 settings = settings_model.get_general_settings()
1535 1540 return settings.get(settings_key, default)
1536 1541
1537 1542 def _log_audit_action(self, action, action_data, user, pull_request):
1538 1543 audit_logger.store(
1539 1544 action=action,
1540 1545 action_data=action_data,
1541 1546 user=user,
1542 1547 repo=pull_request.target_repo)
1543 1548
1544 1549 def get_reviewer_functions(self):
1545 1550 """
1546 1551 Fetches functions for validation and fetching default reviewers.
1547 1552 If available we use the EE package, else we fallback to CE
1548 1553 package functions
1549 1554 """
1550 1555 try:
1551 1556 from rc_reviewers.utils import get_default_reviewers_data
1552 1557 from rc_reviewers.utils import validate_default_reviewers
1553 1558 except ImportError:
1554 1559 from rhodecode.apps.repository.utils import \
1555 1560 get_default_reviewers_data
1556 1561 from rhodecode.apps.repository.utils import \
1557 1562 validate_default_reviewers
1558 1563
1559 1564 return get_default_reviewers_data, validate_default_reviewers
1560 1565
1561 1566
1562 1567 class MergeCheck(object):
1563 1568 """
1564 1569 Perform Merge Checks and returns a check object which stores information
1565 1570 about merge errors, and merge conditions
1566 1571 """
1567 1572 TODO_CHECK = 'todo'
1568 1573 PERM_CHECK = 'perm'
1569 1574 REVIEW_CHECK = 'review'
1570 1575 MERGE_CHECK = 'merge'
1571 1576
1572 1577 def __init__(self):
1573 1578 self.review_status = None
1574 1579 self.merge_possible = None
1575 1580 self.merge_msg = ''
1576 1581 self.failed = None
1577 1582 self.errors = []
1578 1583 self.error_details = OrderedDict()
1579 1584
1580 1585 def push_error(self, error_type, message, error_key, details):
1581 1586 self.failed = True
1582 1587 self.errors.append([error_type, message])
1583 1588 self.error_details[error_key] = dict(
1584 1589 details=details,
1585 1590 error_type=error_type,
1586 1591 message=message
1587 1592 )
1588 1593
1589 1594 @classmethod
1590 1595 def validate(cls, pull_request, user, translator, fail_early=False,
1591 1596 force_shadow_repo_refresh=False):
1592 1597 _ = translator
1593 1598 merge_check = cls()
1594 1599
1595 1600 # permissions to merge
1596 1601 user_allowed_to_merge = PullRequestModel().check_user_merge(
1597 1602 pull_request, user)
1598 1603 if not user_allowed_to_merge:
1599 1604 log.debug("MergeCheck: cannot merge, approval is pending.")
1600 1605
1601 1606 msg = _('User `{}` not allowed to perform merge.').format(user.username)
1602 1607 merge_check.push_error('error', msg, cls.PERM_CHECK, user.username)
1603 1608 if fail_early:
1604 1609 return merge_check
1605 1610
1606 1611 # review status, must be always present
1607 1612 review_status = pull_request.calculated_review_status()
1608 1613 merge_check.review_status = review_status
1609 1614
1610 1615 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1611 1616 if not status_approved:
1612 1617 log.debug("MergeCheck: cannot merge, approval is pending.")
1613 1618
1614 1619 msg = _('Pull request reviewer approval is pending.')
1615 1620
1616 1621 merge_check.push_error(
1617 1622 'warning', msg, cls.REVIEW_CHECK, review_status)
1618 1623
1619 1624 if fail_early:
1620 1625 return merge_check
1621 1626
1622 1627 # left over TODOs
1623 1628 todos = CommentsModel().get_unresolved_todos(pull_request)
1624 1629 if todos:
1625 1630 log.debug("MergeCheck: cannot merge, {} "
1626 1631 "unresolved todos left.".format(len(todos)))
1627 1632
1628 1633 if len(todos) == 1:
1629 1634 msg = _('Cannot merge, {} TODO still not resolved.').format(
1630 1635 len(todos))
1631 1636 else:
1632 1637 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1633 1638 len(todos))
1634 1639
1635 1640 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1636 1641
1637 1642 if fail_early:
1638 1643 return merge_check
1639 1644
1640 1645 # merge possible, here is the filesystem simulation + shadow repo
1641 1646 merge_status, msg = PullRequestModel().merge_status(
1642 1647 pull_request, translator=translator,
1643 1648 force_shadow_repo_refresh=force_shadow_repo_refresh)
1644 1649 merge_check.merge_possible = merge_status
1645 1650 merge_check.merge_msg = msg
1646 1651 if not merge_status:
1647 1652 log.debug(
1648 1653 "MergeCheck: cannot merge, pull request merge not possible.")
1649 1654 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1650 1655
1651 1656 if fail_early:
1652 1657 return merge_check
1653 1658
1654 1659 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1655 1660 return merge_check
1656 1661
1657 1662 @classmethod
1658 1663 def get_merge_conditions(cls, pull_request, translator):
1659 1664 _ = translator
1660 1665 merge_details = {}
1661 1666
1662 1667 model = PullRequestModel()
1663 1668 use_rebase = model._use_rebase_for_merging(pull_request)
1664 1669
1665 1670 if use_rebase:
1666 1671 merge_details['merge_strategy'] = dict(
1667 1672 details={},
1668 1673 message=_('Merge strategy: rebase')
1669 1674 )
1670 1675 else:
1671 1676 merge_details['merge_strategy'] = dict(
1672 1677 details={},
1673 1678 message=_('Merge strategy: explicit merge commit')
1674 1679 )
1675 1680
1676 1681 close_branch = model._close_branch_before_merging(pull_request)
1677 1682 if close_branch:
1678 1683 repo_type = pull_request.target_repo.repo_type
1679 1684 if repo_type == 'hg':
1680 1685 close_msg = _('Source branch will be closed after merge.')
1681 1686 elif repo_type == 'git':
1682 1687 close_msg = _('Source branch will be deleted after merge.')
1683 1688
1684 1689 merge_details['close_branch'] = dict(
1685 1690 details={},
1686 1691 message=close_msg
1687 1692 )
1688 1693
1689 1694 return merge_details
1690 1695
1691 1696 ChangeTuple = collections.namedtuple(
1692 1697 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1693 1698
1694 1699 FileChangeTuple = collections.namedtuple(
1695 1700 'FileChangeTuple', ['added', 'modified', 'removed'])
@@ -1,236 +1,243 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import time
23 23 import logging
24 24 import datetime
25 25 import hashlib
26 26 import tempfile
27 27 from os.path import join as jn
28 28
29 29 from tempfile import _RandomNameSequence
30 30
31 31 import pytest
32 32
33 33 from rhodecode.model.db import User
34 34 from rhodecode.lib import auth
35 35 from rhodecode.lib import helpers as h
36 36 from rhodecode.lib.helpers import flash, link_to
37 37 from rhodecode.lib.utils2 import safe_str
38 38
39 39
40 40 log = logging.getLogger(__name__)
41 41
42 42 __all__ = [
43 43 'get_new_dir', 'TestController',
44 44 'link_to', 'clear_all_caches',
45 45 'assert_session_flash', 'login_user', 'no_newline_id_generator',
46 46 'TESTS_TMP_PATH', 'HG_REPO', 'GIT_REPO', 'SVN_REPO',
47 47 'NEW_HG_REPO', 'NEW_GIT_REPO',
48 48 'HG_FORK', 'GIT_FORK', 'TEST_USER_ADMIN_LOGIN', 'TEST_USER_ADMIN_PASS',
49 49 'TEST_USER_REGULAR_LOGIN', 'TEST_USER_REGULAR_PASS',
50 50 'TEST_USER_REGULAR_EMAIL', 'TEST_USER_REGULAR2_LOGIN',
51 51 'TEST_USER_REGULAR2_PASS', 'TEST_USER_REGULAR2_EMAIL', 'TEST_HG_REPO',
52 52 'TEST_HG_REPO_CLONE', 'TEST_HG_REPO_PULL', 'TEST_GIT_REPO',
53 53 'TEST_GIT_REPO_CLONE', 'TEST_GIT_REPO_PULL', 'SCM_TESTS',
54 54 ]
55 55
56 56
57 57 # SOME GLOBALS FOR TESTS
58 58 TEST_DIR = tempfile.gettempdir()
59 59
60 60 TESTS_TMP_PATH = jn(TEST_DIR, 'rc_test_%s' % _RandomNameSequence().next())
61 61 TEST_USER_ADMIN_LOGIN = 'test_admin'
62 62 TEST_USER_ADMIN_PASS = 'test12'
63 63 TEST_USER_ADMIN_EMAIL = 'test_admin@mail.com'
64 64
65 65 TEST_USER_REGULAR_LOGIN = 'test_regular'
66 66 TEST_USER_REGULAR_PASS = 'test12'
67 67 TEST_USER_REGULAR_EMAIL = 'test_regular@mail.com'
68 68
69 69 TEST_USER_REGULAR2_LOGIN = 'test_regular2'
70 70 TEST_USER_REGULAR2_PASS = 'test12'
71 71 TEST_USER_REGULAR2_EMAIL = 'test_regular2@mail.com'
72 72
73 73 HG_REPO = 'vcs_test_hg'
74 74 GIT_REPO = 'vcs_test_git'
75 75 SVN_REPO = 'vcs_test_svn'
76 76
77 77 NEW_HG_REPO = 'vcs_test_hg_new'
78 78 NEW_GIT_REPO = 'vcs_test_git_new'
79 79
80 80 HG_FORK = 'vcs_test_hg_fork'
81 81 GIT_FORK = 'vcs_test_git_fork'
82 82
83 83 ## VCS
84 84 SCM_TESTS = ['hg', 'git']
85 85 uniq_suffix = str(int(time.mktime(datetime.datetime.now().timetuple())))
86 86
87 87 TEST_GIT_REPO = jn(TESTS_TMP_PATH, GIT_REPO)
88 88 TEST_GIT_REPO_CLONE = jn(TESTS_TMP_PATH, 'vcsgitclone%s' % uniq_suffix)
89 89 TEST_GIT_REPO_PULL = jn(TESTS_TMP_PATH, 'vcsgitpull%s' % uniq_suffix)
90 90
91 91 TEST_HG_REPO = jn(TESTS_TMP_PATH, HG_REPO)
92 92 TEST_HG_REPO_CLONE = jn(TESTS_TMP_PATH, 'vcshgclone%s' % uniq_suffix)
93 93 TEST_HG_REPO_PULL = jn(TESTS_TMP_PATH, 'vcshgpull%s' % uniq_suffix)
94 94
95 95 TEST_REPO_PREFIX = 'vcs-test'
96 96
97 97
98 98 def clear_all_caches():
99 99 from beaker.cache import cache_managers
100 100 for _cache in cache_managers.values():
101 101 _cache.clear()
102 102
103 103
104 104 def get_new_dir(title):
105 105 """
106 106 Returns always new directory path.
107 107 """
108 108 from rhodecode.tests.vcs.utils import get_normalized_path
109 109 name_parts = [TEST_REPO_PREFIX]
110 110 if title:
111 111 name_parts.append(title)
112 112 hex_str = hashlib.sha1('%s %s' % (os.getpid(), time.time())).hexdigest()
113 113 name_parts.append(hex_str)
114 114 name = '-'.join(name_parts)
115 115 path = os.path.join(TEST_DIR, name)
116 116 return get_normalized_path(path)
117 117
118 118
119 def repo_id_generator(name):
120 numeric_hash = 0
121 for char in name:
122 numeric_hash += (ord(char))
123 return numeric_hash
124
125
119 126 @pytest.mark.usefixtures('app', 'index_location')
120 127 class TestController(object):
121 128
122 129 maxDiff = None
123 130
124 131 def log_user(self, username=TEST_USER_ADMIN_LOGIN,
125 132 password=TEST_USER_ADMIN_PASS):
126 133 self._logged_username = username
127 134 self._session = login_user_session(self.app, username, password)
128 135 self.csrf_token = auth.get_csrf_token(self._session)
129 136
130 137 return self._session['rhodecode_user']
131 138
132 139 def logout_user(self):
133 140 logout_user_session(self.app, auth.get_csrf_token(self._session))
134 141 self.csrf_token = None
135 142 self._logged_username = None
136 143 self._session = None
137 144
138 145 def _get_logged_user(self):
139 146 return User.get_by_username(self._logged_username)
140 147
141 148
142 149 def login_user_session(
143 150 app, username=TEST_USER_ADMIN_LOGIN, password=TEST_USER_ADMIN_PASS):
144 151
145 152 response = app.post(
146 153 h.route_path('login'),
147 154 {'username': username, 'password': password})
148 155 if 'invalid user name' in response.body:
149 156 pytest.fail('could not login using %s %s' % (username, password))
150 157
151 158 assert response.status == '302 Found'
152 159 response = response.follow()
153 160 assert response.status == '200 OK'
154 161
155 162 session = response.get_session_from_response()
156 163 assert 'rhodecode_user' in session
157 164 rc_user = session['rhodecode_user']
158 165 assert rc_user.get('username') == username
159 166 assert rc_user.get('is_authenticated')
160 167
161 168 return session
162 169
163 170
164 171 def logout_user_session(app, csrf_token):
165 172 app.post(h.route_path('logout'), {'csrf_token': csrf_token}, status=302)
166 173
167 174
168 175 def login_user(app, username=TEST_USER_ADMIN_LOGIN,
169 176 password=TEST_USER_ADMIN_PASS):
170 177 return login_user_session(app, username, password)['rhodecode_user']
171 178
172 179
173 180 def assert_session_flash(response, msg=None, category=None, no_=None):
174 181 """
175 182 Assert on a flash message in the current session.
176 183
177 184 :param response: Response from give calll, it will contain flash
178 185 messages or bound session with them.
179 186 :param msg: The expected message. Will be evaluated if a
180 187 :class:`LazyString` is passed in.
181 188 :param category: Optional. If passed, the message category will be
182 189 checked as well.
183 190 :param no_: Optional. If passed, the message will be checked to NOT
184 191 be in the flash session
185 192 """
186 193 if msg is None and no_ is None:
187 194 raise ValueError("Parameter msg or no_ is required.")
188 195
189 196 if msg and no_:
190 197 raise ValueError("Please specify either msg or no_, but not both")
191 198
192 199 session = response.get_session_from_response()
193 200 messages = flash.pop_messages(session=session)
194 201 msg = _eval_if_lazy(msg)
195 202
196 203 if no_:
197 204 error_msg = 'unable to detect no_ message `%s` in empty flash list' % no_
198 205 else:
199 206 error_msg = 'unable to find message `%s` in empty flash list' % msg
200 207 assert messages, error_msg
201 208 message = messages[0]
202 209
203 210 message_text = _eval_if_lazy(message.message) or ''
204 211
205 212 if no_:
206 213 if no_ in message_text:
207 214 msg = u'msg `%s` found in session flash.' % (no_,)
208 215 pytest.fail(safe_str(msg))
209 216 else:
210 217 if msg not in message_text:
211 218 fail_msg = u'msg `%s` not found in session ' \
212 219 u'flash: got `%s` (type:%s) instead' % (
213 220 msg, message_text, type(message_text))
214 221
215 222 pytest.fail(safe_str(fail_msg))
216 223 if category:
217 224 assert category == message.category
218 225
219 226
220 227 def _eval_if_lazy(value):
221 228 return value.eval() if hasattr(value, 'eval') else value
222 229
223 230
224 231 def no_newline_id_generator(test_name):
225 232 """
226 233 Generates a test name without spaces or newlines characters. Used for
227 234 nicer output of progress of test
228 235 """
229 236 org_name = test_name
230 237 test_name = test_name\
231 238 .replace('\n', '_N') \
232 239 .replace('\r', '_N') \
233 240 .replace('\t', '_T') \
234 241 .replace(' ', '_S')
235 242
236 243 return test_name or 'test-with-empty-name'
@@ -1,472 +1,472 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import base64
22 22
23 23 import mock
24 24 import pytest
25 25
26 26 from rhodecode.lib.utils2 import AttributeDict
27 27 from rhodecode.tests.utils import CustomTestApp
28 28
29 29 from rhodecode.lib.caching_query import FromCache
30 30 from rhodecode.lib.hooks_daemon import DummyHooksCallbackDaemon
31 31 from rhodecode.lib.middleware import simplevcs
32 32 from rhodecode.lib.middleware.https_fixup import HttpsFixup
33 33 from rhodecode.lib.middleware.utils import scm_app_http
34 34 from rhodecode.model.db import User, _hash_key
35 35 from rhodecode.model.meta import Session
36 36 from rhodecode.tests import (
37 37 HG_REPO, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
38 38 from rhodecode.tests.lib.middleware import mock_scm_app
39 39
40 40
41 41 class StubVCSController(simplevcs.SimpleVCS):
42 42
43 43 SCM = 'hg'
44 44 stub_response_body = tuple()
45 45
46 46 def __init__(self, *args, **kwargs):
47 47 super(StubVCSController, self).__init__(*args, **kwargs)
48 48 self._action = 'pull'
49 49 self._is_shadow_repo_dir = True
50 50 self._name = HG_REPO
51 51 self.set_repo_names(None)
52 52
53 53 @property
54 54 def is_shadow_repo_dir(self):
55 55 return self._is_shadow_repo_dir
56 56
57 57 def _get_repository_name(self, environ):
58 58 return self._name
59 59
60 60 def _get_action(self, environ):
61 61 return self._action
62 62
63 63 def _create_wsgi_app(self, repo_path, repo_name, config):
64 64 def fake_app(environ, start_response):
65 65 headers = [
66 66 ('Http-Accept', 'application/mercurial')
67 67 ]
68 68 start_response('200 OK', headers)
69 69 return self.stub_response_body
70 70 return fake_app
71 71
72 72 def _create_config(self, extras, repo_name):
73 73 return None
74 74
75 75
76 76 @pytest.fixture
77 77 def vcscontroller(baseapp, config_stub, request_stub):
78 78 config_stub.testing_securitypolicy()
79 79 config_stub.include('rhodecode.authentication')
80 80
81 81 controller = StubVCSController(
82 82 baseapp.config.get_settings(), request_stub.registry)
83 83 app = HttpsFixup(controller, baseapp.config.get_settings())
84 84 app = CustomTestApp(app)
85 85
86 86 _remove_default_user_from_query_cache()
87 87
88 88 # Sanity checks that things are set up correctly
89 89 app.get('/' + HG_REPO, status=200)
90 90
91 91 app.controller = controller
92 92 return app
93 93
94 94
95 95 def _remove_default_user_from_query_cache():
96 96 user = User.get_default_user(cache=True)
97 97 query = Session().query(User).filter(User.username == user.username)
98 98 query = query.options(
99 99 FromCache("sql_cache_short", "get_user_%s" % _hash_key(user.username)))
100 100 query.invalidate()
101 101 Session().expire(user)
102 102
103 103
104 104 def test_handles_exceptions_during_permissions_checks(
105 105 vcscontroller, disable_anonymous_user):
106 106 user_and_pass = '%s:%s' % (TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
107 107 auth_password = base64.encodestring(user_and_pass).strip()
108 108 extra_environ = {
109 109 'AUTH_TYPE': 'Basic',
110 110 'HTTP_AUTHORIZATION': 'Basic %s' % auth_password,
111 111 'REMOTE_USER': TEST_USER_ADMIN_LOGIN,
112 112 }
113 113
114 114 # Verify that things are hooked up correctly
115 115 vcscontroller.get('/', status=200, extra_environ=extra_environ)
116 116
117 117 # Simulate trouble during permission checks
118 118 with mock.patch('rhodecode.model.db.User.get_by_username',
119 119 side_effect=Exception) as get_user:
120 120 # Verify that a correct 500 is returned and check that the expected
121 121 # code path was hit.
122 122 vcscontroller.get('/', status=500, extra_environ=extra_environ)
123 123 assert get_user.called
124 124
125 125
126 126 def test_returns_forbidden_if_no_anonymous_access(
127 127 vcscontroller, disable_anonymous_user):
128 128 vcscontroller.get('/', status=401)
129 129
130 130
131 131 class StubFailVCSController(simplevcs.SimpleVCS):
132 132 def _handle_request(self, environ, start_response):
133 133 raise Exception("BOOM")
134 134
135 135
136 136 @pytest.fixture(scope='module')
137 137 def fail_controller(baseapp):
138 138 controller = StubFailVCSController(
139 139 baseapp.config.get_settings(), baseapp.config)
140 140 controller = HttpsFixup(controller, baseapp.config.get_settings())
141 141 controller = CustomTestApp(controller)
142 142 return controller
143 143
144 144
145 145 def test_handles_exceptions_as_internal_server_error(fail_controller):
146 146 fail_controller.get('/', status=500)
147 147
148 148
149 149 def test_provides_traceback_for_appenlight(fail_controller):
150 150 response = fail_controller.get(
151 151 '/', status=500, extra_environ={'appenlight.client': 'fake'})
152 152 assert 'appenlight.__traceback' in response.request.environ
153 153
154 154
155 155 def test_provides_utils_scm_app_as_scm_app_by_default(baseapp, request_stub):
156 156 controller = StubVCSController(baseapp.config.get_settings(), request_stub.registry)
157 157 assert controller.scm_app is scm_app_http
158 158
159 159
160 160 def test_allows_to_override_scm_app_via_config(baseapp, request_stub):
161 161 config = baseapp.config.get_settings().copy()
162 162 config['vcs.scm_app_implementation'] = (
163 163 'rhodecode.tests.lib.middleware.mock_scm_app')
164 164 controller = StubVCSController(config, request_stub.registry)
165 165 assert controller.scm_app is mock_scm_app
166 166
167 167
168 168 @pytest.mark.parametrize('query_string, expected', [
169 169 ('cmd=stub_command', True),
170 170 ('cmd=listkeys', False),
171 171 ])
172 172 def test_should_check_locking(query_string, expected):
173 173 result = simplevcs._should_check_locking(query_string)
174 174 assert result == expected
175 175
176 176
177 177 class TestShadowRepoRegularExpression(object):
178 178 pr_segment = 'pull-request'
179 179 shadow_segment = 'repository'
180 180
181 181 @pytest.mark.parametrize('url, expected', [
182 182 # repo with/without groups
183 183 ('My-Repo/{pr_segment}/1/{shadow_segment}', True),
184 184 ('Group/My-Repo/{pr_segment}/2/{shadow_segment}', True),
185 185 ('Group/Sub-Group/My-Repo/{pr_segment}/3/{shadow_segment}', True),
186 186 ('Group/Sub-Group1/Sub-Group2/My-Repo/{pr_segment}/3/{shadow_segment}', True),
187 187
188 188 # pull request ID
189 189 ('MyRepo/{pr_segment}/1/{shadow_segment}', True),
190 190 ('MyRepo/{pr_segment}/1234567890/{shadow_segment}', True),
191 191 ('MyRepo/{pr_segment}/-1/{shadow_segment}', False),
192 192 ('MyRepo/{pr_segment}/invalid/{shadow_segment}', False),
193 193
194 194 # unicode
195 195 (u'Sp€çîál-Repö/{pr_segment}/1/{shadow_segment}', True),
196 196 (u'Sp€çîál-Gröüp/Sp€çîál-Repö/{pr_segment}/1/{shadow_segment}', True),
197 197
198 198 # trailing/leading slash
199 199 ('/My-Repo/{pr_segment}/1/{shadow_segment}', False),
200 200 ('My-Repo/{pr_segment}/1/{shadow_segment}/', False),
201 201 ('/My-Repo/{pr_segment}/1/{shadow_segment}/', False),
202 202
203 203 # misc
204 204 ('My-Repo/{pr_segment}/1/{shadow_segment}/extra', False),
205 205 ('My-Repo/{pr_segment}/1/{shadow_segment}extra', False),
206 206 ])
207 207 def test_shadow_repo_regular_expression(self, url, expected):
208 208 from rhodecode.lib.middleware.simplevcs import SimpleVCS
209 209 url = url.format(
210 210 pr_segment=self.pr_segment,
211 211 shadow_segment=self.shadow_segment)
212 212 match_obj = SimpleVCS.shadow_repo_re.match(url)
213 213 assert (match_obj is not None) == expected
214 214
215 215
216 216 @pytest.mark.backends('git', 'hg')
217 217 class TestShadowRepoExposure(object):
218 218
219 219 def test_pull_on_shadow_repo_propagates_to_wsgi_app(
220 220 self, baseapp, request_stub):
221 221 """
222 222 Check that a pull action to a shadow repo is propagated to the
223 223 underlying wsgi app.
224 224 """
225 225 controller = StubVCSController(
226 226 baseapp.config.get_settings(), request_stub.registry)
227 227 controller._check_ssl = mock.Mock()
228 228 controller.is_shadow_repo = True
229 229 controller._action = 'pull'
230 230 controller._is_shadow_repo_dir = True
231 231 controller.stub_response_body = 'dummy body value'
232 232 controller._get_default_cache_ttl = mock.Mock(
233 233 return_value=(False, 0))
234 234
235 235 environ_stub = {
236 236 'HTTP_HOST': 'test.example.com',
237 237 'HTTP_ACCEPT': 'application/mercurial',
238 238 'REQUEST_METHOD': 'GET',
239 239 'wsgi.url_scheme': 'http',
240 240 }
241 241
242 242 response = controller(environ_stub, mock.Mock())
243 243 response_body = ''.join(response)
244 244
245 245 # Assert that we got the response from the wsgi app.
246 246 assert response_body == controller.stub_response_body
247 247
248 248 def test_pull_on_shadow_repo_that_is_missing(self, baseapp, request_stub):
249 249 """
250 250 Check that a pull action to a shadow repo is propagated to the
251 251 underlying wsgi app.
252 252 """
253 253 controller = StubVCSController(
254 254 baseapp.config.get_settings(), request_stub.registry)
255 255 controller._check_ssl = mock.Mock()
256 256 controller.is_shadow_repo = True
257 257 controller._action = 'pull'
258 258 controller._is_shadow_repo_dir = False
259 259 controller.stub_response_body = 'dummy body value'
260 260 environ_stub = {
261 261 'HTTP_HOST': 'test.example.com',
262 262 'HTTP_ACCEPT': 'application/mercurial',
263 263 'REQUEST_METHOD': 'GET',
264 264 'wsgi.url_scheme': 'http',
265 265 }
266 266
267 267 response = controller(environ_stub, mock.Mock())
268 268 response_body = ''.join(response)
269 269
270 270 # Assert that we got the response from the wsgi app.
271 271 assert '404 Not Found' in response_body
272 272
273 273 def test_push_on_shadow_repo_raises(self, baseapp, request_stub):
274 274 """
275 275 Check that a push action to a shadow repo is aborted.
276 276 """
277 277 controller = StubVCSController(
278 278 baseapp.config.get_settings(), request_stub.registry)
279 279 controller._check_ssl = mock.Mock()
280 280 controller.is_shadow_repo = True
281 281 controller._action = 'push'
282 282 controller.stub_response_body = 'dummy body value'
283 283 environ_stub = {
284 284 'HTTP_HOST': 'test.example.com',
285 285 'HTTP_ACCEPT': 'application/mercurial',
286 286 'REQUEST_METHOD': 'GET',
287 287 'wsgi.url_scheme': 'http',
288 288 }
289 289
290 290 response = controller(environ_stub, mock.Mock())
291 291 response_body = ''.join(response)
292 292
293 293 assert response_body != controller.stub_response_body
294 294 # Assert that a 406 error is returned.
295 295 assert '406 Not Acceptable' in response_body
296 296
297 297 def test_set_repo_names_no_shadow(self, baseapp, request_stub):
298 298 """
299 299 Check that the set_repo_names method sets all names to the one returned
300 300 by the _get_repository_name method on a request to a non shadow repo.
301 301 """
302 302 environ_stub = {}
303 303 controller = StubVCSController(
304 304 baseapp.config.get_settings(), request_stub.registry)
305 305 controller._name = 'RepoGroup/MyRepo'
306 306 controller.set_repo_names(environ_stub)
307 307 assert not controller.is_shadow_repo
308 308 assert (controller.url_repo_name ==
309 309 controller.acl_repo_name ==
310 310 controller.vcs_repo_name ==
311 311 controller._get_repository_name(environ_stub))
312 312
313 313 def test_set_repo_names_with_shadow(
314 314 self, baseapp, pr_util, config_stub, request_stub):
315 315 """
316 316 Check that the set_repo_names method sets correct names on a request
317 317 to a shadow repo.
318 318 """
319 319 from rhodecode.model.pull_request import PullRequestModel
320 320
321 321 pull_request = pr_util.create_pull_request()
322 322 shadow_url = '{target}/{pr_segment}/{pr_id}/{shadow_segment}'.format(
323 323 target=pull_request.target_repo.repo_name,
324 324 pr_id=pull_request.pull_request_id,
325 325 pr_segment=TestShadowRepoRegularExpression.pr_segment,
326 326 shadow_segment=TestShadowRepoRegularExpression.shadow_segment)
327 327 controller = StubVCSController(
328 328 baseapp.config.get_settings(), request_stub.registry)
329 329 controller._name = shadow_url
330 330 controller.set_repo_names({})
331 331
332 332 # Get file system path to shadow repo for assertions.
333 333 workspace_id = PullRequestModel()._workspace_id(pull_request)
334 334 target_vcs = pull_request.target_repo.scm_instance()
335 335 vcs_repo_name = target_vcs._get_shadow_repository_path(
336 workspace_id)
336 pull_request.target_repo.repo_id, workspace_id)
337 337
338 338 assert controller.vcs_repo_name == vcs_repo_name
339 339 assert controller.url_repo_name == shadow_url
340 340 assert controller.acl_repo_name == pull_request.target_repo.repo_name
341 341 assert controller.is_shadow_repo
342 342
343 343 def test_set_repo_names_with_shadow_but_missing_pr(
344 344 self, baseapp, pr_util, config_stub, request_stub):
345 345 """
346 346 Checks that the set_repo_names method enforces matching target repos
347 347 and pull request IDs.
348 348 """
349 349 pull_request = pr_util.create_pull_request()
350 350 shadow_url = '{target}/{pr_segment}/{pr_id}/{shadow_segment}'.format(
351 351 target=pull_request.target_repo.repo_name,
352 352 pr_id=999999999,
353 353 pr_segment=TestShadowRepoRegularExpression.pr_segment,
354 354 shadow_segment=TestShadowRepoRegularExpression.shadow_segment)
355 355 controller = StubVCSController(
356 356 baseapp.config.get_settings(), request_stub.registry)
357 357 controller._name = shadow_url
358 358 controller.set_repo_names({})
359 359
360 360 assert not controller.is_shadow_repo
361 361 assert (controller.url_repo_name ==
362 362 controller.acl_repo_name ==
363 363 controller.vcs_repo_name)
364 364
365 365
366 366 @pytest.mark.usefixtures('baseapp')
367 367 class TestGenerateVcsResponse(object):
368 368
369 369 def test_ensures_that_start_response_is_called_early_enough(self):
370 370 self.call_controller_with_response_body(iter(['a', 'b']))
371 371 assert self.start_response.called
372 372
373 373 def test_invalidates_cache_after_body_is_consumed(self):
374 374 result = self.call_controller_with_response_body(iter(['a', 'b']))
375 375 assert not self.was_cache_invalidated()
376 376 # Consume the result
377 377 list(result)
378 378 assert self.was_cache_invalidated()
379 379
380 380 def test_raises_unknown_exceptions(self):
381 381 result = self.call_controller_with_response_body(
382 382 self.raise_result_iter(vcs_kind='unknown'))
383 383 with pytest.raises(Exception):
384 384 list(result)
385 385
386 386 def test_prepare_callback_daemon_is_called(self):
387 387 def side_effect(extras, environ, action, txn_id=None):
388 388 return DummyHooksCallbackDaemon(), extras
389 389
390 390 prepare_patcher = mock.patch.object(
391 391 StubVCSController, '_prepare_callback_daemon')
392 392 with prepare_patcher as prepare_mock:
393 393 prepare_mock.side_effect = side_effect
394 394 self.call_controller_with_response_body(iter(['a', 'b']))
395 395 assert prepare_mock.called
396 396 assert prepare_mock.call_count == 1
397 397
398 398 def call_controller_with_response_body(self, response_body):
399 399 settings = {
400 400 'base_path': 'fake_base_path',
401 401 'vcs.hooks.protocol': 'http',
402 402 'vcs.hooks.direct_calls': False,
403 403 }
404 404 registry = AttributeDict()
405 405 controller = StubVCSController(settings, registry)
406 406 controller._invalidate_cache = mock.Mock()
407 407 controller.stub_response_body = response_body
408 408 self.start_response = mock.Mock()
409 409 result = controller._generate_vcs_response(
410 410 environ={}, start_response=self.start_response,
411 411 repo_path='fake_repo_path',
412 412 extras={}, action='push')
413 413 self.controller = controller
414 414 return result
415 415
416 416 def raise_result_iter(self, vcs_kind='repo_locked'):
417 417 """
418 418 Simulates an exception due to a vcs raised exception if kind vcs_kind
419 419 """
420 420 raise self.vcs_exception(vcs_kind=vcs_kind)
421 421 yield "never_reached"
422 422
423 423 def vcs_exception(self, vcs_kind='repo_locked'):
424 424 locked_exception = Exception('TEST_MESSAGE')
425 425 locked_exception._vcs_kind = vcs_kind
426 426 return locked_exception
427 427
428 428 def was_cache_invalidated(self):
429 429 return self.controller._invalidate_cache.called
430 430
431 431
432 432 class TestInitializeGenerator(object):
433 433
434 434 def test_drains_first_element(self):
435 435 gen = self.factory(['__init__', 1, 2])
436 436 result = list(gen)
437 437 assert result == [1, 2]
438 438
439 439 @pytest.mark.parametrize('values', [
440 440 [],
441 441 [1, 2],
442 442 ])
443 443 def test_raises_value_error(self, values):
444 444 with pytest.raises(ValueError):
445 445 self.factory(values)
446 446
447 447 @simplevcs.initialize_generator
448 448 def factory(self, iterable):
449 449 for elem in iterable:
450 450 yield elem
451 451
452 452
453 453 class TestPrepareHooksDaemon(object):
454 454 def test_calls_imported_prepare_callback_daemon(self, app_settings, request_stub):
455 455 expected_extras = {'extra1': 'value1'}
456 456 daemon = DummyHooksCallbackDaemon()
457 457
458 458 controller = StubVCSController(app_settings, request_stub.registry)
459 459 prepare_patcher = mock.patch.object(
460 460 simplevcs, 'prepare_callback_daemon',
461 461 return_value=(daemon, expected_extras))
462 462 with prepare_patcher as prepare_mock:
463 463 callback_daemon, extras = controller._prepare_callback_daemon(
464 464 expected_extras.copy(), {}, 'push')
465 465 prepare_mock.assert_called_once_with(
466 466 expected_extras,
467 467 protocol=app_settings['vcs.hooks.protocol'],
468 468 txn_id=None,
469 469 use_direct_calls=app_settings['vcs.hooks.direct_calls'])
470 470
471 471 assert callback_daemon == daemon
472 472 assert extras == extras
@@ -1,860 +1,868 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import mock
22 22 import pytest
23 23 import textwrap
24 24
25 25 import rhodecode
26 26 from rhodecode.lib.utils2 import safe_unicode
27 27 from rhodecode.lib.vcs.backends import get_backend
28 28 from rhodecode.lib.vcs.backends.base import (
29 29 MergeResponse, MergeFailureReason, Reference)
30 30 from rhodecode.lib.vcs.exceptions import RepositoryError
31 31 from rhodecode.lib.vcs.nodes import FileNode
32 32 from rhodecode.model.comment import CommentsModel
33 33 from rhodecode.model.db import PullRequest, Session
34 34 from rhodecode.model.pull_request import PullRequestModel
35 35 from rhodecode.model.user import UserModel
36 36 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
37 37
38 38
39 39 pytestmark = [
40 40 pytest.mark.backends("git", "hg"),
41 41 ]
42 42
43 43
44 44 @pytest.mark.usefixtures('config_stub')
45 45 class TestPullRequestModel(object):
46 46
47 47 @pytest.fixture
48 48 def pull_request(self, request, backend, pr_util):
49 49 """
50 50 A pull request combined with multiples patches.
51 51 """
52 52 BackendClass = get_backend(backend.alias)
53 53 self.merge_patcher = mock.patch.object(
54 54 BackendClass, 'merge', return_value=MergeResponse(
55 55 False, False, None, MergeFailureReason.UNKNOWN))
56 56 self.workspace_remove_patcher = mock.patch.object(
57 57 BackendClass, 'cleanup_merge_workspace')
58 58
59 59 self.workspace_remove_mock = self.workspace_remove_patcher.start()
60 60 self.merge_mock = self.merge_patcher.start()
61 61 self.comment_patcher = mock.patch(
62 62 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status')
63 63 self.comment_patcher.start()
64 64 self.notification_patcher = mock.patch(
65 65 'rhodecode.model.notification.NotificationModel.create')
66 66 self.notification_patcher.start()
67 67 self.helper_patcher = mock.patch(
68 68 'rhodecode.lib.helpers.route_path')
69 69 self.helper_patcher.start()
70 70
71 71 self.hook_patcher = mock.patch.object(PullRequestModel,
72 72 '_trigger_pull_request_hook')
73 73 self.hook_mock = self.hook_patcher.start()
74 74
75 75 self.invalidation_patcher = mock.patch(
76 76 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation')
77 77 self.invalidation_mock = self.invalidation_patcher.start()
78 78
79 79 self.pull_request = pr_util.create_pull_request(
80 80 mergeable=True, name_suffix=u'ąć')
81 81 self.source_commit = self.pull_request.source_ref_parts.commit_id
82 82 self.target_commit = self.pull_request.target_ref_parts.commit_id
83 83 self.workspace_id = 'pr-%s' % self.pull_request.pull_request_id
84 self.repo_id = self.pull_request.target_repo.repo_id
84 85
85 86 @request.addfinalizer
86 87 def cleanup_pull_request():
87 88 calls = [mock.call(
88 89 self.pull_request, self.pull_request.author, 'create')]
89 90 self.hook_mock.assert_has_calls(calls)
90 91
91 92 self.workspace_remove_patcher.stop()
92 93 self.merge_patcher.stop()
93 94 self.comment_patcher.stop()
94 95 self.notification_patcher.stop()
95 96 self.helper_patcher.stop()
96 97 self.hook_patcher.stop()
97 98 self.invalidation_patcher.stop()
98 99
99 100 return self.pull_request
100 101
101 102 def test_get_all(self, pull_request):
102 103 prs = PullRequestModel().get_all(pull_request.target_repo)
103 104 assert isinstance(prs, list)
104 105 assert len(prs) == 1
105 106
106 107 def test_count_all(self, pull_request):
107 108 pr_count = PullRequestModel().count_all(pull_request.target_repo)
108 109 assert pr_count == 1
109 110
110 111 def test_get_awaiting_review(self, pull_request):
111 112 prs = PullRequestModel().get_awaiting_review(pull_request.target_repo)
112 113 assert isinstance(prs, list)
113 114 assert len(prs) == 1
114 115
115 116 def test_count_awaiting_review(self, pull_request):
116 117 pr_count = PullRequestModel().count_awaiting_review(
117 118 pull_request.target_repo)
118 119 assert pr_count == 1
119 120
120 121 def test_get_awaiting_my_review(self, pull_request):
121 122 PullRequestModel().update_reviewers(
122 123 pull_request, [(pull_request.author, ['author'], False, [])],
123 124 pull_request.author)
124 125 prs = PullRequestModel().get_awaiting_my_review(
125 126 pull_request.target_repo, user_id=pull_request.author.user_id)
126 127 assert isinstance(prs, list)
127 128 assert len(prs) == 1
128 129
129 130 def test_count_awaiting_my_review(self, pull_request):
130 131 PullRequestModel().update_reviewers(
131 132 pull_request, [(pull_request.author, ['author'], False, [])],
132 133 pull_request.author)
133 134 pr_count = PullRequestModel().count_awaiting_my_review(
134 135 pull_request.target_repo, user_id=pull_request.author.user_id)
135 136 assert pr_count == 1
136 137
137 138 def test_delete_calls_cleanup_merge(self, pull_request):
139 repo_id = pull_request.target_repo.repo_id
138 140 PullRequestModel().delete(pull_request, pull_request.author)
139 141
140 142 self.workspace_remove_mock.assert_called_once_with(
141 self.workspace_id)
143 repo_id, self.workspace_id)
142 144
143 145 def test_close_calls_cleanup_and_hook(self, pull_request):
144 146 PullRequestModel().close_pull_request(
145 147 pull_request, pull_request.author)
148 repo_id = pull_request.target_repo.repo_id
146 149
147 150 self.workspace_remove_mock.assert_called_once_with(
148 self.workspace_id)
151 repo_id, self.workspace_id)
149 152 self.hook_mock.assert_called_with(
150 153 self.pull_request, self.pull_request.author, 'close')
151 154
152 155 def test_merge_status(self, pull_request):
153 156 self.merge_mock.return_value = MergeResponse(
154 157 True, False, None, MergeFailureReason.NONE)
155 158
156 159 assert pull_request._last_merge_source_rev is None
157 160 assert pull_request._last_merge_target_rev is None
158 161 assert pull_request.last_merge_status is None
159 162
160 163 status, msg = PullRequestModel().merge_status(pull_request)
161 164 assert status is True
162 165 assert msg.eval() == 'This pull request can be automatically merged.'
163 166 self.merge_mock.assert_called_with(
167 self.repo_id, self.workspace_id,
164 168 pull_request.target_ref_parts,
165 169 pull_request.source_repo.scm_instance(),
166 pull_request.source_ref_parts, self.workspace_id, dry_run=True,
170 pull_request.source_ref_parts, dry_run=True,
167 171 use_rebase=False, close_branch=False)
168 172
169 173 assert pull_request._last_merge_source_rev == self.source_commit
170 174 assert pull_request._last_merge_target_rev == self.target_commit
171 175 assert pull_request.last_merge_status is MergeFailureReason.NONE
172 176
173 177 self.merge_mock.reset_mock()
174 178 status, msg = PullRequestModel().merge_status(pull_request)
175 179 assert status is True
176 180 assert msg.eval() == 'This pull request can be automatically merged.'
177 181 assert self.merge_mock.called is False
178 182
179 183 def test_merge_status_known_failure(self, pull_request):
180 184 self.merge_mock.return_value = MergeResponse(
181 185 False, False, None, MergeFailureReason.MERGE_FAILED)
182 186
183 187 assert pull_request._last_merge_source_rev is None
184 188 assert pull_request._last_merge_target_rev is None
185 189 assert pull_request.last_merge_status is None
186 190
187 191 status, msg = PullRequestModel().merge_status(pull_request)
188 192 assert status is False
189 193 assert (
190 194 msg.eval() ==
191 195 'This pull request cannot be merged because of merge conflicts.')
192 196 self.merge_mock.assert_called_with(
197 self.repo_id, self.workspace_id,
193 198 pull_request.target_ref_parts,
194 199 pull_request.source_repo.scm_instance(),
195 pull_request.source_ref_parts, self.workspace_id, dry_run=True,
200 pull_request.source_ref_parts, dry_run=True,
196 201 use_rebase=False, close_branch=False)
197 202
198 203 assert pull_request._last_merge_source_rev == self.source_commit
199 204 assert pull_request._last_merge_target_rev == self.target_commit
200 205 assert (
201 206 pull_request.last_merge_status is MergeFailureReason.MERGE_FAILED)
202 207
203 208 self.merge_mock.reset_mock()
204 209 status, msg = PullRequestModel().merge_status(pull_request)
205 210 assert status is False
206 211 assert (
207 212 msg.eval() ==
208 213 'This pull request cannot be merged because of merge conflicts.')
209 214 assert self.merge_mock.called is False
210 215
211 216 def test_merge_status_unknown_failure(self, pull_request):
212 217 self.merge_mock.return_value = MergeResponse(
213 218 False, False, None, MergeFailureReason.UNKNOWN)
214 219
215 220 assert pull_request._last_merge_source_rev is None
216 221 assert pull_request._last_merge_target_rev is None
217 222 assert pull_request.last_merge_status is None
218 223
219 224 status, msg = PullRequestModel().merge_status(pull_request)
220 225 assert status is False
221 226 assert msg.eval() == (
222 227 'This pull request cannot be merged because of an unhandled'
223 228 ' exception.')
224 229 self.merge_mock.assert_called_with(
230 self.repo_id, self.workspace_id,
225 231 pull_request.target_ref_parts,
226 232 pull_request.source_repo.scm_instance(),
227 pull_request.source_ref_parts, self.workspace_id, dry_run=True,
233 pull_request.source_ref_parts, dry_run=True,
228 234 use_rebase=False, close_branch=False)
229 235
230 236 assert pull_request._last_merge_source_rev is None
231 237 assert pull_request._last_merge_target_rev is None
232 238 assert pull_request.last_merge_status is None
233 239
234 240 self.merge_mock.reset_mock()
235 241 status, msg = PullRequestModel().merge_status(pull_request)
236 242 assert status is False
237 243 assert msg.eval() == (
238 244 'This pull request cannot be merged because of an unhandled'
239 245 ' exception.')
240 246 assert self.merge_mock.called is True
241 247
242 248 def test_merge_status_when_target_is_locked(self, pull_request):
243 249 pull_request.target_repo.locked = [1, u'12345.50', 'lock_web']
244 250 status, msg = PullRequestModel().merge_status(pull_request)
245 251 assert status is False
246 252 assert msg.eval() == (
247 253 'This pull request cannot be merged because the target repository'
248 254 ' is locked.')
249 255
250 256 def test_merge_status_requirements_check_target(self, pull_request):
251 257
252 258 def has_largefiles(self, repo):
253 259 return repo == pull_request.source_repo
254 260
255 261 patcher = mock.patch.object(
256 262 PullRequestModel, '_has_largefiles', has_largefiles)
257 263 with patcher:
258 264 status, msg = PullRequestModel().merge_status(pull_request)
259 265
260 266 assert status is False
261 267 assert msg == 'Target repository large files support is disabled.'
262 268
263 269 def test_merge_status_requirements_check_source(self, pull_request):
264 270
265 271 def has_largefiles(self, repo):
266 272 return repo == pull_request.target_repo
267 273
268 274 patcher = mock.patch.object(
269 275 PullRequestModel, '_has_largefiles', has_largefiles)
270 276 with patcher:
271 277 status, msg = PullRequestModel().merge_status(pull_request)
272 278
273 279 assert status is False
274 280 assert msg == 'Source repository large files support is disabled.'
275 281
276 282 def test_merge(self, pull_request, merge_extras):
277 283 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
278 284 merge_ref = Reference(
279 285 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
280 286 self.merge_mock.return_value = MergeResponse(
281 287 True, True, merge_ref, MergeFailureReason.NONE)
282 288
283 289 merge_extras['repository'] = pull_request.target_repo.repo_name
284 PullRequestModel().merge(
290 PullRequestModel().merge_repo(
285 291 pull_request, pull_request.author, extras=merge_extras)
286 292
287 293 message = (
288 294 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
289 295 u'\n\n {pr_title}'.format(
290 296 pr_id=pull_request.pull_request_id,
291 297 source_repo=safe_unicode(
292 298 pull_request.source_repo.scm_instance().name),
293 299 source_ref_name=pull_request.source_ref_parts.name,
294 300 pr_title=safe_unicode(pull_request.title)
295 301 )
296 302 )
297 303 self.merge_mock.assert_called_with(
304 self.repo_id, self.workspace_id,
298 305 pull_request.target_ref_parts,
299 306 pull_request.source_repo.scm_instance(),
300 pull_request.source_ref_parts, self.workspace_id,
307 pull_request.source_ref_parts,
301 308 user_name=user.username, user_email=user.email, message=message,
302 309 use_rebase=False, close_branch=False
303 310 )
304 311 self.invalidation_mock.assert_called_once_with(
305 312 pull_request.target_repo.repo_name)
306 313
307 314 self.hook_mock.assert_called_with(
308 315 self.pull_request, self.pull_request.author, 'merge')
309 316
310 317 pull_request = PullRequest.get(pull_request.pull_request_id)
311 318 assert (
312 319 pull_request.merge_rev ==
313 320 '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
314 321
315 322 def test_merge_failed(self, pull_request, merge_extras):
316 323 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
317 324 merge_ref = Reference(
318 325 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
319 326 self.merge_mock.return_value = MergeResponse(
320 327 False, False, merge_ref, MergeFailureReason.MERGE_FAILED)
321 328
322 329 merge_extras['repository'] = pull_request.target_repo.repo_name
323 PullRequestModel().merge(
330 PullRequestModel().merge_repo(
324 331 pull_request, pull_request.author, extras=merge_extras)
325 332
326 333 message = (
327 334 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
328 335 u'\n\n {pr_title}'.format(
329 336 pr_id=pull_request.pull_request_id,
330 337 source_repo=safe_unicode(
331 338 pull_request.source_repo.scm_instance().name),
332 339 source_ref_name=pull_request.source_ref_parts.name,
333 340 pr_title=safe_unicode(pull_request.title)
334 341 )
335 342 )
336 343 self.merge_mock.assert_called_with(
344 self.repo_id, self.workspace_id,
337 345 pull_request.target_ref_parts,
338 346 pull_request.source_repo.scm_instance(),
339 pull_request.source_ref_parts, self.workspace_id,
347 pull_request.source_ref_parts,
340 348 user_name=user.username, user_email=user.email, message=message,
341 349 use_rebase=False, close_branch=False
342 350 )
343 351
344 352 pull_request = PullRequest.get(pull_request.pull_request_id)
345 353 assert self.invalidation_mock.called is False
346 354 assert pull_request.merge_rev is None
347 355
348 356 def test_get_commit_ids(self, pull_request):
349 357 # The PR has been not merget yet, so expect an exception
350 358 with pytest.raises(ValueError):
351 359 PullRequestModel()._get_commit_ids(pull_request)
352 360
353 361 # Merge revision is in the revisions list
354 362 pull_request.merge_rev = pull_request.revisions[0]
355 363 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
356 364 assert commit_ids == pull_request.revisions
357 365
358 366 # Merge revision is not in the revisions list
359 367 pull_request.merge_rev = 'f000' * 10
360 368 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
361 369 assert commit_ids == pull_request.revisions + [pull_request.merge_rev]
362 370
363 371 def test_get_diff_from_pr_version(self, pull_request):
364 372 source_repo = pull_request.source_repo
365 373 source_ref_id = pull_request.source_ref_parts.commit_id
366 374 target_ref_id = pull_request.target_ref_parts.commit_id
367 375 diff = PullRequestModel()._get_diff_from_pr_or_version(
368 376 source_repo, source_ref_id, target_ref_id, context=6)
369 377 assert 'file_1' in diff.raw
370 378
371 379 def test_generate_title_returns_unicode(self):
372 380 title = PullRequestModel().generate_pullrequest_title(
373 381 source='source-dummy',
374 382 source_ref='source-ref-dummy',
375 383 target='target-dummy',
376 384 )
377 385 assert type(title) == unicode
378 386
379 387
380 388 @pytest.mark.usefixtures('config_stub')
381 389 class TestIntegrationMerge(object):
382 390 @pytest.mark.parametrize('extra_config', (
383 391 {'vcs.hooks.protocol': 'http', 'vcs.hooks.direct_calls': False},
384 392 ))
385 393 def test_merge_triggers_push_hooks(
386 394 self, pr_util, user_admin, capture_rcextensions, merge_extras,
387 395 extra_config):
388 396 pull_request = pr_util.create_pull_request(
389 397 approved=True, mergeable=True)
390 398 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
391 399 merge_extras['repository'] = pull_request.target_repo.repo_name
392 400 Session().commit()
393 401
394 402 with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False):
395 merge_state = PullRequestModel().merge(
403 merge_state = PullRequestModel().merge_repo(
396 404 pull_request, user_admin, extras=merge_extras)
397 405
398 406 assert merge_state.executed
399 407 assert 'pre_push' in capture_rcextensions
400 408 assert 'post_push' in capture_rcextensions
401 409
402 410 def test_merge_can_be_rejected_by_pre_push_hook(
403 411 self, pr_util, user_admin, capture_rcextensions, merge_extras):
404 412 pull_request = pr_util.create_pull_request(
405 413 approved=True, mergeable=True)
406 414 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
407 415 merge_extras['repository'] = pull_request.target_repo.repo_name
408 416 Session().commit()
409 417
410 418 with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull:
411 419 pre_pull.side_effect = RepositoryError("Disallow push!")
412 merge_status = PullRequestModel().merge(
420 merge_status = PullRequestModel().merge_repo(
413 421 pull_request, user_admin, extras=merge_extras)
414 422
415 423 assert not merge_status.executed
416 424 assert 'pre_push' not in capture_rcextensions
417 425 assert 'post_push' not in capture_rcextensions
418 426
419 427 def test_merge_fails_if_target_is_locked(
420 428 self, pr_util, user_regular, merge_extras):
421 429 pull_request = pr_util.create_pull_request(
422 430 approved=True, mergeable=True)
423 431 locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web']
424 432 pull_request.target_repo.locked = locked_by
425 433 # TODO: johbo: Check if this can work based on the database, currently
426 434 # all data is pre-computed, that's why just updating the DB is not
427 435 # enough.
428 436 merge_extras['locked_by'] = locked_by
429 437 merge_extras['repository'] = pull_request.target_repo.repo_name
430 438 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
431 439 Session().commit()
432 merge_status = PullRequestModel().merge(
440 merge_status = PullRequestModel().merge_repo(
433 441 pull_request, user_regular, extras=merge_extras)
434 442 assert not merge_status.executed
435 443
436 444
437 445 @pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [
438 446 (False, 1, 0),
439 447 (True, 0, 1),
440 448 ])
441 449 def test_outdated_comments(
442 450 pr_util, use_outdated, inlines_count, outdated_count, config_stub):
443 451 pull_request = pr_util.create_pull_request()
444 452 pr_util.create_inline_comment(file_path='not_in_updated_diff')
445 453
446 454 with outdated_comments_patcher(use_outdated) as outdated_comment_mock:
447 455 pr_util.add_one_commit()
448 456 assert_inline_comments(
449 457 pull_request, visible=inlines_count, outdated=outdated_count)
450 458 outdated_comment_mock.assert_called_with(pull_request)
451 459
452 460
453 461 @pytest.fixture
454 462 def merge_extras(user_regular):
455 463 """
456 464 Context for the vcs operation when running a merge.
457 465 """
458 466 extras = {
459 467 'ip': '127.0.0.1',
460 468 'username': user_regular.username,
461 469 'user_id': user_regular.user_id,
462 470 'action': 'push',
463 471 'repository': 'fake_target_repo_name',
464 472 'scm': 'git',
465 473 'config': 'fake_config_ini_path',
466 474 'make_lock': None,
467 475 'locked_by': [None, None, None],
468 476 'server_url': 'http://test.example.com:5000',
469 477 'hooks': ['push', 'pull'],
470 478 'is_shadow_repo': False,
471 479 }
472 480 return extras
473 481
474 482
475 483 @pytest.mark.usefixtures('config_stub')
476 484 class TestUpdateCommentHandling(object):
477 485
478 486 @pytest.fixture(autouse=True, scope='class')
479 487 def enable_outdated_comments(self, request, baseapp):
480 488 config_patch = mock.patch.dict(
481 489 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True})
482 490 config_patch.start()
483 491
484 492 @request.addfinalizer
485 493 def cleanup():
486 494 config_patch.stop()
487 495
488 496 def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util):
489 497 commits = [
490 498 {'message': 'a'},
491 499 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
492 500 {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]},
493 501 ]
494 502 pull_request = pr_util.create_pull_request(
495 503 commits=commits, target_head='a', source_head='b', revisions=['b'])
496 504 pr_util.create_inline_comment(file_path='file_b')
497 505 pr_util.add_one_commit(head='c')
498 506
499 507 assert_inline_comments(pull_request, visible=1, outdated=0)
500 508
501 509 def test_comment_stays_unflagged_on_change_above(self, pr_util):
502 510 original_content = ''.join(
503 511 ['line {}\n'.format(x) for x in range(1, 11)])
504 512 updated_content = 'new_line_at_top\n' + original_content
505 513 commits = [
506 514 {'message': 'a'},
507 515 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
508 516 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
509 517 ]
510 518 pull_request = pr_util.create_pull_request(
511 519 commits=commits, target_head='a', source_head='b', revisions=['b'])
512 520
513 521 with outdated_comments_patcher():
514 522 comment = pr_util.create_inline_comment(
515 523 line_no=u'n8', file_path='file_b')
516 524 pr_util.add_one_commit(head='c')
517 525
518 526 assert_inline_comments(pull_request, visible=1, outdated=0)
519 527 assert comment.line_no == u'n9'
520 528
521 529 def test_comment_stays_unflagged_on_change_below(self, pr_util):
522 530 original_content = ''.join(['line {}\n'.format(x) for x in range(10)])
523 531 updated_content = original_content + 'new_line_at_end\n'
524 532 commits = [
525 533 {'message': 'a'},
526 534 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
527 535 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
528 536 ]
529 537 pull_request = pr_util.create_pull_request(
530 538 commits=commits, target_head='a', source_head='b', revisions=['b'])
531 539 pr_util.create_inline_comment(file_path='file_b')
532 540 pr_util.add_one_commit(head='c')
533 541
534 542 assert_inline_comments(pull_request, visible=1, outdated=0)
535 543
536 544 @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9'])
537 545 def test_comment_flagged_on_change_around_context(self, pr_util, line_no):
538 546 base_lines = ['line {}\n'.format(x) for x in range(1, 13)]
539 547 change_lines = list(base_lines)
540 548 change_lines.insert(6, 'line 6a added\n')
541 549
542 550 # Changes on the last line of sight
543 551 update_lines = list(change_lines)
544 552 update_lines[0] = 'line 1 changed\n'
545 553 update_lines[-1] = 'line 12 changed\n'
546 554
547 555 def file_b(lines):
548 556 return FileNode('file_b', ''.join(lines))
549 557
550 558 commits = [
551 559 {'message': 'a', 'added': [file_b(base_lines)]},
552 560 {'message': 'b', 'changed': [file_b(change_lines)]},
553 561 {'message': 'c', 'changed': [file_b(update_lines)]},
554 562 ]
555 563
556 564 pull_request = pr_util.create_pull_request(
557 565 commits=commits, target_head='a', source_head='b', revisions=['b'])
558 566 pr_util.create_inline_comment(line_no=line_no, file_path='file_b')
559 567
560 568 with outdated_comments_patcher():
561 569 pr_util.add_one_commit(head='c')
562 570 assert_inline_comments(pull_request, visible=0, outdated=1)
563 571
564 572 @pytest.mark.parametrize("change, content", [
565 573 ('changed', 'changed\n'),
566 574 ('removed', ''),
567 575 ], ids=['changed', 'removed'])
568 576 def test_comment_flagged_on_change(self, pr_util, change, content):
569 577 commits = [
570 578 {'message': 'a'},
571 579 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
572 580 {'message': 'c', change: [FileNode('file_b', content)]},
573 581 ]
574 582 pull_request = pr_util.create_pull_request(
575 583 commits=commits, target_head='a', source_head='b', revisions=['b'])
576 584 pr_util.create_inline_comment(file_path='file_b')
577 585
578 586 with outdated_comments_patcher():
579 587 pr_util.add_one_commit(head='c')
580 588 assert_inline_comments(pull_request, visible=0, outdated=1)
581 589
582 590
583 591 @pytest.mark.usefixtures('config_stub')
584 592 class TestUpdateChangedFiles(object):
585 593
586 594 def test_no_changes_on_unchanged_diff(self, pr_util):
587 595 commits = [
588 596 {'message': 'a'},
589 597 {'message': 'b',
590 598 'added': [FileNode('file_b', 'test_content b\n')]},
591 599 {'message': 'c',
592 600 'added': [FileNode('file_c', 'test_content c\n')]},
593 601 ]
594 602 # open a PR from a to b, adding file_b
595 603 pull_request = pr_util.create_pull_request(
596 604 commits=commits, target_head='a', source_head='b', revisions=['b'],
597 605 name_suffix='per-file-review')
598 606
599 607 # modify PR adding new file file_c
600 608 pr_util.add_one_commit(head='c')
601 609
602 610 assert_pr_file_changes(
603 611 pull_request,
604 612 added=['file_c'],
605 613 modified=[],
606 614 removed=[])
607 615
608 616 def test_modify_and_undo_modification_diff(self, pr_util):
609 617 commits = [
610 618 {'message': 'a'},
611 619 {'message': 'b',
612 620 'added': [FileNode('file_b', 'test_content b\n')]},
613 621 {'message': 'c',
614 622 'changed': [FileNode('file_b', 'test_content b modified\n')]},
615 623 {'message': 'd',
616 624 'changed': [FileNode('file_b', 'test_content b\n')]},
617 625 ]
618 626 # open a PR from a to b, adding file_b
619 627 pull_request = pr_util.create_pull_request(
620 628 commits=commits, target_head='a', source_head='b', revisions=['b'],
621 629 name_suffix='per-file-review')
622 630
623 631 # modify PR modifying file file_b
624 632 pr_util.add_one_commit(head='c')
625 633
626 634 assert_pr_file_changes(
627 635 pull_request,
628 636 added=[],
629 637 modified=['file_b'],
630 638 removed=[])
631 639
632 640 # move the head again to d, which rollbacks change,
633 641 # meaning we should indicate no changes
634 642 pr_util.add_one_commit(head='d')
635 643
636 644 assert_pr_file_changes(
637 645 pull_request,
638 646 added=[],
639 647 modified=[],
640 648 removed=[])
641 649
642 650 def test_updated_all_files_in_pr(self, pr_util):
643 651 commits = [
644 652 {'message': 'a'},
645 653 {'message': 'b', 'added': [
646 654 FileNode('file_a', 'test_content a\n'),
647 655 FileNode('file_b', 'test_content b\n'),
648 656 FileNode('file_c', 'test_content c\n')]},
649 657 {'message': 'c', 'changed': [
650 658 FileNode('file_a', 'test_content a changed\n'),
651 659 FileNode('file_b', 'test_content b changed\n'),
652 660 FileNode('file_c', 'test_content c changed\n')]},
653 661 ]
654 662 # open a PR from a to b, changing 3 files
655 663 pull_request = pr_util.create_pull_request(
656 664 commits=commits, target_head='a', source_head='b', revisions=['b'],
657 665 name_suffix='per-file-review')
658 666
659 667 pr_util.add_one_commit(head='c')
660 668
661 669 assert_pr_file_changes(
662 670 pull_request,
663 671 added=[],
664 672 modified=['file_a', 'file_b', 'file_c'],
665 673 removed=[])
666 674
667 675 def test_updated_and_removed_all_files_in_pr(self, pr_util):
668 676 commits = [
669 677 {'message': 'a'},
670 678 {'message': 'b', 'added': [
671 679 FileNode('file_a', 'test_content a\n'),
672 680 FileNode('file_b', 'test_content b\n'),
673 681 FileNode('file_c', 'test_content c\n')]},
674 682 {'message': 'c', 'removed': [
675 683 FileNode('file_a', 'test_content a changed\n'),
676 684 FileNode('file_b', 'test_content b changed\n'),
677 685 FileNode('file_c', 'test_content c changed\n')]},
678 686 ]
679 687 # open a PR from a to b, removing 3 files
680 688 pull_request = pr_util.create_pull_request(
681 689 commits=commits, target_head='a', source_head='b', revisions=['b'],
682 690 name_suffix='per-file-review')
683 691
684 692 pr_util.add_one_commit(head='c')
685 693
686 694 assert_pr_file_changes(
687 695 pull_request,
688 696 added=[],
689 697 modified=[],
690 698 removed=['file_a', 'file_b', 'file_c'])
691 699
692 700
693 701 def test_update_writes_snapshot_into_pull_request_version(pr_util, config_stub):
694 702 model = PullRequestModel()
695 703 pull_request = pr_util.create_pull_request()
696 704 pr_util.update_source_repository()
697 705
698 706 model.update_commits(pull_request)
699 707
700 708 # Expect that it has a version entry now
701 709 assert len(model.get_versions(pull_request)) == 1
702 710
703 711
704 712 def test_update_skips_new_version_if_unchanged(pr_util, config_stub):
705 713 pull_request = pr_util.create_pull_request()
706 714 model = PullRequestModel()
707 715 model.update_commits(pull_request)
708 716
709 717 # Expect that it still has no versions
710 718 assert len(model.get_versions(pull_request)) == 0
711 719
712 720
713 721 def test_update_assigns_comments_to_the_new_version(pr_util, config_stub):
714 722 model = PullRequestModel()
715 723 pull_request = pr_util.create_pull_request()
716 724 comment = pr_util.create_comment()
717 725 pr_util.update_source_repository()
718 726
719 727 model.update_commits(pull_request)
720 728
721 729 # Expect that the comment is linked to the pr version now
722 730 assert comment.pull_request_version == model.get_versions(pull_request)[0]
723 731
724 732
725 733 def test_update_adds_a_comment_to_the_pull_request_about_the_change(pr_util, config_stub):
726 734 model = PullRequestModel()
727 735 pull_request = pr_util.create_pull_request()
728 736 pr_util.update_source_repository()
729 737 pr_util.update_source_repository()
730 738
731 739 model.update_commits(pull_request)
732 740
733 741 # Expect to find a new comment about the change
734 742 expected_message = textwrap.dedent(
735 743 """\
736 744 Pull request updated. Auto status change to |under_review|
737 745
738 746 .. role:: added
739 747 .. role:: removed
740 748 .. parsed-literal::
741 749
742 750 Changed commits:
743 751 * :added:`1 added`
744 752 * :removed:`0 removed`
745 753
746 754 Changed files:
747 755 * `A file_2 <#a_c--92ed3b5f07b4>`_
748 756
749 757 .. |under_review| replace:: *"Under Review"*"""
750 758 )
751 759 pull_request_comments = sorted(
752 760 pull_request.comments, key=lambda c: c.modified_at)
753 761 update_comment = pull_request_comments[-1]
754 762 assert update_comment.text == expected_message
755 763
756 764
757 765 def test_create_version_from_snapshot_updates_attributes(pr_util, config_stub):
758 766 pull_request = pr_util.create_pull_request()
759 767
760 768 # Avoiding default values
761 769 pull_request.status = PullRequest.STATUS_CLOSED
762 770 pull_request._last_merge_source_rev = "0" * 40
763 771 pull_request._last_merge_target_rev = "1" * 40
764 772 pull_request.last_merge_status = 1
765 773 pull_request.merge_rev = "2" * 40
766 774
767 775 # Remember automatic values
768 776 created_on = pull_request.created_on
769 777 updated_on = pull_request.updated_on
770 778
771 779 # Create a new version of the pull request
772 780 version = PullRequestModel()._create_version_from_snapshot(pull_request)
773 781
774 782 # Check attributes
775 783 assert version.title == pr_util.create_parameters['title']
776 784 assert version.description == pr_util.create_parameters['description']
777 785 assert version.status == PullRequest.STATUS_CLOSED
778 786
779 787 # versions get updated created_on
780 788 assert version.created_on != created_on
781 789
782 790 assert version.updated_on == updated_on
783 791 assert version.user_id == pull_request.user_id
784 792 assert version.revisions == pr_util.create_parameters['revisions']
785 793 assert version.source_repo == pr_util.source_repository
786 794 assert version.source_ref == pr_util.create_parameters['source_ref']
787 795 assert version.target_repo == pr_util.target_repository
788 796 assert version.target_ref == pr_util.create_parameters['target_ref']
789 797 assert version._last_merge_source_rev == pull_request._last_merge_source_rev
790 798 assert version._last_merge_target_rev == pull_request._last_merge_target_rev
791 799 assert version.last_merge_status == pull_request.last_merge_status
792 800 assert version.merge_rev == pull_request.merge_rev
793 801 assert version.pull_request == pull_request
794 802
795 803
796 804 def test_link_comments_to_version_only_updates_unlinked_comments(pr_util, config_stub):
797 805 version1 = pr_util.create_version_of_pull_request()
798 806 comment_linked = pr_util.create_comment(linked_to=version1)
799 807 comment_unlinked = pr_util.create_comment()
800 808 version2 = pr_util.create_version_of_pull_request()
801 809
802 810 PullRequestModel()._link_comments_to_version(version2)
803 811
804 812 # Expect that only the new comment is linked to version2
805 813 assert (
806 814 comment_unlinked.pull_request_version_id ==
807 815 version2.pull_request_version_id)
808 816 assert (
809 817 comment_linked.pull_request_version_id ==
810 818 version1.pull_request_version_id)
811 819 assert (
812 820 comment_unlinked.pull_request_version_id !=
813 821 comment_linked.pull_request_version_id)
814 822
815 823
816 824 def test_calculate_commits():
817 825 old_ids = [1, 2, 3]
818 826 new_ids = [1, 3, 4, 5]
819 827 change = PullRequestModel()._calculate_commit_id_changes(old_ids, new_ids)
820 828 assert change.added == [4, 5]
821 829 assert change.common == [1, 3]
822 830 assert change.removed == [2]
823 831 assert change.total == [1, 3, 4, 5]
824 832
825 833
826 834 def assert_inline_comments(pull_request, visible=None, outdated=None):
827 835 if visible is not None:
828 836 inline_comments = CommentsModel().get_inline_comments(
829 837 pull_request.target_repo.repo_id, pull_request=pull_request)
830 838 inline_cnt = CommentsModel().get_inline_comments_count(
831 839 inline_comments)
832 840 assert inline_cnt == visible
833 841 if outdated is not None:
834 842 outdated_comments = CommentsModel().get_outdated_comments(
835 843 pull_request.target_repo.repo_id, pull_request)
836 844 assert len(outdated_comments) == outdated
837 845
838 846
839 847 def assert_pr_file_changes(
840 848 pull_request, added=None, modified=None, removed=None):
841 849 pr_versions = PullRequestModel().get_versions(pull_request)
842 850 # always use first version, ie original PR to calculate changes
843 851 pull_request_version = pr_versions[0]
844 852 old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs(
845 853 pull_request, pull_request_version)
846 854 file_changes = PullRequestModel()._calculate_file_changes(
847 855 old_diff_data, new_diff_data)
848 856
849 857 assert added == file_changes.added, \
850 858 'expected added:%s vs value:%s' % (added, file_changes.added)
851 859 assert modified == file_changes.modified, \
852 860 'expected modified:%s vs value:%s' % (modified, file_changes.modified)
853 861 assert removed == file_changes.removed, \
854 862 'expected removed:%s vs value:%s' % (removed, file_changes.removed)
855 863
856 864
857 865 def outdated_comments_patcher(use_outdated=True):
858 866 return mock.patch.object(
859 867 CommentsModel, 'use_outdated_comments',
860 868 return_value=use_outdated)
@@ -1,1289 +1,1289 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import datetime
22 22 import mock
23 23 import os
24 24 import sys
25 25 import shutil
26 26
27 27 import pytest
28 28
29 29 from rhodecode.lib.utils import make_db_config
30 30 from rhodecode.lib.vcs.backends.base import Reference
31 31 from rhodecode.lib.vcs.backends.git import (
32 32 GitRepository, GitCommit, discover_git_version)
33 33 from rhodecode.lib.vcs.exceptions import (
34 34 RepositoryError, VCSError, NodeDoesNotExistError)
35 35 from rhodecode.lib.vcs.nodes import (
36 36 NodeKind, FileNode, DirNode, NodeState, SubModuleNode)
37 37 from rhodecode.tests import TEST_GIT_REPO, TEST_GIT_REPO_CLONE, get_new_dir
38 38 from rhodecode.tests.vcs.conftest import BackendTestMixin
39 39
40 40
41 41 pytestmark = pytest.mark.backends("git")
42 42
43 43
44 44 def repo_path_generator():
45 45 """
46 46 Return a different path to be used for cloning repos.
47 47 """
48 48 i = 0
49 49 while True:
50 50 i += 1
51 51 yield '%s-%d' % (TEST_GIT_REPO_CLONE, i)
52 52
53 53
54 54 REPO_PATH_GENERATOR = repo_path_generator()
55 55
56 56
57 57 class TestGitRepository:
58 58
59 59 # pylint: disable=protected-access
60 60
61 61 def __check_for_existing_repo(self):
62 62 if os.path.exists(TEST_GIT_REPO_CLONE):
63 63 self.fail('Cannot test git clone repo as location %s already '
64 64 'exists. You should manually remove it first.'
65 65 % TEST_GIT_REPO_CLONE)
66 66
67 67 @pytest.fixture(autouse=True)
68 68 def prepare(self, request, baseapp):
69 69 self.repo = GitRepository(TEST_GIT_REPO, bare=True)
70 70
71 71 def get_clone_repo(self):
72 72 """
73 73 Return a non bare clone of the base repo.
74 74 """
75 75 clone_path = next(REPO_PATH_GENERATOR)
76 76 repo_clone = GitRepository(
77 77 clone_path, create=True, src_url=self.repo.path, bare=False)
78 78
79 79 return repo_clone
80 80
81 81 def get_empty_repo(self, bare=False):
82 82 """
83 83 Return a non bare empty repo.
84 84 """
85 85 return GitRepository(next(REPO_PATH_GENERATOR), create=True, bare=bare)
86 86
87 87 def test_wrong_repo_path(self):
88 88 wrong_repo_path = '/tmp/errorrepo_git'
89 89 with pytest.raises(RepositoryError):
90 90 GitRepository(wrong_repo_path)
91 91
92 92 def test_repo_clone(self):
93 93 self.__check_for_existing_repo()
94 94 repo = GitRepository(TEST_GIT_REPO)
95 95 repo_clone = GitRepository(
96 96 TEST_GIT_REPO_CLONE,
97 97 src_url=TEST_GIT_REPO, create=True, update_after_clone=True)
98 98 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
99 99 # Checking hashes of commits should be enough
100 100 for commit in repo.get_commits():
101 101 raw_id = commit.raw_id
102 102 assert raw_id == repo_clone.get_commit(raw_id).raw_id
103 103
104 104 def test_repo_clone_without_create(self):
105 105 with pytest.raises(RepositoryError):
106 106 GitRepository(
107 107 TEST_GIT_REPO_CLONE + '_wo_create', src_url=TEST_GIT_REPO)
108 108
109 109 def test_repo_clone_with_update(self):
110 110 repo = GitRepository(TEST_GIT_REPO)
111 111 clone_path = TEST_GIT_REPO_CLONE + '_with_update'
112 112 repo_clone = GitRepository(
113 113 clone_path,
114 114 create=True, src_url=TEST_GIT_REPO, update_after_clone=True)
115 115 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
116 116
117 117 # check if current workdir was updated
118 118 fpath = os.path.join(clone_path, 'MANIFEST.in')
119 119 assert os.path.isfile(fpath)
120 120
121 121 def test_repo_clone_without_update(self):
122 122 repo = GitRepository(TEST_GIT_REPO)
123 123 clone_path = TEST_GIT_REPO_CLONE + '_without_update'
124 124 repo_clone = GitRepository(
125 125 clone_path,
126 126 create=True, src_url=TEST_GIT_REPO, update_after_clone=False)
127 127 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
128 128 # check if current workdir was *NOT* updated
129 129 fpath = os.path.join(clone_path, 'MANIFEST.in')
130 130 # Make sure it's not bare repo
131 131 assert not repo_clone.bare
132 132 assert not os.path.isfile(fpath)
133 133
134 134 def test_repo_clone_into_bare_repo(self):
135 135 repo = GitRepository(TEST_GIT_REPO)
136 136 clone_path = TEST_GIT_REPO_CLONE + '_bare.git'
137 137 repo_clone = GitRepository(
138 138 clone_path, create=True, src_url=repo.path, bare=True)
139 139 assert repo_clone.bare
140 140
141 141 def test_create_repo_is_not_bare_by_default(self):
142 142 repo = GitRepository(get_new_dir('not-bare-by-default'), create=True)
143 143 assert not repo.bare
144 144
145 145 def test_create_bare_repo(self):
146 146 repo = GitRepository(get_new_dir('bare-repo'), create=True, bare=True)
147 147 assert repo.bare
148 148
149 149 def test_update_server_info(self):
150 150 self.repo._update_server_info()
151 151
152 152 def test_fetch(self, vcsbackend_git):
153 153 # Note: This is a git specific part of the API, it's only implemented
154 154 # by the git backend.
155 155 source_repo = vcsbackend_git.repo
156 156 target_repo = vcsbackend_git.create_repo()
157 157 target_repo.fetch(source_repo.path)
158 158 # Note: Get a fresh instance, avoids caching trouble
159 159 target_repo = vcsbackend_git.backend(target_repo.path)
160 160 assert len(source_repo.commit_ids) == len(target_repo.commit_ids)
161 161
162 162 def test_commit_ids(self):
163 163 # there are 112 commits (by now)
164 164 # so we can assume they would be available from now on
165 165 subset = set([
166 166 'c1214f7e79e02fc37156ff215cd71275450cffc3',
167 167 '38b5fe81f109cb111f549bfe9bb6b267e10bc557',
168 168 'fa6600f6848800641328adbf7811fd2372c02ab2',
169 169 '102607b09cdd60e2793929c4f90478be29f85a17',
170 170 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
171 171 '2d1028c054665b962fa3d307adfc923ddd528038',
172 172 'd7e0d30fbcae12c90680eb095a4f5f02505ce501',
173 173 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
174 174 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
175 175 '8430a588b43b5d6da365400117c89400326e7992',
176 176 'd955cd312c17b02143c04fa1099a352b04368118',
177 177 'f67b87e5c629c2ee0ba58f85197e423ff28d735b',
178 178 'add63e382e4aabc9e1afdc4bdc24506c269b7618',
179 179 'f298fe1189f1b69779a4423f40b48edf92a703fc',
180 180 'bd9b619eb41994cac43d67cf4ccc8399c1125808',
181 181 '6e125e7c890379446e98980d8ed60fba87d0f6d1',
182 182 'd4a54db9f745dfeba6933bf5b1e79e15d0af20bd',
183 183 '0b05e4ed56c802098dfc813cbe779b2f49e92500',
184 184 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
185 185 '45223f8f114c64bf4d6f853e3c35a369a6305520',
186 186 'ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
187 187 'f5ea29fc42ef67a2a5a7aecff10e1566699acd68',
188 188 '27d48942240f5b91dfda77accd2caac94708cc7d',
189 189 '622f0eb0bafd619d2560c26f80f09e3b0b0d78af',
190 190 'e686b958768ee96af8029fe19c6050b1a8dd3b2b'])
191 191 assert subset.issubset(set(self.repo.commit_ids))
192 192
193 193 def test_slicing(self):
194 194 # 4 1 5 10 95
195 195 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
196 196 (10, 20, 10), (5, 100, 95)]:
197 197 commit_ids = list(self.repo[sfrom:sto])
198 198 assert len(commit_ids) == size
199 199 assert commit_ids[0] == self.repo.get_commit(commit_idx=sfrom)
200 200 assert commit_ids[-1] == self.repo.get_commit(commit_idx=sto - 1)
201 201
202 202 def test_branches(self):
203 203 # TODO: Need more tests here
204 204 # Removed (those are 'remotes' branches for cloned repo)
205 205 # assert 'master' in self.repo.branches
206 206 # assert 'gittree' in self.repo.branches
207 207 # assert 'web-branch' in self.repo.branches
208 208 for __, commit_id in self.repo.branches.items():
209 209 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
210 210
211 211 def test_tags(self):
212 212 # TODO: Need more tests here
213 213 assert 'v0.1.1' in self.repo.tags
214 214 assert 'v0.1.2' in self.repo.tags
215 215 for __, commit_id in self.repo.tags.items():
216 216 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
217 217
218 218 def _test_single_commit_cache(self, commit_id):
219 219 commit = self.repo.get_commit(commit_id)
220 220 assert commit_id in self.repo.commits
221 221 assert commit is self.repo.commits[commit_id]
222 222
223 223 def test_initial_commit(self):
224 224 commit_id = self.repo.commit_ids[0]
225 225 init_commit = self.repo.get_commit(commit_id)
226 226 init_author = init_commit.author
227 227
228 228 assert init_commit.message == 'initial import\n'
229 229 assert init_author == 'Marcin Kuzminski <marcin@python-blog.com>'
230 230 assert init_author == init_commit.committer
231 231 for path in ('vcs/__init__.py',
232 232 'vcs/backends/BaseRepository.py',
233 233 'vcs/backends/__init__.py'):
234 234 assert isinstance(init_commit.get_node(path), FileNode)
235 235 for path in ('', 'vcs', 'vcs/backends'):
236 236 assert isinstance(init_commit.get_node(path), DirNode)
237 237
238 238 with pytest.raises(NodeDoesNotExistError):
239 239 init_commit.get_node(path='foobar')
240 240
241 241 node = init_commit.get_node('vcs/')
242 242 assert hasattr(node, 'kind')
243 243 assert node.kind == NodeKind.DIR
244 244
245 245 node = init_commit.get_node('vcs')
246 246 assert hasattr(node, 'kind')
247 247 assert node.kind == NodeKind.DIR
248 248
249 249 node = init_commit.get_node('vcs/__init__.py')
250 250 assert hasattr(node, 'kind')
251 251 assert node.kind == NodeKind.FILE
252 252
253 253 def test_not_existing_commit(self):
254 254 with pytest.raises(RepositoryError):
255 255 self.repo.get_commit('f' * 40)
256 256
257 257 def test_commit10(self):
258 258
259 259 commit10 = self.repo.get_commit(self.repo.commit_ids[9])
260 260 README = """===
261 261 VCS
262 262 ===
263 263
264 264 Various Version Control System management abstraction layer for Python.
265 265
266 266 Introduction
267 267 ------------
268 268
269 269 TODO: To be written...
270 270
271 271 """
272 272 node = commit10.get_node('README.rst')
273 273 assert node.kind == NodeKind.FILE
274 274 assert node.content == README
275 275
276 276 def test_head(self):
277 277 assert self.repo.head == self.repo.get_commit().raw_id
278 278
279 279 def test_checkout_with_create(self):
280 280 repo_clone = self.get_clone_repo()
281 281
282 282 new_branch = 'new_branch'
283 283 assert repo_clone._current_branch() == 'master'
284 284 assert set(repo_clone.branches) == set(('master',))
285 285 repo_clone._checkout(new_branch, create=True)
286 286
287 287 # Branches is a lazy property so we need to recrete the Repo object.
288 288 repo_clone = GitRepository(repo_clone.path)
289 289 assert set(repo_clone.branches) == set(('master', new_branch))
290 290 assert repo_clone._current_branch() == new_branch
291 291
292 292 def test_checkout(self):
293 293 repo_clone = self.get_clone_repo()
294 294
295 295 repo_clone._checkout('new_branch', create=True)
296 296 repo_clone._checkout('master')
297 297
298 298 assert repo_clone._current_branch() == 'master'
299 299
300 300 def test_checkout_same_branch(self):
301 301 repo_clone = self.get_clone_repo()
302 302
303 303 repo_clone._checkout('master')
304 304 assert repo_clone._current_branch() == 'master'
305 305
306 306 def test_checkout_branch_already_exists(self):
307 307 repo_clone = self.get_clone_repo()
308 308
309 309 with pytest.raises(RepositoryError):
310 310 repo_clone._checkout('master', create=True)
311 311
312 312 def test_checkout_bare_repo(self):
313 313 with pytest.raises(RepositoryError):
314 314 self.repo._checkout('master')
315 315
316 316 def test_current_branch_bare_repo(self):
317 317 with pytest.raises(RepositoryError):
318 318 self.repo._current_branch()
319 319
320 320 def test_current_branch_empty_repo(self):
321 321 repo = self.get_empty_repo()
322 322 assert repo._current_branch() is None
323 323
324 324 def test_local_clone(self):
325 325 clone_path = next(REPO_PATH_GENERATOR)
326 326 self.repo._local_clone(clone_path, 'master')
327 327 repo_clone = GitRepository(clone_path)
328 328
329 329 assert self.repo.commit_ids == repo_clone.commit_ids
330 330
331 331 def test_local_clone_with_specific_branch(self):
332 332 source_repo = self.get_clone_repo()
333 333
334 334 # Create a new branch in source repo
335 335 new_branch_commit = source_repo.commit_ids[-3]
336 336 source_repo._checkout(new_branch_commit)
337 337 source_repo._checkout('new_branch', create=True)
338 338
339 339 clone_path = next(REPO_PATH_GENERATOR)
340 340 source_repo._local_clone(clone_path, 'new_branch')
341 341 repo_clone = GitRepository(clone_path)
342 342
343 343 assert source_repo.commit_ids[:-3 + 1] == repo_clone.commit_ids
344 344
345 345 clone_path = next(REPO_PATH_GENERATOR)
346 346 source_repo._local_clone(clone_path, 'master')
347 347 repo_clone = GitRepository(clone_path)
348 348
349 349 assert source_repo.commit_ids == repo_clone.commit_ids
350 350
351 351 def test_local_clone_fails_if_target_exists(self):
352 352 with pytest.raises(RepositoryError):
353 353 self.repo._local_clone(self.repo.path, 'master')
354 354
355 355 def test_local_fetch(self):
356 356 target_repo = self.get_empty_repo()
357 357 source_repo = self.get_clone_repo()
358 358
359 359 # Create a new branch in source repo
360 360 master_commit = source_repo.commit_ids[-1]
361 361 new_branch_commit = source_repo.commit_ids[-3]
362 362 source_repo._checkout(new_branch_commit)
363 363 source_repo._checkout('new_branch', create=True)
364 364
365 365 target_repo._local_fetch(source_repo.path, 'new_branch')
366 366 assert target_repo._last_fetch_heads() == [new_branch_commit]
367 367
368 368 target_repo._local_fetch(source_repo.path, 'master')
369 369 assert target_repo._last_fetch_heads() == [master_commit]
370 370
371 371 def test_local_fetch_from_bare_repo(self):
372 372 target_repo = self.get_empty_repo()
373 373 target_repo._local_fetch(self.repo.path, 'master')
374 374
375 375 master_commit = self.repo.commit_ids[-1]
376 376 assert target_repo._last_fetch_heads() == [master_commit]
377 377
378 378 def test_local_fetch_from_same_repo(self):
379 379 with pytest.raises(ValueError):
380 380 self.repo._local_fetch(self.repo.path, 'master')
381 381
382 382 def test_local_fetch_branch_does_not_exist(self):
383 383 target_repo = self.get_empty_repo()
384 384
385 385 with pytest.raises(RepositoryError):
386 386 target_repo._local_fetch(self.repo.path, 'new_branch')
387 387
388 388 def test_local_pull(self):
389 389 target_repo = self.get_empty_repo()
390 390 source_repo = self.get_clone_repo()
391 391
392 392 # Create a new branch in source repo
393 393 master_commit = source_repo.commit_ids[-1]
394 394 new_branch_commit = source_repo.commit_ids[-3]
395 395 source_repo._checkout(new_branch_commit)
396 396 source_repo._checkout('new_branch', create=True)
397 397
398 398 target_repo._local_pull(source_repo.path, 'new_branch')
399 399 target_repo = GitRepository(target_repo.path)
400 400 assert target_repo.head == new_branch_commit
401 401
402 402 target_repo._local_pull(source_repo.path, 'master')
403 403 target_repo = GitRepository(target_repo.path)
404 404 assert target_repo.head == master_commit
405 405
406 406 def test_local_pull_in_bare_repo(self):
407 407 with pytest.raises(RepositoryError):
408 408 self.repo._local_pull(self.repo.path, 'master')
409 409
410 410 def test_local_merge(self):
411 411 target_repo = self.get_empty_repo()
412 412 source_repo = self.get_clone_repo()
413 413
414 414 # Create a new branch in source repo
415 415 master_commit = source_repo.commit_ids[-1]
416 416 new_branch_commit = source_repo.commit_ids[-3]
417 417 source_repo._checkout(new_branch_commit)
418 418 source_repo._checkout('new_branch', create=True)
419 419
420 420 # This is required as one cannot do a -ff-only merge in an empty repo.
421 421 target_repo._local_pull(source_repo.path, 'new_branch')
422 422
423 423 target_repo._local_fetch(source_repo.path, 'master')
424 424 merge_message = 'Merge message\n\nDescription:...'
425 425 user_name = 'Albert Einstein'
426 426 user_email = 'albert@einstein.com'
427 427 target_repo._local_merge(merge_message, user_name, user_email,
428 428 target_repo._last_fetch_heads())
429 429
430 430 target_repo = GitRepository(target_repo.path)
431 431 assert target_repo.commit_ids[-2] == master_commit
432 432 last_commit = target_repo.get_commit(target_repo.head)
433 433 assert last_commit.message.strip() == merge_message
434 434 assert last_commit.author == '%s <%s>' % (user_name, user_email)
435 435
436 436 assert not os.path.exists(
437 437 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
438 438
439 439 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_git):
440 440 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
441 441 vcsbackend_git.ensure_file('README', 'I will conflict with you!!!')
442 442
443 443 target_repo._local_fetch(self.repo.path, 'master')
444 444 with pytest.raises(RepositoryError):
445 445 target_repo._local_merge(
446 446 'merge_message', 'user name', 'user@name.com',
447 447 target_repo._last_fetch_heads())
448 448
449 449 # Check we are not left in an intermediate merge state
450 450 assert not os.path.exists(
451 451 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
452 452
453 453 def test_local_merge_into_empty_repo(self):
454 454 target_repo = self.get_empty_repo()
455 455
456 456 # This is required as one cannot do a -ff-only merge in an empty repo.
457 457 target_repo._local_fetch(self.repo.path, 'master')
458 458 with pytest.raises(RepositoryError):
459 459 target_repo._local_merge(
460 460 'merge_message', 'user name', 'user@name.com',
461 461 target_repo._last_fetch_heads())
462 462
463 463 def test_local_merge_in_bare_repo(self):
464 464 with pytest.raises(RepositoryError):
465 465 self.repo._local_merge(
466 466 'merge_message', 'user name', 'user@name.com', None)
467 467
468 468 def test_local_push_non_bare(self):
469 469 target_repo = self.get_empty_repo()
470 470
471 471 pushed_branch = 'pushed_branch'
472 472 self.repo._local_push('master', target_repo.path, pushed_branch)
473 473 # Fix the HEAD of the target repo, or otherwise GitRepository won't
474 474 # report any branches.
475 475 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
476 476 f.write('ref: refs/heads/%s' % pushed_branch)
477 477
478 478 target_repo = GitRepository(target_repo.path)
479 479
480 480 assert (target_repo.branches[pushed_branch] ==
481 481 self.repo.branches['master'])
482 482
483 483 def test_local_push_bare(self):
484 484 target_repo = self.get_empty_repo(bare=True)
485 485
486 486 pushed_branch = 'pushed_branch'
487 487 self.repo._local_push('master', target_repo.path, pushed_branch)
488 488 # Fix the HEAD of the target repo, or otherwise GitRepository won't
489 489 # report any branches.
490 490 with open(os.path.join(target_repo.path, 'HEAD'), 'w') as f:
491 491 f.write('ref: refs/heads/%s' % pushed_branch)
492 492
493 493 target_repo = GitRepository(target_repo.path)
494 494
495 495 assert (target_repo.branches[pushed_branch] ==
496 496 self.repo.branches['master'])
497 497
498 498 def test_local_push_non_bare_target_branch_is_checked_out(self):
499 499 target_repo = self.get_clone_repo()
500 500
501 501 pushed_branch = 'pushed_branch'
502 502 # Create a new branch in source repo
503 503 new_branch_commit = target_repo.commit_ids[-3]
504 504 target_repo._checkout(new_branch_commit)
505 505 target_repo._checkout(pushed_branch, create=True)
506 506
507 507 self.repo._local_push('master', target_repo.path, pushed_branch)
508 508
509 509 target_repo = GitRepository(target_repo.path)
510 510
511 511 assert (target_repo.branches[pushed_branch] ==
512 512 self.repo.branches['master'])
513 513
514 514 def test_local_push_raises_exception_on_conflict(self, vcsbackend_git):
515 515 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
516 516 with pytest.raises(RepositoryError):
517 517 self.repo._local_push('master', target_repo.path, 'master')
518 518
519 519 def test_hooks_can_be_enabled_via_env_variable_for_local_push(self):
520 520 target_repo = self.get_empty_repo(bare=True)
521 521
522 522 with mock.patch.object(self.repo, 'run_git_command') as run_mock:
523 523 self.repo._local_push(
524 524 'master', target_repo.path, 'master', enable_hooks=True)
525 525 env = run_mock.call_args[1]['extra_env']
526 526 assert 'RC_SKIP_HOOKS' not in env
527 527
528 528 def _add_failing_hook(self, repo_path, hook_name, bare=False):
529 529 path_components = (
530 530 ['hooks', hook_name] if bare else ['.git', 'hooks', hook_name])
531 531 hook_path = os.path.join(repo_path, *path_components)
532 532 with open(hook_path, 'w') as f:
533 533 script_lines = [
534 534 '#!%s' % sys.executable,
535 535 'import os',
536 536 'import sys',
537 537 'if os.environ.get("RC_SKIP_HOOKS"):',
538 538 ' sys.exit(0)',
539 539 'sys.exit(1)',
540 540 ]
541 541 f.write('\n'.join(script_lines))
542 542 os.chmod(hook_path, 0755)
543 543
544 544 def test_local_push_does_not_execute_hook(self):
545 545 target_repo = self.get_empty_repo()
546 546
547 547 pushed_branch = 'pushed_branch'
548 548 self._add_failing_hook(target_repo.path, 'pre-receive')
549 549 self.repo._local_push('master', target_repo.path, pushed_branch)
550 550 # Fix the HEAD of the target repo, or otherwise GitRepository won't
551 551 # report any branches.
552 552 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
553 553 f.write('ref: refs/heads/%s' % pushed_branch)
554 554
555 555 target_repo = GitRepository(target_repo.path)
556 556
557 557 assert (target_repo.branches[pushed_branch] ==
558 558 self.repo.branches['master'])
559 559
560 560 def test_local_push_executes_hook(self):
561 561 target_repo = self.get_empty_repo(bare=True)
562 562 self._add_failing_hook(target_repo.path, 'pre-receive', bare=True)
563 563 with pytest.raises(RepositoryError):
564 564 self.repo._local_push(
565 565 'master', target_repo.path, 'master', enable_hooks=True)
566 566
567 567 def test_maybe_prepare_merge_workspace(self):
568 568 workspace = self.repo._maybe_prepare_merge_workspace(
569 'pr2', Reference('branch', 'master', 'unused'),
569 2, 'pr2', Reference('branch', 'master', 'unused'),
570 570 Reference('branch', 'master', 'unused'))
571 571
572 572 assert os.path.isdir(workspace)
573 573 workspace_repo = GitRepository(workspace)
574 574 assert workspace_repo.branches == self.repo.branches
575 575
576 576 # Calling it a second time should also succeed
577 577 workspace = self.repo._maybe_prepare_merge_workspace(
578 'pr2', Reference('branch', 'master', 'unused'),
578 2, 'pr2', Reference('branch', 'master', 'unused'),
579 579 Reference('branch', 'master', 'unused'))
580 580 assert os.path.isdir(workspace)
581 581
582 582 def test_maybe_prepare_merge_workspace_different_refs(self):
583 583 workspace = self.repo._maybe_prepare_merge_workspace(
584 'pr2', Reference('branch', 'master', 'unused'),
584 2, 'pr2', Reference('branch', 'master', 'unused'),
585 585 Reference('branch', 'develop', 'unused'))
586 586
587 587 assert os.path.isdir(workspace)
588 588 workspace_repo = GitRepository(workspace)
589 589 assert workspace_repo.branches == self.repo.branches
590 590
591 591 # Calling it a second time should also succeed
592 592 workspace = self.repo._maybe_prepare_merge_workspace(
593 'pr2', Reference('branch', 'master', 'unused'),
593 2, 'pr2', Reference('branch', 'master', 'unused'),
594 594 Reference('branch', 'develop', 'unused'))
595 595 assert os.path.isdir(workspace)
596 596
597 597 def test_cleanup_merge_workspace(self):
598 598 workspace = self.repo._maybe_prepare_merge_workspace(
599 'pr3', Reference('branch', 'master', 'unused'),
599 2, 'pr3', Reference('branch', 'master', 'unused'),
600 600 Reference('branch', 'master', 'unused'))
601 self.repo.cleanup_merge_workspace('pr3')
601 self.repo.cleanup_merge_workspace(2, 'pr3')
602 602
603 603 assert not os.path.exists(workspace)
604 604
605 605 def test_cleanup_merge_workspace_invalid_workspace_id(self):
606 606 # No assert: because in case of an inexistent workspace this function
607 607 # should still succeed.
608 self.repo.cleanup_merge_workspace('pr4')
608 self.repo.cleanup_merge_workspace(1, 'pr4')
609 609
610 610 def test_set_refs(self):
611 611 test_ref = 'refs/test-refs/abcde'
612 612 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
613 613
614 614 self.repo.set_refs(test_ref, test_commit_id)
615 615 stdout, _ = self.repo.run_git_command(['show-ref'])
616 616 assert test_ref in stdout
617 617 assert test_commit_id in stdout
618 618
619 619 def test_remove_ref(self):
620 620 test_ref = 'refs/test-refs/abcde'
621 621 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
622 622 self.repo.set_refs(test_ref, test_commit_id)
623 623 stdout, _ = self.repo.run_git_command(['show-ref'])
624 624 assert test_ref in stdout
625 625 assert test_commit_id in stdout
626 626
627 627 self.repo.remove_ref(test_ref)
628 628 stdout, _ = self.repo.run_git_command(['show-ref'])
629 629 assert test_ref not in stdout
630 630 assert test_commit_id not in stdout
631 631
632 632
633 633 class TestGitCommit(object):
634 634
635 635 @pytest.fixture(autouse=True)
636 636 def prepare(self):
637 637 self.repo = GitRepository(TEST_GIT_REPO)
638 638
639 639 def test_default_commit(self):
640 640 tip = self.repo.get_commit()
641 641 assert tip == self.repo.get_commit(None)
642 642 assert tip == self.repo.get_commit('tip')
643 643
644 644 def test_root_node(self):
645 645 tip = self.repo.get_commit()
646 646 assert tip.root is tip.get_node('')
647 647
648 648 def test_lazy_fetch(self):
649 649 """
650 650 Test if commit's nodes expands and are cached as we walk through
651 651 the commit. This test is somewhat hard to write as order of tests
652 652 is a key here. Written by running command after command in a shell.
653 653 """
654 654 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
655 655 assert commit_id in self.repo.commit_ids
656 656 commit = self.repo.get_commit(commit_id)
657 657 assert len(commit.nodes) == 0
658 658 root = commit.root
659 659 assert len(commit.nodes) == 1
660 660 assert len(root.nodes) == 8
661 661 # accessing root.nodes updates commit.nodes
662 662 assert len(commit.nodes) == 9
663 663
664 664 docs = root.get_node('docs')
665 665 # we haven't yet accessed anything new as docs dir was already cached
666 666 assert len(commit.nodes) == 9
667 667 assert len(docs.nodes) == 8
668 668 # accessing docs.nodes updates commit.nodes
669 669 assert len(commit.nodes) == 17
670 670
671 671 assert docs is commit.get_node('docs')
672 672 assert docs is root.nodes[0]
673 673 assert docs is root.dirs[0]
674 674 assert docs is commit.get_node('docs')
675 675
676 676 def test_nodes_with_commit(self):
677 677 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
678 678 commit = self.repo.get_commit(commit_id)
679 679 root = commit.root
680 680 docs = root.get_node('docs')
681 681 assert docs is commit.get_node('docs')
682 682 api = docs.get_node('api')
683 683 assert api is commit.get_node('docs/api')
684 684 index = api.get_node('index.rst')
685 685 assert index is commit.get_node('docs/api/index.rst')
686 686 assert index is commit.get_node('docs')\
687 687 .get_node('api')\
688 688 .get_node('index.rst')
689 689
690 690 def test_branch_and_tags(self):
691 691 """
692 692 rev0 = self.repo.commit_ids[0]
693 693 commit0 = self.repo.get_commit(rev0)
694 694 assert commit0.branch == 'master'
695 695 assert commit0.tags == []
696 696
697 697 rev10 = self.repo.commit_ids[10]
698 698 commit10 = self.repo.get_commit(rev10)
699 699 assert commit10.branch == 'master'
700 700 assert commit10.tags == []
701 701
702 702 rev44 = self.repo.commit_ids[44]
703 703 commit44 = self.repo.get_commit(rev44)
704 704 assert commit44.branch == 'web-branch'
705 705
706 706 tip = self.repo.get_commit('tip')
707 707 assert 'tip' in tip.tags
708 708 """
709 709 # Those tests would fail - branches are now going
710 710 # to be changed at main API in order to support git backend
711 711 pass
712 712
713 713 def test_file_size(self):
714 714 to_check = (
715 715 ('c1214f7e79e02fc37156ff215cd71275450cffc3',
716 716 'vcs/backends/BaseRepository.py', 502),
717 717 ('d7e0d30fbcae12c90680eb095a4f5f02505ce501',
718 718 'vcs/backends/hg.py', 854),
719 719 ('6e125e7c890379446e98980d8ed60fba87d0f6d1',
720 720 'setup.py', 1068),
721 721
722 722 ('d955cd312c17b02143c04fa1099a352b04368118',
723 723 'vcs/backends/base.py', 2921),
724 724 ('ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
725 725 'vcs/backends/base.py', 3936),
726 726 ('f50f42baeed5af6518ef4b0cb2f1423f3851a941',
727 727 'vcs/backends/base.py', 6189),
728 728 )
729 729 for commit_id, path, size in to_check:
730 730 node = self.repo.get_commit(commit_id).get_node(path)
731 731 assert node.is_file()
732 732 assert node.size == size
733 733
734 734 def test_file_history_from_commits(self):
735 735 node = self.repo[10].get_node('setup.py')
736 736 commit_ids = [commit.raw_id for commit in node.history]
737 737 assert ['ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == commit_ids
738 738
739 739 node = self.repo[20].get_node('setup.py')
740 740 node_ids = [commit.raw_id for commit in node.history]
741 741 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
742 742 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
743 743
744 744 # special case we check history from commit that has this particular
745 745 # file changed this means we check if it's included as well
746 746 node = self.repo.get_commit('191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e') \
747 747 .get_node('setup.py')
748 748 node_ids = [commit.raw_id for commit in node.history]
749 749 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
750 750 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
751 751
752 752 def test_file_history(self):
753 753 # we can only check if those commits are present in the history
754 754 # as we cannot update this test every time file is changed
755 755 files = {
756 756 'setup.py': [
757 757 '54386793436c938cff89326944d4c2702340037d',
758 758 '51d254f0ecf5df2ce50c0b115741f4cf13985dab',
759 759 '998ed409c795fec2012b1c0ca054d99888b22090',
760 760 '5e0eb4c47f56564395f76333f319d26c79e2fb09',
761 761 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
762 762 '7cb3fd1b6d8c20ba89e2264f1c8baebc8a52d36e',
763 763 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
764 764 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
765 765 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
766 766 ],
767 767 'vcs/nodes.py': [
768 768 '33fa3223355104431402a888fa77a4e9956feb3e',
769 769 'fa014c12c26d10ba682fadb78f2a11c24c8118e1',
770 770 'e686b958768ee96af8029fe19c6050b1a8dd3b2b',
771 771 'ab5721ca0a081f26bf43d9051e615af2cc99952f',
772 772 'c877b68d18e792a66b7f4c529ea02c8f80801542',
773 773 '4313566d2e417cb382948f8d9d7c765330356054',
774 774 '6c2303a793671e807d1cfc70134c9ca0767d98c2',
775 775 '54386793436c938cff89326944d4c2702340037d',
776 776 '54000345d2e78b03a99d561399e8e548de3f3203',
777 777 '1c6b3677b37ea064cb4b51714d8f7498f93f4b2b',
778 778 '2d03ca750a44440fb5ea8b751176d1f36f8e8f46',
779 779 '2a08b128c206db48c2f0b8f70df060e6db0ae4f8',
780 780 '30c26513ff1eb8e5ce0e1c6b477ee5dc50e2f34b',
781 781 'ac71e9503c2ca95542839af0ce7b64011b72ea7c',
782 782 '12669288fd13adba2a9b7dd5b870cc23ffab92d2',
783 783 '5a0c84f3e6fe3473e4c8427199d5a6fc71a9b382',
784 784 '12f2f5e2b38e6ff3fbdb5d722efed9aa72ecb0d5',
785 785 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
786 786 'f50f42baeed5af6518ef4b0cb2f1423f3851a941',
787 787 'd7e390a45f6aa96f04f5e7f583ad4f867431aa25',
788 788 'f15c21f97864b4f071cddfbf2750ec2e23859414',
789 789 'e906ef056cf539a4e4e5fc8003eaf7cf14dd8ade',
790 790 'ea2b108b48aa8f8c9c4a941f66c1a03315ca1c3b',
791 791 '84dec09632a4458f79f50ddbbd155506c460b4f9',
792 792 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
793 793 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
794 794 '3bf1c5868e570e39569d094f922d33ced2fa3b2b',
795 795 'b8d04012574729d2c29886e53b1a43ef16dd00a1',
796 796 '6970b057cffe4aab0a792aa634c89f4bebf01441',
797 797 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
798 798 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
799 799 ],
800 800 'vcs/backends/git.py': [
801 801 '4cf116ad5a457530381135e2f4c453e68a1b0105',
802 802 '9a751d84d8e9408e736329767387f41b36935153',
803 803 'cb681fb539c3faaedbcdf5ca71ca413425c18f01',
804 804 '428f81bb652bcba8d631bce926e8834ff49bdcc6',
805 805 '180ab15aebf26f98f714d8c68715e0f05fa6e1c7',
806 806 '2b8e07312a2e89e92b90426ab97f349f4bce2a3a',
807 807 '50e08c506174d8645a4bb517dd122ac946a0f3bf',
808 808 '54000345d2e78b03a99d561399e8e548de3f3203',
809 809 ],
810 810 }
811 811 for path, commit_ids in files.items():
812 812 node = self.repo.get_commit(commit_ids[0]).get_node(path)
813 813 node_ids = [commit.raw_id for commit in node.history]
814 814 assert set(commit_ids).issubset(set(node_ids)), (
815 815 "We assumed that %s is subset of commit_ids for which file %s "
816 816 "has been changed, and history of that node returned: %s"
817 817 % (commit_ids, path, node_ids))
818 818
819 819 def test_file_annotate(self):
820 820 files = {
821 821 'vcs/backends/__init__.py': {
822 822 'c1214f7e79e02fc37156ff215cd71275450cffc3': {
823 823 'lines_no': 1,
824 824 'commits': [
825 825 'c1214f7e79e02fc37156ff215cd71275450cffc3',
826 826 ],
827 827 },
828 828 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647': {
829 829 'lines_no': 21,
830 830 'commits': [
831 831 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
832 832 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
833 833 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
834 834 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
835 835 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
836 836 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
837 837 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
838 838 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
839 839 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
840 840 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
841 841 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
842 842 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
843 843 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
844 844 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
845 845 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
846 846 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
847 847 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
848 848 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
849 849 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
850 850 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
851 851 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
852 852 ],
853 853 },
854 854 'e29b67bd158580fc90fc5e9111240b90e6e86064': {
855 855 'lines_no': 32,
856 856 'commits': [
857 857 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
858 858 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
859 859 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
860 860 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
861 861 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
862 862 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
863 863 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
864 864 '54000345d2e78b03a99d561399e8e548de3f3203',
865 865 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
866 866 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
867 867 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
868 868 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
869 869 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
870 870 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
871 871 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
872 872 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
873 873 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
874 874 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
875 875 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
876 876 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
877 877 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
878 878 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
879 879 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
880 880 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
881 881 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
882 882 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
883 883 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
884 884 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
885 885 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
886 886 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
887 887 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
888 888 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
889 889 ],
890 890 },
891 891 },
892 892 }
893 893
894 894 for fname, commit_dict in files.items():
895 895 for commit_id, __ in commit_dict.items():
896 896 commit = self.repo.get_commit(commit_id)
897 897
898 898 l1_1 = [x[1] for x in commit.get_file_annotate(fname)]
899 899 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
900 900 assert l1_1 == l1_2
901 901 l1 = l1_1
902 902 l2 = files[fname][commit_id]['commits']
903 903 assert l1 == l2, (
904 904 "The lists of commit_ids for %s@commit_id %s"
905 905 "from annotation list should match each other, "
906 906 "got \n%s \nvs \n%s " % (fname, commit_id, l1, l2))
907 907
908 908 def test_files_state(self):
909 909 """
910 910 Tests state of FileNodes.
911 911 """
912 912 node = self.repo\
913 913 .get_commit('e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0')\
914 914 .get_node('vcs/utils/diffs.py')
915 915 assert node.state, NodeState.ADDED
916 916 assert node.added
917 917 assert not node.changed
918 918 assert not node.not_changed
919 919 assert not node.removed
920 920
921 921 node = self.repo\
922 922 .get_commit('33fa3223355104431402a888fa77a4e9956feb3e')\
923 923 .get_node('.hgignore')
924 924 assert node.state, NodeState.CHANGED
925 925 assert not node.added
926 926 assert node.changed
927 927 assert not node.not_changed
928 928 assert not node.removed
929 929
930 930 node = self.repo\
931 931 .get_commit('e29b67bd158580fc90fc5e9111240b90e6e86064')\
932 932 .get_node('setup.py')
933 933 assert node.state, NodeState.NOT_CHANGED
934 934 assert not node.added
935 935 assert not node.changed
936 936 assert node.not_changed
937 937 assert not node.removed
938 938
939 939 # If node has REMOVED state then trying to fetch it would raise
940 940 # CommitError exception
941 941 commit = self.repo.get_commit(
942 942 'fa6600f6848800641328adbf7811fd2372c02ab2')
943 943 path = 'vcs/backends/BaseRepository.py'
944 944 with pytest.raises(NodeDoesNotExistError):
945 945 commit.get_node(path)
946 946 # but it would be one of ``removed`` (commit's attribute)
947 947 assert path in [rf.path for rf in commit.removed]
948 948
949 949 commit = self.repo.get_commit(
950 950 '54386793436c938cff89326944d4c2702340037d')
951 951 changed = [
952 952 'setup.py', 'tests/test_nodes.py', 'vcs/backends/hg.py',
953 953 'vcs/nodes.py']
954 954 assert set(changed) == set([f.path for f in commit.changed])
955 955
956 956 def test_unicode_branch_refs(self):
957 957 unicode_branches = {
958 958 'refs/heads/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
959 959 u'refs/heads/uniçö∂e': 'ürl',
960 960 }
961 961 with mock.patch(
962 962 ("rhodecode.lib.vcs.backends.git.repository"
963 963 ".GitRepository._refs"),
964 964 unicode_branches):
965 965 branches = self.repo.branches
966 966
967 967 assert 'unicode' in branches
968 968 assert u'uniçö∂e' in branches
969 969
970 970 def test_unicode_tag_refs(self):
971 971 unicode_tags = {
972 972 'refs/tags/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
973 973 u'refs/tags/uniçö∂e': '6c0ce52b229aa978889e91b38777f800e85f330b',
974 974 }
975 975 with mock.patch(
976 976 ("rhodecode.lib.vcs.backends.git.repository"
977 977 ".GitRepository._refs"),
978 978 unicode_tags):
979 979 tags = self.repo.tags
980 980
981 981 assert 'unicode' in tags
982 982 assert u'uniçö∂e' in tags
983 983
984 984 def test_commit_message_is_unicode(self):
985 985 for commit in self.repo:
986 986 assert type(commit.message) == unicode
987 987
988 988 def test_commit_author_is_unicode(self):
989 989 for commit in self.repo:
990 990 assert type(commit.author) == unicode
991 991
992 992 def test_repo_files_content_is_unicode(self):
993 993 commit = self.repo.get_commit()
994 994 for node in commit.get_node('/'):
995 995 if node.is_file():
996 996 assert type(node.content) == unicode
997 997
998 998 def test_wrong_path(self):
999 999 # There is 'setup.py' in the root dir but not there:
1000 1000 path = 'foo/bar/setup.py'
1001 1001 tip = self.repo.get_commit()
1002 1002 with pytest.raises(VCSError):
1003 1003 tip.get_node(path)
1004 1004
1005 1005 @pytest.mark.parametrize("author_email, commit_id", [
1006 1006 ('marcin@python-blog.com', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
1007 1007 ('lukasz.balcerzak@python-center.pl',
1008 1008 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
1009 1009 ('none@none', '8430a588b43b5d6da365400117c89400326e7992'),
1010 1010 ])
1011 1011 def test_author_email(self, author_email, commit_id):
1012 1012 commit = self.repo.get_commit(commit_id)
1013 1013 assert author_email == commit.author_email
1014 1014
1015 1015 @pytest.mark.parametrize("author, commit_id", [
1016 1016 ('Marcin Kuzminski', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
1017 1017 ('Lukasz Balcerzak', 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
1018 1018 ('marcink', '8430a588b43b5d6da365400117c89400326e7992'),
1019 1019 ])
1020 1020 def test_author_username(self, author, commit_id):
1021 1021 commit = self.repo.get_commit(commit_id)
1022 1022 assert author == commit.author_name
1023 1023
1024 1024
1025 1025 class TestLargeFileRepo(object):
1026 1026
1027 1027 def test_large_file(self, backend_git):
1028 1028 conf = make_db_config()
1029 1029 repo = backend_git.create_test_repo('largefiles', conf)
1030 1030
1031 1031 tip = repo.scm_instance().get_commit()
1032 1032
1033 1033 # extract stored LF node into the origin cache
1034 1034 lfs_store = os.path.join(repo.repo_path, repo.repo_name, 'lfs_store')
1035 1035
1036 1036 oid = '7b331c02e313c7599d5a90212e17e6d3cb729bd2e1c9b873c302a63c95a2f9bf'
1037 1037 oid_path = os.path.join(lfs_store, oid)
1038 1038 oid_destination = os.path.join(
1039 1039 conf.get('vcs_git_lfs', 'store_location'), oid)
1040 1040 shutil.copy(oid_path, oid_destination)
1041 1041
1042 1042 node = tip.get_node('1MB.zip')
1043 1043
1044 1044 lf_node = node.get_largefile_node()
1045 1045
1046 1046 assert lf_node.is_largefile() is True
1047 1047 assert lf_node.size == 1024000
1048 1048 assert lf_node.name == '1MB.zip'
1049 1049
1050 1050
1051 1051 @pytest.mark.usefixtures("vcs_repository_support")
1052 1052 class TestGitSpecificWithRepo(BackendTestMixin):
1053 1053
1054 1054 @classmethod
1055 1055 def _get_commits(cls):
1056 1056 return [
1057 1057 {
1058 1058 'message': 'Initial',
1059 1059 'author': 'Joe Doe <joe.doe@example.com>',
1060 1060 'date': datetime.datetime(2010, 1, 1, 20),
1061 1061 'added': [
1062 1062 FileNode('foobar/static/js/admin/base.js', content='base'),
1063 1063 FileNode(
1064 1064 'foobar/static/admin', content='admin',
1065 1065 mode=0120000), # this is a link
1066 1066 FileNode('foo', content='foo'),
1067 1067 ],
1068 1068 },
1069 1069 {
1070 1070 'message': 'Second',
1071 1071 'author': 'Joe Doe <joe.doe@example.com>',
1072 1072 'date': datetime.datetime(2010, 1, 1, 22),
1073 1073 'added': [
1074 1074 FileNode('foo2', content='foo2'),
1075 1075 ],
1076 1076 },
1077 1077 ]
1078 1078
1079 1079 def test_paths_slow_traversing(self):
1080 1080 commit = self.repo.get_commit()
1081 1081 assert commit.get_node('foobar').get_node('static').get_node('js')\
1082 1082 .get_node('admin').get_node('base.js').content == 'base'
1083 1083
1084 1084 def test_paths_fast_traversing(self):
1085 1085 commit = self.repo.get_commit()
1086 1086 assert (
1087 1087 commit.get_node('foobar/static/js/admin/base.js').content ==
1088 1088 'base')
1089 1089
1090 1090 def test_get_diff_runs_git_command_with_hashes(self):
1091 1091 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1092 1092 self.repo.get_diff(self.repo[0], self.repo[1])
1093 1093 self.repo.run_git_command.assert_called_once_with(
1094 1094 ['diff', '-U3', '--full-index', '--binary', '-p', '-M',
1095 1095 '--abbrev=40', self.repo._get_commit_id(0),
1096 1096 self.repo._get_commit_id(1)])
1097 1097
1098 1098 def test_get_diff_runs_git_command_with_str_hashes(self):
1099 1099 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1100 1100 self.repo.get_diff(self.repo.EMPTY_COMMIT, self.repo[1])
1101 1101 self.repo.run_git_command.assert_called_once_with(
1102 1102 ['show', '-U3', '--full-index', '--binary', '-p', '-M',
1103 1103 '--abbrev=40', self.repo._get_commit_id(1)])
1104 1104
1105 1105 def test_get_diff_runs_git_command_with_path_if_its_given(self):
1106 1106 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1107 1107 self.repo.get_diff(self.repo[0], self.repo[1], 'foo')
1108 1108 self.repo.run_git_command.assert_called_once_with(
1109 1109 ['diff', '-U3', '--full-index', '--binary', '-p', '-M',
1110 1110 '--abbrev=40', self.repo._get_commit_id(0),
1111 1111 self.repo._get_commit_id(1), '--', 'foo'])
1112 1112
1113 1113
1114 1114 @pytest.mark.usefixtures("vcs_repository_support")
1115 1115 class TestGitRegression(BackendTestMixin):
1116 1116
1117 1117 @classmethod
1118 1118 def _get_commits(cls):
1119 1119 return [
1120 1120 {
1121 1121 'message': 'Initial',
1122 1122 'author': 'Joe Doe <joe.doe@example.com>',
1123 1123 'date': datetime.datetime(2010, 1, 1, 20),
1124 1124 'added': [
1125 1125 FileNode('bot/__init__.py', content='base'),
1126 1126 FileNode('bot/templates/404.html', content='base'),
1127 1127 FileNode('bot/templates/500.html', content='base'),
1128 1128 ],
1129 1129 },
1130 1130 {
1131 1131 'message': 'Second',
1132 1132 'author': 'Joe Doe <joe.doe@example.com>',
1133 1133 'date': datetime.datetime(2010, 1, 1, 22),
1134 1134 'added': [
1135 1135 FileNode('bot/build/migrations/1.py', content='foo2'),
1136 1136 FileNode('bot/build/migrations/2.py', content='foo2'),
1137 1137 FileNode(
1138 1138 'bot/build/static/templates/f.html', content='foo2'),
1139 1139 FileNode(
1140 1140 'bot/build/static/templates/f1.html', content='foo2'),
1141 1141 FileNode('bot/build/templates/err.html', content='foo2'),
1142 1142 FileNode('bot/build/templates/err2.html', content='foo2'),
1143 1143 ],
1144 1144 },
1145 1145 ]
1146 1146
1147 1147 @pytest.mark.parametrize("path, expected_paths", [
1148 1148 ('bot', [
1149 1149 'bot/build',
1150 1150 'bot/templates',
1151 1151 'bot/__init__.py']),
1152 1152 ('bot/build', [
1153 1153 'bot/build/migrations',
1154 1154 'bot/build/static',
1155 1155 'bot/build/templates']),
1156 1156 ('bot/build/static', [
1157 1157 'bot/build/static/templates']),
1158 1158 ('bot/build/static/templates', [
1159 1159 'bot/build/static/templates/f.html',
1160 1160 'bot/build/static/templates/f1.html']),
1161 1161 ('bot/build/templates', [
1162 1162 'bot/build/templates/err.html',
1163 1163 'bot/build/templates/err2.html']),
1164 1164 ('bot/templates/', [
1165 1165 'bot/templates/404.html',
1166 1166 'bot/templates/500.html']),
1167 1167 ])
1168 1168 def test_similar_paths(self, path, expected_paths):
1169 1169 commit = self.repo.get_commit()
1170 1170 paths = [n.path for n in commit.get_nodes(path)]
1171 1171 assert paths == expected_paths
1172 1172
1173 1173
1174 1174 class TestDiscoverGitVersion:
1175 1175
1176 1176 def test_returns_git_version(self, baseapp):
1177 1177 version = discover_git_version()
1178 1178 assert version
1179 1179
1180 1180 def test_returns_empty_string_without_vcsserver(self):
1181 1181 mock_connection = mock.Mock()
1182 1182 mock_connection.discover_git_version = mock.Mock(
1183 1183 side_effect=Exception)
1184 1184 with mock.patch('rhodecode.lib.vcs.connection.Git', mock_connection):
1185 1185 version = discover_git_version()
1186 1186 assert version == ''
1187 1187
1188 1188
1189 1189 class TestGetSubmoduleUrl(object):
1190 1190 def test_submodules_file_found(self):
1191 1191 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1192 1192 node = mock.Mock()
1193 1193 with mock.patch.object(
1194 1194 commit, 'get_node', return_value=node) as get_node_mock:
1195 1195 node.content = (
1196 1196 '[submodule "subrepo1"]\n'
1197 1197 '\tpath = subrepo1\n'
1198 1198 '\turl = https://code.rhodecode.com/dulwich\n'
1199 1199 )
1200 1200 result = commit._get_submodule_url('subrepo1')
1201 1201 get_node_mock.assert_called_once_with('.gitmodules')
1202 1202 assert result == 'https://code.rhodecode.com/dulwich'
1203 1203
1204 1204 def test_complex_submodule_path(self):
1205 1205 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1206 1206 node = mock.Mock()
1207 1207 with mock.patch.object(
1208 1208 commit, 'get_node', return_value=node) as get_node_mock:
1209 1209 node.content = (
1210 1210 '[submodule "complex/subrepo/path"]\n'
1211 1211 '\tpath = complex/subrepo/path\n'
1212 1212 '\turl = https://code.rhodecode.com/dulwich\n'
1213 1213 )
1214 1214 result = commit._get_submodule_url('complex/subrepo/path')
1215 1215 get_node_mock.assert_called_once_with('.gitmodules')
1216 1216 assert result == 'https://code.rhodecode.com/dulwich'
1217 1217
1218 1218 def test_submodules_file_not_found(self):
1219 1219 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1220 1220 with mock.patch.object(
1221 1221 commit, 'get_node', side_effect=NodeDoesNotExistError):
1222 1222 result = commit._get_submodule_url('complex/subrepo/path')
1223 1223 assert result is None
1224 1224
1225 1225 def test_path_not_found(self):
1226 1226 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1227 1227 node = mock.Mock()
1228 1228 with mock.patch.object(
1229 1229 commit, 'get_node', return_value=node) as get_node_mock:
1230 1230 node.content = (
1231 1231 '[submodule "subrepo1"]\n'
1232 1232 '\tpath = subrepo1\n'
1233 1233 '\turl = https://code.rhodecode.com/dulwich\n'
1234 1234 )
1235 1235 result = commit._get_submodule_url('subrepo2')
1236 1236 get_node_mock.assert_called_once_with('.gitmodules')
1237 1237 assert result is None
1238 1238
1239 1239 def test_returns_cached_values(self):
1240 1240 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1241 1241 node = mock.Mock()
1242 1242 with mock.patch.object(
1243 1243 commit, 'get_node', return_value=node) as get_node_mock:
1244 1244 node.content = (
1245 1245 '[submodule "subrepo1"]\n'
1246 1246 '\tpath = subrepo1\n'
1247 1247 '\turl = https://code.rhodecode.com/dulwich\n'
1248 1248 )
1249 1249 for _ in range(3):
1250 1250 commit._get_submodule_url('subrepo1')
1251 1251 get_node_mock.assert_called_once_with('.gitmodules')
1252 1252
1253 1253 def test_get_node_returns_a_link(self):
1254 1254 repository = mock.Mock()
1255 1255 repository.alias = 'git'
1256 1256 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1257 1257 submodule_url = 'https://code.rhodecode.com/dulwich'
1258 1258 get_id_patch = mock.patch.object(
1259 1259 commit, '_get_id_for_path', return_value=(1, 'link'))
1260 1260 get_submodule_patch = mock.patch.object(
1261 1261 commit, '_get_submodule_url', return_value=submodule_url)
1262 1262
1263 1263 with get_id_patch, get_submodule_patch as submodule_mock:
1264 1264 node = commit.get_node('/abcde')
1265 1265
1266 1266 submodule_mock.assert_called_once_with('/abcde')
1267 1267 assert type(node) == SubModuleNode
1268 1268 assert node.url == submodule_url
1269 1269
1270 1270 def test_get_nodes_returns_links(self):
1271 1271 repository = mock.MagicMock()
1272 1272 repository.alias = 'git'
1273 1273 repository._remote.tree_items.return_value = [
1274 1274 ('subrepo', 'stat', 1, 'link')
1275 1275 ]
1276 1276 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1277 1277 submodule_url = 'https://code.rhodecode.com/dulwich'
1278 1278 get_id_patch = mock.patch.object(
1279 1279 commit, '_get_id_for_path', return_value=(1, 'tree'))
1280 1280 get_submodule_patch = mock.patch.object(
1281 1281 commit, '_get_submodule_url', return_value=submodule_url)
1282 1282
1283 1283 with get_id_patch, get_submodule_patch as submodule_mock:
1284 1284 nodes = commit.get_nodes('/abcde')
1285 1285
1286 1286 submodule_mock.assert_called_once_with('/abcde/subrepo')
1287 1287 assert len(nodes) == 1
1288 1288 assert type(nodes[0]) == SubModuleNode
1289 1289 assert nodes[0].url == submodule_url
@@ -1,1183 +1,1186 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22
23 23 import mock
24 24 import pytest
25 25
26 26 from rhodecode.lib.utils import make_db_config
27 27 from rhodecode.lib.vcs import backends
28 28 from rhodecode.lib.vcs.backends.base import (
29 29 Reference, MergeResponse, MergeFailureReason)
30 30 from rhodecode.lib.vcs.backends.hg import MercurialRepository, MercurialCommit
31 31 from rhodecode.lib.vcs.exceptions import (
32 32 RepositoryError, VCSError, NodeDoesNotExistError, CommitDoesNotExistError)
33 33 from rhodecode.lib.vcs.nodes import FileNode, NodeKind, NodeState
34 from rhodecode.tests import TEST_HG_REPO, TEST_HG_REPO_CLONE
34 from rhodecode.tests import TEST_HG_REPO, TEST_HG_REPO_CLONE, repo_id_generator
35 35
36 36
37 37 pytestmark = pytest.mark.backends("hg")
38 38
39 39
40 40 def repo_path_generator():
41 41 """
42 42 Return a different path to be used for cloning repos.
43 43 """
44 44 i = 0
45 45 while True:
46 46 i += 1
47 47 yield '%s-%d' % (TEST_HG_REPO_CLONE, i)
48 48
49
50 49 REPO_PATH_GENERATOR = repo_path_generator()
51 50
52 51
53 52 @pytest.fixture(scope='class', autouse=True)
54 53 def repo(request, baseapp):
55 54 repo = MercurialRepository(TEST_HG_REPO)
56 55 if request.cls:
57 56 request.cls.repo = repo
58 57 return repo
59 58
60 59
61 60 class TestMercurialRepository:
62 61
63 62 # pylint: disable=protected-access
64 63
65 64 def get_clone_repo(self):
66 65 """
67 66 Return a clone of the base repo.
68 67 """
69 68 clone_path = next(REPO_PATH_GENERATOR)
70 69 repo_clone = MercurialRepository(
71 70 clone_path, create=True, src_url=self.repo.path)
72 71
73 72 return repo_clone
74 73
75 74 def get_empty_repo(self):
76 75 """
77 76 Return an empty repo.
78 77 """
79 78 return MercurialRepository(next(REPO_PATH_GENERATOR), create=True)
80 79
81 80 def test_wrong_repo_path(self):
82 81 wrong_repo_path = '/tmp/errorrepo_hg'
83 82 with pytest.raises(RepositoryError):
84 83 MercurialRepository(wrong_repo_path)
85 84
86 85 def test_unicode_path_repo(self):
87 86 with pytest.raises(VCSError):
88 87 MercurialRepository(u'iShouldFail')
89 88
90 89 def test_unicode_commit_id(self):
91 90 with pytest.raises(CommitDoesNotExistError):
92 91 self.repo.get_commit(u'unicode-commit-id')
93 92 with pytest.raises(CommitDoesNotExistError):
94 93 self.repo.get_commit(u'unícøde-spéçial-chäråcter-commit-id')
95 94
96 95 def test_unicode_bookmark(self):
97 96 self.repo.bookmark(u'unicode-bookmark')
98 97 self.repo.bookmark(u'unícøde-spéçial-chäråcter-bookmark')
99 98
100 99 def test_unicode_branch(self):
101 100 with pytest.raises(KeyError):
102 101 self.repo.branches[u'unicode-branch']
103 102 with pytest.raises(KeyError):
104 103 self.repo.branches[u'unícøde-spéçial-chäråcter-branch']
105 104
106 105 def test_repo_clone(self):
107 106 if os.path.exists(TEST_HG_REPO_CLONE):
108 107 self.fail(
109 108 'Cannot test mercurial clone repo as location %s already '
110 109 'exists. You should manually remove it first.'
111 110 % TEST_HG_REPO_CLONE)
112 111
113 112 repo = MercurialRepository(TEST_HG_REPO)
114 113 repo_clone = MercurialRepository(TEST_HG_REPO_CLONE,
115 114 src_url=TEST_HG_REPO)
116 115 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
117 116 # Checking hashes of commits should be enough
118 117 for commit in repo.get_commits():
119 118 raw_id = commit.raw_id
120 119 assert raw_id == repo_clone.get_commit(raw_id).raw_id
121 120
122 121 def test_repo_clone_with_update(self):
123 122 repo = MercurialRepository(TEST_HG_REPO)
124 123 repo_clone = MercurialRepository(
125 124 TEST_HG_REPO_CLONE + '_w_update',
126 125 src_url=TEST_HG_REPO, update_after_clone=True)
127 126 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
128 127
129 128 # check if current workdir was updated
130 129 assert os.path.isfile(
131 130 os.path.join(TEST_HG_REPO_CLONE + '_w_update', 'MANIFEST.in'))
132 131
133 132 def test_repo_clone_without_update(self):
134 133 repo = MercurialRepository(TEST_HG_REPO)
135 134 repo_clone = MercurialRepository(
136 135 TEST_HG_REPO_CLONE + '_wo_update',
137 136 src_url=TEST_HG_REPO, update_after_clone=False)
138 137 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
139 138 assert not os.path.isfile(
140 139 os.path.join(TEST_HG_REPO_CLONE + '_wo_update', 'MANIFEST.in'))
141 140
142 141 def test_commit_ids(self):
143 142 # there are 21 commits at bitbucket now
144 143 # so we can assume they would be available from now on
145 144 subset = set([
146 145 'b986218ba1c9b0d6a259fac9b050b1724ed8e545',
147 146 '3d8f361e72ab303da48d799ff1ac40d5ac37c67e',
148 147 '6cba7170863a2411822803fa77a0a264f1310b35',
149 148 '56349e29c2af3ac913b28bde9a2c6154436e615b',
150 149 '2dda4e345facb0ccff1a191052dd1606dba6781d',
151 150 '6fff84722075f1607a30f436523403845f84cd9e',
152 151 '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7',
153 152 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb',
154 153 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c',
155 154 'be90031137367893f1c406e0a8683010fd115b79',
156 155 'db8e58be770518cbb2b1cdfa69146e47cd481481',
157 156 '84478366594b424af694a6c784cb991a16b87c21',
158 157 '17f8e105dddb9f339600389c6dc7175d395a535c',
159 158 '20a662e756499bde3095ffc9bc0643d1def2d0eb',
160 159 '2e319b85e70a707bba0beff866d9f9de032aa4f9',
161 160 '786facd2c61deb9cf91e9534735124fb8fc11842',
162 161 '94593d2128d38210a2fcd1aabff6dda0d6d9edf8',
163 162 'aa6a0de05b7612707db567078e130a6cd114a9a7',
164 163 'eada5a770da98ab0dd7325e29d00e0714f228d09'
165 164 ])
166 165 assert subset.issubset(set(self.repo.commit_ids))
167 166
168 167 # check if we have the proper order of commits
169 168 org = [
170 169 'b986218ba1c9b0d6a259fac9b050b1724ed8e545',
171 170 '3d8f361e72ab303da48d799ff1ac40d5ac37c67e',
172 171 '6cba7170863a2411822803fa77a0a264f1310b35',
173 172 '56349e29c2af3ac913b28bde9a2c6154436e615b',
174 173 '2dda4e345facb0ccff1a191052dd1606dba6781d',
175 174 '6fff84722075f1607a30f436523403845f84cd9e',
176 175 '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7',
177 176 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb',
178 177 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c',
179 178 'be90031137367893f1c406e0a8683010fd115b79',
180 179 'db8e58be770518cbb2b1cdfa69146e47cd481481',
181 180 '84478366594b424af694a6c784cb991a16b87c21',
182 181 '17f8e105dddb9f339600389c6dc7175d395a535c',
183 182 '20a662e756499bde3095ffc9bc0643d1def2d0eb',
184 183 '2e319b85e70a707bba0beff866d9f9de032aa4f9',
185 184 '786facd2c61deb9cf91e9534735124fb8fc11842',
186 185 '94593d2128d38210a2fcd1aabff6dda0d6d9edf8',
187 186 'aa6a0de05b7612707db567078e130a6cd114a9a7',
188 187 'eada5a770da98ab0dd7325e29d00e0714f228d09',
189 188 '2c1885c735575ca478bf9e17b0029dca68824458',
190 189 'd9bcd465040bf869799b09ad732c04e0eea99fe9',
191 190 '469e9c847fe1f6f7a697b8b25b4bc5b48780c1a7',
192 191 '4fb8326d78e5120da2c7468dcf7098997be385da',
193 192 '62b4a097164940bd66030c4db51687f3ec035eed',
194 193 '536c1a19428381cfea92ac44985304f6a8049569',
195 194 '965e8ab3c44b070cdaa5bf727ddef0ada980ecc4',
196 195 '9bb326a04ae5d98d437dece54be04f830cf1edd9',
197 196 'f8940bcb890a98c4702319fbe36db75ea309b475',
198 197 'ff5ab059786ebc7411e559a2cc309dfae3625a3b',
199 198 '6b6ad5f82ad5bb6190037671bd254bd4e1f4bf08',
200 199 'ee87846a61c12153b51543bf860e1026c6d3dcba',
201 200 ]
202 201 assert org == self.repo.commit_ids[:31]
203 202
204 203 def test_iter_slice(self):
205 204 sliced = list(self.repo[:10])
206 205 itered = list(self.repo)[:10]
207 206 assert sliced == itered
208 207
209 208 def test_slicing(self):
210 209 # 4 1 5 10 95
211 210 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
212 211 (10, 20, 10), (5, 100, 95)]:
213 212 indexes = list(self.repo[sfrom:sto])
214 213 assert len(indexes) == size
215 214 assert indexes[0] == self.repo.get_commit(commit_idx=sfrom)
216 215 assert indexes[-1] == self.repo.get_commit(commit_idx=sto - 1)
217 216
218 217 def test_branches(self):
219 218 # TODO: Need more tests here
220 219
221 220 # active branches
222 221 assert 'default' in self.repo.branches
223 222 assert 'stable' in self.repo.branches
224 223
225 224 # closed
226 225 assert 'git' in self.repo._get_branches(closed=True)
227 226 assert 'web' in self.repo._get_branches(closed=True)
228 227
229 228 for name, id in self.repo.branches.items():
230 229 assert isinstance(self.repo.get_commit(id), MercurialCommit)
231 230
232 231 def test_tip_in_tags(self):
233 232 # tip is always a tag
234 233 assert 'tip' in self.repo.tags
235 234
236 235 def test_tip_commit_in_tags(self):
237 236 tip = self.repo.get_commit()
238 237 assert self.repo.tags['tip'] == tip.raw_id
239 238
240 239 def test_initial_commit(self):
241 240 init_commit = self.repo.get_commit(commit_idx=0)
242 241 init_author = init_commit.author
243 242
244 243 assert init_commit.message == 'initial import'
245 244 assert init_author == 'Marcin Kuzminski <marcin@python-blog.com>'
246 245 assert init_author == init_commit.committer
247 246 assert sorted(init_commit._file_paths) == sorted([
248 247 'vcs/__init__.py',
249 248 'vcs/backends/BaseRepository.py',
250 249 'vcs/backends/__init__.py',
251 250 ])
252 251 assert sorted(init_commit._dir_paths) == sorted(
253 252 ['', 'vcs', 'vcs/backends'])
254 253
255 254 assert init_commit._dir_paths + init_commit._file_paths == \
256 255 init_commit._paths
257 256
258 257 with pytest.raises(NodeDoesNotExistError):
259 258 init_commit.get_node(path='foobar')
260 259
261 260 node = init_commit.get_node('vcs/')
262 261 assert hasattr(node, 'kind')
263 262 assert node.kind == NodeKind.DIR
264 263
265 264 node = init_commit.get_node('vcs')
266 265 assert hasattr(node, 'kind')
267 266 assert node.kind == NodeKind.DIR
268 267
269 268 node = init_commit.get_node('vcs/__init__.py')
270 269 assert hasattr(node, 'kind')
271 270 assert node.kind == NodeKind.FILE
272 271
273 272 def test_not_existing_commit(self):
274 273 # rawid
275 274 with pytest.raises(RepositoryError):
276 275 self.repo.get_commit('abcd' * 10)
277 276 # shortid
278 277 with pytest.raises(RepositoryError):
279 278 self.repo.get_commit('erro' * 4)
280 279 # numeric
281 280 with pytest.raises(RepositoryError):
282 281 self.repo.get_commit(commit_idx=self.repo.count() + 1)
283 282
284 283 # Small chance we ever get to this one
285 284 idx = pow(2, 30)
286 285 with pytest.raises(RepositoryError):
287 286 self.repo.get_commit(commit_idx=idx)
288 287
289 288 def test_commit10(self):
290 289 commit10 = self.repo.get_commit(commit_idx=10)
291 290 README = """===
292 291 VCS
293 292 ===
294 293
295 294 Various Version Control System management abstraction layer for Python.
296 295
297 296 Introduction
298 297 ------------
299 298
300 299 TODO: To be written...
301 300
302 301 """
303 302 node = commit10.get_node('README.rst')
304 303 assert node.kind == NodeKind.FILE
305 304 assert node.content == README
306 305
307 306 def test_local_clone(self):
308 307 clone_path = next(REPO_PATH_GENERATOR)
309 308 self.repo._local_clone(clone_path)
310 309 repo_clone = MercurialRepository(clone_path)
311 310
312 311 assert self.repo.commit_ids == repo_clone.commit_ids
313 312
314 313 def test_local_clone_fails_if_target_exists(self):
315 314 with pytest.raises(RepositoryError):
316 315 self.repo._local_clone(self.repo.path)
317 316
318 317 def test_update(self):
319 318 repo_clone = self.get_clone_repo()
320 319 branches = repo_clone.branches
321 320
322 321 repo_clone._update('default')
323 322 assert branches['default'] == repo_clone._identify()
324 323 repo_clone._update('stable')
325 324 assert branches['stable'] == repo_clone._identify()
326 325
327 326 def test_local_pull_branch(self):
328 327 target_repo = self.get_empty_repo()
329 328 source_repo = self.get_clone_repo()
330 329
331 330 default = Reference(
332 331 'branch', 'default', source_repo.branches['default'])
333 332 target_repo._local_pull(source_repo.path, default)
334 333 target_repo = MercurialRepository(target_repo.path)
335 334 assert (target_repo.branches['default'] ==
336 335 source_repo.branches['default'])
337 336
338 337 stable = Reference('branch', 'stable', source_repo.branches['stable'])
339 338 target_repo._local_pull(source_repo.path, stable)
340 339 target_repo = MercurialRepository(target_repo.path)
341 340 assert target_repo.branches['stable'] == source_repo.branches['stable']
342 341
343 342 def test_local_pull_bookmark(self):
344 343 target_repo = self.get_empty_repo()
345 344 source_repo = self.get_clone_repo()
346 345
347 346 commits = list(source_repo.get_commits(branch_name='default'))
348 347 foo1_id = commits[-5].raw_id
349 348 foo1 = Reference('book', 'foo1', foo1_id)
350 349 source_repo._update(foo1_id)
351 350 source_repo.bookmark('foo1')
352 351
353 352 foo2_id = commits[-3].raw_id
354 353 foo2 = Reference('book', 'foo2', foo2_id)
355 354 source_repo._update(foo2_id)
356 355 source_repo.bookmark('foo2')
357 356
358 357 target_repo._local_pull(source_repo.path, foo1)
359 358 target_repo = MercurialRepository(target_repo.path)
360 359 assert target_repo.branches['default'] == commits[-5].raw_id
361 360
362 361 target_repo._local_pull(source_repo.path, foo2)
363 362 target_repo = MercurialRepository(target_repo.path)
364 363 assert target_repo.branches['default'] == commits[-3].raw_id
365 364
366 365 def test_local_pull_commit(self):
367 366 target_repo = self.get_empty_repo()
368 367 source_repo = self.get_clone_repo()
369 368
370 369 commits = list(source_repo.get_commits(branch_name='default'))
371 370 commit_id = commits[-5].raw_id
372 371 commit = Reference('rev', commit_id, commit_id)
373 372 target_repo._local_pull(source_repo.path, commit)
374 373 target_repo = MercurialRepository(target_repo.path)
375 374 assert target_repo.branches['default'] == commit_id
376 375
377 376 commit_id = commits[-3].raw_id
378 377 commit = Reference('rev', commit_id, commit_id)
379 378 target_repo._local_pull(source_repo.path, commit)
380 379 target_repo = MercurialRepository(target_repo.path)
381 380 assert target_repo.branches['default'] == commit_id
382 381
383 382 def test_local_pull_from_same_repo(self):
384 383 reference = Reference('branch', 'default', None)
385 384 with pytest.raises(ValueError):
386 385 self.repo._local_pull(self.repo.path, reference)
387 386
388 387 def test_validate_pull_reference_raises_on_missing_reference(
389 388 self, vcsbackend_hg):
390 389 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
391 390 reference = Reference(
392 391 'book', 'invalid_reference', 'a' * 40)
393 392
394 393 with pytest.raises(CommitDoesNotExistError):
395 394 target_repo._validate_pull_reference(reference)
396 395
397 396 def test_heads(self):
398 397 assert set(self.repo._heads()) == set(self.repo.branches.values())
399 398
400 399 def test_ancestor(self):
401 400 commits = [
402 401 c.raw_id for c in self.repo.get_commits(branch_name='default')]
403 402 assert self.repo._ancestor(commits[-3], commits[-5]) == commits[-5]
404 403 assert self.repo._ancestor(commits[-5], commits[-3]) == commits[-5]
405 404
406 405 def test_local_push(self):
407 406 target_repo = self.get_empty_repo()
408 407
409 408 revisions = list(self.repo.get_commits(branch_name='default'))
410 409 revision = revisions[-5].raw_id
411 410 self.repo._local_push(revision, target_repo.path)
412 411
413 412 target_repo = MercurialRepository(target_repo.path)
414 413
415 414 assert target_repo.branches['default'] == revision
416 415
417 416 def test_hooks_can_be_enabled_for_local_push(self):
418 417 revision = 'deadbeef'
419 418 repo_path = 'test_group/test_repo'
420 419 with mock.patch.object(self.repo, '_remote') as remote_mock:
421 420 self.repo._local_push(revision, repo_path, enable_hooks=True)
422 421 remote_mock.push.assert_called_once_with(
423 422 [revision], repo_path, hooks=True, push_branches=False)
424 423
425 424 def test_local_merge(self, vcsbackend_hg):
426 425 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
427 426 source_repo = vcsbackend_hg.clone_repo(target_repo)
428 427 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
429 428 target_repo = MercurialRepository(target_repo.path)
430 429 target_rev = target_repo.branches['default']
431 430 target_ref = Reference(
432 431 type='branch', name='default', commit_id=target_rev)
433 432 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
434 433 source_repo = MercurialRepository(source_repo.path)
435 434 source_rev = source_repo.branches['default']
436 435 source_ref = Reference(
437 436 type='branch', name='default', commit_id=source_rev)
438 437
439 438 target_repo._local_pull(source_repo.path, source_ref)
440 439
441 440 merge_message = 'Merge message\n\nDescription:...'
442 441 user_name = 'Albert Einstein'
443 442 user_email = 'albert@einstein.com'
444 443 merge_commit_id, needs_push = target_repo._local_merge(
445 444 target_ref, merge_message, user_name, user_email, source_ref)
446 445 assert needs_push
447 446
448 447 target_repo = MercurialRepository(target_repo.path)
449 448 assert target_repo.commit_ids[-3] == target_rev
450 449 assert target_repo.commit_ids[-2] == source_rev
451 450 last_commit = target_repo.get_commit(merge_commit_id)
452 451 assert last_commit.message.strip() == merge_message
453 452 assert last_commit.author == '%s <%s>' % (user_name, user_email)
454 453
455 454 assert not os.path.exists(
456 455 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
457 456
458 457 def test_local_merge_source_is_fast_forward(self, vcsbackend_hg):
459 458 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
460 459 source_repo = vcsbackend_hg.clone_repo(target_repo)
461 460 target_rev = target_repo.branches['default']
462 461 target_ref = Reference(
463 462 type='branch', name='default', commit_id=target_rev)
464 463 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
465 464 source_repo = MercurialRepository(source_repo.path)
466 465 source_rev = source_repo.branches['default']
467 466 source_ref = Reference(
468 467 type='branch', name='default', commit_id=source_rev)
469 468
470 469 target_repo._local_pull(source_repo.path, source_ref)
471 470
472 471 merge_message = 'Merge message\n\nDescription:...'
473 472 user_name = 'Albert Einstein'
474 473 user_email = 'albert@einstein.com'
475 474 merge_commit_id, needs_push = target_repo._local_merge(
476 475 target_ref, merge_message, user_name, user_email, source_ref)
477 476 assert merge_commit_id == source_rev
478 477 assert needs_push
479 478
480 479 target_repo = MercurialRepository(target_repo.path)
481 480 assert target_repo.commit_ids[-2] == target_rev
482 481 assert target_repo.commit_ids[-1] == source_rev
483 482
484 483 assert not os.path.exists(
485 484 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
486 485
487 486 def test_local_merge_source_is_integrated(self, vcsbackend_hg):
488 487 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
489 488 target_rev = target_repo.branches['default']
490 489 target_ref = Reference(
491 490 type='branch', name='default', commit_id=target_rev)
492 491
493 492 merge_message = 'Merge message\n\nDescription:...'
494 493 user_name = 'Albert Einstein'
495 494 user_email = 'albert@einstein.com'
496 495 merge_commit_id, needs_push = target_repo._local_merge(
497 496 target_ref, merge_message, user_name, user_email, target_ref)
498 497 assert merge_commit_id == target_rev
499 498 assert not needs_push
500 499
501 500 target_repo = MercurialRepository(target_repo.path)
502 501 assert target_repo.commit_ids[-1] == target_rev
503 502
504 503 assert not os.path.exists(
505 504 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
506 505
507 506 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_hg):
508 507 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
509 508 source_repo = vcsbackend_hg.clone_repo(target_repo)
510 509 vcsbackend_hg.add_file(target_repo, 'README_MERGE', 'Version 1')
511 510 target_repo = MercurialRepository(target_repo.path)
512 511 target_rev = target_repo.branches['default']
513 512 target_ref = Reference(
514 513 type='branch', name='default', commit_id=target_rev)
515 514 vcsbackend_hg.add_file(source_repo, 'README_MERGE', 'Version 2')
516 515 source_repo = MercurialRepository(source_repo.path)
517 516 source_rev = source_repo.branches['default']
518 517 source_ref = Reference(
519 518 type='branch', name='default', commit_id=source_rev)
520 519
521 520 target_repo._local_pull(source_repo.path, source_ref)
522 521 with pytest.raises(RepositoryError):
523 522 target_repo._local_merge(
524 523 target_ref, 'merge_message', 'user name', 'user@name.com',
525 524 source_ref)
526 525
527 526 # Check we are not left in an intermediate merge state
528 527 assert not os.path.exists(
529 528 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
530 529
531 530 def test_local_merge_of_two_branches_of_the_same_repo(self, backend_hg):
532 531 commits = [
533 532 {'message': 'a'},
534 533 {'message': 'b', 'branch': 'b'},
535 534 ]
536 535 repo = backend_hg.create_repo(commits)
537 536 commit_ids = backend_hg.commit_ids
538 537 target_ref = Reference(
539 538 type='branch', name='default', commit_id=commit_ids['a'])
540 539 source_ref = Reference(
541 540 type='branch', name='b', commit_id=commit_ids['b'])
542 541 merge_message = 'Merge message\n\nDescription:...'
543 542 user_name = 'Albert Einstein'
544 543 user_email = 'albert@einstein.com'
545 544 vcs_repo = repo.scm_instance()
546 545 merge_commit_id, needs_push = vcs_repo._local_merge(
547 546 target_ref, merge_message, user_name, user_email, source_ref)
548 547 assert merge_commit_id != source_ref.commit_id
549 548 assert needs_push is True
550 549 commit = vcs_repo.get_commit(merge_commit_id)
551 550 assert commit.merge is True
552 551 assert commit.message == merge_message
553 552
554 553 def test_maybe_prepare_merge_workspace(self):
555 554 workspace = self.repo._maybe_prepare_merge_workspace(
556 'pr2', 'unused', 'unused2')
555 1, 'pr2', 'unused', 'unused2')
557 556
558 557 assert os.path.isdir(workspace)
559 558 workspace_repo = MercurialRepository(workspace)
560 559 assert workspace_repo.branches == self.repo.branches
561 560
562 561 # Calling it a second time should also succeed
563 562 workspace = self.repo._maybe_prepare_merge_workspace(
564 'pr2', 'unused', 'unused2')
563 1, 'pr2', 'unused', 'unused2')
565 564 assert os.path.isdir(workspace)
566 565
567 566 def test_cleanup_merge_workspace(self):
568 567 workspace = self.repo._maybe_prepare_merge_workspace(
569 'pr3', 'unused', 'unused2')
570 self.repo.cleanup_merge_workspace('pr3')
568 1, 'pr3', 'unused', 'unused2')
569
570 assert os.path.isdir(workspace)
571 self.repo.cleanup_merge_workspace(1, 'pr3')
571 572
572 573 assert not os.path.exists(workspace)
573 574
574 575 def test_cleanup_merge_workspace_invalid_workspace_id(self):
575 576 # No assert: because in case of an inexistent workspace this function
576 577 # should still succeed.
577 self.repo.cleanup_merge_workspace('pr4')
578 self.repo.cleanup_merge_workspace(1, 'pr4')
578 579
579 580 def test_merge_target_is_bookmark(self, vcsbackend_hg):
580 581 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
581 582 source_repo = vcsbackend_hg.clone_repo(target_repo)
582 583 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
583 584 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
584 585 imc = source_repo.in_memory_commit
585 586 imc.add(FileNode('file_x', content=source_repo.name))
586 587 imc.commit(
587 588 message=u'Automatic commit from repo merge test',
588 589 author=u'Automatic')
589 590 target_commit = target_repo.get_commit()
590 591 source_commit = source_repo.get_commit()
591 592 default_branch = target_repo.DEFAULT_BRANCH_NAME
592 593 bookmark_name = 'bookmark'
593 594 target_repo._update(default_branch)
594 595 target_repo.bookmark(bookmark_name)
595 596 target_ref = Reference('book', bookmark_name, target_commit.raw_id)
596 597 source_ref = Reference('branch', default_branch, source_commit.raw_id)
597 workspace = 'test-merge'
598
598 workspace_id = 'test-merge'
599 repo_id = repo_id_generator(target_repo.path)
599 600 merge_response = target_repo.merge(
600 target_ref, source_repo, source_ref, workspace,
601 repo_id, workspace_id, target_ref, source_repo, source_ref,
601 602 'test user', 'test@rhodecode.com', 'merge message 1',
602 603 dry_run=False)
603 604 expected_merge_response = MergeResponse(
604 605 True, True, merge_response.merge_ref,
605 606 MergeFailureReason.NONE)
606 607 assert merge_response == expected_merge_response
607 608
608 609 target_repo = backends.get_backend(vcsbackend_hg.alias)(
609 610 target_repo.path)
610 611 target_commits = list(target_repo.get_commits())
611 612 commit_ids = [c.raw_id for c in target_commits[:-1]]
612 613 assert source_ref.commit_id in commit_ids
613 614 assert target_ref.commit_id in commit_ids
614 615
615 616 merge_commit = target_commits[-1]
616 617 assert merge_commit.raw_id == merge_response.merge_ref.commit_id
617 618 assert merge_commit.message.strip() == 'merge message 1'
618 619 assert merge_commit.author == 'test user <test@rhodecode.com>'
619 620
620 621 # Check the bookmark was updated in the target repo
621 622 assert (
622 623 target_repo.bookmarks[bookmark_name] ==
623 624 merge_response.merge_ref.commit_id)
624 625
625 626 def test_merge_source_is_bookmark(self, vcsbackend_hg):
626 627 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
627 628 source_repo = vcsbackend_hg.clone_repo(target_repo)
628 629 imc = source_repo.in_memory_commit
629 630 imc.add(FileNode('file_x', content=source_repo.name))
630 631 imc.commit(
631 632 message=u'Automatic commit from repo merge test',
632 633 author=u'Automatic')
633 634 target_commit = target_repo.get_commit()
634 635 source_commit = source_repo.get_commit()
635 636 default_branch = target_repo.DEFAULT_BRANCH_NAME
636 637 bookmark_name = 'bookmark'
637 638 target_ref = Reference('branch', default_branch, target_commit.raw_id)
638 639 source_repo._update(default_branch)
639 640 source_repo.bookmark(bookmark_name)
640 641 source_ref = Reference('book', bookmark_name, source_commit.raw_id)
641 workspace = 'test-merge'
642
642 workspace_id = 'test-merge'
643 repo_id = repo_id_generator(target_repo.path)
643 644 merge_response = target_repo.merge(
644 target_ref, source_repo, source_ref, workspace,
645 repo_id, workspace_id, target_ref, source_repo, source_ref,
645 646 'test user', 'test@rhodecode.com', 'merge message 1',
646 647 dry_run=False)
647 648 expected_merge_response = MergeResponse(
648 649 True, True, merge_response.merge_ref,
649 650 MergeFailureReason.NONE)
650 651 assert merge_response == expected_merge_response
651 652
652 653 target_repo = backends.get_backend(vcsbackend_hg.alias)(
653 654 target_repo.path)
654 655 target_commits = list(target_repo.get_commits())
655 656 commit_ids = [c.raw_id for c in target_commits]
656 657 assert source_ref.commit_id == commit_ids[-1]
657 658 assert target_ref.commit_id == commit_ids[-2]
658 659
659 660 def test_merge_target_has_multiple_heads(self, vcsbackend_hg):
660 661 target_repo = vcsbackend_hg.create_repo(number_of_commits=2)
661 662 source_repo = vcsbackend_hg.clone_repo(target_repo)
662 663 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
663 664 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
664 665
665 666 # add an extra head to the target repo
666 667 imc = target_repo.in_memory_commit
667 668 imc.add(FileNode('file_x', content='foo'))
668 669 commits = list(target_repo.get_commits())
669 670 imc.commit(
670 671 message=u'Automatic commit from repo merge test',
671 672 author=u'Automatic', parents=commits[0:1])
672 673
673 674 target_commit = target_repo.get_commit()
674 675 source_commit = source_repo.get_commit()
675 676 default_branch = target_repo.DEFAULT_BRANCH_NAME
676 677 target_repo._update(default_branch)
677 678
678 679 target_ref = Reference('branch', default_branch, target_commit.raw_id)
679 680 source_ref = Reference('branch', default_branch, source_commit.raw_id)
680 workspace = 'test-merge'
681 workspace_id = 'test-merge'
681 682
682 683 assert len(target_repo._heads(branch='default')) == 2
683 684 expected_merge_response = MergeResponse(
684 685 False, False, None,
685 686 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS)
687 repo_id = repo_id_generator(target_repo.path)
686 688 merge_response = target_repo.merge(
687 target_ref, source_repo, source_ref, workspace,
689 repo_id, workspace_id, target_ref, source_repo, source_ref,
688 690 'test user', 'test@rhodecode.com', 'merge message 1',
689 691 dry_run=False)
690 692 assert merge_response == expected_merge_response
691 693
692 694 def test_merge_rebase_source_is_updated_bookmark(self, vcsbackend_hg):
693 695 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
694 696 source_repo = vcsbackend_hg.clone_repo(target_repo)
695 697 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
696 698 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
697 699 imc = source_repo.in_memory_commit
698 700 imc.add(FileNode('file_x', content=source_repo.name))
699 701 imc.commit(
700 702 message=u'Automatic commit from repo merge test',
701 703 author=u'Automatic')
702 704 target_commit = target_repo.get_commit()
703 705 source_commit = source_repo.get_commit()
704 706
705 707 vcsbackend_hg.add_file(source_repo, 'LICENSE', 'LICENSE Info')
706 708
707 709 default_branch = target_repo.DEFAULT_BRANCH_NAME
708 710 bookmark_name = 'bookmark'
709 711 source_repo._update(default_branch)
710 712 source_repo.bookmark(bookmark_name)
711 713
712 714 target_ref = Reference('branch', default_branch, target_commit.raw_id)
713 715 source_ref = Reference('book', bookmark_name, source_commit.raw_id)
714 workspace = 'test-merge'
716 repo_id = repo_id_generator(target_repo.path)
717 workspace_id = 'test-merge'
715 718
716 719 merge_response = target_repo.merge(
717 target_ref, source_repo, source_ref, workspace,
720 repo_id, workspace_id, target_ref, source_repo, source_ref,
718 721 'test user', 'test@rhodecode.com', 'merge message 1',
719 722 dry_run=False, use_rebase=True)
720 723
721 724 expected_merge_response = MergeResponse(
722 725 True, True, merge_response.merge_ref,
723 726 MergeFailureReason.NONE)
724 727 assert merge_response == expected_merge_response
725 728
726 729 target_repo = backends.get_backend(vcsbackend_hg.alias)(
727 730 target_repo.path)
728 731 last_commit = target_repo.get_commit()
729 732 assert last_commit.message == source_commit.message
730 733 assert last_commit.author == source_commit.author
731 734 # This checks that we effectively did a rebase
732 735 assert last_commit.raw_id != source_commit.raw_id
733 736
734 737 # Check the target has only 4 commits: 2 were already in target and
735 738 # only two should have been added
736 739 assert len(target_repo.commit_ids) == 2 + 2
737 740
738 741
739 742 class TestGetShadowInstance(object):
740 743
741 744 @pytest.fixture
742 745 def repo(self, vcsbackend_hg, monkeypatch):
743 746 repo = vcsbackend_hg.repo
744 747 monkeypatch.setattr(repo, 'config', mock.Mock())
745 748 monkeypatch.setattr('rhodecode.lib.vcs.connection.Hg', mock.Mock())
746 749 return repo
747 750
748 751 def test_passes_config(self, repo):
749 752 shadow = repo._get_shadow_instance(repo.path)
750 753 assert shadow.config == repo.config.copy()
751 754
752 755 def test_disables_hooks(self, repo):
753 756 shadow = repo._get_shadow_instance(repo.path)
754 757 shadow.config.clear_section.assert_called_once_with('hooks')
755 758
756 759 def test_allows_to_keep_hooks(self, repo):
757 760 shadow = repo._get_shadow_instance(repo.path, enable_hooks=True)
758 761 assert not shadow.config.clear_section.called
759 762
760 763
761 764 class TestMercurialCommit(object):
762 765
763 766 def _test_equality(self, commit):
764 767 idx = commit.idx
765 768 assert commit == self.repo.get_commit(commit_idx=idx)
766 769
767 770 def test_equality(self):
768 771 indexes = [0, 10, 20]
769 772 commits = [self.repo.get_commit(commit_idx=idx) for idx in indexes]
770 773 for commit in commits:
771 774 self._test_equality(commit)
772 775
773 776 def test_default_commit(self):
774 777 tip = self.repo.get_commit('tip')
775 778 assert tip == self.repo.get_commit()
776 779 assert tip == self.repo.get_commit(commit_id=None)
777 780 assert tip == self.repo.get_commit(commit_idx=None)
778 781 assert tip == list(self.repo[-1:])[0]
779 782
780 783 def test_root_node(self):
781 784 tip = self.repo.get_commit('tip')
782 785 assert tip.root is tip.get_node('')
783 786
784 787 def test_lazy_fetch(self):
785 788 """
786 789 Test if commit's nodes expands and are cached as we walk through
787 790 the commit. This test is somewhat hard to write as order of tests
788 791 is a key here. Written by running command after command in a shell.
789 792 """
790 793 commit = self.repo.get_commit(commit_idx=45)
791 794 assert len(commit.nodes) == 0
792 795 root = commit.root
793 796 assert len(commit.nodes) == 1
794 797 assert len(root.nodes) == 8
795 798 # accessing root.nodes updates commit.nodes
796 799 assert len(commit.nodes) == 9
797 800
798 801 docs = root.get_node('docs')
799 802 # we haven't yet accessed anything new as docs dir was already cached
800 803 assert len(commit.nodes) == 9
801 804 assert len(docs.nodes) == 8
802 805 # accessing docs.nodes updates commit.nodes
803 806 assert len(commit.nodes) == 17
804 807
805 808 assert docs is commit.get_node('docs')
806 809 assert docs is root.nodes[0]
807 810 assert docs is root.dirs[0]
808 811 assert docs is commit.get_node('docs')
809 812
810 813 def test_nodes_with_commit(self):
811 814 commit = self.repo.get_commit(commit_idx=45)
812 815 root = commit.root
813 816 docs = root.get_node('docs')
814 817 assert docs is commit.get_node('docs')
815 818 api = docs.get_node('api')
816 819 assert api is commit.get_node('docs/api')
817 820 index = api.get_node('index.rst')
818 821 assert index is commit.get_node('docs/api/index.rst')
819 822 assert index is commit.get_node(
820 823 'docs').get_node('api').get_node('index.rst')
821 824
822 825 def test_branch_and_tags(self):
823 826 commit0 = self.repo.get_commit(commit_idx=0)
824 827 assert commit0.branch == 'default'
825 828 assert commit0.tags == []
826 829
827 830 commit10 = self.repo.get_commit(commit_idx=10)
828 831 assert commit10.branch == 'default'
829 832 assert commit10.tags == []
830 833
831 834 commit44 = self.repo.get_commit(commit_idx=44)
832 835 assert commit44.branch == 'web'
833 836
834 837 tip = self.repo.get_commit('tip')
835 838 assert 'tip' in tip.tags
836 839
837 840 def test_bookmarks(self):
838 841 commit0 = self.repo.get_commit(commit_idx=0)
839 842 assert commit0.bookmarks == []
840 843
841 844 def _test_file_size(self, idx, path, size):
842 845 node = self.repo.get_commit(commit_idx=idx).get_node(path)
843 846 assert node.is_file()
844 847 assert node.size == size
845 848
846 849 def test_file_size(self):
847 850 to_check = (
848 851 (10, 'setup.py', 1068),
849 852 (20, 'setup.py', 1106),
850 853 (60, 'setup.py', 1074),
851 854
852 855 (10, 'vcs/backends/base.py', 2921),
853 856 (20, 'vcs/backends/base.py', 3936),
854 857 (60, 'vcs/backends/base.py', 6189),
855 858 )
856 859 for idx, path, size in to_check:
857 860 self._test_file_size(idx, path, size)
858 861
859 862 def test_file_history_from_commits(self):
860 863 node = self.repo[10].get_node('setup.py')
861 864 commit_ids = [commit.raw_id for commit in node.history]
862 865 assert ['3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == commit_ids
863 866
864 867 node = self.repo[20].get_node('setup.py')
865 868 node_ids = [commit.raw_id for commit in node.history]
866 869 assert ['eada5a770da98ab0dd7325e29d00e0714f228d09',
867 870 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == node_ids
868 871
869 872 # special case we check history from commit that has this particular
870 873 # file changed this means we check if it's included as well
871 874 node = self.repo.get_commit('eada5a770da98ab0dd7325e29d00e0714f228d09')\
872 875 .get_node('setup.py')
873 876 node_ids = [commit.raw_id for commit in node.history]
874 877 assert ['eada5a770da98ab0dd7325e29d00e0714f228d09',
875 878 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == node_ids
876 879
877 880 def test_file_history(self):
878 881 # we can only check if those commits are present in the history
879 882 # as we cannot update this test every time file is changed
880 883 files = {
881 884 'setup.py': [7, 18, 45, 46, 47, 69, 77],
882 885 'vcs/nodes.py': [
883 886 7, 8, 24, 26, 30, 45, 47, 49, 56, 57, 58, 59, 60, 61, 73, 76],
884 887 'vcs/backends/hg.py': [
885 888 4, 5, 6, 11, 12, 13, 14, 15, 16, 21, 22, 23, 26, 27, 28, 30,
886 889 31, 33, 35, 36, 37, 38, 39, 40, 41, 44, 45, 47, 48, 49, 53, 54,
887 890 55, 58, 60, 61, 67, 68, 69, 70, 73, 77, 78, 79, 82],
888 891 }
889 892 for path, indexes in files.items():
890 893 tip = self.repo.get_commit(commit_idx=indexes[-1])
891 894 node = tip.get_node(path)
892 895 node_indexes = [commit.idx for commit in node.history]
893 896 assert set(indexes).issubset(set(node_indexes)), (
894 897 "We assumed that %s is subset of commits for which file %s "
895 898 "has been changed, and history of that node returned: %s"
896 899 % (indexes, path, node_indexes))
897 900
898 901 def test_file_annotate(self):
899 902 files = {
900 903 'vcs/backends/__init__.py': {
901 904 89: {
902 905 'lines_no': 31,
903 906 'commits': [
904 907 32, 32, 61, 32, 32, 37, 32, 32, 32, 44,
905 908 37, 37, 37, 37, 45, 37, 44, 37, 37, 37,
906 909 32, 32, 32, 32, 37, 32, 37, 37, 32,
907 910 32, 32
908 911 ]
909 912 },
910 913 20: {
911 914 'lines_no': 1,
912 915 'commits': [4]
913 916 },
914 917 55: {
915 918 'lines_no': 31,
916 919 'commits': [
917 920 32, 32, 45, 32, 32, 37, 32, 32, 32, 44,
918 921 37, 37, 37, 37, 45, 37, 44, 37, 37, 37,
919 922 32, 32, 32, 32, 37, 32, 37, 37, 32,
920 923 32, 32
921 924 ]
922 925 }
923 926 },
924 927 'vcs/exceptions.py': {
925 928 89: {
926 929 'lines_no': 18,
927 930 'commits': [
928 931 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
929 932 16, 16, 17, 16, 16, 18, 18, 18
930 933 ]
931 934 },
932 935 20: {
933 936 'lines_no': 18,
934 937 'commits': [
935 938 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
936 939 16, 16, 17, 16, 16, 18, 18, 18
937 940 ]
938 941 },
939 942 55: {
940 943 'lines_no': 18,
941 944 'commits': [
942 945 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
943 946 17, 16, 16, 18, 18, 18
944 947 ]
945 948 }
946 949 },
947 950 'MANIFEST.in': {
948 951 89: {
949 952 'lines_no': 5,
950 953 'commits': [7, 7, 7, 71, 71]
951 954 },
952 955 20: {
953 956 'lines_no': 3,
954 957 'commits': [7, 7, 7]
955 958 },
956 959 55: {
957 960 'lines_no': 3,
958 961 'commits': [7, 7, 7]
959 962 }
960 963 }
961 964 }
962 965
963 966 for fname, commit_dict in files.items():
964 967 for idx, __ in commit_dict.items():
965 968 commit = self.repo.get_commit(commit_idx=idx)
966 969 l1_1 = [x[1] for x in commit.get_file_annotate(fname)]
967 970 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
968 971 assert l1_1 == l1_2
969 972 l1 = l1_2 = [
970 973 x[2]().idx for x in commit.get_file_annotate(fname)]
971 974 l2 = files[fname][idx]['commits']
972 975 assert l1 == l2, (
973 976 "The lists of commit for %s@commit_id%s"
974 977 "from annotation list should match each other,"
975 978 "got \n%s \nvs \n%s " % (fname, idx, l1, l2))
976 979
977 980 def test_commit_state(self):
978 981 """
979 982 Tests which files have been added/changed/removed at particular commit
980 983 """
981 984
982 985 # commit_id 46ad32a4f974:
983 986 # hg st --rev 46ad32a4f974
984 987 # changed: 13
985 988 # added: 20
986 989 # removed: 1
987 990 changed = set([
988 991 '.hgignore', 'README.rst', 'docs/conf.py', 'docs/index.rst',
989 992 'setup.py', 'tests/test_hg.py', 'tests/test_nodes.py',
990 993 'vcs/__init__.py', 'vcs/backends/__init__.py',
991 994 'vcs/backends/base.py', 'vcs/backends/hg.py', 'vcs/nodes.py',
992 995 'vcs/utils/__init__.py'])
993 996
994 997 added = set([
995 998 'docs/api/backends/hg.rst', 'docs/api/backends/index.rst',
996 999 'docs/api/index.rst', 'docs/api/nodes.rst',
997 1000 'docs/api/web/index.rst', 'docs/api/web/simplevcs.rst',
998 1001 'docs/installation.rst', 'docs/quickstart.rst', 'setup.cfg',
999 1002 'vcs/utils/baseui_config.py', 'vcs/utils/web.py',
1000 1003 'vcs/web/__init__.py', 'vcs/web/exceptions.py',
1001 1004 'vcs/web/simplevcs/__init__.py', 'vcs/web/simplevcs/exceptions.py',
1002 1005 'vcs/web/simplevcs/middleware.py', 'vcs/web/simplevcs/models.py',
1003 1006 'vcs/web/simplevcs/settings.py', 'vcs/web/simplevcs/utils.py',
1004 1007 'vcs/web/simplevcs/views.py'])
1005 1008
1006 1009 removed = set(['docs/api.rst'])
1007 1010
1008 1011 commit64 = self.repo.get_commit('46ad32a4f974')
1009 1012 assert set((node.path for node in commit64.added)) == added
1010 1013 assert set((node.path for node in commit64.changed)) == changed
1011 1014 assert set((node.path for node in commit64.removed)) == removed
1012 1015
1013 1016 # commit_id b090f22d27d6:
1014 1017 # hg st --rev b090f22d27d6
1015 1018 # changed: 13
1016 1019 # added: 20
1017 1020 # removed: 1
1018 1021 commit88 = self.repo.get_commit('b090f22d27d6')
1019 1022 assert set((node.path for node in commit88.added)) == set()
1020 1023 assert set((node.path for node in commit88.changed)) == \
1021 1024 set(['.hgignore'])
1022 1025 assert set((node.path for node in commit88.removed)) == set()
1023 1026
1024 1027 #
1025 1028 # 85:
1026 1029 # added: 2 [
1027 1030 # 'vcs/utils/diffs.py', 'vcs/web/simplevcs/views/diffs.py']
1028 1031 # changed: 4 ['vcs/web/simplevcs/models.py', ...]
1029 1032 # removed: 1 ['vcs/utils/web.py']
1030 1033 commit85 = self.repo.get_commit(commit_idx=85)
1031 1034 assert set((node.path for node in commit85.added)) == set([
1032 1035 'vcs/utils/diffs.py',
1033 1036 'vcs/web/simplevcs/views/diffs.py'])
1034 1037 assert set((node.path for node in commit85.changed)) == set([
1035 1038 'vcs/web/simplevcs/models.py',
1036 1039 'vcs/web/simplevcs/utils.py',
1037 1040 'vcs/web/simplevcs/views/__init__.py',
1038 1041 'vcs/web/simplevcs/views/repository.py',
1039 1042 ])
1040 1043 assert set((node.path for node in commit85.removed)) == \
1041 1044 set(['vcs/utils/web.py'])
1042 1045
1043 1046 def test_files_state(self):
1044 1047 """
1045 1048 Tests state of FileNodes.
1046 1049 """
1047 1050 commit = self.repo.get_commit(commit_idx=85)
1048 1051 node = commit.get_node('vcs/utils/diffs.py')
1049 1052 assert node.state, NodeState.ADDED
1050 1053 assert node.added
1051 1054 assert not node.changed
1052 1055 assert not node.not_changed
1053 1056 assert not node.removed
1054 1057
1055 1058 commit = self.repo.get_commit(commit_idx=88)
1056 1059 node = commit.get_node('.hgignore')
1057 1060 assert node.state, NodeState.CHANGED
1058 1061 assert not node.added
1059 1062 assert node.changed
1060 1063 assert not node.not_changed
1061 1064 assert not node.removed
1062 1065
1063 1066 commit = self.repo.get_commit(commit_idx=85)
1064 1067 node = commit.get_node('setup.py')
1065 1068 assert node.state, NodeState.NOT_CHANGED
1066 1069 assert not node.added
1067 1070 assert not node.changed
1068 1071 assert node.not_changed
1069 1072 assert not node.removed
1070 1073
1071 1074 # If node has REMOVED state then trying to fetch it would raise
1072 1075 # CommitError exception
1073 1076 commit = self.repo.get_commit(commit_idx=2)
1074 1077 path = 'vcs/backends/BaseRepository.py'
1075 1078 with pytest.raises(NodeDoesNotExistError):
1076 1079 commit.get_node(path)
1077 1080 # but it would be one of ``removed`` (commit's attribute)
1078 1081 assert path in [rf.path for rf in commit.removed]
1079 1082
1080 1083 def test_commit_message_is_unicode(self):
1081 1084 for cm in self.repo:
1082 1085 assert type(cm.message) == unicode
1083 1086
1084 1087 def test_commit_author_is_unicode(self):
1085 1088 for cm in self.repo:
1086 1089 assert type(cm.author) == unicode
1087 1090
1088 1091 def test_repo_files_content_is_unicode(self):
1089 1092 test_commit = self.repo.get_commit(commit_idx=100)
1090 1093 for node in test_commit.get_node('/'):
1091 1094 if node.is_file():
1092 1095 assert type(node.content) == unicode
1093 1096
1094 1097 def test_wrong_path(self):
1095 1098 # There is 'setup.py' in the root dir but not there:
1096 1099 path = 'foo/bar/setup.py'
1097 1100 with pytest.raises(VCSError):
1098 1101 self.repo.get_commit().get_node(path)
1099 1102
1100 1103 def test_author_email(self):
1101 1104 assert 'marcin@python-blog.com' == \
1102 1105 self.repo.get_commit('b986218ba1c9').author_email
1103 1106 assert 'lukasz.balcerzak@python-center.pl' == \
1104 1107 self.repo.get_commit('3803844fdbd3').author_email
1105 1108 assert '' == self.repo.get_commit('84478366594b').author_email
1106 1109
1107 1110 def test_author_username(self):
1108 1111 assert 'Marcin Kuzminski' == \
1109 1112 self.repo.get_commit('b986218ba1c9').author_name
1110 1113 assert 'Lukasz Balcerzak' == \
1111 1114 self.repo.get_commit('3803844fdbd3').author_name
1112 1115 assert 'marcink' == \
1113 1116 self.repo.get_commit('84478366594b').author_name
1114 1117
1115 1118
1116 1119 class TestLargeFileRepo(object):
1117 1120
1118 1121 def test_large_file(self, backend_hg):
1119 1122 repo = backend_hg.create_test_repo('largefiles', make_db_config())
1120 1123
1121 1124 tip = repo.scm_instance().get_commit()
1122 1125 node = tip.get_node('.hglf/thisfileislarge')
1123 1126
1124 1127 lf_node = node.get_largefile_node()
1125 1128
1126 1129 assert lf_node.is_largefile() is True
1127 1130 assert lf_node.size == 1024000
1128 1131 assert lf_node.name == '.hglf/thisfileislarge'
1129 1132
1130 1133
1131 1134 class TestGetBranchName(object):
1132 1135 def test_returns_ref_name_when_type_is_branch(self):
1133 1136 ref = self._create_ref('branch', 'fake-name')
1134 1137 result = self.repo._get_branch_name(ref)
1135 1138 assert result == ref.name
1136 1139
1137 1140 @pytest.mark.parametrize("type_", ("book", "tag"))
1138 1141 def test_queries_remote_when_type_is_not_branch(self, type_):
1139 1142 ref = self._create_ref(type_, 'wrong-fake-name')
1140 1143 with mock.patch.object(self.repo, "_remote") as remote_mock:
1141 1144 remote_mock.ctx_branch.return_value = "fake-name"
1142 1145 result = self.repo._get_branch_name(ref)
1143 1146 assert result == "fake-name"
1144 1147 remote_mock.ctx_branch.assert_called_once_with(ref.commit_id)
1145 1148
1146 1149 def _create_ref(self, type_, name):
1147 1150 ref = mock.Mock()
1148 1151 ref.type = type_
1149 1152 ref.name = 'wrong-fake-name'
1150 1153 ref.commit_id = "deadbeef"
1151 1154 return ref
1152 1155
1153 1156
1154 1157 class TestIsTheSameBranch(object):
1155 1158 def test_returns_true_when_branches_are_equal(self):
1156 1159 source_ref = mock.Mock(name="source-ref")
1157 1160 target_ref = mock.Mock(name="target-ref")
1158 1161 branch_name_patcher = mock.patch.object(
1159 1162 self.repo, "_get_branch_name", return_value="default")
1160 1163 with branch_name_patcher as branch_name_mock:
1161 1164 result = self.repo._is_the_same_branch(source_ref, target_ref)
1162 1165
1163 1166 expected_calls = [mock.call(source_ref), mock.call(target_ref)]
1164 1167 assert branch_name_mock.call_args_list == expected_calls
1165 1168 assert result is True
1166 1169
1167 1170 def test_returns_false_when_branches_are_not_equal(self):
1168 1171 source_ref = mock.Mock(name="source-ref")
1169 1172 source_ref.name = "source-branch"
1170 1173 target_ref = mock.Mock(name="target-ref")
1171 1174 source_ref.name = "target-branch"
1172 1175
1173 1176 def side_effect(ref):
1174 1177 return ref.name
1175 1178
1176 1179 branch_name_patcher = mock.patch.object(
1177 1180 self.repo, "_get_branch_name", side_effect=side_effect)
1178 1181 with branch_name_patcher as branch_name_mock:
1179 1182 result = self.repo._is_the_same_branch(source_ref, target_ref)
1180 1183
1181 1184 expected_calls = [mock.call(source_ref), mock.call(target_ref)]
1182 1185 assert branch_name_mock.call_args_list == expected_calls
1183 1186 assert result is False
@@ -1,183 +1,183 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import pytest
22 22
23 23 from mock import call, patch
24 24
25 25 from rhodecode.lib.vcs.backends.base import Reference
26 26
27 27
28 28 class TestMercurialRemoteRepoInvalidation(object):
29 29 """
30 30 If the VCSServer is running with multiple processes or/and instances.
31 31 Operations on repositories are potentially handled by different processes
32 32 in a random fashion. The mercurial repository objects used in the VCSServer
33 33 are caching the commits of the repo. Therefore we have to invalidate the
34 34 VCSServer caching of these objects after a writing operation.
35 35 """
36 36
37 37 # Default reference used as a dummy during tests.
38 38 default_ref = Reference('branch', 'default', None)
39 39
40 40 # Methods of vcsserver.hg.HgRemote that are "writing" operations.
41 41 writing_methods = [
42 42 'bookmark',
43 43 'commit',
44 44 'merge',
45 45 'pull',
46 46 'pull_cmd',
47 47 'rebase',
48 48 'strip',
49 49 'tag',
50 50 ]
51 51
52 52 @pytest.mark.parametrize('method_name, method_args', [
53 53 ('_local_merge', [default_ref, None, None, None, default_ref]),
54 54 ('_local_pull', ['', default_ref]),
55 55 ('bookmark', [None]),
56 56 ('pull', ['', default_ref]),
57 57 ('remove_tag', ['mytag', None]),
58 58 ('strip', [None]),
59 59 ('tag', ['newtag', None]),
60 60 ])
61 61 def test_method_invokes_invalidate_on_remote_repo(
62 62 self, method_name, method_args, backend_hg):
63 63 """
64 64 Check that the listed methods are invalidating the VCSServer cache
65 65 after invoking a writing method of their remote repository object.
66 66 """
67 67 tags = {'mytag': 'mytag-id'}
68 68
69 69 def add_tag(name, raw_id, *args, **kwds):
70 70 tags[name] = raw_id
71 71
72 72 repo = backend_hg.repo.scm_instance()
73 73 with patch.object(repo, '_remote') as remote:
74 74 remote.lookup.return_value = ('commit-id', 'commit-idx')
75 75 remote.tags.return_value = tags
76 76 remote._get_tags.return_value = tags
77 77 remote.tag.side_effect = add_tag
78 78
79 79 # Invoke method.
80 80 method = getattr(repo, method_name)
81 81 method(*method_args)
82 82
83 83 # Assert that every "writing" method is followed by an invocation
84 84 # of the cache invalidation method.
85 85 for counter, method_call in enumerate(remote.method_calls):
86 86 call_name = method_call[0]
87 87 if call_name in self.writing_methods:
88 88 next_call = remote.method_calls[counter + 1]
89 89 assert next_call == call.invalidate_vcs_cache()
90 90
91 91 def _prepare_shadow_repo(self, pull_request):
92 92 """
93 93 Helper that creates a shadow repo that can be used to reproduce the
94 94 CommitDoesNotExistError when pulling in from target and source
95 95 references.
96 96 """
97 97 from rhodecode.model.pull_request import PullRequestModel
98
98 repo_id = pull_request.target_repo
99 99 target_vcs = pull_request.target_repo.scm_instance()
100 100 target_ref = pull_request.target_ref_parts
101 101 source_ref = pull_request.source_ref_parts
102 102
103 103 # Create shadow repository.
104 104 pr = PullRequestModel()
105 105 workspace_id = pr._workspace_id(pull_request)
106 106 shadow_repository_path = target_vcs._maybe_prepare_merge_workspace(
107 workspace_id, target_ref, source_ref)
107 repo_id, workspace_id, target_ref, source_ref)
108 108 shadow_repo = target_vcs._get_shadow_instance(shadow_repository_path)
109 109
110 110 # This will populate the cache of the mercurial repository object
111 111 # inside of the VCSServer.
112 112 shadow_repo.get_commit()
113 113
114 114 return shadow_repo, source_ref, target_ref
115 115
116 116 @pytest.mark.backends('hg')
117 117 def test_commit_does_not_exist_error_happens(self, pr_util, app):
118 118 """
119 119 This test is somewhat special. It does not really test the system
120 120 instead it is more or less a precondition for the
121 121 "test_commit_does_not_exist_error_does_not_happen". It deactivates the
122 122 cache invalidation and asserts that the error occurs.
123 123 """
124 124 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
125 125
126 126 pull_request = pr_util.create_pull_request()
127 127 target_vcs = pull_request.target_repo.scm_instance()
128 128 source_vcs = pull_request.source_repo.scm_instance()
129 129 shadow_repo, source_ref, target_ref = self._prepare_shadow_repo(
130 130 pull_request)
131 131
132 132 # Pull from target and source references but without invalidation of
133 133 # RemoteRepo objects and without VCSServer caching of mercurial
134 134 # repository objects.
135 135 with patch.object(shadow_repo._remote, 'invalidate_vcs_cache'):
136 136 # NOTE: Do not use patch.dict() to disable the cache because it
137 137 # restores the WHOLE dict and not only the patched keys.
138 138 shadow_repo._remote._wire['cache'] = False
139 139 shadow_repo._local_pull(target_vcs.path, target_ref)
140 140 shadow_repo._local_pull(source_vcs.path, source_ref)
141 141 shadow_repo._remote._wire.pop('cache')
142 142
143 143 # Try to lookup the target_ref in shadow repo. This should work because
144 144 # the shadow repo is a clone of the target and always contains all off
145 145 # it's commits in the initial cache.
146 146 shadow_repo.get_commit(target_ref.commit_id)
147 147
148 148 # If we try to lookup the source_ref it should fail because the shadow
149 149 # repo commit cache doesn't get invalidated. (Due to patched
150 150 # invalidation and caching above).
151 151 with pytest.raises(CommitDoesNotExistError):
152 152 shadow_repo.get_commit(source_ref.commit_id)
153 153
154 154 @pytest.mark.backends('hg')
155 155 def test_commit_does_not_exist_error_does_not_happen(self, pr_util, app):
156 156 """
157 157 This test simulates a pull request merge in which the pull operations
158 158 are handled by a different VCSServer process than all other operations.
159 159 Without correct cache invalidation this leads to an error when
160 160 retrieving the pulled commits afterwards.
161 161 """
162 162
163 163 pull_request = pr_util.create_pull_request()
164 164 target_vcs = pull_request.target_repo.scm_instance()
165 165 source_vcs = pull_request.source_repo.scm_instance()
166 166 shadow_repo, source_ref, target_ref = self._prepare_shadow_repo(
167 167 pull_request)
168 168
169 169 # Pull from target and source references without without VCSServer
170 170 # caching of mercurial repository objects but with active invalidation
171 171 # of RemoteRepo objects.
172 172 # NOTE: Do not use patch.dict() to disable the cache because it
173 173 # restores the WHOLE dict and not only the patched keys.
174 174 shadow_repo._remote._wire['cache'] = False
175 175 shadow_repo._local_pull(target_vcs.path, target_ref)
176 176 shadow_repo._local_pull(source_vcs.path, source_ref)
177 177 shadow_repo._remote._wire.pop('cache')
178 178
179 179 # Try to lookup the target and source references in shadow repo. This
180 180 # should work because the RemoteRepo object gets invalidated during the
181 181 # above pull operations.
182 182 shadow_repo.get_commit(target_ref.commit_id)
183 183 shadow_repo.get_commit(source_ref.commit_id)
@@ -1,537 +1,553 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import datetime
22 22 from urllib2 import URLError
23 23
24 24 import mock
25 25 import pytest
26 26
27 27 from rhodecode.lib.vcs import backends
28 28 from rhodecode.lib.vcs.backends.base import (
29 29 Config, BaseInMemoryCommit, Reference, MergeResponse, MergeFailureReason)
30 30 from rhodecode.lib.vcs.exceptions import VCSError, RepositoryError
31 31 from rhodecode.lib.vcs.nodes import FileNode
32 32 from rhodecode.tests.vcs.conftest import BackendTestMixin
33 from rhodecode.tests import repo_id_generator
33 34
34 35
35 36 @pytest.mark.usefixtures("vcs_repository_support")
36 37 class TestRepositoryBase(BackendTestMixin):
37 38 recreate_repo_per_test = False
38 39
39 40 def test_init_accepts_unicode_path(self, tmpdir):
40 41 path = unicode(tmpdir.join(u'unicode ä'))
41 42 self.Backend(path, create=True)
42 43
43 44 def test_init_accepts_str_path(self, tmpdir):
44 45 path = str(tmpdir.join('str ä'))
45 46 self.Backend(path, create=True)
46 47
47 48 def test_init_fails_if_path_does_not_exist(self, tmpdir):
48 49 path = unicode(tmpdir.join('i-do-not-exist'))
49 50 with pytest.raises(VCSError):
50 51 self.Backend(path)
51 52
52 53 def test_init_fails_if_path_is_not_a_valid_repository(self, tmpdir):
53 54 path = unicode(tmpdir.mkdir(u'unicode ä'))
54 55 with pytest.raises(VCSError):
55 56 self.Backend(path)
56 57
57 58 def test_has_commits_attribute(self):
58 59 self.repo.commit_ids
59 60
60 61 def test_name(self):
61 62 assert self.repo.name.startswith('vcs-test')
62 63
63 64 @pytest.mark.backends("hg", "git")
64 65 def test_has_default_branch_name(self):
65 66 assert self.repo.DEFAULT_BRANCH_NAME is not None
66 67
67 68 @pytest.mark.backends("svn")
68 69 def test_has_no_default_branch_name(self):
69 70 assert self.repo.DEFAULT_BRANCH_NAME is None
70 71
71 72 def test_has_empty_commit(self):
72 73 assert self.repo.EMPTY_COMMIT_ID is not None
73 74 assert self.repo.EMPTY_COMMIT is not None
74 75
75 76 def test_empty_changeset_is_deprecated(self):
76 77 def get_empty_changeset(repo):
77 78 return repo.EMPTY_CHANGESET
78 79 pytest.deprecated_call(get_empty_changeset, self.repo)
79 80
80 81 def test_bookmarks(self):
81 82 assert len(self.repo.bookmarks) == 0
82 83
83 84 # TODO: Cover two cases: Local repo path, remote URL
84 85 def test_check_url(self):
85 86 config = Config()
86 87 assert self.Backend.check_url(self.repo.path, config)
87 88
88 89 def test_check_url_invalid(self):
89 90 config = Config()
90 91 with pytest.raises(URLError):
91 92 self.Backend.check_url(self.repo.path + "invalid", config)
92 93
93 94 def test_get_contact(self):
94 95 assert self.repo.contact
95 96
96 97 def test_get_description(self):
97 98 assert self.repo.description
98 99
99 100 def test_get_hook_location(self):
100 101 assert len(self.repo.get_hook_location()) != 0
101 102
102 103 def test_last_change(self, local_dt_to_utc):
103 104 assert self.repo.last_change >= local_dt_to_utc(
104 105 datetime.datetime(2010, 1, 1, 21, 0))
105 106
106 107 def test_last_change_in_empty_repository(self, vcsbackend, local_dt_to_utc):
107 108 delta = datetime.timedelta(seconds=1)
108 109
109 110 start = local_dt_to_utc(datetime.datetime.now())
110 111 empty_repo = vcsbackend.create_repo()
111 112 now = local_dt_to_utc(datetime.datetime.now())
112 113 assert empty_repo.last_change >= start - delta
113 114 assert empty_repo.last_change <= now + delta
114 115
115 116 def test_repo_equality(self):
116 117 assert self.repo == self.repo
117 118
118 119 def test_repo_equality_broken_object(self):
119 120 import copy
120 121 _repo = copy.copy(self.repo)
121 122 delattr(_repo, 'path')
122 123 assert self.repo != _repo
123 124
124 125 def test_repo_equality_other_object(self):
125 126 class dummy(object):
126 127 path = self.repo.path
127 128 assert self.repo != dummy()
128 129
129 130 def test_get_commit_is_implemented(self):
130 131 self.repo.get_commit()
131 132
132 133 def test_get_commits_is_implemented(self):
133 134 commit_iter = iter(self.repo.get_commits())
134 135 commit = next(commit_iter)
135 136 assert commit.idx == 0
136 137
137 138 def test_supports_iteration(self):
138 139 repo_iter = iter(self.repo)
139 140 commit = next(repo_iter)
140 141 assert commit.idx == 0
141 142
142 143 def test_in_memory_commit(self):
143 144 imc = self.repo.in_memory_commit
144 145 assert isinstance(imc, BaseInMemoryCommit)
145 146
146 147 @pytest.mark.backends("hg")
147 148 def test__get_url_unicode(self):
148 149 url = u'/home/repos/malmö'
149 150 assert self.repo._get_url(url)
150 151
151 152
152 153 @pytest.mark.usefixtures("vcs_repository_support")
153 154 class TestDeprecatedRepositoryAPI(BackendTestMixin):
154 155 recreate_repo_per_test = False
155 156
156 157 def test_revisions_is_deprecated(self):
157 158 def get_revisions(repo):
158 159 return repo.revisions
159 160 pytest.deprecated_call(get_revisions, self.repo)
160 161
161 162 def test_get_changeset_is_deprecated(self):
162 163 pytest.deprecated_call(self.repo.get_changeset)
163 164
164 165 def test_get_changesets_is_deprecated(self):
165 166 pytest.deprecated_call(self.repo.get_changesets)
166 167
167 168 def test_in_memory_changeset_is_deprecated(self):
168 169 def get_imc(repo):
169 170 return repo.in_memory_changeset
170 171 pytest.deprecated_call(get_imc, self.repo)
171 172
172 173
173 174 # TODO: these tests are incomplete, must check the resulting compare result for
174 175 # correcteness
175 176 class TestRepositoryCompare:
176 177
177 178 @pytest.mark.parametrize('merge', [True, False])
178 179 def test_compare_commits_of_same_repository(self, vcsbackend, merge):
179 180 target_repo = vcsbackend.create_repo(number_of_commits=5)
180 181 target_repo.compare(
181 182 target_repo[1].raw_id, target_repo[3].raw_id, target_repo,
182 183 merge=merge)
183 184
184 185 @pytest.mark.xfail_backends('svn')
185 186 @pytest.mark.parametrize('merge', [True, False])
186 187 def test_compare_cloned_repositories(self, vcsbackend, merge):
187 188 target_repo = vcsbackend.create_repo(number_of_commits=5)
188 189 source_repo = vcsbackend.clone_repo(target_repo)
189 190 assert target_repo != source_repo
190 191
191 192 vcsbackend.add_file(source_repo, 'newfile', 'somecontent')
192 193 source_commit = source_repo.get_commit()
193 194
194 195 target_repo.compare(
195 196 target_repo[1].raw_id, source_repo[3].raw_id, source_repo,
196 197 merge=merge)
197 198
198 199 @pytest.mark.xfail_backends('svn')
199 200 @pytest.mark.parametrize('merge', [True, False])
200 201 def test_compare_unrelated_repositories(self, vcsbackend, merge):
201 202 orig = vcsbackend.create_repo(number_of_commits=5)
202 203 unrelated = vcsbackend.create_repo(number_of_commits=5)
203 204 assert orig != unrelated
204 205
205 206 orig.compare(
206 207 orig[1].raw_id, unrelated[3].raw_id, unrelated, merge=merge)
207 208
208 209
209 210 class TestRepositoryGetCommonAncestor:
210 211
211 212 def test_get_common_ancestor_from_same_repo_existing(self, vcsbackend):
212 213 target_repo = vcsbackend.create_repo(number_of_commits=5)
213 214
214 215 expected_ancestor = target_repo[2].raw_id
215 216
216 217 assert target_repo.get_common_ancestor(
217 218 commit_id1=target_repo[2].raw_id,
218 219 commit_id2=target_repo[4].raw_id,
219 220 repo2=target_repo
220 221 ) == expected_ancestor
221 222
222 223 assert target_repo.get_common_ancestor(
223 224 commit_id1=target_repo[4].raw_id,
224 225 commit_id2=target_repo[2].raw_id,
225 226 repo2=target_repo
226 227 ) == expected_ancestor
227 228
228 229 @pytest.mark.xfail_backends("svn")
229 230 def test_get_common_ancestor_from_cloned_repo_existing(self, vcsbackend):
230 231 target_repo = vcsbackend.create_repo(number_of_commits=5)
231 232 source_repo = vcsbackend.clone_repo(target_repo)
232 233 assert target_repo != source_repo
233 234
234 235 vcsbackend.add_file(source_repo, 'newfile', 'somecontent')
235 236 source_commit = source_repo.get_commit()
236 237
237 238 expected_ancestor = target_repo[4].raw_id
238 239
239 240 assert target_repo.get_common_ancestor(
240 241 commit_id1=target_repo[4].raw_id,
241 242 commit_id2=source_commit.raw_id,
242 243 repo2=source_repo
243 244 ) == expected_ancestor
244 245
245 246 assert target_repo.get_common_ancestor(
246 247 commit_id1=source_commit.raw_id,
247 248 commit_id2=target_repo[4].raw_id,
248 249 repo2=target_repo
249 250 ) == expected_ancestor
250 251
251 252 @pytest.mark.xfail_backends("svn")
252 253 def test_get_common_ancestor_from_unrelated_repo_missing(self, vcsbackend):
253 254 original = vcsbackend.create_repo(number_of_commits=5)
254 255 unrelated = vcsbackend.create_repo(number_of_commits=5)
255 256 assert original != unrelated
256 257
257 258 assert original.get_common_ancestor(
258 259 commit_id1=original[0].raw_id,
259 260 commit_id2=unrelated[0].raw_id,
260 261 repo2=unrelated
261 262 ) == None
262 263
263 264 assert original.get_common_ancestor(
264 265 commit_id1=original[-1].raw_id,
265 266 commit_id2=unrelated[-1].raw_id,
266 267 repo2=unrelated
267 268 ) == None
268 269
269 270
270 271 @pytest.mark.backends("git", "hg")
271 class TestRepositoryMerge:
272 class TestRepositoryMerge(object):
272 273 def prepare_for_success(self, vcsbackend):
273 274 self.target_repo = vcsbackend.create_repo(number_of_commits=1)
274 275 self.source_repo = vcsbackend.clone_repo(self.target_repo)
275 276 vcsbackend.add_file(self.target_repo, 'README_MERGE1', 'Version 1')
276 277 vcsbackend.add_file(self.source_repo, 'README_MERGE2', 'Version 2')
277 278 imc = self.source_repo.in_memory_commit
278 279 imc.add(FileNode('file_x', content=self.source_repo.name))
279 280 imc.commit(
280 281 message=u'Automatic commit from repo merge test',
281 282 author=u'Automatic')
282 283 self.target_commit = self.target_repo.get_commit()
283 284 self.source_commit = self.source_repo.get_commit()
284 285 # This only works for Git and Mercurial
285 286 default_branch = self.target_repo.DEFAULT_BRANCH_NAME
286 287 self.target_ref = Reference(
287 288 'branch', default_branch, self.target_commit.raw_id)
288 289 self.source_ref = Reference(
289 290 'branch', default_branch, self.source_commit.raw_id)
290 self.workspace = 'test-merge'
291 self.workspace_id = 'test-merge'
292 self.repo_id = repo_id_generator(self.target_repo.path)
291 293
292 294 def prepare_for_conflict(self, vcsbackend):
293 295 self.target_repo = vcsbackend.create_repo(number_of_commits=1)
294 296 self.source_repo = vcsbackend.clone_repo(self.target_repo)
295 297 vcsbackend.add_file(self.target_repo, 'README_MERGE', 'Version 1')
296 298 vcsbackend.add_file(self.source_repo, 'README_MERGE', 'Version 2')
297 299 self.target_commit = self.target_repo.get_commit()
298 300 self.source_commit = self.source_repo.get_commit()
299 301 # This only works for Git and Mercurial
300 302 default_branch = self.target_repo.DEFAULT_BRANCH_NAME
301 303 self.target_ref = Reference(
302 304 'branch', default_branch, self.target_commit.raw_id)
303 305 self.source_ref = Reference(
304 306 'branch', default_branch, self.source_commit.raw_id)
305 self.workspace = 'test-merge'
307 self.workspace_id = 'test-merge'
308 self.repo_id = repo_id_generator(self.target_repo.path)
306 309
307 310 def test_merge_success(self, vcsbackend):
308 311 self.prepare_for_success(vcsbackend)
309 312
310 313 merge_response = self.target_repo.merge(
311 self.target_ref, self.source_repo, self.source_ref, self.workspace,
314 self.repo_id, self.workspace_id, self.target_ref, self.source_repo,
315 self.source_ref,
312 316 'test user', 'test@rhodecode.com', 'merge message 1',
313 317 dry_run=False)
314 318 expected_merge_response = MergeResponse(
315 319 True, True, merge_response.merge_ref,
316 320 MergeFailureReason.NONE)
317 321 assert merge_response == expected_merge_response
318 322
319 323 target_repo = backends.get_backend(vcsbackend.alias)(
320 324 self.target_repo.path)
321 325 target_commits = list(target_repo.get_commits())
322 326 commit_ids = [c.raw_id for c in target_commits[:-1]]
323 327 assert self.source_ref.commit_id in commit_ids
324 328 assert self.target_ref.commit_id in commit_ids
325 329
326 330 merge_commit = target_commits[-1]
327 331 assert merge_commit.raw_id == merge_response.merge_ref.commit_id
328 332 assert merge_commit.message.strip() == 'merge message 1'
329 333 assert merge_commit.author == 'test user <test@rhodecode.com>'
330 334
331 335 # We call it twice so to make sure we can handle updates
332 336 target_ref = Reference(
333 337 self.target_ref.type, self.target_ref.name,
334 338 merge_response.merge_ref.commit_id)
335 339
336 340 merge_response = target_repo.merge(
337 target_ref, self.source_repo, self.source_ref, self.workspace,
341 self.repo_id, self.workspace_id, target_ref, self.source_repo, self.source_ref,
338 342 'test user', 'test@rhodecode.com', 'merge message 2',
339 343 dry_run=False)
340 344 expected_merge_response = MergeResponse(
341 345 True, True, merge_response.merge_ref,
342 346 MergeFailureReason.NONE)
343 347 assert merge_response == expected_merge_response
344 348
345 349 target_repo = backends.get_backend(
346 350 vcsbackend.alias)(self.target_repo.path)
347 351 merge_commit = target_repo.get_commit(
348 352 merge_response.merge_ref.commit_id)
349 353 assert merge_commit.message.strip() == 'merge message 1'
350 354 assert merge_commit.author == 'test user <test@rhodecode.com>'
351 355
352 356 def test_merge_success_dry_run(self, vcsbackend):
353 357 self.prepare_for_success(vcsbackend)
354 358
355 359 merge_response = self.target_repo.merge(
356 self.target_ref, self.source_repo, self.source_ref, self.workspace,
357 dry_run=True)
360 self.repo_id, self.workspace_id, self.target_ref, self.source_repo,
361 self.source_ref, dry_run=True)
358 362
359 363 # We call it twice so to make sure we can handle updates
360 364 merge_response_update = self.target_repo.merge(
361 self.target_ref, self.source_repo, self.source_ref, self.workspace,
362 dry_run=True)
365 self.repo_id, self.workspace_id, self.target_ref, self.source_repo,
366 self.source_ref, dry_run=True)
363 367
364 368 # Multiple merges may differ in their commit id. Therefore we set the
365 369 # commit id to `None` before comparing the merge responses.
366 370 merge_response = merge_response._replace(
367 371 merge_ref=merge_response.merge_ref._replace(commit_id=None))
368 372 merge_response_update = merge_response_update._replace(
369 373 merge_ref=merge_response_update.merge_ref._replace(commit_id=None))
370 374
371 375 assert merge_response == merge_response_update
372 376 assert merge_response.possible is True
373 377 assert merge_response.executed is False
374 378 assert merge_response.merge_ref
375 379 assert merge_response.failure_reason is MergeFailureReason.NONE
376 380
377 381 @pytest.mark.parametrize('dry_run', [True, False])
378 382 def test_merge_conflict(self, vcsbackend, dry_run):
379 383 self.prepare_for_conflict(vcsbackend)
380 384 expected_merge_response = MergeResponse(
381 385 False, False, None, MergeFailureReason.MERGE_FAILED)
382 386
383 387 merge_response = self.target_repo.merge(
384 self.target_ref, self.source_repo, self.source_ref, self.workspace,
388 self.repo_id, self.workspace_id, self.target_ref,
389 self.source_repo, self.source_ref,
385 390 'test_user', 'test@rhodecode.com', 'test message', dry_run=dry_run)
386 391 assert merge_response == expected_merge_response
387 392
388 393 # We call it twice so to make sure we can handle updates
389 394 merge_response = self.target_repo.merge(
390 self.target_ref, self.source_repo, self.source_ref, self.workspace,
395 self.repo_id, self.workspace_id, self.target_ref, self.source_repo,
396 self.source_ref,
391 397 'test_user', 'test@rhodecode.com', 'test message', dry_run=dry_run)
392 398 assert merge_response == expected_merge_response
393 399
394 400 def test_merge_target_is_not_head(self, vcsbackend):
395 401 self.prepare_for_success(vcsbackend)
396 402 expected_merge_response = MergeResponse(
397 403 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
398 404
399 405 target_ref = Reference(
400 406 self.target_ref.type, self.target_ref.name, '0' * 40)
401 407
402 408 merge_response = self.target_repo.merge(
403 target_ref, self.source_repo, self.source_ref, self.workspace,
404 dry_run=True)
409 self.repo_id, self.workspace_id, target_ref, self.source_repo,
410 self.source_ref, dry_run=True)
405 411
406 412 assert merge_response == expected_merge_response
407 413
408 414 def test_merge_missing_source_reference(self, vcsbackend):
409 415 self.prepare_for_success(vcsbackend)
410 416 expected_merge_response = MergeResponse(
411 417 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
412 418
413 419 source_ref = Reference(
414 420 self.source_ref.type, 'not_existing', self.source_ref.commit_id)
415 421
416 422 merge_response = self.target_repo.merge(
417 self.target_ref, self.source_repo, source_ref, self.workspace,
423 self.repo_id, self.workspace_id, self.target_ref,
424 self.source_repo, source_ref,
418 425 dry_run=True)
419 426
420 427 assert merge_response == expected_merge_response
421 428
422 429 def test_merge_raises_exception(self, vcsbackend):
423 430 self.prepare_for_success(vcsbackend)
424 431 expected_merge_response = MergeResponse(
425 432 False, False, None, MergeFailureReason.UNKNOWN)
426 433
427 434 with mock.patch.object(self.target_repo, '_merge_repo',
428 435 side_effect=RepositoryError()):
429 436 merge_response = self.target_repo.merge(
430 self.target_ref, self.source_repo, self.source_ref,
431 self.workspace, dry_run=True)
437 self.repo_id, self.workspace_id, self.target_ref,
438 self.source_repo, self.source_ref,
439 dry_run=True)
432 440
433 441 assert merge_response == expected_merge_response
434 442
435 443 def test_merge_invalid_user_name(self, vcsbackend):
436 444 repo = vcsbackend.create_repo(number_of_commits=1)
437 445 ref = Reference('branch', 'master', 'not_used')
446 workspace_id = 'test-errors-in-merge'
447 repo_id = repo_id_generator(workspace_id)
438 448 with pytest.raises(ValueError):
439 repo.merge(ref, self, ref, 'workspace_id')
449 repo.merge(repo_id, workspace_id, ref, self, ref)
440 450
441 451 def test_merge_invalid_user_email(self, vcsbackend):
442 452 repo = vcsbackend.create_repo(number_of_commits=1)
443 453 ref = Reference('branch', 'master', 'not_used')
454 workspace_id = 'test-errors-in-merge'
455 repo_id = repo_id_generator(workspace_id)
444 456 with pytest.raises(ValueError):
445 repo.merge(ref, self, ref, 'workspace_id', 'user name')
457 repo.merge(
458 repo_id, workspace_id, ref, self, ref, 'user name')
446 459
447 460 def test_merge_invalid_message(self, vcsbackend):
448 461 repo = vcsbackend.create_repo(number_of_commits=1)
449 462 ref = Reference('branch', 'master', 'not_used')
463 workspace_id = 'test-errors-in-merge'
464 repo_id = repo_id_generator(workspace_id)
450 465 with pytest.raises(ValueError):
451 466 repo.merge(
452 ref, self, ref, 'workspace_id', 'user name', 'user@email.com')
467 repo_id, workspace_id, ref, self, ref,
468 'user name', 'user@email.com')
453 469
454 470
455 471 @pytest.mark.usefixtures("vcs_repository_support")
456 472 class TestRepositoryStrip(BackendTestMixin):
457 473 recreate_repo_per_test = True
458 474
459 475 @classmethod
460 476 def _get_commits(cls):
461 477 commits = [
462 478 {
463 479 'message': 'Initial commit',
464 480 'author': 'Joe Doe <joe.doe@example.com>',
465 481 'date': datetime.datetime(2010, 1, 1, 20),
466 482 'branch': 'master',
467 483 'added': [
468 484 FileNode('foobar', content='foobar'),
469 485 FileNode('foobar2', content='foobar2'),
470 486 ],
471 487 },
472 488 ]
473 489 for x in xrange(10):
474 490 commit_data = {
475 491 'message': 'Changed foobar - commit%s' % x,
476 492 'author': 'Jane Doe <jane.doe@example.com>',
477 493 'date': datetime.datetime(2010, 1, 1, 21, x),
478 494 'branch': 'master',
479 495 'changed': [
480 496 FileNode('foobar', 'FOOBAR - %s' % x),
481 497 ],
482 498 }
483 499 commits.append(commit_data)
484 500 return commits
485 501
486 502 @pytest.mark.backends("git", "hg")
487 503 def test_strip_commit(self):
488 504 tip = self.repo.get_commit()
489 505 assert tip.idx == 10
490 506 self.repo.strip(tip.raw_id, self.repo.DEFAULT_BRANCH_NAME)
491 507
492 508 tip = self.repo.get_commit()
493 509 assert tip.idx == 9
494 510
495 511 @pytest.mark.backends("git", "hg")
496 512 def test_strip_multiple_commits(self):
497 513 tip = self.repo.get_commit()
498 514 assert tip.idx == 10
499 515
500 516 old = self.repo.get_commit(commit_idx=5)
501 517 self.repo.strip(old.raw_id, self.repo.DEFAULT_BRANCH_NAME)
502 518
503 519 tip = self.repo.get_commit()
504 520 assert tip.idx == 4
505 521
506 522
507 523 @pytest.mark.backends('hg', 'git')
508 class TestRepositoryPull:
524 class TestRepositoryPull(object):
509 525
510 526 def test_pull(self, vcsbackend):
511 527 source_repo = vcsbackend.repo
512 528 target_repo = vcsbackend.create_repo()
513 529 assert len(source_repo.commit_ids) > len(target_repo.commit_ids)
514 530
515 531 target_repo.pull(source_repo.path)
516 532 # Note: Get a fresh instance, avoids caching trouble
517 533 target_repo = vcsbackend.backend(target_repo.path)
518 534 assert len(source_repo.commit_ids) == len(target_repo.commit_ids)
519 535
520 536 def test_pull_wrong_path(self, vcsbackend):
521 537 target_repo = vcsbackend.create_repo()
522 538 with pytest.raises(RepositoryError):
523 539 target_repo.pull(target_repo.path + "wrong")
524 540
525 541 def test_pull_specific_commits(self, vcsbackend):
526 542 source_repo = vcsbackend.repo
527 543 target_repo = vcsbackend.create_repo()
528 544
529 545 second_commit = source_repo[1].raw_id
530 546 if vcsbackend.alias == 'git':
531 547 second_commit_ref = 'refs/test-refs/a'
532 548 source_repo.set_refs(second_commit_ref, second_commit)
533 549
534 550 target_repo.pull(source_repo.path, commit_ids=[second_commit])
535 551 target_repo = vcsbackend.backend(target_repo.path)
536 552 assert 2 == len(target_repo.commit_ids)
537 553 assert second_commit == target_repo.get_commit().raw_id
General Comments 0
You need to be logged in to leave comments. Login now