##// END OF EJS Templates
pull-requests: use close action with proper --close-commit solution....
marcink -
r2056:678ed378 default
parent child Browse files
Show More
@@ -1,1045 +1,1047 b''
1 1 .. _repo-methods-ref:
2 2
3 3 repo methods
4 4 ============
5 5
6 6 add_field_to_repo
7 7 -----------------
8 8
9 9 .. py:function:: add_field_to_repo(apiuser, repoid, key, label=<Optional:''>, description=<Optional:''>)
10 10
11 11 Adds an extra field to a repository.
12 12
13 13 This command can only be run using an |authtoken| with at least
14 14 write permissions to the |repo|.
15 15
16 16 :param apiuser: This is filled automatically from the |authtoken|.
17 17 :type apiuser: AuthUser
18 18 :param repoid: Set the repository name or repository id.
19 19 :type repoid: str or int
20 20 :param key: Create a unique field key for this repository.
21 21 :type key: str
22 22 :param label:
23 23 :type label: Optional(str)
24 24 :param description:
25 25 :type description: Optional(str)
26 26
27 27
28 28 comment_commit
29 29 --------------
30 30
31 31 .. py:function:: comment_commit(apiuser, repoid, commit_id, message, status=<Optional:None>, comment_type=<Optional:u'note'>, resolves_comment_id=<Optional:None>, userid=<Optional:<OptionalAttr:apiuser>>)
32 32
33 33 Set a commit comment, and optionally change the status of the commit.
34 34
35 35 :param apiuser: This is filled automatically from the |authtoken|.
36 36 :type apiuser: AuthUser
37 37 :param repoid: Set the repository name or repository ID.
38 38 :type repoid: str or int
39 39 :param commit_id: Specify the commit_id for which to set a comment.
40 40 :type commit_id: str
41 41 :param message: The comment text.
42 42 :type message: str
43 43 :param status: (**Optional**) status of commit, one of: 'not_reviewed',
44 44 'approved', 'rejected', 'under_review'
45 45 :type status: str
46 46 :param comment_type: Comment type, one of: 'note', 'todo'
47 47 :type comment_type: Optional(str), default: 'note'
48 48 :param userid: Set the user name of the comment creator.
49 49 :type userid: Optional(str or int)
50 50
51 51 Example error output:
52 52
53 53 .. code-block:: bash
54 54
55 55 {
56 56 "id" : <id_given_in_input>,
57 57 "result" : {
58 58 "msg": "Commented on commit `<commit_id>` for repository `<repoid>`",
59 59 "status_change": null or <status>,
60 60 "success": true
61 61 },
62 62 "error" : null
63 63 }
64 64
65 65
66 66 create_repo
67 67 -----------
68 68
69 69 .. py:function:: create_repo(apiuser, repo_name, repo_type, owner=<Optional:<OptionalAttr:apiuser>>, description=<Optional:''>, private=<Optional:False>, clone_uri=<Optional:None>, landing_rev=<Optional:'rev:tip'>, enable_statistics=<Optional:False>, enable_locking=<Optional:False>, enable_downloads=<Optional:False>, copy_permissions=<Optional:False>)
70 70
71 71 Creates a repository.
72 72
73 73 * If the repository name contains "/", repository will be created inside
74 74 a repository group or nested repository groups
75 75
76 76 For example "foo/bar/repo1" will create |repo| called "repo1" inside
77 77 group "foo/bar". You have to have permissions to access and write to
78 78 the last repository group ("bar" in this example)
79 79
80 80 This command can only be run using an |authtoken| with at least
81 81 permissions to create repositories, or write permissions to
82 82 parent repository groups.
83 83
84 84 :param apiuser: This is filled automatically from the |authtoken|.
85 85 :type apiuser: AuthUser
86 86 :param repo_name: Set the repository name.
87 87 :type repo_name: str
88 88 :param repo_type: Set the repository type; 'hg','git', or 'svn'.
89 89 :type repo_type: str
90 90 :param owner: user_id or username
91 91 :type owner: Optional(str)
92 92 :param description: Set the repository description.
93 93 :type description: Optional(str)
94 94 :param private: set repository as private
95 95 :type private: bool
96 96 :param clone_uri: set clone_uri
97 97 :type clone_uri: str
98 98 :param landing_rev: <rev_type>:<rev>
99 99 :type landing_rev: str
100 100 :param enable_locking:
101 101 :type enable_locking: bool
102 102 :param enable_downloads:
103 103 :type enable_downloads: bool
104 104 :param enable_statistics:
105 105 :type enable_statistics: bool
106 106 :param copy_permissions: Copy permission from group in which the
107 107 repository is being created.
108 108 :type copy_permissions: bool
109 109
110 110
111 111 Example output:
112 112
113 113 .. code-block:: bash
114 114
115 115 id : <id_given_in_input>
116 116 result: {
117 117 "msg": "Created new repository `<reponame>`",
118 118 "success": true,
119 119 "task": "<celery task id or None if done sync>"
120 120 }
121 121 error: null
122 122
123 123
124 124 Example error output:
125 125
126 126 .. code-block:: bash
127 127
128 128 id : <id_given_in_input>
129 129 result : null
130 130 error : {
131 131 'failed to create repository `<repo_name>`'
132 132 }
133 133
134 134
135 135 delete_repo
136 136 -----------
137 137
138 138 .. py:function:: delete_repo(apiuser, repoid, forks=<Optional:''>)
139 139
140 140 Deletes a repository.
141 141
142 142 * When the `forks` parameter is set it's possible to detach or delete
143 143 forks of deleted repository.
144 144
145 145 This command can only be run using an |authtoken| with admin
146 146 permissions on the |repo|.
147 147
148 148 :param apiuser: This is filled automatically from the |authtoken|.
149 149 :type apiuser: AuthUser
150 150 :param repoid: Set the repository name or repository ID.
151 151 :type repoid: str or int
152 152 :param forks: Set to `detach` or `delete` forks from the |repo|.
153 153 :type forks: Optional(str)
154 154
155 155 Example error output:
156 156
157 157 .. code-block:: bash
158 158
159 159 id : <id_given_in_input>
160 160 result: {
161 161 "msg": "Deleted repository `<reponame>`",
162 162 "success": true
163 163 }
164 164 error: null
165 165
166 166
167 167 fork_repo
168 168 ---------
169 169
170 170 .. py:function:: fork_repo(apiuser, repoid, fork_name, owner=<Optional:<OptionalAttr:apiuser>>, description=<Optional:''>, private=<Optional:False>, clone_uri=<Optional:None>, landing_rev=<Optional:'rev:tip'>, copy_permissions=<Optional:False>)
171 171
172 172 Creates a fork of the specified |repo|.
173 173
174 174 * If the fork_name contains "/", fork will be created inside
175 175 a repository group or nested repository groups
176 176
177 177 For example "foo/bar/fork-repo" will create fork called "fork-repo"
178 178 inside group "foo/bar". You have to have permissions to access and
179 179 write to the last repository group ("bar" in this example)
180 180
181 181 This command can only be run using an |authtoken| with minimum
182 182 read permissions of the forked repo, create fork permissions for an user.
183 183
184 184 :param apiuser: This is filled automatically from the |authtoken|.
185 185 :type apiuser: AuthUser
186 186 :param repoid: Set repository name or repository ID.
187 187 :type repoid: str or int
188 188 :param fork_name: Set the fork name, including it's repository group membership.
189 189 :type fork_name: str
190 190 :param owner: Set the fork owner.
191 191 :type owner: str
192 192 :param description: Set the fork description.
193 193 :type description: str
194 194 :param copy_permissions: Copy permissions from parent |repo|. The
195 195 default is False.
196 196 :type copy_permissions: bool
197 197 :param private: Make the fork private. The default is False.
198 198 :type private: bool
199 199 :param landing_rev: Set the landing revision. The default is tip.
200 200
201 201 Example output:
202 202
203 203 .. code-block:: bash
204 204
205 205 id : <id_for_response>
206 206 api_key : "<api_key>"
207 207 args: {
208 208 "repoid" : "<reponame or repo_id>",
209 209 "fork_name": "<forkname>",
210 210 "owner": "<username or user_id = Optional(=apiuser)>",
211 211 "description": "<description>",
212 212 "copy_permissions": "<bool>",
213 213 "private": "<bool>",
214 214 "landing_rev": "<landing_rev>"
215 215 }
216 216
217 217 Example error output:
218 218
219 219 .. code-block:: bash
220 220
221 221 id : <id_given_in_input>
222 222 result: {
223 223 "msg": "Created fork of `<reponame>` as `<forkname>`",
224 224 "success": true,
225 225 "task": "<celery task id or None if done sync>"
226 226 }
227 227 error: null
228 228
229 229
230 230 get_repo
231 231 --------
232 232
233 233 .. py:function:: get_repo(apiuser, repoid, cache=<Optional:True>)
234 234
235 235 Gets an existing repository by its name or repository_id.
236 236
237 237 The members section so the output returns users groups or users
238 238 associated with that repository.
239 239
240 240 This command can only be run using an |authtoken| with admin rights,
241 241 or users with at least read rights to the |repo|.
242 242
243 243 :param apiuser: This is filled automatically from the |authtoken|.
244 244 :type apiuser: AuthUser
245 245 :param repoid: The repository name or repository id.
246 246 :type repoid: str or int
247 247 :param cache: use the cached value for last changeset
248 248 :type: cache: Optional(bool)
249 249
250 250 Example output:
251 251
252 252 .. code-block:: bash
253 253
254 254 {
255 255 "error": null,
256 256 "id": <repo_id>,
257 257 "result": {
258 258 "clone_uri": null,
259 259 "created_on": "timestamp",
260 260 "description": "repo description",
261 261 "enable_downloads": false,
262 262 "enable_locking": false,
263 263 "enable_statistics": false,
264 264 "followers": [
265 265 {
266 266 "active": true,
267 267 "admin": false,
268 268 "api_key": "****************************************",
269 269 "api_keys": [
270 270 "****************************************"
271 271 ],
272 272 "email": "user@example.com",
273 273 "emails": [
274 274 "user@example.com"
275 275 ],
276 276 "extern_name": "rhodecode",
277 277 "extern_type": "rhodecode",
278 278 "firstname": "username",
279 279 "ip_addresses": [],
280 280 "language": null,
281 281 "last_login": "2015-09-16T17:16:35.854",
282 282 "lastname": "surname",
283 283 "user_id": <user_id>,
284 284 "username": "name"
285 285 }
286 286 ],
287 287 "fork_of": "parent-repo",
288 288 "landing_rev": [
289 289 "rev",
290 290 "tip"
291 291 ],
292 292 "last_changeset": {
293 293 "author": "User <user@example.com>",
294 294 "branch": "default",
295 295 "date": "timestamp",
296 296 "message": "last commit message",
297 297 "parents": [
298 298 {
299 299 "raw_id": "commit-id"
300 300 }
301 301 ],
302 302 "raw_id": "commit-id",
303 303 "revision": <revision number>,
304 304 "short_id": "short id"
305 305 },
306 306 "lock_reason": null,
307 307 "locked_by": null,
308 308 "locked_date": null,
309 309 "members": [
310 310 {
311 311 "name": "super-admin-name",
312 312 "origin": "super-admin",
313 313 "permission": "repository.admin",
314 314 "type": "user"
315 315 },
316 316 {
317 317 "name": "owner-name",
318 318 "origin": "owner",
319 319 "permission": "repository.admin",
320 320 "type": "user"
321 321 },
322 322 {
323 323 "name": "user-group-name",
324 324 "origin": "permission",
325 325 "permission": "repository.write",
326 326 "type": "user_group"
327 327 }
328 328 ],
329 329 "owner": "owner-name",
330 330 "permissions": [
331 331 {
332 332 "name": "super-admin-name",
333 333 "origin": "super-admin",
334 334 "permission": "repository.admin",
335 335 "type": "user"
336 336 },
337 337 {
338 338 "name": "owner-name",
339 339 "origin": "owner",
340 340 "permission": "repository.admin",
341 341 "type": "user"
342 342 },
343 343 {
344 344 "name": "user-group-name",
345 345 "origin": "permission",
346 346 "permission": "repository.write",
347 347 "type": "user_group"
348 348 }
349 349 ],
350 350 "private": true,
351 351 "repo_id": 676,
352 352 "repo_name": "user-group/repo-name",
353 353 "repo_type": "hg"
354 354 }
355 355 }
356 356
357 357
358 358 get_repo_changeset
359 359 ------------------
360 360
361 361 .. py:function:: get_repo_changeset(apiuser, repoid, revision, details=<Optional:'basic'>)
362 362
363 363 Returns information about a changeset.
364 364
365 365 Additionally parameters define the amount of details returned by
366 366 this function.
367 367
368 368 This command can only be run using an |authtoken| with admin rights,
369 369 or users with at least read rights to the |repo|.
370 370
371 371 :param apiuser: This is filled automatically from the |authtoken|.
372 372 :type apiuser: AuthUser
373 373 :param repoid: The repository name or repository id
374 374 :type repoid: str or int
375 375 :param revision: revision for which listing should be done
376 376 :type revision: str
377 377 :param details: details can be 'basic|extended|full' full gives diff
378 378 info details like the diff itself, and number of changed files etc.
379 379 :type details: Optional(str)
380 380
381 381
382 382 get_repo_changesets
383 383 -------------------
384 384
385 385 .. py:function:: get_repo_changesets(apiuser, repoid, start_rev, limit, details=<Optional:'basic'>)
386 386
387 387 Returns a set of commits limited by the number starting
388 388 from the `start_rev` option.
389 389
390 390 Additional parameters define the amount of details returned by this
391 391 function.
392 392
393 393 This command can only be run using an |authtoken| with admin rights,
394 394 or users with at least read rights to |repos|.
395 395
396 396 :param apiuser: This is filled automatically from the |authtoken|.
397 397 :type apiuser: AuthUser
398 398 :param repoid: The repository name or repository ID.
399 399 :type repoid: str or int
400 400 :param start_rev: The starting revision from where to get changesets.
401 401 :type start_rev: str
402 402 :param limit: Limit the number of commits to this amount
403 403 :type limit: str or int
404 404 :param details: Set the level of detail returned. Valid option are:
405 405 ``basic``, ``extended`` and ``full``.
406 406 :type details: Optional(str)
407 407
408 408 .. note::
409 409
410 410 Setting the parameter `details` to the value ``full`` is extensive
411 411 and returns details like the diff itself, and the number
412 412 of changed files.
413 413
414 414
415 415 get_repo_nodes
416 416 --------------
417 417
418 418 .. py:function:: get_repo_nodes(apiuser, repoid, revision, root_path, ret_type=<Optional:'all'>, details=<Optional:'basic'>, max_file_bytes=<Optional:None>)
419 419
420 420 Returns a list of nodes and children in a flat list for a given
421 421 path at given revision.
422 422
423 423 It's possible to specify ret_type to show only `files` or `dirs`.
424 424
425 425 This command can only be run using an |authtoken| with admin rights,
426 426 or users with at least read rights to |repos|.
427 427
428 428 :param apiuser: This is filled automatically from the |authtoken|.
429 429 :type apiuser: AuthUser
430 430 :param repoid: The repository name or repository ID.
431 431 :type repoid: str or int
432 432 :param revision: The revision for which listing should be done.
433 433 :type revision: str
434 434 :param root_path: The path from which to start displaying.
435 435 :type root_path: str
436 436 :param ret_type: Set the return type. Valid options are
437 437 ``all`` (default), ``files`` and ``dirs``.
438 438 :type ret_type: Optional(str)
439 439 :param details: Returns extended information about nodes, such as
440 440 md5, binary, and or content. The valid options are ``basic`` and
441 441 ``full``.
442 442 :type details: Optional(str)
443 443 :param max_file_bytes: Only return file content under this file size bytes
444 444 :type details: Optional(int)
445 445
446 446 Example output:
447 447
448 448 .. code-block:: bash
449 449
450 450 id : <id_given_in_input>
451 451 result: [
452 452 {
453 453 "name" : "<name>"
454 454 "type" : "<type>",
455 455 "binary": "<true|false>" (only in extended mode)
456 456 "md5" : "<md5 of file content>" (only in extended mode)
457 457 },
458 458 ...
459 459 ]
460 460 error: null
461 461
462 462
463 463 get_repo_refs
464 464 -------------
465 465
466 466 .. py:function:: get_repo_refs(apiuser, repoid)
467 467
468 468 Returns a dictionary of current references. It returns
469 469 bookmarks, branches, closed_branches, and tags for given repository
470 470
471 471 It's possible to specify ret_type to show only `files` or `dirs`.
472 472
473 473 This command can only be run using an |authtoken| with admin rights,
474 474 or users with at least read rights to |repos|.
475 475
476 476 :param apiuser: This is filled automatically from the |authtoken|.
477 477 :type apiuser: AuthUser
478 478 :param repoid: The repository name or repository ID.
479 479 :type repoid: str or int
480 480
481 481 Example output:
482 482
483 483 .. code-block:: bash
484 484
485 485 id : <id_given_in_input>
486 486 "result": {
487 487 "bookmarks": {
488 488 "dev": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
489 489 "master": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf"
490 490 },
491 491 "branches": {
492 492 "default": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
493 493 "stable": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf"
494 494 },
495 495 "branches_closed": {},
496 496 "tags": {
497 497 "tip": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
498 498 "v4.4.0": "1232313f9e6adac5ce5399c2a891dc1e72b79022",
499 499 "v4.4.1": "cbb9f1d329ae5768379cdec55a62ebdd546c4e27",
500 500 "v4.4.2": "24ffe44a27fcd1c5b6936144e176b9f6dd2f3a17",
501 501 }
502 502 }
503 503 error: null
504 504
505 505
506 506 get_repo_settings
507 507 -----------------
508 508
509 509 .. py:function:: get_repo_settings(apiuser, repoid, key=<Optional:None>)
510 510
511 511 Returns all settings for a repository. If key is given it only returns the
512 512 setting identified by the key or null.
513 513
514 514 :param apiuser: This is filled automatically from the |authtoken|.
515 515 :type apiuser: AuthUser
516 516 :param repoid: The repository name or repository id.
517 517 :type repoid: str or int
518 518 :param key: Key of the setting to return.
519 519 :type: key: Optional(str)
520 520
521 521 Example output:
522 522
523 523 .. code-block:: bash
524 524
525 525 {
526 526 "error": null,
527 527 "id": 237,
528 528 "result": {
529 529 "extensions_largefiles": true,
530 530 "extensions_evolve": true,
531 531 "hooks_changegroup_push_logger": true,
532 532 "hooks_changegroup_repo_size": false,
533 533 "hooks_outgoing_pull_logger": true,
534 534 "phases_publish": "True",
535 535 "rhodecode_hg_use_rebase_for_merging": true,
536 536 "rhodecode_hg_close_branch_before_merging": false,
537 "rhodecode_git_use_rebase_for_merging": true,
538 "rhodecode_git_close_branch_before_merging": false,
537 539 "rhodecode_pr_merge_enabled": true,
538 540 "rhodecode_use_outdated_comments": true
539 541 }
540 542 }
541 543
542 544
543 545 get_repos
544 546 ---------
545 547
546 548 .. py:function:: get_repos(apiuser, root=<Optional:None>, traverse=<Optional:True>)
547 549
548 550 Lists all existing repositories.
549 551
550 552 This command can only be run using an |authtoken| with admin rights,
551 553 or users with at least read rights to |repos|.
552 554
553 555 :param apiuser: This is filled automatically from the |authtoken|.
554 556 :type apiuser: AuthUser
555 557 :param root: specify root repository group to fetch repositories.
556 558 filters the returned repositories to be members of given root group.
557 559 :type root: Optional(None)
558 560 :param traverse: traverse given root into subrepositories. With this flag
559 561 set to False, it will only return top-level repositories from `root`.
560 562 if root is empty it will return just top-level repositories.
561 563 :type traverse: Optional(True)
562 564
563 565
564 566 Example output:
565 567
566 568 .. code-block:: bash
567 569
568 570 id : <id_given_in_input>
569 571 result: [
570 572 {
571 573 "repo_id" : "<repo_id>",
572 574 "repo_name" : "<reponame>"
573 575 "repo_type" : "<repo_type>",
574 576 "clone_uri" : "<clone_uri>",
575 577 "private": : "<bool>",
576 578 "created_on" : "<datetimecreated>",
577 579 "description" : "<description>",
578 580 "landing_rev": "<landing_rev>",
579 581 "owner": "<repo_owner>",
580 582 "fork_of": "<name_of_fork_parent>",
581 583 "enable_downloads": "<bool>",
582 584 "enable_locking": "<bool>",
583 585 "enable_statistics": "<bool>",
584 586 },
585 587 ...
586 588 ]
587 589 error: null
588 590
589 591
590 592 grant_user_group_permission
591 593 ---------------------------
592 594
593 595 .. py:function:: grant_user_group_permission(apiuser, repoid, usergroupid, perm)
594 596
595 597 Grant permission for a user group on the specified repository,
596 598 or update existing permissions.
597 599
598 600 This command can only be run using an |authtoken| with admin
599 601 permissions on the |repo|.
600 602
601 603 :param apiuser: This is filled automatically from the |authtoken|.
602 604 :type apiuser: AuthUser
603 605 :param repoid: Set the repository name or repository ID.
604 606 :type repoid: str or int
605 607 :param usergroupid: Specify the ID of the user group.
606 608 :type usergroupid: str or int
607 609 :param perm: Set the user group permissions using the following
608 610 format: (repository.(none|read|write|admin))
609 611 :type perm: str
610 612
611 613 Example output:
612 614
613 615 .. code-block:: bash
614 616
615 617 id : <id_given_in_input>
616 618 result : {
617 619 "msg" : "Granted perm: `<perm>` for group: `<usersgroupname>` in repo: `<reponame>`",
618 620 "success": true
619 621
620 622 }
621 623 error : null
622 624
623 625 Example error output:
624 626
625 627 .. code-block:: bash
626 628
627 629 id : <id_given_in_input>
628 630 result : null
629 631 error : {
630 632 "failed to edit permission for user group: `<usergroup>` in repo `<repo>`'
631 633 }
632 634
633 635
634 636 grant_user_permission
635 637 ---------------------
636 638
637 639 .. py:function:: grant_user_permission(apiuser, repoid, userid, perm)
638 640
639 641 Grant permissions for the specified user on the given repository,
640 642 or update existing permissions if found.
641 643
642 644 This command can only be run using an |authtoken| with admin
643 645 permissions on the |repo|.
644 646
645 647 :param apiuser: This is filled automatically from the |authtoken|.
646 648 :type apiuser: AuthUser
647 649 :param repoid: Set the repository name or repository ID.
648 650 :type repoid: str or int
649 651 :param userid: Set the user name.
650 652 :type userid: str
651 653 :param perm: Set the user permissions, using the following format
652 654 ``(repository.(none|read|write|admin))``
653 655 :type perm: str
654 656
655 657 Example output:
656 658
657 659 .. code-block:: bash
658 660
659 661 id : <id_given_in_input>
660 662 result: {
661 663 "msg" : "Granted perm: `<perm>` for user: `<username>` in repo: `<reponame>`",
662 664 "success": true
663 665 }
664 666 error: null
665 667
666 668
667 669 invalidate_cache
668 670 ----------------
669 671
670 672 .. py:function:: invalidate_cache(apiuser, repoid, delete_keys=<Optional:False>)
671 673
672 674 Invalidates the cache for the specified repository.
673 675
674 676 This command can only be run using an |authtoken| with admin rights to
675 677 the specified repository.
676 678
677 679 This command takes the following options:
678 680
679 681 :param apiuser: This is filled automatically from |authtoken|.
680 682 :type apiuser: AuthUser
681 683 :param repoid: Sets the repository name or repository ID.
682 684 :type repoid: str or int
683 685 :param delete_keys: This deletes the invalidated keys instead of
684 686 just flagging them.
685 687 :type delete_keys: Optional(``True`` | ``False``)
686 688
687 689 Example output:
688 690
689 691 .. code-block:: bash
690 692
691 693 id : <id_given_in_input>
692 694 result : {
693 695 'msg': Cache for repository `<repository name>` was invalidated,
694 696 'repository': <repository name>
695 697 }
696 698 error : null
697 699
698 700 Example error output:
699 701
700 702 .. code-block:: bash
701 703
702 704 id : <id_given_in_input>
703 705 result : null
704 706 error : {
705 707 'Error occurred during cache invalidation action'
706 708 }
707 709
708 710
709 711 lock
710 712 ----
711 713
712 714 .. py:function:: lock(apiuser, repoid, locked=<Optional:None>, userid=<Optional:<OptionalAttr:apiuser>>)
713 715
714 716 Sets the lock state of the specified |repo| by the given user.
715 717 From more information, see :ref:`repo-locking`.
716 718
717 719 * If the ``userid`` option is not set, the repository is locked to the
718 720 user who called the method.
719 721 * If the ``locked`` parameter is not set, the current lock state of the
720 722 repository is displayed.
721 723
722 724 This command can only be run using an |authtoken| with admin rights to
723 725 the specified repository.
724 726
725 727 This command takes the following options:
726 728
727 729 :param apiuser: This is filled automatically from the |authtoken|.
728 730 :type apiuser: AuthUser
729 731 :param repoid: Sets the repository name or repository ID.
730 732 :type repoid: str or int
731 733 :param locked: Sets the lock state.
732 734 :type locked: Optional(``True`` | ``False``)
733 735 :param userid: Set the repository lock to this user.
734 736 :type userid: Optional(str or int)
735 737
736 738 Example error output:
737 739
738 740 .. code-block:: bash
739 741
740 742 id : <id_given_in_input>
741 743 result : {
742 744 'repo': '<reponame>',
743 745 'locked': <bool: lock state>,
744 746 'locked_since': <int: lock timestamp>,
745 747 'locked_by': <username of person who made the lock>,
746 748 'lock_reason': <str: reason for locking>,
747 749 'lock_state_changed': <bool: True if lock state has been changed in this request>,
748 750 'msg': 'Repo `<reponame>` locked by `<username>` on <timestamp>.'
749 751 or
750 752 'msg': 'Repo `<repository name>` not locked.'
751 753 or
752 754 'msg': 'User `<user name>` set lock state for repo `<repository name>` to `<new lock state>`'
753 755 }
754 756 error : null
755 757
756 758 Example error output:
757 759
758 760 .. code-block:: bash
759 761
760 762 id : <id_given_in_input>
761 763 result : null
762 764 error : {
763 765 'Error occurred locking repository `<reponame>`'
764 766 }
765 767
766 768
767 769 maintenance
768 770 -----------
769 771
770 772 .. py:function:: maintenance(apiuser, repoid)
771 773
772 774 Triggers a maintenance on the given repository.
773 775
774 776 This command can only be run using an |authtoken| with admin
775 777 rights to the specified repository. For more information,
776 778 see :ref:`config-token-ref`.
777 779
778 780 This command takes the following options:
779 781
780 782 :param apiuser: This is filled automatically from the |authtoken|.
781 783 :type apiuser: AuthUser
782 784 :param repoid: The repository name or repository ID.
783 785 :type repoid: str or int
784 786
785 787 Example output:
786 788
787 789 .. code-block:: bash
788 790
789 791 id : <id_given_in_input>
790 792 result : {
791 793 "msg": "executed maintenance command",
792 794 "executed_actions": [
793 795 <action_message>, <action_message2>...
794 796 ],
795 797 "repository": "<repository name>"
796 798 }
797 799 error : null
798 800
799 801 Example error output:
800 802
801 803 .. code-block:: bash
802 804
803 805 id : <id_given_in_input>
804 806 result : null
805 807 error : {
806 808 "Unable to execute maintenance on `<reponame>`"
807 809 }
808 810
809 811
810 812 pull
811 813 ----
812 814
813 815 .. py:function:: pull(apiuser, repoid)
814 816
815 817 Triggers a pull on the given repository from a remote location. You
816 818 can use this to keep remote repositories up-to-date.
817 819
818 820 This command can only be run using an |authtoken| with admin
819 821 rights to the specified repository. For more information,
820 822 see :ref:`config-token-ref`.
821 823
822 824 This command takes the following options:
823 825
824 826 :param apiuser: This is filled automatically from the |authtoken|.
825 827 :type apiuser: AuthUser
826 828 :param repoid: The repository name or repository ID.
827 829 :type repoid: str or int
828 830
829 831 Example output:
830 832
831 833 .. code-block:: bash
832 834
833 835 id : <id_given_in_input>
834 836 result : {
835 837 "msg": "Pulled from `<repository name>`"
836 838 "repository": "<repository name>"
837 839 }
838 840 error : null
839 841
840 842 Example error output:
841 843
842 844 .. code-block:: bash
843 845
844 846 id : <id_given_in_input>
845 847 result : null
846 848 error : {
847 849 "Unable to pull changes from `<reponame>`"
848 850 }
849 851
850 852
851 853 remove_field_from_repo
852 854 ----------------------
853 855
854 856 .. py:function:: remove_field_from_repo(apiuser, repoid, key)
855 857
856 858 Removes an extra field from a repository.
857 859
858 860 This command can only be run using an |authtoken| with at least
859 861 write permissions to the |repo|.
860 862
861 863 :param apiuser: This is filled automatically from the |authtoken|.
862 864 :type apiuser: AuthUser
863 865 :param repoid: Set the repository name or repository ID.
864 866 :type repoid: str or int
865 867 :param key: Set the unique field key for this repository.
866 868 :type key: str
867 869
868 870
869 871 revoke_user_group_permission
870 872 ----------------------------
871 873
872 874 .. py:function:: revoke_user_group_permission(apiuser, repoid, usergroupid)
873 875
874 876 Revoke the permissions of a user group on a given repository.
875 877
876 878 This command can only be run using an |authtoken| with admin
877 879 permissions on the |repo|.
878 880
879 881 :param apiuser: This is filled automatically from the |authtoken|.
880 882 :type apiuser: AuthUser
881 883 :param repoid: Set the repository name or repository ID.
882 884 :type repoid: str or int
883 885 :param usergroupid: Specify the user group ID.
884 886 :type usergroupid: str or int
885 887
886 888 Example output:
887 889
888 890 .. code-block:: bash
889 891
890 892 id : <id_given_in_input>
891 893 result: {
892 894 "msg" : "Revoked perm for group: `<usersgroupname>` in repo: `<reponame>`",
893 895 "success": true
894 896 }
895 897 error: null
896 898
897 899
898 900 revoke_user_permission
899 901 ----------------------
900 902
901 903 .. py:function:: revoke_user_permission(apiuser, repoid, userid)
902 904
903 905 Revoke permission for a user on the specified repository.
904 906
905 907 This command can only be run using an |authtoken| with admin
906 908 permissions on the |repo|.
907 909
908 910 :param apiuser: This is filled automatically from the |authtoken|.
909 911 :type apiuser: AuthUser
910 912 :param repoid: Set the repository name or repository ID.
911 913 :type repoid: str or int
912 914 :param userid: Set the user name of revoked user.
913 915 :type userid: str or int
914 916
915 917 Example error output:
916 918
917 919 .. code-block:: bash
918 920
919 921 id : <id_given_in_input>
920 922 result: {
921 923 "msg" : "Revoked perm for user: `<username>` in repo: `<reponame>`",
922 924 "success": true
923 925 }
924 926 error: null
925 927
926 928
927 929 set_repo_settings
928 930 -----------------
929 931
930 932 .. py:function:: set_repo_settings(apiuser, repoid, settings)
931 933
932 934 Update repository settings. Returns true on success.
933 935
934 936 :param apiuser: This is filled automatically from the |authtoken|.
935 937 :type apiuser: AuthUser
936 938 :param repoid: The repository name or repository id.
937 939 :type repoid: str or int
938 940 :param settings: The new settings for the repository.
939 941 :type: settings: dict
940 942
941 943 Example output:
942 944
943 945 .. code-block:: bash
944 946
945 947 {
946 948 "error": null,
947 949 "id": 237,
948 950 "result": true
949 951 }
950 952
951 953
952 954 strip
953 955 -----
954 956
955 957 .. py:function:: strip(apiuser, repoid, revision, branch)
956 958
957 959 Strips the given revision from the specified repository.
958 960
959 961 * This will remove the revision and all of its decendants.
960 962
961 963 This command can only be run using an |authtoken| with admin rights to
962 964 the specified repository.
963 965
964 966 This command takes the following options:
965 967
966 968 :param apiuser: This is filled automatically from the |authtoken|.
967 969 :type apiuser: AuthUser
968 970 :param repoid: The repository name or repository ID.
969 971 :type repoid: str or int
970 972 :param revision: The revision you wish to strip.
971 973 :type revision: str
972 974 :param branch: The branch from which to strip the revision.
973 975 :type branch: str
974 976
975 977 Example output:
976 978
977 979 .. code-block:: bash
978 980
979 981 id : <id_given_in_input>
980 982 result : {
981 983 "msg": "'Stripped commit <commit_hash> from repo `<repository name>`'"
982 984 "repository": "<repository name>"
983 985 }
984 986 error : null
985 987
986 988 Example error output:
987 989
988 990 .. code-block:: bash
989 991
990 992 id : <id_given_in_input>
991 993 result : null
992 994 error : {
993 995 "Unable to strip commit <commit_hash> from repo `<repository name>`"
994 996 }
995 997
996 998
997 999 update_repo
998 1000 -----------
999 1001
1000 1002 .. py:function:: update_repo(apiuser, repoid, repo_name=<Optional:None>, owner=<Optional:<OptionalAttr:apiuser>>, description=<Optional:''>, private=<Optional:False>, clone_uri=<Optional:None>, landing_rev=<Optional:'rev:tip'>, fork_of=<Optional:None>, enable_statistics=<Optional:False>, enable_locking=<Optional:False>, enable_downloads=<Optional:False>, fields=<Optional:''>)
1001 1003
1002 1004 Updates a repository with the given information.
1003 1005
1004 1006 This command can only be run using an |authtoken| with at least
1005 1007 admin permissions to the |repo|.
1006 1008
1007 1009 * If the repository name contains "/", repository will be updated
1008 1010 accordingly with a repository group or nested repository groups
1009 1011
1010 1012 For example repoid=repo-test name="foo/bar/repo-test" will update |repo|
1011 1013 called "repo-test" and place it inside group "foo/bar".
1012 1014 You have to have permissions to access and write to the last repository
1013 1015 group ("bar" in this example)
1014 1016
1015 1017 :param apiuser: This is filled automatically from the |authtoken|.
1016 1018 :type apiuser: AuthUser
1017 1019 :param repoid: repository name or repository ID.
1018 1020 :type repoid: str or int
1019 1021 :param repo_name: Update the |repo| name, including the
1020 1022 repository group it's in.
1021 1023 :type repo_name: str
1022 1024 :param owner: Set the |repo| owner.
1023 1025 :type owner: str
1024 1026 :param fork_of: Set the |repo| as fork of another |repo|.
1025 1027 :type fork_of: str
1026 1028 :param description: Update the |repo| description.
1027 1029 :type description: str
1028 1030 :param private: Set the |repo| as private. (True | False)
1029 1031 :type private: bool
1030 1032 :param clone_uri: Update the |repo| clone URI.
1031 1033 :type clone_uri: str
1032 1034 :param landing_rev: Set the |repo| landing revision. Default is ``rev:tip``.
1033 1035 :type landing_rev: str
1034 1036 :param enable_statistics: Enable statistics on the |repo|, (True | False).
1035 1037 :type enable_statistics: bool
1036 1038 :param enable_locking: Enable |repo| locking.
1037 1039 :type enable_locking: bool
1038 1040 :param enable_downloads: Enable downloads from the |repo|, (True | False).
1039 1041 :type enable_downloads: bool
1040 1042 :param fields: Add extra fields to the |repo|. Use the following
1041 1043 example format: ``field_key=field_val,field_key2=fieldval2``.
1042 1044 Escape ', ' with \,
1043 1045 :type fields: str
1044 1046
1045 1047
@@ -1,855 +1,875 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 HG repository module
23 23 """
24 24
25 25 import logging
26 26 import binascii
27 27 import os
28 28 import shutil
29 29 import urllib
30 30
31 31 from zope.cachedescriptors.property import Lazy as LazyProperty
32 32
33 33 from rhodecode.lib.compat import OrderedDict
34 34 from rhodecode.lib.datelib import (
35 35 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate,
36 36 date_astimestamp)
37 37 from rhodecode.lib.utils import safe_unicode, safe_str
38 38 from rhodecode.lib.vcs import connection
39 39 from rhodecode.lib.vcs.backends.base import (
40 40 BaseRepository, CollectionGenerator, Config, MergeResponse,
41 41 MergeFailureReason, Reference)
42 42 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
43 43 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
44 44 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
45 45 from rhodecode.lib.vcs.exceptions import (
46 46 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
47 47 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
48 48
49 49 hexlify = binascii.hexlify
50 50 nullid = "\0" * 20
51 51
52 52 log = logging.getLogger(__name__)
53 53
54 54
55 55 class MercurialRepository(BaseRepository):
56 56 """
57 57 Mercurial repository backend
58 58 """
59 59 DEFAULT_BRANCH_NAME = 'default'
60 60
61 61 def __init__(self, repo_path, config=None, create=False, src_url=None,
62 62 update_after_clone=False, with_wire=None):
63 63 """
64 64 Raises RepositoryError if repository could not be find at the given
65 65 ``repo_path``.
66 66
67 67 :param repo_path: local path of the repository
68 68 :param config: config object containing the repo configuration
69 69 :param create=False: if set to True, would try to create repository if
70 70 it does not exist rather than raising exception
71 71 :param src_url=None: would try to clone repository from given location
72 72 :param update_after_clone=False: sets update of working copy after
73 73 making a clone
74 74 """
75 75 self.path = safe_str(os.path.abspath(repo_path))
76 76 self.config = config if config else Config()
77 77 self._remote = connection.Hg(
78 78 self.path, self.config, with_wire=with_wire)
79 79
80 80 self._init_repo(create, src_url, update_after_clone)
81 81
82 82 # caches
83 83 self._commit_ids = {}
84 84
85 85 @LazyProperty
86 86 def commit_ids(self):
87 87 """
88 88 Returns list of commit ids, in ascending order. Being lazy
89 89 attribute allows external tools to inject shas from cache.
90 90 """
91 91 commit_ids = self._get_all_commit_ids()
92 92 self._rebuild_cache(commit_ids)
93 93 return commit_ids
94 94
95 95 def _rebuild_cache(self, commit_ids):
96 96 self._commit_ids = dict((commit_id, index)
97 97 for index, commit_id in enumerate(commit_ids))
98 98
99 99 @LazyProperty
100 100 def branches(self):
101 101 return self._get_branches()
102 102
103 103 @LazyProperty
104 104 def branches_closed(self):
105 105 return self._get_branches(active=False, closed=True)
106 106
107 107 @LazyProperty
108 108 def branches_all(self):
109 109 all_branches = {}
110 110 all_branches.update(self.branches)
111 111 all_branches.update(self.branches_closed)
112 112 return all_branches
113 113
114 114 def _get_branches(self, active=True, closed=False):
115 115 """
116 116 Gets branches for this repository
117 117 Returns only not closed active branches by default
118 118
119 119 :param active: return also active branches
120 120 :param closed: return also closed branches
121 121
122 122 """
123 123 if self.is_empty():
124 124 return {}
125 125
126 126 def get_name(ctx):
127 127 return ctx[0]
128 128
129 129 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
130 130 self._remote.branches(active, closed).items()]
131 131
132 132 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
133 133
134 134 @LazyProperty
135 135 def tags(self):
136 136 """
137 137 Gets tags for this repository
138 138 """
139 139 return self._get_tags()
140 140
141 141 def _get_tags(self):
142 142 if self.is_empty():
143 143 return {}
144 144
145 145 def get_name(ctx):
146 146 return ctx[0]
147 147
148 148 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
149 149 self._remote.tags().items()]
150 150
151 151 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
152 152
153 153 def tag(self, name, user, commit_id=None, message=None, date=None,
154 154 **kwargs):
155 155 """
156 156 Creates and returns a tag for the given ``commit_id``.
157 157
158 158 :param name: name for new tag
159 159 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
160 160 :param commit_id: commit id for which new tag would be created
161 161 :param message: message of the tag's commit
162 162 :param date: date of tag's commit
163 163
164 164 :raises TagAlreadyExistError: if tag with same name already exists
165 165 """
166 166 if name in self.tags:
167 167 raise TagAlreadyExistError("Tag %s already exists" % name)
168 168 commit = self.get_commit(commit_id=commit_id)
169 169 local = kwargs.setdefault('local', False)
170 170
171 171 if message is None:
172 172 message = "Added tag %s for commit %s" % (name, commit.short_id)
173 173
174 174 date, tz = date_to_timestamp_plus_offset(date)
175 175
176 176 self._remote.tag(
177 177 name, commit.raw_id, message, local, user, date, tz)
178 178 self._remote.invalidate_vcs_cache()
179 179
180 180 # Reinitialize tags
181 181 self.tags = self._get_tags()
182 182 tag_id = self.tags[name]
183 183
184 184 return self.get_commit(commit_id=tag_id)
185 185
186 186 def remove_tag(self, name, user, message=None, date=None):
187 187 """
188 188 Removes tag with the given `name`.
189 189
190 190 :param name: name of the tag to be removed
191 191 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
192 192 :param message: message of the tag's removal commit
193 193 :param date: date of tag's removal commit
194 194
195 195 :raises TagDoesNotExistError: if tag with given name does not exists
196 196 """
197 197 if name not in self.tags:
198 198 raise TagDoesNotExistError("Tag %s does not exist" % name)
199 199 if message is None:
200 200 message = "Removed tag %s" % name
201 201 local = False
202 202
203 203 date, tz = date_to_timestamp_plus_offset(date)
204 204
205 205 self._remote.tag(name, nullid, message, local, user, date, tz)
206 206 self._remote.invalidate_vcs_cache()
207 207 self.tags = self._get_tags()
208 208
209 209 @LazyProperty
210 210 def bookmarks(self):
211 211 """
212 212 Gets bookmarks for this repository
213 213 """
214 214 return self._get_bookmarks()
215 215
216 216 def _get_bookmarks(self):
217 217 if self.is_empty():
218 218 return {}
219 219
220 220 def get_name(ctx):
221 221 return ctx[0]
222 222
223 223 _bookmarks = [
224 224 (safe_unicode(n), hexlify(h)) for n, h in
225 225 self._remote.bookmarks().items()]
226 226
227 227 return OrderedDict(sorted(_bookmarks, key=get_name))
228 228
229 229 def _get_all_commit_ids(self):
230 230 return self._remote.get_all_commit_ids('visible')
231 231
232 232 def get_diff(
233 233 self, commit1, commit2, path='', ignore_whitespace=False,
234 234 context=3, path1=None):
235 235 """
236 236 Returns (git like) *diff*, as plain text. Shows changes introduced by
237 237 `commit2` since `commit1`.
238 238
239 239 :param commit1: Entry point from which diff is shown. Can be
240 240 ``self.EMPTY_COMMIT`` - in this case, patch showing all
241 241 the changes since empty state of the repository until `commit2`
242 242 :param commit2: Until which commit changes should be shown.
243 243 :param ignore_whitespace: If set to ``True``, would not show whitespace
244 244 changes. Defaults to ``False``.
245 245 :param context: How many lines before/after changed lines should be
246 246 shown. Defaults to ``3``.
247 247 """
248 248 self._validate_diff_commits(commit1, commit2)
249 249 if path1 is not None and path1 != path:
250 250 raise ValueError("Diff of two different paths not supported.")
251 251
252 252 if path:
253 253 file_filter = [self.path, path]
254 254 else:
255 255 file_filter = None
256 256
257 257 diff = self._remote.diff(
258 258 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
259 259 opt_git=True, opt_ignorews=ignore_whitespace,
260 260 context=context)
261 261 return MercurialDiff(diff)
262 262
263 263 def strip(self, commit_id, branch=None):
264 264 self._remote.strip(commit_id, update=False, backup="none")
265 265
266 266 self._remote.invalidate_vcs_cache()
267 267 self.commit_ids = self._get_all_commit_ids()
268 268 self._rebuild_cache(self.commit_ids)
269 269
270 270 def verify(self):
271 271 verify = self._remote.verify()
272 272
273 273 self._remote.invalidate_vcs_cache()
274 274 return verify
275 275
276 276 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
277 277 if commit_id1 == commit_id2:
278 278 return commit_id1
279 279
280 280 ancestors = self._remote.revs_from_revspec(
281 281 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
282 282 other_path=repo2.path)
283 283 return repo2[ancestors[0]].raw_id if ancestors else None
284 284
285 285 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
286 286 if commit_id1 == commit_id2:
287 287 commits = []
288 288 else:
289 289 if merge:
290 290 indexes = self._remote.revs_from_revspec(
291 291 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
292 292 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
293 293 else:
294 294 indexes = self._remote.revs_from_revspec(
295 295 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
296 296 commit_id1, other_path=repo2.path)
297 297
298 298 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
299 299 for idx in indexes]
300 300
301 301 return commits
302 302
303 303 @staticmethod
304 304 def check_url(url, config):
305 305 """
306 306 Function will check given url and try to verify if it's a valid
307 307 link. Sometimes it may happened that mercurial will issue basic
308 308 auth request that can cause whole API to hang when used from python
309 309 or other external calls.
310 310
311 311 On failures it'll raise urllib2.HTTPError, exception is also thrown
312 312 when the return code is non 200
313 313 """
314 314 # check first if it's not an local url
315 315 if os.path.isdir(url) or url.startswith('file:'):
316 316 return True
317 317
318 318 # Request the _remote to verify the url
319 319 return connection.Hg.check_url(url, config.serialize())
320 320
321 321 @staticmethod
322 322 def is_valid_repository(path):
323 323 return os.path.isdir(os.path.join(path, '.hg'))
324 324
325 325 def _init_repo(self, create, src_url=None, update_after_clone=False):
326 326 """
327 327 Function will check for mercurial repository in given path. If there
328 328 is no repository in that path it will raise an exception unless
329 329 `create` parameter is set to True - in that case repository would
330 330 be created.
331 331
332 332 If `src_url` is given, would try to clone repository from the
333 333 location at given clone_point. Additionally it'll make update to
334 334 working copy accordingly to `update_after_clone` flag.
335 335 """
336 336 if create and os.path.exists(self.path):
337 337 raise RepositoryError(
338 338 "Cannot create repository at %s, location already exist"
339 339 % self.path)
340 340
341 341 if src_url:
342 342 url = str(self._get_url(src_url))
343 343 MercurialRepository.check_url(url, self.config)
344 344
345 345 self._remote.clone(url, self.path, update_after_clone)
346 346
347 347 # Don't try to create if we've already cloned repo
348 348 create = False
349 349
350 350 if create:
351 351 os.makedirs(self.path, mode=0755)
352 352
353 353 self._remote.localrepository(create)
354 354
355 355 @LazyProperty
356 356 def in_memory_commit(self):
357 357 return MercurialInMemoryCommit(self)
358 358
359 359 @LazyProperty
360 360 def description(self):
361 361 description = self._remote.get_config_value(
362 362 'web', 'description', untrusted=True)
363 363 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
364 364
365 365 @LazyProperty
366 366 def contact(self):
367 367 contact = (
368 368 self._remote.get_config_value("web", "contact") or
369 369 self._remote.get_config_value("ui", "username"))
370 370 return safe_unicode(contact or self.DEFAULT_CONTACT)
371 371
372 372 @LazyProperty
373 373 def last_change(self):
374 374 """
375 375 Returns last change made on this repository as
376 376 `datetime.datetime` object.
377 377 """
378 378 try:
379 379 return self.get_commit().date
380 380 except RepositoryError:
381 381 tzoffset = makedate()[1]
382 382 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
383 383
384 384 def _get_fs_mtime(self):
385 385 # fallback to filesystem
386 386 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
387 387 st_path = os.path.join(self.path, '.hg', "store")
388 388 if os.path.exists(cl_path):
389 389 return os.stat(cl_path).st_mtime
390 390 else:
391 391 return os.stat(st_path).st_mtime
392 392
393 393 def _sanitize_commit_idx(self, idx):
394 394 # Note: Mercurial has ``int(-1)`` reserved as not existing id_or_idx
395 395 # number. A `long` is treated in the correct way though. So we convert
396 396 # `int` to `long` here to make sure it is handled correctly.
397 397 if isinstance(idx, int):
398 398 return long(idx)
399 399 return idx
400 400
401 401 def _get_url(self, url):
402 402 """
403 403 Returns normalized url. If schema is not given, would fall
404 404 to filesystem
405 405 (``file:///``) schema.
406 406 """
407 407 url = url.encode('utf8')
408 408 if url != 'default' and '://' not in url:
409 409 url = "file:" + urllib.pathname2url(url)
410 410 return url
411 411
412 412 def get_hook_location(self):
413 413 """
414 414 returns absolute path to location where hooks are stored
415 415 """
416 416 return os.path.join(self.path, '.hg', '.hgrc')
417 417
418 418 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
419 419 """
420 420 Returns ``MercurialCommit`` object representing repository's
421 421 commit at the given `commit_id` or `commit_idx`.
422 422 """
423 423 if self.is_empty():
424 424 raise EmptyRepositoryError("There are no commits yet")
425 425
426 426 if commit_id is not None:
427 427 self._validate_commit_id(commit_id)
428 428 try:
429 429 idx = self._commit_ids[commit_id]
430 430 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
431 431 except KeyError:
432 432 pass
433 433 elif commit_idx is not None:
434 434 self._validate_commit_idx(commit_idx)
435 435 commit_idx = self._sanitize_commit_idx(commit_idx)
436 436 try:
437 437 id_ = self.commit_ids[commit_idx]
438 438 if commit_idx < 0:
439 439 commit_idx += len(self.commit_ids)
440 440 return MercurialCommit(
441 441 self, id_, commit_idx, pre_load=pre_load)
442 442 except IndexError:
443 443 commit_id = commit_idx
444 444 else:
445 445 commit_id = "tip"
446 446
447 447 # TODO Paris: Ugly hack to "serialize" long for msgpack
448 448 if isinstance(commit_id, long):
449 449 commit_id = float(commit_id)
450 450
451 451 if isinstance(commit_id, unicode):
452 452 commit_id = safe_str(commit_id)
453 453
454 454 try:
455 455 raw_id, idx = self._remote.lookup(commit_id, both=True)
456 456 except CommitDoesNotExistError:
457 457 msg = "Commit %s does not exist for %s" % (
458 458 commit_id, self)
459 459 raise CommitDoesNotExistError(msg)
460 460
461 461 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
462 462
463 463 def get_commits(
464 464 self, start_id=None, end_id=None, start_date=None, end_date=None,
465 465 branch_name=None, pre_load=None):
466 466 """
467 467 Returns generator of ``MercurialCommit`` objects from start to end
468 468 (both are inclusive)
469 469
470 470 :param start_id: None, str(commit_id)
471 471 :param end_id: None, str(commit_id)
472 472 :param start_date: if specified, commits with commit date less than
473 473 ``start_date`` would be filtered out from returned set
474 474 :param end_date: if specified, commits with commit date greater than
475 475 ``end_date`` would be filtered out from returned set
476 476 :param branch_name: if specified, commits not reachable from given
477 477 branch would be filtered out from returned set
478 478
479 479 :raise BranchDoesNotExistError: If given ``branch_name`` does not
480 480 exist.
481 481 :raise CommitDoesNotExistError: If commit for given ``start`` or
482 482 ``end`` could not be found.
483 483 """
484 484 # actually we should check now if it's not an empty repo
485 485 branch_ancestors = False
486 486 if self.is_empty():
487 487 raise EmptyRepositoryError("There are no commits yet")
488 488 self._validate_branch_name(branch_name)
489 489
490 490 if start_id is not None:
491 491 self._validate_commit_id(start_id)
492 492 c_start = self.get_commit(commit_id=start_id)
493 493 start_pos = self._commit_ids[c_start.raw_id]
494 494 else:
495 495 start_pos = None
496 496
497 497 if end_id is not None:
498 498 self._validate_commit_id(end_id)
499 499 c_end = self.get_commit(commit_id=end_id)
500 500 end_pos = max(0, self._commit_ids[c_end.raw_id])
501 501 else:
502 502 end_pos = None
503 503
504 504 if None not in [start_id, end_id] and start_pos > end_pos:
505 505 raise RepositoryError(
506 506 "Start commit '%s' cannot be after end commit '%s'" %
507 507 (start_id, end_id))
508 508
509 509 if end_pos is not None:
510 510 end_pos += 1
511 511
512 512 commit_filter = []
513 513 if branch_name and not branch_ancestors:
514 514 commit_filter.append('branch("%s")' % branch_name)
515 515 elif branch_name and branch_ancestors:
516 516 commit_filter.append('ancestors(branch("%s"))' % branch_name)
517 517 if start_date and not end_date:
518 518 commit_filter.append('date(">%s")' % start_date)
519 519 if end_date and not start_date:
520 520 commit_filter.append('date("<%s")' % end_date)
521 521 if start_date and end_date:
522 522 commit_filter.append(
523 523 'date(">%s") and date("<%s")' % (start_date, end_date))
524 524
525 525 # TODO: johbo: Figure out a simpler way for this solution
526 526 collection_generator = CollectionGenerator
527 527 if commit_filter:
528 528 commit_filter = map(safe_str, commit_filter)
529 529 revisions = self._remote.rev_range(commit_filter)
530 530 collection_generator = MercurialIndexBasedCollectionGenerator
531 531 else:
532 532 revisions = self.commit_ids
533 533
534 534 if start_pos or end_pos:
535 535 revisions = revisions[start_pos:end_pos]
536 536
537 537 return collection_generator(self, revisions, pre_load=pre_load)
538 538
539 539 def pull(self, url, commit_ids=None):
540 540 """
541 541 Tries to pull changes from external location.
542 542
543 543 :param commit_ids: Optional. Can be set to a list of commit ids
544 544 which shall be pulled from the other repository.
545 545 """
546 546 url = self._get_url(url)
547 547 self._remote.pull(url, commit_ids=commit_ids)
548 548 self._remote.invalidate_vcs_cache()
549 549
550 550 def _local_clone(self, clone_path):
551 551 """
552 552 Create a local clone of the current repo.
553 553 """
554 554 self._remote.clone(self.path, clone_path, update_after_clone=True,
555 555 hooks=False)
556 556
557 557 def _update(self, revision, clean=False):
558 558 """
559 559 Update the working copty to the specified revision.
560 560 """
561 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
561 562 self._remote.update(revision, clean=clean)
562 563
563 564 def _identify(self):
564 565 """
565 566 Return the current state of the working directory.
566 567 """
567 568 return self._remote.identify().strip().rstrip('+')
568 569
569 570 def _heads(self, branch=None):
570 571 """
571 572 Return the commit ids of the repository heads.
572 573 """
573 574 return self._remote.heads(branch=branch).strip().split(' ')
574 575
575 576 def _ancestor(self, revision1, revision2):
576 577 """
577 578 Return the common ancestor of the two revisions.
578 579 """
579 580 return self._remote.ancestor(revision1, revision2)
580 581
581 582 def _local_push(
582 583 self, revision, repository_path, push_branches=False,
583 584 enable_hooks=False):
584 585 """
585 586 Push the given revision to the specified repository.
586 587
587 588 :param push_branches: allow to create branches in the target repo.
588 589 """
589 590 self._remote.push(
590 591 [revision], repository_path, hooks=enable_hooks,
591 592 push_branches=push_branches)
592 593
593 594 def _local_merge(self, target_ref, merge_message, user_name, user_email,
594 595 source_ref, use_rebase=False):
595 596 """
596 597 Merge the given source_revision into the checked out revision.
597 598
598 599 Returns the commit id of the merge and a boolean indicating if the
599 600 commit needs to be pushed.
600 601 """
601 602 self._update(target_ref.commit_id)
602 603
603 604 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
604 605 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
605 606
606 607 if ancestor == source_ref.commit_id:
607 608 # Nothing to do, the changes were already integrated
608 609 return target_ref.commit_id, False
609 610
610 611 elif ancestor == target_ref.commit_id and is_the_same_branch:
611 612 # In this case we should force a commit message
612 613 return source_ref.commit_id, True
613 614
614 615 if use_rebase:
615 616 try:
616 617 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
617 618 target_ref.commit_id)
618 619 self.bookmark(bookmark_name, revision=source_ref.commit_id)
619 620 self._remote.rebase(
620 621 source=source_ref.commit_id, dest=target_ref.commit_id)
621 622 self._remote.invalidate_vcs_cache()
622 623 self._update(bookmark_name)
623 624 return self._identify(), True
624 625 except RepositoryError:
625 626 # The rebase-abort may raise another exception which 'hides'
626 627 # the original one, therefore we log it here.
627 628 log.exception('Error while rebasing shadow repo during merge.')
628 629
629 630 # Cleanup any rebase leftovers
630 631 self._remote.invalidate_vcs_cache()
631 632 self._remote.rebase(abort=True)
632 633 self._remote.invalidate_vcs_cache()
633 634 self._remote.update(clean=True)
634 635 raise
635 636 else:
636 637 try:
637 638 self._remote.merge(source_ref.commit_id)
638 639 self._remote.invalidate_vcs_cache()
639 640 self._remote.commit(
640 641 message=safe_str(merge_message),
641 642 username=safe_str('%s <%s>' % (user_name, user_email)))
642 643 self._remote.invalidate_vcs_cache()
643 644 return self._identify(), True
644 645 except RepositoryError:
645 646 # Cleanup any merge leftovers
646 647 self._remote.update(clean=True)
647 648 raise
648 649
649 650 def _local_close(self, target_ref, user_name, user_email,
650 651 source_ref, close_message=''):
651 652 """
652 653 Close the branch of the given source_revision
653 654
654 655 Returns the commit id of the close and a boolean indicating if the
655 656 commit needs to be pushed.
656 657 """
657 self._update(target_ref.commit_id)
658 message = close_message or "Closing branch"
658 self._update(source_ref.commit_id)
659 message = close_message or "Closing branch: `{}`".format(source_ref.name)
659 660 try:
660 661 self._remote.commit(
661 662 message=safe_str(message),
662 663 username=safe_str('%s <%s>' % (user_name, user_email)),
663 664 close_branch=True)
664 665 self._remote.invalidate_vcs_cache()
665 666 return self._identify(), True
666 667 except RepositoryError:
667 668 # Cleanup any commit leftovers
668 669 self._remote.update(clean=True)
669 670 raise
670 671
671 672 def _is_the_same_branch(self, target_ref, source_ref):
672 673 return (
673 674 self._get_branch_name(target_ref) ==
674 675 self._get_branch_name(source_ref))
675 676
676 677 def _get_branch_name(self, ref):
677 678 if ref.type == 'branch':
678 679 return ref.name
679 680 return self._remote.ctx_branch(ref.commit_id)
680 681
681 682 def _get_shadow_repository_path(self, workspace_id):
682 683 # The name of the shadow repository must start with '.', so it is
683 684 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
684 685 return os.path.join(
685 686 os.path.dirname(self.path),
686 687 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
687 688
688 689 def _maybe_prepare_merge_workspace(self, workspace_id, unused_target_ref):
689 690 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
690 691 if not os.path.exists(shadow_repository_path):
691 692 self._local_clone(shadow_repository_path)
692 693 log.debug(
693 694 'Prepared shadow repository in %s', shadow_repository_path)
694 695
695 696 return shadow_repository_path
696 697
697 698 def cleanup_merge_workspace(self, workspace_id):
698 699 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
699 700 shutil.rmtree(shadow_repository_path, ignore_errors=True)
700 701
701 702 def _merge_repo(self, shadow_repository_path, target_ref,
702 703 source_repo, source_ref, merge_message,
703 704 merger_name, merger_email, dry_run=False,
704 705 use_rebase=False, close_branch=False):
705 706 if target_ref.commit_id not in self._heads():
706 707 return MergeResponse(
707 708 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
708 709
709 710 try:
710 711 if (target_ref.type == 'branch' and
711 712 len(self._heads(target_ref.name)) != 1):
712 713 return MergeResponse(
713 714 False, False, None,
714 715 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS)
715 716 except CommitDoesNotExistError as e:
716 717 log.exception('Failure when looking up branch heads on hg target')
717 718 return MergeResponse(
718 719 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
719 720
720 721 shadow_repo = self._get_shadow_instance(shadow_repository_path)
721 722
722 723 log.debug('Pulling in target reference %s', target_ref)
723 724 self._validate_pull_reference(target_ref)
724 725 shadow_repo._local_pull(self.path, target_ref)
725 726 try:
726 727 log.debug('Pulling in source reference %s', source_ref)
727 728 source_repo._validate_pull_reference(source_ref)
728 729 shadow_repo._local_pull(source_repo.path, source_ref)
729 730 except CommitDoesNotExistError:
730 731 log.exception('Failure when doing local pull on hg shadow repo')
731 732 return MergeResponse(
732 733 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
733 734
734 735 merge_ref = None
736 merge_commit_id = None
737 close_commit_id = None
735 738 merge_failure_reason = MergeFailureReason.NONE
736 739
737 if close_branch and not use_rebase:
740 # enforce that close branch should be used only in case we source from
741 # an actual Branch
742 close_branch = close_branch and source_ref.type == 'branch'
743
744 # don't allow to close branch if source and target are the same
745 close_branch = close_branch and source_ref.name != target_ref.name
746
747 needs_push_on_close = False
748 if close_branch and not use_rebase and not dry_run:
738 749 try:
739 close_commit_id, needs_push = shadow_repo._local_close(
750 close_commit_id, needs_push_on_close = shadow_repo._local_close(
740 751 target_ref, merger_name, merger_email, source_ref)
741 target_ref.commit_id = close_commit_id
742 752 merge_possible = True
743 753 except RepositoryError:
744 log.exception('Failure when doing close branch on hg shadow repo')
754 log.exception(
755 'Failure when doing close branch on hg shadow repo')
745 756 merge_possible = False
746 757 merge_failure_reason = MergeFailureReason.MERGE_FAILED
747 758 else:
748 759 merge_possible = True
749 760
750 761 if merge_possible:
751 762 try:
752 763 merge_commit_id, needs_push = shadow_repo._local_merge(
753 764 target_ref, merge_message, merger_name, merger_email,
754 765 source_ref, use_rebase=use_rebase)
755 766 merge_possible = True
756 767
757 # Set a bookmark pointing to the merge commit. This bookmark may be
758 # used to easily identify the last successful merge commit in the
759 # shadow repository.
768 # read the state of the close action, if it
769 # maybe required a push
770 needs_push = needs_push or needs_push_on_close
771
772 # Set a bookmark pointing to the merge commit. This bookmark
773 # may be used to easily identify the last successful merge
774 # commit in the shadow repository.
760 775 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
761 776 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
762 777 except SubrepoMergeError:
763 778 log.exception(
764 779 'Subrepo merge error during local merge on hg shadow repo.')
765 780 merge_possible = False
766 781 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
767 782 except RepositoryError:
768 783 log.exception('Failure when doing local merge on hg shadow repo')
769 784 merge_possible = False
770 785 merge_failure_reason = MergeFailureReason.MERGE_FAILED
771 786
772 787 if merge_possible and not dry_run:
773 788 if needs_push:
774 789 # In case the target is a bookmark, update it, so after pushing
775 790 # the bookmarks is also updated in the target.
776 791 if target_ref.type == 'book':
777 792 shadow_repo.bookmark(
778 793 target_ref.name, revision=merge_commit_id)
779
780 794 try:
781 795 shadow_repo_with_hooks = self._get_shadow_instance(
782 796 shadow_repository_path,
783 797 enable_hooks=True)
784 798 # Note: the push_branches option will push any new branch
785 799 # defined in the source repository to the target. This may
786 800 # be dangerous as branches are permanent in Mercurial.
787 801 # This feature was requested in issue #441.
788 802 shadow_repo_with_hooks._local_push(
789 803 merge_commit_id, self.path, push_branches=True,
790 804 enable_hooks=True)
805
806 # maybe we also need to push the close_commit_id
807 if close_commit_id:
808 shadow_repo_with_hooks._local_push(
809 close_commit_id, self.path, push_branches=True,
810 enable_hooks=True)
791 811 merge_succeeded = True
792 812 except RepositoryError:
793 813 log.exception(
794 814 'Failure when doing local push from the shadow '
795 815 'repository to the target repository.')
796 816 merge_succeeded = False
797 817 merge_failure_reason = MergeFailureReason.PUSH_FAILED
798 818 else:
799 819 merge_succeeded = True
800 820 else:
801 821 merge_succeeded = False
802 822
803 823 return MergeResponse(
804 824 merge_possible, merge_succeeded, merge_ref, merge_failure_reason)
805 825
806 826 def _get_shadow_instance(
807 827 self, shadow_repository_path, enable_hooks=False):
808 828 config = self.config.copy()
809 829 if not enable_hooks:
810 830 config.clear_section('hooks')
811 831 return MercurialRepository(shadow_repository_path, config)
812 832
813 833 def _validate_pull_reference(self, reference):
814 834 if not (reference.name in self.bookmarks or
815 835 reference.name in self.branches or
816 836 self.get_commit(reference.commit_id)):
817 837 raise CommitDoesNotExistError(
818 838 'Unknown branch, bookmark or commit id')
819 839
820 840 def _local_pull(self, repository_path, reference):
821 841 """
822 842 Fetch a branch, bookmark or commit from a local repository.
823 843 """
824 844 repository_path = os.path.abspath(repository_path)
825 845 if repository_path == self.path:
826 846 raise ValueError('Cannot pull from the same repository')
827 847
828 848 reference_type_to_option_name = {
829 849 'book': 'bookmark',
830 850 'branch': 'branch',
831 851 }
832 852 option_name = reference_type_to_option_name.get(
833 853 reference.type, 'revision')
834 854
835 855 if option_name == 'revision':
836 856 ref = reference.commit_id
837 857 else:
838 858 ref = reference.name
839 859
840 860 options = {option_name: [ref]}
841 861 self._remote.pull_cmd(repository_path, hooks=False, **options)
842 862 self._remote.invalidate_vcs_cache()
843 863
844 864 def bookmark(self, bookmark, revision=None):
845 865 if isinstance(bookmark, unicode):
846 866 bookmark = safe_str(bookmark)
847 867 self._remote.bookmark(bookmark, revision=revision)
848 868 self._remote.invalidate_vcs_cache()
849 869
850 870
851 871 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
852 872
853 873 def _commit_factory(self, commit_id):
854 874 return self.repo.get_commit(
855 875 commit_idx=commit_id, pre_load=self.pre_load)
@@ -1,567 +1,569 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 this is forms validation classes
23 23 http://formencode.org/module-formencode.validators.html
24 24 for list off all availible validators
25 25
26 26 we can create our own validators
27 27
28 28 The table below outlines the options which can be used in a schema in addition to the validators themselves
29 29 pre_validators [] These validators will be applied before the schema
30 30 chained_validators [] These validators will be applied after the schema
31 31 allow_extra_fields False If True, then it is not an error when keys that aren't associated with a validator are present
32 32 filter_extra_fields False If True, then keys that aren't associated with a validator are removed
33 33 if_key_missing NoDefault If this is given, then any keys that aren't available but are expected will be replaced with this value (and then validated). This does not override a present .if_missing attribute on validators. NoDefault is a special FormEncode class to mean that no default values has been specified and therefore missing keys shouldn't take a default value.
34 34 ignore_key_missing False If True, then missing keys will be missing in the result, if the validator doesn't have .if_missing on it already
35 35
36 36
37 37 <name> = formencode.validators.<name of validator>
38 38 <name> must equal form name
39 39 list=[1,2,3,4,5]
40 40 for SELECT use formencode.All(OneOf(list), Int())
41 41
42 42 """
43 43
44 44 import deform
45 45 import logging
46 46 import formencode
47 47
48 48 from pkg_resources import resource_filename
49 49 from formencode import All, Pipe
50 50
51 51 from pylons.i18n.translation import _
52 52 from pyramid.threadlocal import get_current_request
53 53
54 54 from rhodecode import BACKENDS
55 55 from rhodecode.lib import helpers
56 56 from rhodecode.model import validators as v
57 57
58 58 log = logging.getLogger(__name__)
59 59
60 60
61 61 deform_templates = resource_filename('deform', 'templates')
62 62 rhodecode_templates = resource_filename('rhodecode', 'templates/forms')
63 63 search_path = (rhodecode_templates, deform_templates)
64 64
65 65
66 66 class RhodecodeFormZPTRendererFactory(deform.ZPTRendererFactory):
67 67 """ Subclass of ZPTRendererFactory to add rhodecode context variables """
68 68 def __call__(self, template_name, **kw):
69 69 kw['h'] = helpers
70 70 kw['request'] = get_current_request()
71 71 return self.load(template_name)(**kw)
72 72
73 73
74 74 form_renderer = RhodecodeFormZPTRendererFactory(search_path)
75 75 deform.Form.set_default_renderer(form_renderer)
76 76
77 77
78 78 def LoginForm():
79 79 class _LoginForm(formencode.Schema):
80 80 allow_extra_fields = True
81 81 filter_extra_fields = True
82 82 username = v.UnicodeString(
83 83 strip=True,
84 84 min=1,
85 85 not_empty=True,
86 86 messages={
87 87 'empty': _(u'Please enter a login'),
88 88 'tooShort': _(u'Enter a value %(min)i characters long or more')
89 89 }
90 90 )
91 91
92 92 password = v.UnicodeString(
93 93 strip=False,
94 94 min=3,
95 95 not_empty=True,
96 96 messages={
97 97 'empty': _(u'Please enter a password'),
98 98 'tooShort': _(u'Enter %(min)i characters or more')}
99 99 )
100 100
101 101 remember = v.StringBoolean(if_missing=False)
102 102
103 103 chained_validators = [v.ValidAuth()]
104 104 return _LoginForm
105 105
106 106
107 107 def UserForm(edit=False, available_languages=[], old_data={}):
108 108 class _UserForm(formencode.Schema):
109 109 allow_extra_fields = True
110 110 filter_extra_fields = True
111 111 username = All(v.UnicodeString(strip=True, min=1, not_empty=True),
112 112 v.ValidUsername(edit, old_data))
113 113 if edit:
114 114 new_password = All(
115 115 v.ValidPassword(),
116 116 v.UnicodeString(strip=False, min=6, not_empty=False)
117 117 )
118 118 password_confirmation = All(
119 119 v.ValidPassword(),
120 120 v.UnicodeString(strip=False, min=6, not_empty=False),
121 121 )
122 122 admin = v.StringBoolean(if_missing=False)
123 123 else:
124 124 password = All(
125 125 v.ValidPassword(),
126 126 v.UnicodeString(strip=False, min=6, not_empty=True)
127 127 )
128 128 password_confirmation = All(
129 129 v.ValidPassword(),
130 130 v.UnicodeString(strip=False, min=6, not_empty=False)
131 131 )
132 132
133 133 password_change = v.StringBoolean(if_missing=False)
134 134 create_repo_group = v.StringBoolean(if_missing=False)
135 135
136 136 active = v.StringBoolean(if_missing=False)
137 137 firstname = v.UnicodeString(strip=True, min=1, not_empty=False)
138 138 lastname = v.UnicodeString(strip=True, min=1, not_empty=False)
139 139 email = All(v.Email(not_empty=True), v.UniqSystemEmail(old_data))
140 140 extern_name = v.UnicodeString(strip=True)
141 141 extern_type = v.UnicodeString(strip=True)
142 142 language = v.OneOf(available_languages, hideList=False,
143 143 testValueList=True, if_missing=None)
144 144 chained_validators = [v.ValidPasswordsMatch()]
145 145 return _UserForm
146 146
147 147
148 148 def UserGroupForm(edit=False, old_data=None, allow_disabled=False):
149 149 old_data = old_data or {}
150 150
151 151 class _UserGroupForm(formencode.Schema):
152 152 allow_extra_fields = True
153 153 filter_extra_fields = True
154 154
155 155 users_group_name = All(
156 156 v.UnicodeString(strip=True, min=1, not_empty=True),
157 157 v.ValidUserGroup(edit, old_data)
158 158 )
159 159 user_group_description = v.UnicodeString(strip=True, min=1,
160 160 not_empty=False)
161 161
162 162 users_group_active = v.StringBoolean(if_missing=False)
163 163
164 164 if edit:
165 165 # this is user group owner
166 166 user = All(
167 167 v.UnicodeString(not_empty=True),
168 168 v.ValidRepoUser(allow_disabled))
169 169 return _UserGroupForm
170 170
171 171
172 172 def RepoGroupForm(edit=False, old_data=None, available_groups=None,
173 173 can_create_in_root=False, allow_disabled=False):
174 174 old_data = old_data or {}
175 175 available_groups = available_groups or []
176 176
177 177 class _RepoGroupForm(formencode.Schema):
178 178 allow_extra_fields = True
179 179 filter_extra_fields = False
180 180
181 181 group_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
182 182 v.SlugifyName(),)
183 183 group_description = v.UnicodeString(strip=True, min=1,
184 184 not_empty=False)
185 185 group_copy_permissions = v.StringBoolean(if_missing=False)
186 186
187 187 group_parent_id = v.OneOf(available_groups, hideList=False,
188 188 testValueList=True, not_empty=True)
189 189 enable_locking = v.StringBoolean(if_missing=False)
190 190 chained_validators = [
191 191 v.ValidRepoGroup(edit, old_data, can_create_in_root)]
192 192
193 193 if edit:
194 194 # this is repo group owner
195 195 user = All(
196 196 v.UnicodeString(not_empty=True),
197 197 v.ValidRepoUser(allow_disabled))
198 198
199 199 return _RepoGroupForm
200 200
201 201
202 202 def RegisterForm(edit=False, old_data={}):
203 203 class _RegisterForm(formencode.Schema):
204 204 allow_extra_fields = True
205 205 filter_extra_fields = True
206 206 username = All(
207 207 v.ValidUsername(edit, old_data),
208 208 v.UnicodeString(strip=True, min=1, not_empty=True)
209 209 )
210 210 password = All(
211 211 v.ValidPassword(),
212 212 v.UnicodeString(strip=False, min=6, not_empty=True)
213 213 )
214 214 password_confirmation = All(
215 215 v.ValidPassword(),
216 216 v.UnicodeString(strip=False, min=6, not_empty=True)
217 217 )
218 218 active = v.StringBoolean(if_missing=False)
219 219 firstname = v.UnicodeString(strip=True, min=1, not_empty=False)
220 220 lastname = v.UnicodeString(strip=True, min=1, not_empty=False)
221 221 email = All(v.Email(not_empty=True), v.UniqSystemEmail(old_data))
222 222
223 223 chained_validators = [v.ValidPasswordsMatch()]
224 224
225 225 return _RegisterForm
226 226
227 227
228 228 def PasswordResetForm():
229 229 class _PasswordResetForm(formencode.Schema):
230 230 allow_extra_fields = True
231 231 filter_extra_fields = True
232 232 email = All(v.ValidSystemEmail(), v.Email(not_empty=True))
233 233 return _PasswordResetForm
234 234
235 235
236 236 def RepoForm(edit=False, old_data=None, repo_groups=None, landing_revs=None,
237 237 allow_disabled=False):
238 238 old_data = old_data or {}
239 239 repo_groups = repo_groups or []
240 240 landing_revs = landing_revs or []
241 241 supported_backends = BACKENDS.keys()
242 242
243 243 class _RepoForm(formencode.Schema):
244 244 allow_extra_fields = True
245 245 filter_extra_fields = False
246 246 repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
247 247 v.SlugifyName(), v.CannotHaveGitSuffix())
248 248 repo_group = All(v.CanWriteGroup(old_data),
249 249 v.OneOf(repo_groups, hideList=True))
250 250 repo_type = v.OneOf(supported_backends, required=False,
251 251 if_missing=old_data.get('repo_type'))
252 252 repo_description = v.UnicodeString(strip=True, min=1, not_empty=False)
253 253 repo_private = v.StringBoolean(if_missing=False)
254 254 repo_landing_rev = v.OneOf(landing_revs, hideList=True)
255 255 repo_copy_permissions = v.StringBoolean(if_missing=False)
256 256 clone_uri = All(v.UnicodeString(strip=True, min=1, not_empty=False))
257 257
258 258 repo_enable_statistics = v.StringBoolean(if_missing=False)
259 259 repo_enable_downloads = v.StringBoolean(if_missing=False)
260 260 repo_enable_locking = v.StringBoolean(if_missing=False)
261 261
262 262 if edit:
263 263 # this is repo owner
264 264 user = All(
265 265 v.UnicodeString(not_empty=True),
266 266 v.ValidRepoUser(allow_disabled))
267 267 clone_uri_change = v.UnicodeString(
268 268 not_empty=False, if_missing=v.Missing)
269 269
270 270 chained_validators = [v.ValidCloneUri(),
271 271 v.ValidRepoName(edit, old_data)]
272 272 return _RepoForm
273 273
274 274
275 275 def RepoPermsForm():
276 276 class _RepoPermsForm(formencode.Schema):
277 277 allow_extra_fields = True
278 278 filter_extra_fields = False
279 279 chained_validators = [v.ValidPerms(type_='repo')]
280 280 return _RepoPermsForm
281 281
282 282
283 283 def RepoGroupPermsForm(valid_recursive_choices):
284 284 class _RepoGroupPermsForm(formencode.Schema):
285 285 allow_extra_fields = True
286 286 filter_extra_fields = False
287 287 recursive = v.OneOf(valid_recursive_choices)
288 288 chained_validators = [v.ValidPerms(type_='repo_group')]
289 289 return _RepoGroupPermsForm
290 290
291 291
292 292 def UserGroupPermsForm():
293 293 class _UserPermsForm(formencode.Schema):
294 294 allow_extra_fields = True
295 295 filter_extra_fields = False
296 296 chained_validators = [v.ValidPerms(type_='user_group')]
297 297 return _UserPermsForm
298 298
299 299
300 300 def RepoFieldForm():
301 301 class _RepoFieldForm(formencode.Schema):
302 302 filter_extra_fields = True
303 303 allow_extra_fields = True
304 304
305 305 new_field_key = All(v.FieldKey(),
306 306 v.UnicodeString(strip=True, min=3, not_empty=True))
307 307 new_field_value = v.UnicodeString(not_empty=False, if_missing=u'')
308 308 new_field_type = v.OneOf(['str', 'unicode', 'list', 'tuple'],
309 309 if_missing='str')
310 310 new_field_label = v.UnicodeString(not_empty=False)
311 311 new_field_desc = v.UnicodeString(not_empty=False)
312 312
313 313 return _RepoFieldForm
314 314
315 315
316 316 def RepoForkForm(edit=False, old_data={}, supported_backends=BACKENDS.keys(),
317 317 repo_groups=[], landing_revs=[]):
318 318 class _RepoForkForm(formencode.Schema):
319 319 allow_extra_fields = True
320 320 filter_extra_fields = False
321 321 repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
322 322 v.SlugifyName())
323 323 repo_group = All(v.CanWriteGroup(),
324 324 v.OneOf(repo_groups, hideList=True))
325 325 repo_type = All(v.ValidForkType(old_data), v.OneOf(supported_backends))
326 326 description = v.UnicodeString(strip=True, min=1, not_empty=True)
327 327 private = v.StringBoolean(if_missing=False)
328 328 copy_permissions = v.StringBoolean(if_missing=False)
329 329 fork_parent_id = v.UnicodeString()
330 330 chained_validators = [v.ValidForkName(edit, old_data)]
331 331 landing_rev = v.OneOf(landing_revs, hideList=True)
332 332
333 333 return _RepoForkForm
334 334
335 335
336 336 def ApplicationSettingsForm():
337 337 class _ApplicationSettingsForm(formencode.Schema):
338 338 allow_extra_fields = True
339 339 filter_extra_fields = False
340 340 rhodecode_title = v.UnicodeString(strip=True, max=40, not_empty=False)
341 341 rhodecode_realm = v.UnicodeString(strip=True, min=1, not_empty=True)
342 342 rhodecode_pre_code = v.UnicodeString(strip=True, min=1, not_empty=False)
343 343 rhodecode_post_code = v.UnicodeString(strip=True, min=1, not_empty=False)
344 344 rhodecode_captcha_public_key = v.UnicodeString(strip=True, min=1, not_empty=False)
345 345 rhodecode_captcha_private_key = v.UnicodeString(strip=True, min=1, not_empty=False)
346 346 rhodecode_create_personal_repo_group = v.StringBoolean(if_missing=False)
347 347 rhodecode_personal_repo_group_pattern = v.UnicodeString(strip=True, min=1, not_empty=False)
348 348
349 349 return _ApplicationSettingsForm
350 350
351 351
352 352 def ApplicationVisualisationForm():
353 353 class _ApplicationVisualisationForm(formencode.Schema):
354 354 allow_extra_fields = True
355 355 filter_extra_fields = False
356 356 rhodecode_show_public_icon = v.StringBoolean(if_missing=False)
357 357 rhodecode_show_private_icon = v.StringBoolean(if_missing=False)
358 358 rhodecode_stylify_metatags = v.StringBoolean(if_missing=False)
359 359
360 360 rhodecode_repository_fields = v.StringBoolean(if_missing=False)
361 361 rhodecode_lightweight_journal = v.StringBoolean(if_missing=False)
362 362 rhodecode_dashboard_items = v.Int(min=5, not_empty=True)
363 363 rhodecode_admin_grid_items = v.Int(min=5, not_empty=True)
364 364 rhodecode_show_version = v.StringBoolean(if_missing=False)
365 365 rhodecode_use_gravatar = v.StringBoolean(if_missing=False)
366 366 rhodecode_markup_renderer = v.OneOf(['markdown', 'rst'])
367 367 rhodecode_gravatar_url = v.UnicodeString(min=3)
368 368 rhodecode_clone_uri_tmpl = v.UnicodeString(min=3)
369 369 rhodecode_support_url = v.UnicodeString()
370 370 rhodecode_show_revision_number = v.StringBoolean(if_missing=False)
371 371 rhodecode_show_sha_length = v.Int(min=4, not_empty=True)
372 372
373 373 return _ApplicationVisualisationForm
374 374
375 375
376 376 class _BaseVcsSettingsForm(formencode.Schema):
377 377 allow_extra_fields = True
378 378 filter_extra_fields = False
379 379 hooks_changegroup_repo_size = v.StringBoolean(if_missing=False)
380 380 hooks_changegroup_push_logger = v.StringBoolean(if_missing=False)
381 381 hooks_outgoing_pull_logger = v.StringBoolean(if_missing=False)
382 382
383 383 # PR/Code-review
384 384 rhodecode_pr_merge_enabled = v.StringBoolean(if_missing=False)
385 385 rhodecode_use_outdated_comments = v.StringBoolean(if_missing=False)
386 386
387 387 # hg
388 388 extensions_largefiles = v.StringBoolean(if_missing=False)
389 389 extensions_evolve = v.StringBoolean(if_missing=False)
390 390 phases_publish = v.StringBoolean(if_missing=False)
391 391
392 392 rhodecode_hg_use_rebase_for_merging = v.StringBoolean(if_missing=False)
393 393 rhodecode_hg_close_branch_before_merging = v.StringBoolean(if_missing=False)
394 394
395 395 # git
396 396 vcs_git_lfs_enabled = v.StringBoolean(if_missing=False)
397 rhodecode_git_use_rebase_for_merging = v.StringBoolean(if_missing=False)
398 rhodecode_git_close_branch_before_merging = v.StringBoolean(if_missing=False)
397 399
398 400 # svn
399 401 vcs_svn_proxy_http_requests_enabled = v.StringBoolean(if_missing=False)
400 402 vcs_svn_proxy_http_server_url = v.UnicodeString(strip=True, if_missing=None)
401 403
402 404
403 405 def ApplicationUiSettingsForm():
404 406 class _ApplicationUiSettingsForm(_BaseVcsSettingsForm):
405 407 web_push_ssl = v.StringBoolean(if_missing=False)
406 408 paths_root_path = All(
407 409 v.ValidPath(),
408 410 v.UnicodeString(strip=True, min=1, not_empty=True)
409 411 )
410 412 largefiles_usercache = All(
411 413 v.ValidPath(),
412 414 v.UnicodeString(strip=True, min=2, not_empty=True))
413 415 vcs_git_lfs_store_location = All(
414 416 v.ValidPath(),
415 417 v.UnicodeString(strip=True, min=2, not_empty=True))
416 418 extensions_hgsubversion = v.StringBoolean(if_missing=False)
417 419 extensions_hggit = v.StringBoolean(if_missing=False)
418 420 new_svn_branch = v.ValidSvnPattern(section='vcs_svn_branch')
419 421 new_svn_tag = v.ValidSvnPattern(section='vcs_svn_tag')
420 422
421 423 return _ApplicationUiSettingsForm
422 424
423 425
424 426 def RepoVcsSettingsForm(repo_name):
425 427 class _RepoVcsSettingsForm(_BaseVcsSettingsForm):
426 428 inherit_global_settings = v.StringBoolean(if_missing=False)
427 429 new_svn_branch = v.ValidSvnPattern(
428 430 section='vcs_svn_branch', repo_name=repo_name)
429 431 new_svn_tag = v.ValidSvnPattern(
430 432 section='vcs_svn_tag', repo_name=repo_name)
431 433
432 434 return _RepoVcsSettingsForm
433 435
434 436
435 437 def LabsSettingsForm():
436 438 class _LabSettingsForm(formencode.Schema):
437 439 allow_extra_fields = True
438 440 filter_extra_fields = False
439 441
440 442 return _LabSettingsForm
441 443
442 444
443 445 def ApplicationPermissionsForm(
444 446 register_choices, password_reset_choices, extern_activate_choices):
445 447 class _DefaultPermissionsForm(formencode.Schema):
446 448 allow_extra_fields = True
447 449 filter_extra_fields = True
448 450
449 451 anonymous = v.StringBoolean(if_missing=False)
450 452 default_register = v.OneOf(register_choices)
451 453 default_register_message = v.UnicodeString()
452 454 default_password_reset = v.OneOf(password_reset_choices)
453 455 default_extern_activate = v.OneOf(extern_activate_choices)
454 456
455 457 return _DefaultPermissionsForm
456 458
457 459
458 460 def ObjectPermissionsForm(repo_perms_choices, group_perms_choices,
459 461 user_group_perms_choices):
460 462 class _ObjectPermissionsForm(formencode.Schema):
461 463 allow_extra_fields = True
462 464 filter_extra_fields = True
463 465 overwrite_default_repo = v.StringBoolean(if_missing=False)
464 466 overwrite_default_group = v.StringBoolean(if_missing=False)
465 467 overwrite_default_user_group = v.StringBoolean(if_missing=False)
466 468 default_repo_perm = v.OneOf(repo_perms_choices)
467 469 default_group_perm = v.OneOf(group_perms_choices)
468 470 default_user_group_perm = v.OneOf(user_group_perms_choices)
469 471
470 472 return _ObjectPermissionsForm
471 473
472 474
473 475 def UserPermissionsForm(create_choices, create_on_write_choices,
474 476 repo_group_create_choices, user_group_create_choices,
475 477 fork_choices, inherit_default_permissions_choices):
476 478 class _DefaultPermissionsForm(formencode.Schema):
477 479 allow_extra_fields = True
478 480 filter_extra_fields = True
479 481
480 482 anonymous = v.StringBoolean(if_missing=False)
481 483
482 484 default_repo_create = v.OneOf(create_choices)
483 485 default_repo_create_on_write = v.OneOf(create_on_write_choices)
484 486 default_user_group_create = v.OneOf(user_group_create_choices)
485 487 default_repo_group_create = v.OneOf(repo_group_create_choices)
486 488 default_fork_create = v.OneOf(fork_choices)
487 489 default_inherit_default_permissions = v.OneOf(inherit_default_permissions_choices)
488 490
489 491 return _DefaultPermissionsForm
490 492
491 493
492 494 def UserIndividualPermissionsForm():
493 495 class _DefaultPermissionsForm(formencode.Schema):
494 496 allow_extra_fields = True
495 497 filter_extra_fields = True
496 498
497 499 inherit_default_permissions = v.StringBoolean(if_missing=False)
498 500
499 501 return _DefaultPermissionsForm
500 502
501 503
502 504 def DefaultsForm(edit=False, old_data={}, supported_backends=BACKENDS.keys()):
503 505 class _DefaultsForm(formencode.Schema):
504 506 allow_extra_fields = True
505 507 filter_extra_fields = True
506 508 default_repo_type = v.OneOf(supported_backends)
507 509 default_repo_private = v.StringBoolean(if_missing=False)
508 510 default_repo_enable_statistics = v.StringBoolean(if_missing=False)
509 511 default_repo_enable_downloads = v.StringBoolean(if_missing=False)
510 512 default_repo_enable_locking = v.StringBoolean(if_missing=False)
511 513
512 514 return _DefaultsForm
513 515
514 516
515 517 def AuthSettingsForm():
516 518 class _AuthSettingsForm(formencode.Schema):
517 519 allow_extra_fields = True
518 520 filter_extra_fields = True
519 521 auth_plugins = All(v.ValidAuthPlugins(),
520 522 v.UniqueListFromString()(not_empty=True))
521 523
522 524 return _AuthSettingsForm
523 525
524 526
525 527 def UserExtraEmailForm():
526 528 class _UserExtraEmailForm(formencode.Schema):
527 529 email = All(v.UniqSystemEmail(), v.Email(not_empty=True))
528 530 return _UserExtraEmailForm
529 531
530 532
531 533 def UserExtraIpForm():
532 534 class _UserExtraIpForm(formencode.Schema):
533 535 ip = v.ValidIp()(not_empty=True)
534 536 return _UserExtraIpForm
535 537
536 538
537 539
538 540 def PullRequestForm(repo_id):
539 541 class ReviewerForm(formencode.Schema):
540 542 user_id = v.Int(not_empty=True)
541 543 reasons = All()
542 544 mandatory = v.StringBoolean()
543 545
544 546 class _PullRequestForm(formencode.Schema):
545 547 allow_extra_fields = True
546 548 filter_extra_fields = True
547 549
548 550 common_ancestor = v.UnicodeString(strip=True, required=True)
549 551 source_repo = v.UnicodeString(strip=True, required=True)
550 552 source_ref = v.UnicodeString(strip=True, required=True)
551 553 target_repo = v.UnicodeString(strip=True, required=True)
552 554 target_ref = v.UnicodeString(strip=True, required=True)
553 555 revisions = All(#v.NotReviewedRevisions(repo_id)(),
554 556 v.UniqueList()(not_empty=True))
555 557 review_members = formencode.ForEach(ReviewerForm())
556 558 pullrequest_title = v.UnicodeString(strip=True, required=True)
557 559 pullrequest_desc = v.UnicodeString(strip=True, required=False)
558 560
559 561 return _PullRequestForm
560 562
561 563
562 564 def IssueTrackerPatternsForm():
563 565 class _IssueTrackerPatternsForm(formencode.Schema):
564 566 allow_extra_fields = True
565 567 filter_extra_fields = False
566 568 chained_validators = [v.ValidPattern()]
567 569 return _IssueTrackerPatternsForm
@@ -1,1595 +1,1609 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26 from collections import namedtuple
27 27 import json
28 28 import logging
29 29 import datetime
30 30 import urllib
31 31
32 32 from pylons.i18n.translation import _
33 33 from pylons.i18n.translation import lazy_ugettext
34 34 from pyramid.threadlocal import get_current_request
35 35 from sqlalchemy import or_
36 36
37 37 from rhodecode import events
38 38 from rhodecode.lib import helpers as h, hooks_utils, diffs
39 39 from rhodecode.lib import audit_logger
40 40 from rhodecode.lib.compat import OrderedDict
41 41 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
42 42 from rhodecode.lib.markup_renderer import (
43 43 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
44 44 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
45 45 from rhodecode.lib.vcs.backends.base import (
46 46 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
47 47 from rhodecode.lib.vcs.conf import settings as vcs_settings
48 48 from rhodecode.lib.vcs.exceptions import (
49 49 CommitDoesNotExistError, EmptyRepositoryError)
50 50 from rhodecode.model import BaseModel
51 51 from rhodecode.model.changeset_status import ChangesetStatusModel
52 52 from rhodecode.model.comment import CommentsModel
53 53 from rhodecode.model.db import (
54 54 PullRequest, PullRequestReviewers, ChangesetStatus,
55 55 PullRequestVersion, ChangesetComment, Repository)
56 56 from rhodecode.model.meta import Session
57 57 from rhodecode.model.notification import NotificationModel, \
58 58 EmailNotificationModel
59 59 from rhodecode.model.scm import ScmModel
60 60 from rhodecode.model.settings import VcsSettingsModel
61 61
62 62
63 63 log = logging.getLogger(__name__)
64 64
65 65
66 66 # Data structure to hold the response data when updating commits during a pull
67 67 # request update.
68 68 UpdateResponse = namedtuple('UpdateResponse', [
69 69 'executed', 'reason', 'new', 'old', 'changes',
70 70 'source_changed', 'target_changed'])
71 71
72 72
73 73 class PullRequestModel(BaseModel):
74 74
75 75 cls = PullRequest
76 76
77 77 DIFF_CONTEXT = 3
78 78
79 79 MERGE_STATUS_MESSAGES = {
80 80 MergeFailureReason.NONE: lazy_ugettext(
81 81 'This pull request can be automatically merged.'),
82 82 MergeFailureReason.UNKNOWN: lazy_ugettext(
83 83 'This pull request cannot be merged because of an unhandled'
84 84 ' exception.'),
85 85 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
86 86 'This pull request cannot be merged because of merge conflicts.'),
87 87 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
88 88 'This pull request could not be merged because push to target'
89 89 ' failed.'),
90 90 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
91 91 'This pull request cannot be merged because the target is not a'
92 92 ' head.'),
93 93 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
94 94 'This pull request cannot be merged because the source contains'
95 95 ' more branches than the target.'),
96 96 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
97 97 'This pull request cannot be merged because the target has'
98 98 ' multiple heads.'),
99 99 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
100 100 'This pull request cannot be merged because the target repository'
101 101 ' is locked.'),
102 102 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
103 103 'This pull request cannot be merged because the target or the '
104 104 'source reference is missing.'),
105 105 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
106 106 'This pull request cannot be merged because the target '
107 107 'reference is missing.'),
108 108 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
109 109 'This pull request cannot be merged because the source '
110 110 'reference is missing.'),
111 111 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
112 112 'This pull request cannot be merged because of conflicts related '
113 113 'to sub repositories.'),
114 114 }
115 115
116 116 UPDATE_STATUS_MESSAGES = {
117 117 UpdateFailureReason.NONE: lazy_ugettext(
118 118 'Pull request update successful.'),
119 119 UpdateFailureReason.UNKNOWN: lazy_ugettext(
120 120 'Pull request update failed because of an unknown error.'),
121 121 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
122 122 'No update needed because the source and target have not changed.'),
123 123 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
124 124 'Pull request cannot be updated because the reference type is '
125 125 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
126 126 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
127 127 'This pull request cannot be updated because the target '
128 128 'reference is missing.'),
129 129 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
130 130 'This pull request cannot be updated because the source '
131 131 'reference is missing.'),
132 132 }
133 133
134 134 def __get_pull_request(self, pull_request):
135 135 return self._get_instance((
136 136 PullRequest, PullRequestVersion), pull_request)
137 137
138 138 def _check_perms(self, perms, pull_request, user, api=False):
139 139 if not api:
140 140 return h.HasRepoPermissionAny(*perms)(
141 141 user=user, repo_name=pull_request.target_repo.repo_name)
142 142 else:
143 143 return h.HasRepoPermissionAnyApi(*perms)(
144 144 user=user, repo_name=pull_request.target_repo.repo_name)
145 145
146 146 def check_user_read(self, pull_request, user, api=False):
147 147 _perms = ('repository.admin', 'repository.write', 'repository.read',)
148 148 return self._check_perms(_perms, pull_request, user, api)
149 149
150 150 def check_user_merge(self, pull_request, user, api=False):
151 151 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
152 152 return self._check_perms(_perms, pull_request, user, api)
153 153
154 154 def check_user_update(self, pull_request, user, api=False):
155 155 owner = user.user_id == pull_request.user_id
156 156 return self.check_user_merge(pull_request, user, api) or owner
157 157
158 158 def check_user_delete(self, pull_request, user):
159 159 owner = user.user_id == pull_request.user_id
160 160 _perms = ('repository.admin',)
161 161 return self._check_perms(_perms, pull_request, user) or owner
162 162
163 163 def check_user_change_status(self, pull_request, user, api=False):
164 164 reviewer = user.user_id in [x.user_id for x in
165 165 pull_request.reviewers]
166 166 return self.check_user_update(pull_request, user, api) or reviewer
167 167
168 168 def get(self, pull_request):
169 169 return self.__get_pull_request(pull_request)
170 170
171 171 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
172 172 opened_by=None, order_by=None,
173 173 order_dir='desc'):
174 174 repo = None
175 175 if repo_name:
176 176 repo = self._get_repo(repo_name)
177 177
178 178 q = PullRequest.query()
179 179
180 180 # source or target
181 181 if repo and source:
182 182 q = q.filter(PullRequest.source_repo == repo)
183 183 elif repo:
184 184 q = q.filter(PullRequest.target_repo == repo)
185 185
186 186 # closed,opened
187 187 if statuses:
188 188 q = q.filter(PullRequest.status.in_(statuses))
189 189
190 190 # opened by filter
191 191 if opened_by:
192 192 q = q.filter(PullRequest.user_id.in_(opened_by))
193 193
194 194 if order_by:
195 195 order_map = {
196 196 'name_raw': PullRequest.pull_request_id,
197 197 'title': PullRequest.title,
198 198 'updated_on_raw': PullRequest.updated_on,
199 199 'target_repo': PullRequest.target_repo_id
200 200 }
201 201 if order_dir == 'asc':
202 202 q = q.order_by(order_map[order_by].asc())
203 203 else:
204 204 q = q.order_by(order_map[order_by].desc())
205 205
206 206 return q
207 207
208 208 def count_all(self, repo_name, source=False, statuses=None,
209 209 opened_by=None):
210 210 """
211 211 Count the number of pull requests for a specific repository.
212 212
213 213 :param repo_name: target or source repo
214 214 :param source: boolean flag to specify if repo_name refers to source
215 215 :param statuses: list of pull request statuses
216 216 :param opened_by: author user of the pull request
217 217 :returns: int number of pull requests
218 218 """
219 219 q = self._prepare_get_all_query(
220 220 repo_name, source=source, statuses=statuses, opened_by=opened_by)
221 221
222 222 return q.count()
223 223
224 224 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
225 225 offset=0, length=None, order_by=None, order_dir='desc'):
226 226 """
227 227 Get all pull requests for a specific repository.
228 228
229 229 :param repo_name: target or source repo
230 230 :param source: boolean flag to specify if repo_name refers to source
231 231 :param statuses: list of pull request statuses
232 232 :param opened_by: author user of the pull request
233 233 :param offset: pagination offset
234 234 :param length: length of returned list
235 235 :param order_by: order of the returned list
236 236 :param order_dir: 'asc' or 'desc' ordering direction
237 237 :returns: list of pull requests
238 238 """
239 239 q = self._prepare_get_all_query(
240 240 repo_name, source=source, statuses=statuses, opened_by=opened_by,
241 241 order_by=order_by, order_dir=order_dir)
242 242
243 243 if length:
244 244 pull_requests = q.limit(length).offset(offset).all()
245 245 else:
246 246 pull_requests = q.all()
247 247
248 248 return pull_requests
249 249
250 250 def count_awaiting_review(self, repo_name, source=False, statuses=None,
251 251 opened_by=None):
252 252 """
253 253 Count the number of pull requests for a specific repository that are
254 254 awaiting review.
255 255
256 256 :param repo_name: target or source repo
257 257 :param source: boolean flag to specify if repo_name refers to source
258 258 :param statuses: list of pull request statuses
259 259 :param opened_by: author user of the pull request
260 260 :returns: int number of pull requests
261 261 """
262 262 pull_requests = self.get_awaiting_review(
263 263 repo_name, source=source, statuses=statuses, opened_by=opened_by)
264 264
265 265 return len(pull_requests)
266 266
267 267 def get_awaiting_review(self, repo_name, source=False, statuses=None,
268 268 opened_by=None, offset=0, length=None,
269 269 order_by=None, order_dir='desc'):
270 270 """
271 271 Get all pull requests for a specific repository that are awaiting
272 272 review.
273 273
274 274 :param repo_name: target or source repo
275 275 :param source: boolean flag to specify if repo_name refers to source
276 276 :param statuses: list of pull request statuses
277 277 :param opened_by: author user of the pull request
278 278 :param offset: pagination offset
279 279 :param length: length of returned list
280 280 :param order_by: order of the returned list
281 281 :param order_dir: 'asc' or 'desc' ordering direction
282 282 :returns: list of pull requests
283 283 """
284 284 pull_requests = self.get_all(
285 285 repo_name, source=source, statuses=statuses, opened_by=opened_by,
286 286 order_by=order_by, order_dir=order_dir)
287 287
288 288 _filtered_pull_requests = []
289 289 for pr in pull_requests:
290 290 status = pr.calculated_review_status()
291 291 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
292 292 ChangesetStatus.STATUS_UNDER_REVIEW]:
293 293 _filtered_pull_requests.append(pr)
294 294 if length:
295 295 return _filtered_pull_requests[offset:offset+length]
296 296 else:
297 297 return _filtered_pull_requests
298 298
299 299 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
300 300 opened_by=None, user_id=None):
301 301 """
302 302 Count the number of pull requests for a specific repository that are
303 303 awaiting review from a specific user.
304 304
305 305 :param repo_name: target or source repo
306 306 :param source: boolean flag to specify if repo_name refers to source
307 307 :param statuses: list of pull request statuses
308 308 :param opened_by: author user of the pull request
309 309 :param user_id: reviewer user of the pull request
310 310 :returns: int number of pull requests
311 311 """
312 312 pull_requests = self.get_awaiting_my_review(
313 313 repo_name, source=source, statuses=statuses, opened_by=opened_by,
314 314 user_id=user_id)
315 315
316 316 return len(pull_requests)
317 317
318 318 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
319 319 opened_by=None, user_id=None, offset=0,
320 320 length=None, order_by=None, order_dir='desc'):
321 321 """
322 322 Get all pull requests for a specific repository that are awaiting
323 323 review from a specific user.
324 324
325 325 :param repo_name: target or source repo
326 326 :param source: boolean flag to specify if repo_name refers to source
327 327 :param statuses: list of pull request statuses
328 328 :param opened_by: author user of the pull request
329 329 :param user_id: reviewer user of the pull request
330 330 :param offset: pagination offset
331 331 :param length: length of returned list
332 332 :param order_by: order of the returned list
333 333 :param order_dir: 'asc' or 'desc' ordering direction
334 334 :returns: list of pull requests
335 335 """
336 336 pull_requests = self.get_all(
337 337 repo_name, source=source, statuses=statuses, opened_by=opened_by,
338 338 order_by=order_by, order_dir=order_dir)
339 339
340 340 _my = PullRequestModel().get_not_reviewed(user_id)
341 341 my_participation = []
342 342 for pr in pull_requests:
343 343 if pr in _my:
344 344 my_participation.append(pr)
345 345 _filtered_pull_requests = my_participation
346 346 if length:
347 347 return _filtered_pull_requests[offset:offset+length]
348 348 else:
349 349 return _filtered_pull_requests
350 350
351 351 def get_not_reviewed(self, user_id):
352 352 return [
353 353 x.pull_request for x in PullRequestReviewers.query().filter(
354 354 PullRequestReviewers.user_id == user_id).all()
355 355 ]
356 356
357 357 def _prepare_participating_query(self, user_id=None, statuses=None,
358 358 order_by=None, order_dir='desc'):
359 359 q = PullRequest.query()
360 360 if user_id:
361 361 reviewers_subquery = Session().query(
362 362 PullRequestReviewers.pull_request_id).filter(
363 363 PullRequestReviewers.user_id == user_id).subquery()
364 364 user_filter= or_(
365 365 PullRequest.user_id == user_id,
366 366 PullRequest.pull_request_id.in_(reviewers_subquery)
367 367 )
368 368 q = PullRequest.query().filter(user_filter)
369 369
370 370 # closed,opened
371 371 if statuses:
372 372 q = q.filter(PullRequest.status.in_(statuses))
373 373
374 374 if order_by:
375 375 order_map = {
376 376 'name_raw': PullRequest.pull_request_id,
377 377 'title': PullRequest.title,
378 378 'updated_on_raw': PullRequest.updated_on,
379 379 'target_repo': PullRequest.target_repo_id
380 380 }
381 381 if order_dir == 'asc':
382 382 q = q.order_by(order_map[order_by].asc())
383 383 else:
384 384 q = q.order_by(order_map[order_by].desc())
385 385
386 386 return q
387 387
388 388 def count_im_participating_in(self, user_id=None, statuses=None):
389 389 q = self._prepare_participating_query(user_id, statuses=statuses)
390 390 return q.count()
391 391
392 392 def get_im_participating_in(
393 393 self, user_id=None, statuses=None, offset=0,
394 394 length=None, order_by=None, order_dir='desc'):
395 395 """
396 396 Get all Pull requests that i'm participating in, or i have opened
397 397 """
398 398
399 399 q = self._prepare_participating_query(
400 400 user_id, statuses=statuses, order_by=order_by,
401 401 order_dir=order_dir)
402 402
403 403 if length:
404 404 pull_requests = q.limit(length).offset(offset).all()
405 405 else:
406 406 pull_requests = q.all()
407 407
408 408 return pull_requests
409 409
410 410 def get_versions(self, pull_request):
411 411 """
412 412 returns version of pull request sorted by ID descending
413 413 """
414 414 return PullRequestVersion.query()\
415 415 .filter(PullRequestVersion.pull_request == pull_request)\
416 416 .order_by(PullRequestVersion.pull_request_version_id.asc())\
417 417 .all()
418 418
419 419 def create(self, created_by, source_repo, source_ref, target_repo,
420 420 target_ref, revisions, reviewers, title, description=None,
421 421 reviewer_data=None):
422 422
423 423 created_by_user = self._get_user(created_by)
424 424 source_repo = self._get_repo(source_repo)
425 425 target_repo = self._get_repo(target_repo)
426 426
427 427 pull_request = PullRequest()
428 428 pull_request.source_repo = source_repo
429 429 pull_request.source_ref = source_ref
430 430 pull_request.target_repo = target_repo
431 431 pull_request.target_ref = target_ref
432 432 pull_request.revisions = revisions
433 433 pull_request.title = title
434 434 pull_request.description = description
435 435 pull_request.author = created_by_user
436 436 pull_request.reviewer_data = reviewer_data
437 437
438 438 Session().add(pull_request)
439 439 Session().flush()
440 440
441 441 reviewer_ids = set()
442 442 # members / reviewers
443 443 for reviewer_object in reviewers:
444 444 user_id, reasons, mandatory = reviewer_object
445 445 user = self._get_user(user_id)
446 446
447 447 # skip duplicates
448 448 if user.user_id in reviewer_ids:
449 449 continue
450 450
451 451 reviewer_ids.add(user.user_id)
452 452
453 453 reviewer = PullRequestReviewers()
454 454 reviewer.user = user
455 455 reviewer.pull_request = pull_request
456 456 reviewer.reasons = reasons
457 457 reviewer.mandatory = mandatory
458 458 Session().add(reviewer)
459 459
460 460 # Set approval status to "Under Review" for all commits which are
461 461 # part of this pull request.
462 462 ChangesetStatusModel().set_status(
463 463 repo=target_repo,
464 464 status=ChangesetStatus.STATUS_UNDER_REVIEW,
465 465 user=created_by_user,
466 466 pull_request=pull_request
467 467 )
468 468
469 469 self.notify_reviewers(pull_request, reviewer_ids)
470 470 self._trigger_pull_request_hook(
471 471 pull_request, created_by_user, 'create')
472 472
473 473 creation_data = pull_request.get_api_data(with_merge_state=False)
474 474 self._log_audit_action(
475 475 'repo.pull_request.create', {'data': creation_data},
476 476 created_by_user, pull_request)
477 477
478 478 return pull_request
479 479
480 480 def _trigger_pull_request_hook(self, pull_request, user, action):
481 481 pull_request = self.__get_pull_request(pull_request)
482 482 target_scm = pull_request.target_repo.scm_instance()
483 483 if action == 'create':
484 484 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
485 485 elif action == 'merge':
486 486 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
487 487 elif action == 'close':
488 488 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
489 489 elif action == 'review_status_change':
490 490 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
491 491 elif action == 'update':
492 492 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
493 493 else:
494 494 return
495 495
496 496 trigger_hook(
497 497 username=user.username,
498 498 repo_name=pull_request.target_repo.repo_name,
499 499 repo_alias=target_scm.alias,
500 500 pull_request=pull_request)
501 501
502 502 def _get_commit_ids(self, pull_request):
503 503 """
504 504 Return the commit ids of the merged pull request.
505 505
506 506 This method is not dealing correctly yet with the lack of autoupdates
507 507 nor with the implicit target updates.
508 508 For example: if a commit in the source repo is already in the target it
509 509 will be reported anyways.
510 510 """
511 511 merge_rev = pull_request.merge_rev
512 512 if merge_rev is None:
513 513 raise ValueError('This pull request was not merged yet')
514 514
515 515 commit_ids = list(pull_request.revisions)
516 516 if merge_rev not in commit_ids:
517 517 commit_ids.append(merge_rev)
518 518
519 519 return commit_ids
520 520
521 521 def merge(self, pull_request, user, extras):
522 522 log.debug("Merging pull request %s", pull_request.pull_request_id)
523 523 merge_state = self._merge_pull_request(pull_request, user, extras)
524 524 if merge_state.executed:
525 525 log.debug(
526 526 "Merge was successful, updating the pull request comments.")
527 527 self._comment_and_close_pr(pull_request, user, merge_state)
528 528
529 529 self._log_audit_action(
530 530 'repo.pull_request.merge',
531 531 {'merge_state': merge_state.__dict__},
532 532 user, pull_request)
533 533
534 534 else:
535 535 log.warn("Merge failed, not updating the pull request.")
536 536 return merge_state
537 537
538 538 def _merge_pull_request(self, pull_request, user, extras):
539 539 target_vcs = pull_request.target_repo.scm_instance()
540 540 source_vcs = pull_request.source_repo.scm_instance()
541 541 target_ref = self._refresh_reference(
542 542 pull_request.target_ref_parts, target_vcs)
543 543
544 544 message = _(
545 545 'Merge pull request #%(pr_id)s from '
546 546 '%(source_repo)s %(source_ref_name)s\n\n %(pr_title)s') % {
547 547 'pr_id': pull_request.pull_request_id,
548 548 'source_repo': source_vcs.name,
549 549 'source_ref_name': pull_request.source_ref_parts.name,
550 550 'pr_title': pull_request.title
551 551 }
552 552
553 553 workspace_id = self._workspace_id(pull_request)
554 554 use_rebase = self._use_rebase_for_merging(pull_request)
555 555 close_branch = self._close_branch_before_merging(pull_request)
556 556
557 557 callback_daemon, extras = prepare_callback_daemon(
558 558 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
559 559 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
560 560
561 561 with callback_daemon:
562 562 # TODO: johbo: Implement a clean way to run a config_override
563 563 # for a single call.
564 564 target_vcs.config.set(
565 565 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
566 566 merge_state = target_vcs.merge(
567 567 target_ref, source_vcs, pull_request.source_ref_parts,
568 568 workspace_id, user_name=user.username,
569 569 user_email=user.email, message=message, use_rebase=use_rebase,
570 570 close_branch=close_branch)
571 571 return merge_state
572 572
573 573 def _comment_and_close_pr(self, pull_request, user, merge_state):
574 574 pull_request.merge_rev = merge_state.merge_ref.commit_id
575 575 pull_request.updated_on = datetime.datetime.now()
576 576
577 577 CommentsModel().create(
578 578 text=unicode(_('Pull request merged and closed')),
579 579 repo=pull_request.target_repo.repo_id,
580 580 user=user.user_id,
581 581 pull_request=pull_request.pull_request_id,
582 582 f_path=None,
583 583 line_no=None,
584 584 closing_pr=True
585 585 )
586 586
587 587 Session().add(pull_request)
588 588 Session().flush()
589 589 # TODO: paris: replace invalidation with less radical solution
590 590 ScmModel().mark_for_invalidation(
591 591 pull_request.target_repo.repo_name)
592 592 self._trigger_pull_request_hook(pull_request, user, 'merge')
593 593
594 594 def has_valid_update_type(self, pull_request):
595 595 source_ref_type = pull_request.source_ref_parts.type
596 596 return source_ref_type in ['book', 'branch', 'tag']
597 597
598 598 def update_commits(self, pull_request):
599 599 """
600 600 Get the updated list of commits for the pull request
601 601 and return the new pull request version and the list
602 602 of commits processed by this update action
603 603 """
604 604 pull_request = self.__get_pull_request(pull_request)
605 605 source_ref_type = pull_request.source_ref_parts.type
606 606 source_ref_name = pull_request.source_ref_parts.name
607 607 source_ref_id = pull_request.source_ref_parts.commit_id
608 608
609 609 target_ref_type = pull_request.target_ref_parts.type
610 610 target_ref_name = pull_request.target_ref_parts.name
611 611 target_ref_id = pull_request.target_ref_parts.commit_id
612 612
613 613 if not self.has_valid_update_type(pull_request):
614 614 log.debug(
615 615 "Skipping update of pull request %s due to ref type: %s",
616 616 pull_request, source_ref_type)
617 617 return UpdateResponse(
618 618 executed=False,
619 619 reason=UpdateFailureReason.WRONG_REF_TYPE,
620 620 old=pull_request, new=None, changes=None,
621 621 source_changed=False, target_changed=False)
622 622
623 623 # source repo
624 624 source_repo = pull_request.source_repo.scm_instance()
625 625 try:
626 626 source_commit = source_repo.get_commit(commit_id=source_ref_name)
627 627 except CommitDoesNotExistError:
628 628 return UpdateResponse(
629 629 executed=False,
630 630 reason=UpdateFailureReason.MISSING_SOURCE_REF,
631 631 old=pull_request, new=None, changes=None,
632 632 source_changed=False, target_changed=False)
633 633
634 634 source_changed = source_ref_id != source_commit.raw_id
635 635
636 636 # target repo
637 637 target_repo = pull_request.target_repo.scm_instance()
638 638 try:
639 639 target_commit = target_repo.get_commit(commit_id=target_ref_name)
640 640 except CommitDoesNotExistError:
641 641 return UpdateResponse(
642 642 executed=False,
643 643 reason=UpdateFailureReason.MISSING_TARGET_REF,
644 644 old=pull_request, new=None, changes=None,
645 645 source_changed=False, target_changed=False)
646 646 target_changed = target_ref_id != target_commit.raw_id
647 647
648 648 if not (source_changed or target_changed):
649 649 log.debug("Nothing changed in pull request %s", pull_request)
650 650 return UpdateResponse(
651 651 executed=False,
652 652 reason=UpdateFailureReason.NO_CHANGE,
653 653 old=pull_request, new=None, changes=None,
654 654 source_changed=target_changed, target_changed=source_changed)
655 655
656 656 change_in_found = 'target repo' if target_changed else 'source repo'
657 657 log.debug('Updating pull request because of change in %s detected',
658 658 change_in_found)
659 659
660 660 # Finally there is a need for an update, in case of source change
661 661 # we create a new version, else just an update
662 662 if source_changed:
663 663 pull_request_version = self._create_version_from_snapshot(pull_request)
664 664 self._link_comments_to_version(pull_request_version)
665 665 else:
666 666 try:
667 667 ver = pull_request.versions[-1]
668 668 except IndexError:
669 669 ver = None
670 670
671 671 pull_request.pull_request_version_id = \
672 672 ver.pull_request_version_id if ver else None
673 673 pull_request_version = pull_request
674 674
675 675 try:
676 676 if target_ref_type in ('tag', 'branch', 'book'):
677 677 target_commit = target_repo.get_commit(target_ref_name)
678 678 else:
679 679 target_commit = target_repo.get_commit(target_ref_id)
680 680 except CommitDoesNotExistError:
681 681 return UpdateResponse(
682 682 executed=False,
683 683 reason=UpdateFailureReason.MISSING_TARGET_REF,
684 684 old=pull_request, new=None, changes=None,
685 685 source_changed=source_changed, target_changed=target_changed)
686 686
687 687 # re-compute commit ids
688 688 old_commit_ids = pull_request.revisions
689 689 pre_load = ["author", "branch", "date", "message"]
690 690 commit_ranges = target_repo.compare(
691 691 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
692 692 pre_load=pre_load)
693 693
694 694 ancestor = target_repo.get_common_ancestor(
695 695 target_commit.raw_id, source_commit.raw_id, source_repo)
696 696
697 697 pull_request.source_ref = '%s:%s:%s' % (
698 698 source_ref_type, source_ref_name, source_commit.raw_id)
699 699 pull_request.target_ref = '%s:%s:%s' % (
700 700 target_ref_type, target_ref_name, ancestor)
701 701
702 702 pull_request.revisions = [
703 703 commit.raw_id for commit in reversed(commit_ranges)]
704 704 pull_request.updated_on = datetime.datetime.now()
705 705 Session().add(pull_request)
706 706 new_commit_ids = pull_request.revisions
707 707
708 708 old_diff_data, new_diff_data = self._generate_update_diffs(
709 709 pull_request, pull_request_version)
710 710
711 711 # calculate commit and file changes
712 712 changes = self._calculate_commit_id_changes(
713 713 old_commit_ids, new_commit_ids)
714 714 file_changes = self._calculate_file_changes(
715 715 old_diff_data, new_diff_data)
716 716
717 717 # set comments as outdated if DIFFS changed
718 718 CommentsModel().outdate_comments(
719 719 pull_request, old_diff_data=old_diff_data,
720 720 new_diff_data=new_diff_data)
721 721
722 722 commit_changes = (changes.added or changes.removed)
723 723 file_node_changes = (
724 724 file_changes.added or file_changes.modified or file_changes.removed)
725 725 pr_has_changes = commit_changes or file_node_changes
726 726
727 727 # Add an automatic comment to the pull request, in case
728 728 # anything has changed
729 729 if pr_has_changes:
730 730 update_comment = CommentsModel().create(
731 731 text=self._render_update_message(changes, file_changes),
732 732 repo=pull_request.target_repo,
733 733 user=pull_request.author,
734 734 pull_request=pull_request,
735 735 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
736 736
737 737 # Update status to "Under Review" for added commits
738 738 for commit_id in changes.added:
739 739 ChangesetStatusModel().set_status(
740 740 repo=pull_request.source_repo,
741 741 status=ChangesetStatus.STATUS_UNDER_REVIEW,
742 742 comment=update_comment,
743 743 user=pull_request.author,
744 744 pull_request=pull_request,
745 745 revision=commit_id)
746 746
747 747 log.debug(
748 748 'Updated pull request %s, added_ids: %s, common_ids: %s, '
749 749 'removed_ids: %s', pull_request.pull_request_id,
750 750 changes.added, changes.common, changes.removed)
751 751 log.debug(
752 752 'Updated pull request with the following file changes: %s',
753 753 file_changes)
754 754
755 755 log.info(
756 756 "Updated pull request %s from commit %s to commit %s, "
757 757 "stored new version %s of this pull request.",
758 758 pull_request.pull_request_id, source_ref_id,
759 759 pull_request.source_ref_parts.commit_id,
760 760 pull_request_version.pull_request_version_id)
761 761 Session().commit()
762 762 self._trigger_pull_request_hook(
763 763 pull_request, pull_request.author, 'update')
764 764
765 765 return UpdateResponse(
766 766 executed=True, reason=UpdateFailureReason.NONE,
767 767 old=pull_request, new=pull_request_version, changes=changes,
768 768 source_changed=source_changed, target_changed=target_changed)
769 769
770 770 def _create_version_from_snapshot(self, pull_request):
771 771 version = PullRequestVersion()
772 772 version.title = pull_request.title
773 773 version.description = pull_request.description
774 774 version.status = pull_request.status
775 775 version.created_on = datetime.datetime.now()
776 776 version.updated_on = pull_request.updated_on
777 777 version.user_id = pull_request.user_id
778 778 version.source_repo = pull_request.source_repo
779 779 version.source_ref = pull_request.source_ref
780 780 version.target_repo = pull_request.target_repo
781 781 version.target_ref = pull_request.target_ref
782 782
783 783 version._last_merge_source_rev = pull_request._last_merge_source_rev
784 784 version._last_merge_target_rev = pull_request._last_merge_target_rev
785 785 version.last_merge_status = pull_request.last_merge_status
786 786 version.shadow_merge_ref = pull_request.shadow_merge_ref
787 787 version.merge_rev = pull_request.merge_rev
788 788 version.reviewer_data = pull_request.reviewer_data
789 789
790 790 version.revisions = pull_request.revisions
791 791 version.pull_request = pull_request
792 792 Session().add(version)
793 793 Session().flush()
794 794
795 795 return version
796 796
797 797 def _generate_update_diffs(self, pull_request, pull_request_version):
798 798
799 799 diff_context = (
800 800 self.DIFF_CONTEXT +
801 801 CommentsModel.needed_extra_diff_context())
802 802
803 803 source_repo = pull_request_version.source_repo
804 804 source_ref_id = pull_request_version.source_ref_parts.commit_id
805 805 target_ref_id = pull_request_version.target_ref_parts.commit_id
806 806 old_diff = self._get_diff_from_pr_or_version(
807 807 source_repo, source_ref_id, target_ref_id, context=diff_context)
808 808
809 809 source_repo = pull_request.source_repo
810 810 source_ref_id = pull_request.source_ref_parts.commit_id
811 811 target_ref_id = pull_request.target_ref_parts.commit_id
812 812
813 813 new_diff = self._get_diff_from_pr_or_version(
814 814 source_repo, source_ref_id, target_ref_id, context=diff_context)
815 815
816 816 old_diff_data = diffs.DiffProcessor(old_diff)
817 817 old_diff_data.prepare()
818 818 new_diff_data = diffs.DiffProcessor(new_diff)
819 819 new_diff_data.prepare()
820 820
821 821 return old_diff_data, new_diff_data
822 822
823 823 def _link_comments_to_version(self, pull_request_version):
824 824 """
825 825 Link all unlinked comments of this pull request to the given version.
826 826
827 827 :param pull_request_version: The `PullRequestVersion` to which
828 828 the comments shall be linked.
829 829
830 830 """
831 831 pull_request = pull_request_version.pull_request
832 832 comments = ChangesetComment.query()\
833 833 .filter(
834 834 # TODO: johbo: Should we query for the repo at all here?
835 835 # Pending decision on how comments of PRs are to be related
836 836 # to either the source repo, the target repo or no repo at all.
837 837 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
838 838 ChangesetComment.pull_request == pull_request,
839 839 ChangesetComment.pull_request_version == None)\
840 840 .order_by(ChangesetComment.comment_id.asc())
841 841
842 842 # TODO: johbo: Find out why this breaks if it is done in a bulk
843 843 # operation.
844 844 for comment in comments:
845 845 comment.pull_request_version_id = (
846 846 pull_request_version.pull_request_version_id)
847 847 Session().add(comment)
848 848
849 849 def _calculate_commit_id_changes(self, old_ids, new_ids):
850 850 added = [x for x in new_ids if x not in old_ids]
851 851 common = [x for x in new_ids if x in old_ids]
852 852 removed = [x for x in old_ids if x not in new_ids]
853 853 total = new_ids
854 854 return ChangeTuple(added, common, removed, total)
855 855
856 856 def _calculate_file_changes(self, old_diff_data, new_diff_data):
857 857
858 858 old_files = OrderedDict()
859 859 for diff_data in old_diff_data.parsed_diff:
860 860 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
861 861
862 862 added_files = []
863 863 modified_files = []
864 864 removed_files = []
865 865 for diff_data in new_diff_data.parsed_diff:
866 866 new_filename = diff_data['filename']
867 867 new_hash = md5_safe(diff_data['raw_diff'])
868 868
869 869 old_hash = old_files.get(new_filename)
870 870 if not old_hash:
871 871 # file is not present in old diff, means it's added
872 872 added_files.append(new_filename)
873 873 else:
874 874 if new_hash != old_hash:
875 875 modified_files.append(new_filename)
876 876 # now remove a file from old, since we have seen it already
877 877 del old_files[new_filename]
878 878
879 879 # removed files is when there are present in old, but not in NEW,
880 880 # since we remove old files that are present in new diff, left-overs
881 881 # if any should be the removed files
882 882 removed_files.extend(old_files.keys())
883 883
884 884 return FileChangeTuple(added_files, modified_files, removed_files)
885 885
886 886 def _render_update_message(self, changes, file_changes):
887 887 """
888 888 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
889 889 so it's always looking the same disregarding on which default
890 890 renderer system is using.
891 891
892 892 :param changes: changes named tuple
893 893 :param file_changes: file changes named tuple
894 894
895 895 """
896 896 new_status = ChangesetStatus.get_status_lbl(
897 897 ChangesetStatus.STATUS_UNDER_REVIEW)
898 898
899 899 changed_files = (
900 900 file_changes.added + file_changes.modified + file_changes.removed)
901 901
902 902 params = {
903 903 'under_review_label': new_status,
904 904 'added_commits': changes.added,
905 905 'removed_commits': changes.removed,
906 906 'changed_files': changed_files,
907 907 'added_files': file_changes.added,
908 908 'modified_files': file_changes.modified,
909 909 'removed_files': file_changes.removed,
910 910 }
911 911 renderer = RstTemplateRenderer()
912 912 return renderer.render('pull_request_update.mako', **params)
913 913
914 914 def edit(self, pull_request, title, description, user):
915 915 pull_request = self.__get_pull_request(pull_request)
916 916 old_data = pull_request.get_api_data(with_merge_state=False)
917 917 if pull_request.is_closed():
918 918 raise ValueError('This pull request is closed')
919 919 if title:
920 920 pull_request.title = title
921 921 pull_request.description = description
922 922 pull_request.updated_on = datetime.datetime.now()
923 923 Session().add(pull_request)
924 924 self._log_audit_action(
925 925 'repo.pull_request.edit', {'old_data': old_data},
926 926 user, pull_request)
927 927
928 928 def update_reviewers(self, pull_request, reviewer_data, user):
929 929 """
930 930 Update the reviewers in the pull request
931 931
932 932 :param pull_request: the pr to update
933 933 :param reviewer_data: list of tuples
934 934 [(user, ['reason1', 'reason2'], mandatory_flag)]
935 935 """
936 936
937 937 reviewers = {}
938 938 for user_id, reasons, mandatory in reviewer_data:
939 939 if isinstance(user_id, (int, basestring)):
940 940 user_id = self._get_user(user_id).user_id
941 941 reviewers[user_id] = {
942 942 'reasons': reasons, 'mandatory': mandatory}
943 943
944 944 reviewers_ids = set(reviewers.keys())
945 945 pull_request = self.__get_pull_request(pull_request)
946 946 current_reviewers = PullRequestReviewers.query()\
947 947 .filter(PullRequestReviewers.pull_request ==
948 948 pull_request).all()
949 949 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
950 950
951 951 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
952 952 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
953 953
954 954 log.debug("Adding %s reviewers", ids_to_add)
955 955 log.debug("Removing %s reviewers", ids_to_remove)
956 956 changed = False
957 957 for uid in ids_to_add:
958 958 changed = True
959 959 _usr = self._get_user(uid)
960 960 reviewer = PullRequestReviewers()
961 961 reviewer.user = _usr
962 962 reviewer.pull_request = pull_request
963 963 reviewer.reasons = reviewers[uid]['reasons']
964 964 # NOTE(marcink): mandatory shouldn't be changed now
965 965 # reviewer.mandatory = reviewers[uid]['reasons']
966 966 Session().add(reviewer)
967 967 self._log_audit_action(
968 968 'repo.pull_request.reviewer.add', {'data': reviewer.get_dict()},
969 969 user, pull_request)
970 970
971 971 for uid in ids_to_remove:
972 972 changed = True
973 973 reviewers = PullRequestReviewers.query()\
974 974 .filter(PullRequestReviewers.user_id == uid,
975 975 PullRequestReviewers.pull_request == pull_request)\
976 976 .all()
977 977 # use .all() in case we accidentally added the same person twice
978 978 # this CAN happen due to the lack of DB checks
979 979 for obj in reviewers:
980 980 old_data = obj.get_dict()
981 981 Session().delete(obj)
982 982 self._log_audit_action(
983 983 'repo.pull_request.reviewer.delete',
984 984 {'old_data': old_data}, user, pull_request)
985 985
986 986 if changed:
987 987 pull_request.updated_on = datetime.datetime.now()
988 988 Session().add(pull_request)
989 989
990 990 self.notify_reviewers(pull_request, ids_to_add)
991 991 return ids_to_add, ids_to_remove
992 992
993 993 def get_url(self, pull_request, request=None, permalink=False):
994 994 if not request:
995 995 request = get_current_request()
996 996
997 997 if permalink:
998 998 return request.route_url(
999 999 'pull_requests_global',
1000 1000 pull_request_id=pull_request.pull_request_id,)
1001 1001 else:
1002 1002 return request.route_url('pullrequest_show',
1003 1003 repo_name=safe_str(pull_request.target_repo.repo_name),
1004 1004 pull_request_id=pull_request.pull_request_id,)
1005 1005
1006 1006 def get_shadow_clone_url(self, pull_request):
1007 1007 """
1008 1008 Returns qualified url pointing to the shadow repository. If this pull
1009 1009 request is closed there is no shadow repository and ``None`` will be
1010 1010 returned.
1011 1011 """
1012 1012 if pull_request.is_closed():
1013 1013 return None
1014 1014 else:
1015 1015 pr_url = urllib.unquote(self.get_url(pull_request))
1016 1016 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1017 1017
1018 1018 def notify_reviewers(self, pull_request, reviewers_ids):
1019 1019 # notification to reviewers
1020 1020 if not reviewers_ids:
1021 1021 return
1022 1022
1023 1023 pull_request_obj = pull_request
1024 1024 # get the current participants of this pull request
1025 1025 recipients = reviewers_ids
1026 1026 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1027 1027
1028 1028 pr_source_repo = pull_request_obj.source_repo
1029 1029 pr_target_repo = pull_request_obj.target_repo
1030 1030
1031 1031 pr_url = h.route_url('pullrequest_show',
1032 1032 repo_name=pr_target_repo.repo_name,
1033 1033 pull_request_id=pull_request_obj.pull_request_id,)
1034 1034
1035 1035 # set some variables for email notification
1036 1036 pr_target_repo_url = h.route_url(
1037 1037 'repo_summary', repo_name=pr_target_repo.repo_name)
1038 1038
1039 1039 pr_source_repo_url = h.route_url(
1040 1040 'repo_summary', repo_name=pr_source_repo.repo_name)
1041 1041
1042 1042 # pull request specifics
1043 1043 pull_request_commits = [
1044 1044 (x.raw_id, x.message)
1045 1045 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1046 1046
1047 1047 kwargs = {
1048 1048 'user': pull_request.author,
1049 1049 'pull_request': pull_request_obj,
1050 1050 'pull_request_commits': pull_request_commits,
1051 1051
1052 1052 'pull_request_target_repo': pr_target_repo,
1053 1053 'pull_request_target_repo_url': pr_target_repo_url,
1054 1054
1055 1055 'pull_request_source_repo': pr_source_repo,
1056 1056 'pull_request_source_repo_url': pr_source_repo_url,
1057 1057
1058 1058 'pull_request_url': pr_url,
1059 1059 }
1060 1060
1061 1061 # pre-generate the subject for notification itself
1062 1062 (subject,
1063 1063 _h, _e, # we don't care about those
1064 1064 body_plaintext) = EmailNotificationModel().render_email(
1065 1065 notification_type, **kwargs)
1066 1066
1067 1067 # create notification objects, and emails
1068 1068 NotificationModel().create(
1069 1069 created_by=pull_request.author,
1070 1070 notification_subject=subject,
1071 1071 notification_body=body_plaintext,
1072 1072 notification_type=notification_type,
1073 1073 recipients=recipients,
1074 1074 email_kwargs=kwargs,
1075 1075 )
1076 1076
1077 1077 def delete(self, pull_request, user):
1078 1078 pull_request = self.__get_pull_request(pull_request)
1079 1079 old_data = pull_request.get_api_data(with_merge_state=False)
1080 1080 self._cleanup_merge_workspace(pull_request)
1081 1081 self._log_audit_action(
1082 1082 'repo.pull_request.delete', {'old_data': old_data},
1083 1083 user, pull_request)
1084 1084 Session().delete(pull_request)
1085 1085
1086 1086 def close_pull_request(self, pull_request, user):
1087 1087 pull_request = self.__get_pull_request(pull_request)
1088 1088 self._cleanup_merge_workspace(pull_request)
1089 1089 pull_request.status = PullRequest.STATUS_CLOSED
1090 1090 pull_request.updated_on = datetime.datetime.now()
1091 1091 Session().add(pull_request)
1092 1092 self._trigger_pull_request_hook(
1093 1093 pull_request, pull_request.author, 'close')
1094 1094 self._log_audit_action(
1095 1095 'repo.pull_request.close', {}, user, pull_request)
1096 1096
1097 1097 def close_pull_request_with_comment(
1098 1098 self, pull_request, user, repo, message=None):
1099 1099
1100 1100 pull_request_review_status = pull_request.calculated_review_status()
1101 1101
1102 1102 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1103 1103 # approved only if we have voting consent
1104 1104 status = ChangesetStatus.STATUS_APPROVED
1105 1105 else:
1106 1106 status = ChangesetStatus.STATUS_REJECTED
1107 1107 status_lbl = ChangesetStatus.get_status_lbl(status)
1108 1108
1109 1109 default_message = (
1110 1110 _('Closing with status change {transition_icon} {status}.')
1111 1111 ).format(transition_icon='>', status=status_lbl)
1112 1112 text = message or default_message
1113 1113
1114 1114 # create a comment, and link it to new status
1115 1115 comment = CommentsModel().create(
1116 1116 text=text,
1117 1117 repo=repo.repo_id,
1118 1118 user=user.user_id,
1119 1119 pull_request=pull_request.pull_request_id,
1120 1120 status_change=status_lbl,
1121 1121 status_change_type=status,
1122 1122 closing_pr=True
1123 1123 )
1124 1124
1125 1125 # calculate old status before we change it
1126 1126 old_calculated_status = pull_request.calculated_review_status()
1127 1127 ChangesetStatusModel().set_status(
1128 1128 repo.repo_id,
1129 1129 status,
1130 1130 user.user_id,
1131 1131 comment=comment,
1132 1132 pull_request=pull_request.pull_request_id
1133 1133 )
1134 1134
1135 1135 Session().flush()
1136 1136 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1137 1137 # we now calculate the status of pull request again, and based on that
1138 1138 # calculation trigger status change. This might happen in cases
1139 1139 # that non-reviewer admin closes a pr, which means his vote doesn't
1140 1140 # change the status, while if he's a reviewer this might change it.
1141 1141 calculated_status = pull_request.calculated_review_status()
1142 1142 if old_calculated_status != calculated_status:
1143 1143 self._trigger_pull_request_hook(
1144 1144 pull_request, user, 'review_status_change')
1145 1145
1146 1146 # finally close the PR
1147 1147 PullRequestModel().close_pull_request(
1148 1148 pull_request.pull_request_id, user)
1149 1149
1150 1150 return comment, status
1151 1151
1152 1152 def merge_status(self, pull_request):
1153 1153 if not self._is_merge_enabled(pull_request):
1154 1154 return False, _('Server-side pull request merging is disabled.')
1155 1155 if pull_request.is_closed():
1156 1156 return False, _('This pull request is closed.')
1157 1157 merge_possible, msg = self._check_repo_requirements(
1158 1158 target=pull_request.target_repo, source=pull_request.source_repo)
1159 1159 if not merge_possible:
1160 1160 return merge_possible, msg
1161 1161
1162 1162 try:
1163 1163 resp = self._try_merge(pull_request)
1164 1164 log.debug("Merge response: %s", resp)
1165 1165 status = resp.possible, self.merge_status_message(
1166 1166 resp.failure_reason)
1167 1167 except NotImplementedError:
1168 1168 status = False, _('Pull request merging is not supported.')
1169 1169
1170 1170 return status
1171 1171
1172 1172 def _check_repo_requirements(self, target, source):
1173 1173 """
1174 1174 Check if `target` and `source` have compatible requirements.
1175 1175
1176 1176 Currently this is just checking for largefiles.
1177 1177 """
1178 1178 target_has_largefiles = self._has_largefiles(target)
1179 1179 source_has_largefiles = self._has_largefiles(source)
1180 1180 merge_possible = True
1181 1181 message = u''
1182 1182
1183 1183 if target_has_largefiles != source_has_largefiles:
1184 1184 merge_possible = False
1185 1185 if source_has_largefiles:
1186 1186 message = _(
1187 1187 'Target repository large files support is disabled.')
1188 1188 else:
1189 1189 message = _(
1190 1190 'Source repository large files support is disabled.')
1191 1191
1192 1192 return merge_possible, message
1193 1193
1194 1194 def _has_largefiles(self, repo):
1195 1195 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1196 1196 'extensions', 'largefiles')
1197 1197 return largefiles_ui and largefiles_ui[0].active
1198 1198
1199 1199 def _try_merge(self, pull_request):
1200 1200 """
1201 1201 Try to merge the pull request and return the merge status.
1202 1202 """
1203 1203 log.debug(
1204 1204 "Trying out if the pull request %s can be merged.",
1205 1205 pull_request.pull_request_id)
1206 1206 target_vcs = pull_request.target_repo.scm_instance()
1207 1207
1208 1208 # Refresh the target reference.
1209 1209 try:
1210 1210 target_ref = self._refresh_reference(
1211 1211 pull_request.target_ref_parts, target_vcs)
1212 1212 except CommitDoesNotExistError:
1213 1213 merge_state = MergeResponse(
1214 1214 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
1215 1215 return merge_state
1216 1216
1217 1217 target_locked = pull_request.target_repo.locked
1218 1218 if target_locked and target_locked[0]:
1219 1219 log.debug("The target repository is locked.")
1220 1220 merge_state = MergeResponse(
1221 1221 False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
1222 1222 elif self._needs_merge_state_refresh(pull_request, target_ref):
1223 1223 log.debug("Refreshing the merge status of the repository.")
1224 1224 merge_state = self._refresh_merge_state(
1225 1225 pull_request, target_vcs, target_ref)
1226 1226 else:
1227 1227 possible = pull_request.\
1228 1228 last_merge_status == MergeFailureReason.NONE
1229 1229 merge_state = MergeResponse(
1230 1230 possible, False, None, pull_request.last_merge_status)
1231 1231
1232 1232 return merge_state
1233 1233
1234 1234 def _refresh_reference(self, reference, vcs_repository):
1235 1235 if reference.type in ('branch', 'book'):
1236 1236 name_or_id = reference.name
1237 1237 else:
1238 1238 name_or_id = reference.commit_id
1239 1239 refreshed_commit = vcs_repository.get_commit(name_or_id)
1240 1240 refreshed_reference = Reference(
1241 1241 reference.type, reference.name, refreshed_commit.raw_id)
1242 1242 return refreshed_reference
1243 1243
1244 1244 def _needs_merge_state_refresh(self, pull_request, target_reference):
1245 1245 return not(
1246 1246 pull_request.revisions and
1247 1247 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1248 1248 target_reference.commit_id == pull_request._last_merge_target_rev)
1249 1249
1250 1250 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1251 1251 workspace_id = self._workspace_id(pull_request)
1252 1252 source_vcs = pull_request.source_repo.scm_instance()
1253 1253 use_rebase = self._use_rebase_for_merging(pull_request)
1254 1254 close_branch = self._close_branch_before_merging(pull_request)
1255 1255 merge_state = target_vcs.merge(
1256 1256 target_reference, source_vcs, pull_request.source_ref_parts,
1257 1257 workspace_id, dry_run=True, use_rebase=use_rebase,
1258 1258 close_branch=close_branch)
1259 1259
1260 1260 # Do not store the response if there was an unknown error.
1261 1261 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1262 1262 pull_request._last_merge_source_rev = \
1263 1263 pull_request.source_ref_parts.commit_id
1264 1264 pull_request._last_merge_target_rev = target_reference.commit_id
1265 1265 pull_request.last_merge_status = merge_state.failure_reason
1266 1266 pull_request.shadow_merge_ref = merge_state.merge_ref
1267 1267 Session().add(pull_request)
1268 1268 Session().commit()
1269 1269
1270 1270 return merge_state
1271 1271
1272 1272 def _workspace_id(self, pull_request):
1273 1273 workspace_id = 'pr-%s' % pull_request.pull_request_id
1274 1274 return workspace_id
1275 1275
1276 1276 def merge_status_message(self, status_code):
1277 1277 """
1278 1278 Return a human friendly error message for the given merge status code.
1279 1279 """
1280 1280 return self.MERGE_STATUS_MESSAGES[status_code]
1281 1281
1282 1282 def generate_repo_data(self, repo, commit_id=None, branch=None,
1283 1283 bookmark=None):
1284 1284 all_refs, selected_ref = \
1285 1285 self._get_repo_pullrequest_sources(
1286 1286 repo.scm_instance(), commit_id=commit_id,
1287 1287 branch=branch, bookmark=bookmark)
1288 1288
1289 1289 refs_select2 = []
1290 1290 for element in all_refs:
1291 1291 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1292 1292 refs_select2.append({'text': element[1], 'children': children})
1293 1293
1294 1294 return {
1295 1295 'user': {
1296 1296 'user_id': repo.user.user_id,
1297 1297 'username': repo.user.username,
1298 1298 'firstname': repo.user.first_name,
1299 1299 'lastname': repo.user.last_name,
1300 1300 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1301 1301 },
1302 1302 'description': h.chop_at_smart(repo.description_safe, '\n'),
1303 1303 'refs': {
1304 1304 'all_refs': all_refs,
1305 1305 'selected_ref': selected_ref,
1306 1306 'select2_refs': refs_select2
1307 1307 }
1308 1308 }
1309 1309
1310 1310 def generate_pullrequest_title(self, source, source_ref, target):
1311 1311 return u'{source}#{at_ref} to {target}'.format(
1312 1312 source=source,
1313 1313 at_ref=source_ref,
1314 1314 target=target,
1315 1315 )
1316 1316
1317 1317 def _cleanup_merge_workspace(self, pull_request):
1318 1318 # Merging related cleanup
1319 1319 target_scm = pull_request.target_repo.scm_instance()
1320 1320 workspace_id = 'pr-%s' % pull_request.pull_request_id
1321 1321
1322 1322 try:
1323 1323 target_scm.cleanup_merge_workspace(workspace_id)
1324 1324 except NotImplementedError:
1325 1325 pass
1326 1326
1327 1327 def _get_repo_pullrequest_sources(
1328 1328 self, repo, commit_id=None, branch=None, bookmark=None):
1329 1329 """
1330 1330 Return a structure with repo's interesting commits, suitable for
1331 1331 the selectors in pullrequest controller
1332 1332
1333 1333 :param commit_id: a commit that must be in the list somehow
1334 1334 and selected by default
1335 1335 :param branch: a branch that must be in the list and selected
1336 1336 by default - even if closed
1337 1337 :param bookmark: a bookmark that must be in the list and selected
1338 1338 """
1339 1339
1340 1340 commit_id = safe_str(commit_id) if commit_id else None
1341 1341 branch = safe_str(branch) if branch else None
1342 1342 bookmark = safe_str(bookmark) if bookmark else None
1343 1343
1344 1344 selected = None
1345 1345
1346 1346 # order matters: first source that has commit_id in it will be selected
1347 1347 sources = []
1348 1348 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1349 1349 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1350 1350
1351 1351 if commit_id:
1352 1352 ref_commit = (h.short_id(commit_id), commit_id)
1353 1353 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1354 1354
1355 1355 sources.append(
1356 1356 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1357 1357 )
1358 1358
1359 1359 groups = []
1360 1360 for group_key, ref_list, group_name, match in sources:
1361 1361 group_refs = []
1362 1362 for ref_name, ref_id in ref_list:
1363 1363 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1364 1364 group_refs.append((ref_key, ref_name))
1365 1365
1366 1366 if not selected:
1367 1367 if set([commit_id, match]) & set([ref_id, ref_name]):
1368 1368 selected = ref_key
1369 1369
1370 1370 if group_refs:
1371 1371 groups.append((group_refs, group_name))
1372 1372
1373 1373 if not selected:
1374 1374 ref = commit_id or branch or bookmark
1375 1375 if ref:
1376 1376 raise CommitDoesNotExistError(
1377 1377 'No commit refs could be found matching: %s' % ref)
1378 1378 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1379 1379 selected = 'branch:%s:%s' % (
1380 1380 repo.DEFAULT_BRANCH_NAME,
1381 1381 repo.branches[repo.DEFAULT_BRANCH_NAME]
1382 1382 )
1383 1383 elif repo.commit_ids:
1384 1384 rev = repo.commit_ids[0]
1385 1385 selected = 'rev:%s:%s' % (rev, rev)
1386 1386 else:
1387 1387 raise EmptyRepositoryError()
1388 1388 return groups, selected
1389 1389
1390 1390 def get_diff(self, source_repo, source_ref_id, target_ref_id, context=DIFF_CONTEXT):
1391 1391 return self._get_diff_from_pr_or_version(
1392 1392 source_repo, source_ref_id, target_ref_id, context=context)
1393 1393
1394 1394 def _get_diff_from_pr_or_version(
1395 1395 self, source_repo, source_ref_id, target_ref_id, context):
1396 1396 target_commit = source_repo.get_commit(
1397 1397 commit_id=safe_str(target_ref_id))
1398 1398 source_commit = source_repo.get_commit(
1399 1399 commit_id=safe_str(source_ref_id))
1400 1400 if isinstance(source_repo, Repository):
1401 1401 vcs_repo = source_repo.scm_instance()
1402 1402 else:
1403 1403 vcs_repo = source_repo
1404 1404
1405 1405 # TODO: johbo: In the context of an update, we cannot reach
1406 1406 # the old commit anymore with our normal mechanisms. It needs
1407 1407 # some sort of special support in the vcs layer to avoid this
1408 1408 # workaround.
1409 1409 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1410 1410 vcs_repo.alias == 'git'):
1411 1411 source_commit.raw_id = safe_str(source_ref_id)
1412 1412
1413 1413 log.debug('calculating diff between '
1414 1414 'source_ref:%s and target_ref:%s for repo `%s`',
1415 1415 target_ref_id, source_ref_id,
1416 1416 safe_unicode(vcs_repo.path))
1417 1417
1418 1418 vcs_diff = vcs_repo.get_diff(
1419 1419 commit1=target_commit, commit2=source_commit, context=context)
1420 1420 return vcs_diff
1421 1421
1422 1422 def _is_merge_enabled(self, pull_request):
1423 1423 return self._get_general_setting(
1424 1424 pull_request, 'rhodecode_pr_merge_enabled')
1425 1425
1426 1426 def _use_rebase_for_merging(self, pull_request):
1427 repo_type = pull_request.target_repo.repo_type
1428 if repo_type == 'hg':
1427 1429 return self._get_general_setting(
1428 1430 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1431 elif repo_type == 'git':
1432 return self._get_general_setting(
1433 pull_request, 'rhodecode_git_use_rebase_for_merging')
1434
1435 return False
1429 1436
1430 1437 def _close_branch_before_merging(self, pull_request):
1438 repo_type = pull_request.target_repo.repo_type
1439 if repo_type == 'hg':
1431 1440 return self._get_general_setting(
1432 1441 pull_request, 'rhodecode_hg_close_branch_before_merging')
1442 elif repo_type == 'git':
1443 return self._get_general_setting(
1444 pull_request, 'rhodecode_git_close_branch_before_merging')
1445
1446 return False
1433 1447
1434 1448 def _get_general_setting(self, pull_request, settings_key, default=False):
1435 1449 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1436 1450 settings = settings_model.get_general_settings()
1437 1451 return settings.get(settings_key, default)
1438 1452
1439 1453 def _log_audit_action(self, action, action_data, user, pull_request):
1440 1454 audit_logger.store(
1441 1455 action=action,
1442 1456 action_data=action_data,
1443 1457 user=user,
1444 1458 repo=pull_request.target_repo)
1445 1459
1446 1460 def get_reviewer_functions(self):
1447 1461 """
1448 1462 Fetches functions for validation and fetching default reviewers.
1449 1463 If available we use the EE package, else we fallback to CE
1450 1464 package functions
1451 1465 """
1452 1466 try:
1453 1467 from rc_reviewers.utils import get_default_reviewers_data
1454 1468 from rc_reviewers.utils import validate_default_reviewers
1455 1469 except ImportError:
1456 1470 from rhodecode.apps.repository.utils import \
1457 1471 get_default_reviewers_data
1458 1472 from rhodecode.apps.repository.utils import \
1459 1473 validate_default_reviewers
1460 1474
1461 1475 return get_default_reviewers_data, validate_default_reviewers
1462 1476
1463 1477
1464 1478 class MergeCheck(object):
1465 1479 """
1466 1480 Perform Merge Checks and returns a check object which stores information
1467 1481 about merge errors, and merge conditions
1468 1482 """
1469 1483 TODO_CHECK = 'todo'
1470 1484 PERM_CHECK = 'perm'
1471 1485 REVIEW_CHECK = 'review'
1472 1486 MERGE_CHECK = 'merge'
1473 1487
1474 1488 def __init__(self):
1475 1489 self.review_status = None
1476 1490 self.merge_possible = None
1477 1491 self.merge_msg = ''
1478 1492 self.failed = None
1479 1493 self.errors = []
1480 1494 self.error_details = OrderedDict()
1481 1495
1482 1496 def push_error(self, error_type, message, error_key, details):
1483 1497 self.failed = True
1484 1498 self.errors.append([error_type, message])
1485 1499 self.error_details[error_key] = dict(
1486 1500 details=details,
1487 1501 error_type=error_type,
1488 1502 message=message
1489 1503 )
1490 1504
1491 1505 @classmethod
1492 1506 def validate(cls, pull_request, user, fail_early=False, translator=None):
1493 1507 # if migrated to pyramid...
1494 1508 # _ = lambda: translator or _ # use passed in translator if any
1495 1509
1496 1510 merge_check = cls()
1497 1511
1498 1512 # permissions to merge
1499 1513 user_allowed_to_merge = PullRequestModel().check_user_merge(
1500 1514 pull_request, user)
1501 1515 if not user_allowed_to_merge:
1502 1516 log.debug("MergeCheck: cannot merge, approval is pending.")
1503 1517
1504 1518 msg = _('User `{}` not allowed to perform merge.').format(user.username)
1505 1519 merge_check.push_error('error', msg, cls.PERM_CHECK, user.username)
1506 1520 if fail_early:
1507 1521 return merge_check
1508 1522
1509 1523 # review status, must be always present
1510 1524 review_status = pull_request.calculated_review_status()
1511 1525 merge_check.review_status = review_status
1512 1526
1513 1527 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1514 1528 if not status_approved:
1515 1529 log.debug("MergeCheck: cannot merge, approval is pending.")
1516 1530
1517 1531 msg = _('Pull request reviewer approval is pending.')
1518 1532
1519 1533 merge_check.push_error(
1520 1534 'warning', msg, cls.REVIEW_CHECK, review_status)
1521 1535
1522 1536 if fail_early:
1523 1537 return merge_check
1524 1538
1525 1539 # left over TODOs
1526 1540 todos = CommentsModel().get_unresolved_todos(pull_request)
1527 1541 if todos:
1528 1542 log.debug("MergeCheck: cannot merge, {} "
1529 1543 "unresolved todos left.".format(len(todos)))
1530 1544
1531 1545 if len(todos) == 1:
1532 1546 msg = _('Cannot merge, {} TODO still not resolved.').format(
1533 1547 len(todos))
1534 1548 else:
1535 1549 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1536 1550 len(todos))
1537 1551
1538 1552 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1539 1553
1540 1554 if fail_early:
1541 1555 return merge_check
1542 1556
1543 1557 # merge possible
1544 1558 merge_status, msg = PullRequestModel().merge_status(pull_request)
1545 1559 merge_check.merge_possible = merge_status
1546 1560 merge_check.merge_msg = msg
1547 1561 if not merge_status:
1548 1562 log.debug(
1549 1563 "MergeCheck: cannot merge, pull request merge not possible.")
1550 1564 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1551 1565
1552 1566 if fail_early:
1553 1567 return merge_check
1554 1568
1555 1569 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1556 1570 return merge_check
1557 1571
1558 1572 @classmethod
1559 1573 def get_merge_conditions(cls, pull_request):
1560 1574 merge_details = {}
1561 1575
1562 1576 model = PullRequestModel()
1563 1577 use_rebase = model._use_rebase_for_merging(pull_request)
1564 1578
1565 1579 if use_rebase:
1566 1580 merge_details['merge_strategy'] = dict(
1567 1581 details={},
1568 1582 message=_('Merge strategy: rebase')
1569 1583 )
1570 1584 else:
1571 1585 merge_details['merge_strategy'] = dict(
1572 1586 details={},
1573 1587 message=_('Merge strategy: explicit merge commit')
1574 1588 )
1575 1589
1576 1590 close_branch = model._close_branch_before_merging(pull_request)
1577 1591 if close_branch:
1578 1592 repo_type = pull_request.target_repo.repo_type
1579 1593 if repo_type == 'hg':
1580 1594 close_msg = _('Source branch will be closed after merge.')
1581 1595 elif repo_type == 'git':
1582 1596 close_msg = _('Source branch will be deleted after merge.')
1583 1597
1584 1598 merge_details['close_branch'] = dict(
1585 1599 details={},
1586 1600 message=close_msg
1587 1601 )
1588 1602
1589 1603 return merge_details
1590 1604
1591 1605 ChangeTuple = namedtuple('ChangeTuple',
1592 1606 ['added', 'common', 'removed', 'total'])
1593 1607
1594 1608 FileChangeTuple = namedtuple('FileChangeTuple',
1595 1609 ['added', 'modified', 'removed'])
@@ -1,812 +1,814 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import hashlib
23 23 import logging
24 24 from collections import namedtuple
25 25 from functools import wraps
26 26
27 27 from rhodecode.lib import caches
28 28 from rhodecode.lib.utils2 import (
29 29 Optional, AttributeDict, safe_str, remove_prefix, str2bool)
30 30 from rhodecode.lib.vcs.backends import base
31 31 from rhodecode.model import BaseModel
32 32 from rhodecode.model.db import (
33 33 RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi, RhodeCodeSetting)
34 34 from rhodecode.model.meta import Session
35 35
36 36
37 37 log = logging.getLogger(__name__)
38 38
39 39
40 40 UiSetting = namedtuple(
41 41 'UiSetting', ['section', 'key', 'value', 'active'])
42 42
43 43 SOCIAL_PLUGINS_LIST = ['github', 'bitbucket', 'twitter', 'google']
44 44
45 45
46 46 class SettingNotFound(Exception):
47 47 def __init__(self):
48 48 super(SettingNotFound, self).__init__('Setting is not found')
49 49
50 50
51 51 class SettingsModel(BaseModel):
52 52 BUILTIN_HOOKS = (
53 53 RhodeCodeUi.HOOK_REPO_SIZE, RhodeCodeUi.HOOK_PUSH,
54 54 RhodeCodeUi.HOOK_PRE_PUSH, RhodeCodeUi.HOOK_PRETX_PUSH,
55 55 RhodeCodeUi.HOOK_PULL, RhodeCodeUi.HOOK_PRE_PULL,
56 56 RhodeCodeUi.HOOK_PUSH_KEY,)
57 57 HOOKS_SECTION = 'hooks'
58 58
59 59 def __init__(self, sa=None, repo=None):
60 60 self.repo = repo
61 61 self.UiDbModel = RepoRhodeCodeUi if repo else RhodeCodeUi
62 62 self.SettingsDbModel = (
63 63 RepoRhodeCodeSetting if repo else RhodeCodeSetting)
64 64 super(SettingsModel, self).__init__(sa)
65 65
66 66 def get_ui_by_key(self, key):
67 67 q = self.UiDbModel.query()
68 68 q = q.filter(self.UiDbModel.ui_key == key)
69 69 q = self._filter_by_repo(RepoRhodeCodeUi, q)
70 70 return q.scalar()
71 71
72 72 def get_ui_by_section(self, section):
73 73 q = self.UiDbModel.query()
74 74 q = q.filter(self.UiDbModel.ui_section == section)
75 75 q = self._filter_by_repo(RepoRhodeCodeUi, q)
76 76 return q.all()
77 77
78 78 def get_ui_by_section_and_key(self, section, key):
79 79 q = self.UiDbModel.query()
80 80 q = q.filter(self.UiDbModel.ui_section == section)
81 81 q = q.filter(self.UiDbModel.ui_key == key)
82 82 q = self._filter_by_repo(RepoRhodeCodeUi, q)
83 83 return q.scalar()
84 84
85 85 def get_ui(self, section=None, key=None):
86 86 q = self.UiDbModel.query()
87 87 q = self._filter_by_repo(RepoRhodeCodeUi, q)
88 88
89 89 if section:
90 90 q = q.filter(self.UiDbModel.ui_section == section)
91 91 if key:
92 92 q = q.filter(self.UiDbModel.ui_key == key)
93 93
94 94 # TODO: mikhail: add caching
95 95 result = [
96 96 UiSetting(
97 97 section=safe_str(r.ui_section), key=safe_str(r.ui_key),
98 98 value=safe_str(r.ui_value), active=r.ui_active
99 99 )
100 100 for r in q.all()
101 101 ]
102 102 return result
103 103
104 104 def get_builtin_hooks(self):
105 105 q = self.UiDbModel.query()
106 106 q = q.filter(self.UiDbModel.ui_key.in_(self.BUILTIN_HOOKS))
107 107 return self._get_hooks(q)
108 108
109 109 def get_custom_hooks(self):
110 110 q = self.UiDbModel.query()
111 111 q = q.filter(~self.UiDbModel.ui_key.in_(self.BUILTIN_HOOKS))
112 112 return self._get_hooks(q)
113 113
114 114 def create_ui_section_value(self, section, val, key=None, active=True):
115 115 new_ui = self.UiDbModel()
116 116 new_ui.ui_section = section
117 117 new_ui.ui_value = val
118 118 new_ui.ui_active = active
119 119
120 120 if self.repo:
121 121 repo = self._get_repo(self.repo)
122 122 repository_id = repo.repo_id
123 123 new_ui.repository_id = repository_id
124 124
125 125 if not key:
126 126 # keys are unique so they need appended info
127 127 if self.repo:
128 128 key = hashlib.sha1(
129 129 '{}{}{}'.format(section, val, repository_id)).hexdigest()
130 130 else:
131 131 key = hashlib.sha1('{}{}'.format(section, val)).hexdigest()
132 132
133 133 new_ui.ui_key = key
134 134
135 135 Session().add(new_ui)
136 136 return new_ui
137 137
138 138 def create_or_update_hook(self, key, value):
139 139 ui = (
140 140 self.get_ui_by_section_and_key(self.HOOKS_SECTION, key) or
141 141 self.UiDbModel())
142 142 ui.ui_section = self.HOOKS_SECTION
143 143 ui.ui_active = True
144 144 ui.ui_key = key
145 145 ui.ui_value = value
146 146
147 147 if self.repo:
148 148 repo = self._get_repo(self.repo)
149 149 repository_id = repo.repo_id
150 150 ui.repository_id = repository_id
151 151
152 152 Session().add(ui)
153 153 return ui
154 154
155 155 def delete_ui(self, id_):
156 156 ui = self.UiDbModel.get(id_)
157 157 if not ui:
158 158 raise SettingNotFound()
159 159 Session().delete(ui)
160 160
161 161 def get_setting_by_name(self, name):
162 162 q = self._get_settings_query()
163 163 q = q.filter(self.SettingsDbModel.app_settings_name == name)
164 164 return q.scalar()
165 165
166 166 def create_or_update_setting(
167 167 self, name, val=Optional(''), type_=Optional('unicode')):
168 168 """
169 169 Creates or updates RhodeCode setting. If updates is triggered it will
170 170 only update parameters that are explicityl set Optional instance will
171 171 be skipped
172 172
173 173 :param name:
174 174 :param val:
175 175 :param type_:
176 176 :return:
177 177 """
178 178
179 179 res = self.get_setting_by_name(name)
180 180 repo = self._get_repo(self.repo) if self.repo else None
181 181
182 182 if not res:
183 183 val = Optional.extract(val)
184 184 type_ = Optional.extract(type_)
185 185
186 186 args = (
187 187 (repo.repo_id, name, val, type_)
188 188 if repo else (name, val, type_))
189 189 res = self.SettingsDbModel(*args)
190 190
191 191 else:
192 192 if self.repo:
193 193 res.repository_id = repo.repo_id
194 194
195 195 res.app_settings_name = name
196 196 if not isinstance(type_, Optional):
197 197 # update if set
198 198 res.app_settings_type = type_
199 199 if not isinstance(val, Optional):
200 200 # update if set
201 201 res.app_settings_value = val
202 202
203 203 Session().add(res)
204 204 return res
205 205
206 206 def invalidate_settings_cache(self):
207 207 namespace = 'rhodecode_settings'
208 208 cache_manager = caches.get_cache_manager('sql_cache_short', namespace)
209 209 caches.clear_cache_manager(cache_manager)
210 210
211 211 def get_all_settings(self, cache=False):
212 212
213 213 def _compute():
214 214 q = self._get_settings_query()
215 215 if not q:
216 216 raise Exception('Could not get application settings !')
217 217
218 218 settings = {
219 219 'rhodecode_' + result.app_settings_name: result.app_settings_value
220 220 for result in q
221 221 }
222 222 return settings
223 223
224 224 if cache:
225 225 log.debug('Fetching app settings using cache')
226 226 repo = self._get_repo(self.repo) if self.repo else None
227 227 namespace = 'rhodecode_settings'
228 228 cache_manager = caches.get_cache_manager(
229 229 'sql_cache_short', namespace)
230 230 _cache_key = (
231 231 "get_repo_{}_settings".format(repo.repo_id)
232 232 if repo else "get_app_settings")
233 233
234 234 return cache_manager.get(_cache_key, createfunc=_compute)
235 235
236 236 else:
237 237 return _compute()
238 238
239 239 def get_auth_settings(self):
240 240 q = self._get_settings_query()
241 241 q = q.filter(
242 242 self.SettingsDbModel.app_settings_name.startswith('auth_'))
243 243 rows = q.all()
244 244 auth_settings = {
245 245 row.app_settings_name: row.app_settings_value for row in rows}
246 246 return auth_settings
247 247
248 248 def get_auth_plugins(self):
249 249 auth_plugins = self.get_setting_by_name("auth_plugins")
250 250 return auth_plugins.app_settings_value
251 251
252 252 def get_default_repo_settings(self, strip_prefix=False):
253 253 q = self._get_settings_query()
254 254 q = q.filter(
255 255 self.SettingsDbModel.app_settings_name.startswith('default_'))
256 256 rows = q.all()
257 257
258 258 result = {}
259 259 for row in rows:
260 260 key = row.app_settings_name
261 261 if strip_prefix:
262 262 key = remove_prefix(key, prefix='default_')
263 263 result.update({key: row.app_settings_value})
264 264 return result
265 265
266 266 def get_repo(self):
267 267 repo = self._get_repo(self.repo)
268 268 if not repo:
269 269 raise Exception(
270 270 'Repository `{}` cannot be found inside the database'.format(
271 271 self.repo))
272 272 return repo
273 273
274 274 def _filter_by_repo(self, model, query):
275 275 if self.repo:
276 276 repo = self.get_repo()
277 277 query = query.filter(model.repository_id == repo.repo_id)
278 278 return query
279 279
280 280 def _get_hooks(self, query):
281 281 query = query.filter(self.UiDbModel.ui_section == self.HOOKS_SECTION)
282 282 query = self._filter_by_repo(RepoRhodeCodeUi, query)
283 283 return query.all()
284 284
285 285 def _get_settings_query(self):
286 286 q = self.SettingsDbModel.query()
287 287 return self._filter_by_repo(RepoRhodeCodeSetting, q)
288 288
289 289 def list_enabled_social_plugins(self, settings):
290 290 enabled = []
291 291 for plug in SOCIAL_PLUGINS_LIST:
292 292 if str2bool(settings.get('rhodecode_auth_{}_enabled'.format(plug)
293 293 )):
294 294 enabled.append(plug)
295 295 return enabled
296 296
297 297
298 298 def assert_repo_settings(func):
299 299 @wraps(func)
300 300 def _wrapper(self, *args, **kwargs):
301 301 if not self.repo_settings:
302 302 raise Exception('Repository is not specified')
303 303 return func(self, *args, **kwargs)
304 304 return _wrapper
305 305
306 306
307 307 class IssueTrackerSettingsModel(object):
308 308 INHERIT_SETTINGS = 'inherit_issue_tracker_settings'
309 309 SETTINGS_PREFIX = 'issuetracker_'
310 310
311 311 def __init__(self, sa=None, repo=None):
312 312 self.global_settings = SettingsModel(sa=sa)
313 313 self.repo_settings = SettingsModel(sa=sa, repo=repo) if repo else None
314 314
315 315 @property
316 316 def inherit_global_settings(self):
317 317 if not self.repo_settings:
318 318 return True
319 319 setting = self.repo_settings.get_setting_by_name(self.INHERIT_SETTINGS)
320 320 return setting.app_settings_value if setting else True
321 321
322 322 @inherit_global_settings.setter
323 323 def inherit_global_settings(self, value):
324 324 if self.repo_settings:
325 325 settings = self.repo_settings.create_or_update_setting(
326 326 self.INHERIT_SETTINGS, value, type_='bool')
327 327 Session().add(settings)
328 328
329 329 def _get_keyname(self, key, uid, prefix=''):
330 330 return '{0}{1}{2}_{3}'.format(
331 331 prefix, self.SETTINGS_PREFIX, key, uid)
332 332
333 333 def _make_dict_for_settings(self, qs):
334 334 prefix_match = self._get_keyname('pat', '', 'rhodecode_')
335 335
336 336 issuetracker_entries = {}
337 337 # create keys
338 338 for k, v in qs.items():
339 339 if k.startswith(prefix_match):
340 340 uid = k[len(prefix_match):]
341 341 issuetracker_entries[uid] = None
342 342
343 343 # populate
344 344 for uid in issuetracker_entries:
345 345 issuetracker_entries[uid] = AttributeDict({
346 346 'pat': qs.get(self._get_keyname('pat', uid, 'rhodecode_')),
347 347 'url': qs.get(self._get_keyname('url', uid, 'rhodecode_')),
348 348 'pref': qs.get(self._get_keyname('pref', uid, 'rhodecode_')),
349 349 'desc': qs.get(self._get_keyname('desc', uid, 'rhodecode_')),
350 350 })
351 351 return issuetracker_entries
352 352
353 353 def get_global_settings(self, cache=False):
354 354 """
355 355 Returns list of global issue tracker settings
356 356 """
357 357 defaults = self.global_settings.get_all_settings(cache=cache)
358 358 settings = self._make_dict_for_settings(defaults)
359 359 return settings
360 360
361 361 def get_repo_settings(self, cache=False):
362 362 """
363 363 Returns list of issue tracker settings per repository
364 364 """
365 365 if not self.repo_settings:
366 366 raise Exception('Repository is not specified')
367 367 all_settings = self.repo_settings.get_all_settings(cache=cache)
368 368 settings = self._make_dict_for_settings(all_settings)
369 369 return settings
370 370
371 371 def get_settings(self, cache=False):
372 372 if self.inherit_global_settings:
373 373 return self.get_global_settings(cache=cache)
374 374 else:
375 375 return self.get_repo_settings(cache=cache)
376 376
377 377 def delete_entries(self, uid):
378 378 if self.repo_settings:
379 379 all_patterns = self.get_repo_settings()
380 380 settings_model = self.repo_settings
381 381 else:
382 382 all_patterns = self.get_global_settings()
383 383 settings_model = self.global_settings
384 384 entries = all_patterns.get(uid)
385 385
386 386 for del_key in entries:
387 387 setting_name = self._get_keyname(del_key, uid)
388 388 entry = settings_model.get_setting_by_name(setting_name)
389 389 if entry:
390 390 Session().delete(entry)
391 391
392 392 Session().commit()
393 393
394 394 def create_or_update_setting(
395 395 self, name, val=Optional(''), type_=Optional('unicode')):
396 396 if self.repo_settings:
397 397 setting = self.repo_settings.create_or_update_setting(
398 398 name, val, type_)
399 399 else:
400 400 setting = self.global_settings.create_or_update_setting(
401 401 name, val, type_)
402 402 return setting
403 403
404 404
405 405 class VcsSettingsModel(object):
406 406
407 407 INHERIT_SETTINGS = 'inherit_vcs_settings'
408 408 GENERAL_SETTINGS = (
409 409 'use_outdated_comments',
410 410 'pr_merge_enabled',
411 411 'hg_use_rebase_for_merging',
412 'hg_close_branch_before_merging')
412 'hg_close_branch_before_merging',
413 'git_use_rebase_for_merging',
414 'git_close_branch_before_merging')
413 415
414 416 HOOKS_SETTINGS = (
415 417 ('hooks', 'changegroup.repo_size'),
416 418 ('hooks', 'changegroup.push_logger'),
417 419 ('hooks', 'outgoing.pull_logger'),)
418 420 HG_SETTINGS = (
419 421 ('extensions', 'largefiles'),
420 422 ('phases', 'publish'),
421 423 ('extensions', 'evolve'),)
422 424 GIT_SETTINGS = (
423 425 ('vcs_git_lfs', 'enabled'),)
424 426 GLOBAL_HG_SETTINGS = (
425 427 ('extensions', 'largefiles'),
426 428 ('largefiles', 'usercache'),
427 429 ('phases', 'publish'),
428 430 ('extensions', 'hgsubversion'),
429 431 ('extensions', 'evolve'),)
430 432 GLOBAL_GIT_SETTINGS = (
431 433 ('vcs_git_lfs', 'enabled'),
432 434 ('vcs_git_lfs', 'store_location'))
433 435 GLOBAL_SVN_SETTINGS = (
434 436 ('vcs_svn_proxy', 'http_requests_enabled'),
435 437 ('vcs_svn_proxy', 'http_server_url'))
436 438
437 439 SVN_BRANCH_SECTION = 'vcs_svn_branch'
438 440 SVN_TAG_SECTION = 'vcs_svn_tag'
439 441 SSL_SETTING = ('web', 'push_ssl')
440 442 PATH_SETTING = ('paths', '/')
441 443
442 444 def __init__(self, sa=None, repo=None):
443 445 self.global_settings = SettingsModel(sa=sa)
444 446 self.repo_settings = SettingsModel(sa=sa, repo=repo) if repo else None
445 447 self._ui_settings = (
446 448 self.HG_SETTINGS + self.GIT_SETTINGS + self.HOOKS_SETTINGS)
447 449 self._svn_sections = (self.SVN_BRANCH_SECTION, self.SVN_TAG_SECTION)
448 450
449 451 @property
450 452 @assert_repo_settings
451 453 def inherit_global_settings(self):
452 454 setting = self.repo_settings.get_setting_by_name(self.INHERIT_SETTINGS)
453 455 return setting.app_settings_value if setting else True
454 456
455 457 @inherit_global_settings.setter
456 458 @assert_repo_settings
457 459 def inherit_global_settings(self, value):
458 460 self.repo_settings.create_or_update_setting(
459 461 self.INHERIT_SETTINGS, value, type_='bool')
460 462
461 463 def get_global_svn_branch_patterns(self):
462 464 return self.global_settings.get_ui_by_section(self.SVN_BRANCH_SECTION)
463 465
464 466 @assert_repo_settings
465 467 def get_repo_svn_branch_patterns(self):
466 468 return self.repo_settings.get_ui_by_section(self.SVN_BRANCH_SECTION)
467 469
468 470 def get_global_svn_tag_patterns(self):
469 471 return self.global_settings.get_ui_by_section(self.SVN_TAG_SECTION)
470 472
471 473 @assert_repo_settings
472 474 def get_repo_svn_tag_patterns(self):
473 475 return self.repo_settings.get_ui_by_section(self.SVN_TAG_SECTION)
474 476
475 477 def get_global_settings(self):
476 478 return self._collect_all_settings(global_=True)
477 479
478 480 @assert_repo_settings
479 481 def get_repo_settings(self):
480 482 return self._collect_all_settings(global_=False)
481 483
482 484 @assert_repo_settings
483 485 def create_or_update_repo_settings(
484 486 self, data, inherit_global_settings=False):
485 487 from rhodecode.model.scm import ScmModel
486 488
487 489 self.inherit_global_settings = inherit_global_settings
488 490
489 491 repo = self.repo_settings.get_repo()
490 492 if not inherit_global_settings:
491 493 if repo.repo_type == 'svn':
492 494 self.create_repo_svn_settings(data)
493 495 else:
494 496 self.create_or_update_repo_hook_settings(data)
495 497 self.create_or_update_repo_pr_settings(data)
496 498
497 499 if repo.repo_type == 'hg':
498 500 self.create_or_update_repo_hg_settings(data)
499 501
500 502 if repo.repo_type == 'git':
501 503 self.create_or_update_repo_git_settings(data)
502 504
503 505 ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
504 506
505 507 @assert_repo_settings
506 508 def create_or_update_repo_hook_settings(self, data):
507 509 for section, key in self.HOOKS_SETTINGS:
508 510 data_key = self._get_form_ui_key(section, key)
509 511 if data_key not in data:
510 512 raise ValueError(
511 513 'The given data does not contain {} key'.format(data_key))
512 514
513 515 active = data.get(data_key)
514 516 repo_setting = self.repo_settings.get_ui_by_section_and_key(
515 517 section, key)
516 518 if not repo_setting:
517 519 global_setting = self.global_settings.\
518 520 get_ui_by_section_and_key(section, key)
519 521 self.repo_settings.create_ui_section_value(
520 522 section, global_setting.ui_value, key=key, active=active)
521 523 else:
522 524 repo_setting.ui_active = active
523 525 Session().add(repo_setting)
524 526
525 527 def update_global_hook_settings(self, data):
526 528 for section, key in self.HOOKS_SETTINGS:
527 529 data_key = self._get_form_ui_key(section, key)
528 530 if data_key not in data:
529 531 raise ValueError(
530 532 'The given data does not contain {} key'.format(data_key))
531 533 active = data.get(data_key)
532 534 repo_setting = self.global_settings.get_ui_by_section_and_key(
533 535 section, key)
534 536 repo_setting.ui_active = active
535 537 Session().add(repo_setting)
536 538
537 539 @assert_repo_settings
538 540 def create_or_update_repo_pr_settings(self, data):
539 541 return self._create_or_update_general_settings(
540 542 self.repo_settings, data)
541 543
542 544 def create_or_update_global_pr_settings(self, data):
543 545 return self._create_or_update_general_settings(
544 546 self.global_settings, data)
545 547
546 548 @assert_repo_settings
547 549 def create_repo_svn_settings(self, data):
548 550 return self._create_svn_settings(self.repo_settings, data)
549 551
550 552 @assert_repo_settings
551 553 def create_or_update_repo_hg_settings(self, data):
552 554 largefiles, phases, evolve = \
553 555 self.HG_SETTINGS
554 556 largefiles_key, phases_key, evolve_key = \
555 557 self._get_settings_keys(self.HG_SETTINGS, data)
556 558
557 559 self._create_or_update_ui(
558 560 self.repo_settings, *largefiles, value='',
559 561 active=data[largefiles_key])
560 562 self._create_or_update_ui(
561 563 self.repo_settings, *evolve, value='',
562 564 active=data[evolve_key])
563 565 self._create_or_update_ui(
564 566 self.repo_settings, *phases, value=safe_str(data[phases_key]))
565 567
566 568 def create_or_update_global_hg_settings(self, data):
567 569 largefiles, largefiles_store, phases, hgsubversion, evolve \
568 570 = self.GLOBAL_HG_SETTINGS
569 571 largefiles_key, largefiles_store_key, phases_key, subversion_key, evolve_key \
570 572 = self._get_settings_keys(self.GLOBAL_HG_SETTINGS, data)
571 573
572 574 self._create_or_update_ui(
573 575 self.global_settings, *largefiles, value='',
574 576 active=data[largefiles_key])
575 577 self._create_or_update_ui(
576 578 self.global_settings, *largefiles_store,
577 579 value=data[largefiles_store_key])
578 580 self._create_or_update_ui(
579 581 self.global_settings, *phases, value=safe_str(data[phases_key]))
580 582 self._create_or_update_ui(
581 583 self.global_settings, *hgsubversion, active=data[subversion_key])
582 584 self._create_or_update_ui(
583 585 self.global_settings, *evolve, value='',
584 586 active=data[evolve_key])
585 587
586 588 def create_or_update_repo_git_settings(self, data):
587 589 # NOTE(marcink): # comma make unpack work properly
588 590 lfs_enabled, \
589 591 = self.GIT_SETTINGS
590 592
591 593 lfs_enabled_key, \
592 594 = self._get_settings_keys(self.GIT_SETTINGS, data)
593 595
594 596 self._create_or_update_ui(
595 597 self.repo_settings, *lfs_enabled, value=data[lfs_enabled_key],
596 598 active=data[lfs_enabled_key])
597 599
598 600 def create_or_update_global_git_settings(self, data):
599 601 lfs_enabled, lfs_store_location \
600 602 = self.GLOBAL_GIT_SETTINGS
601 603 lfs_enabled_key, lfs_store_location_key \
602 604 = self._get_settings_keys(self.GLOBAL_GIT_SETTINGS, data)
603 605
604 606 self._create_or_update_ui(
605 607 self.global_settings, *lfs_enabled, value=data[lfs_enabled_key],
606 608 active=data[lfs_enabled_key])
607 609 self._create_or_update_ui(
608 610 self.global_settings, *lfs_store_location,
609 611 value=data[lfs_store_location_key])
610 612
611 613 def create_or_update_global_svn_settings(self, data):
612 614 # branch/tags patterns
613 615 self._create_svn_settings(self.global_settings, data)
614 616
615 617 http_requests_enabled, http_server_url = self.GLOBAL_SVN_SETTINGS
616 618 http_requests_enabled_key, http_server_url_key = self._get_settings_keys(
617 619 self.GLOBAL_SVN_SETTINGS, data)
618 620
619 621 self._create_or_update_ui(
620 622 self.global_settings, *http_requests_enabled,
621 623 value=safe_str(data[http_requests_enabled_key]))
622 624 self._create_or_update_ui(
623 625 self.global_settings, *http_server_url,
624 626 value=data[http_server_url_key])
625 627
626 628 def update_global_ssl_setting(self, value):
627 629 self._create_or_update_ui(
628 630 self.global_settings, *self.SSL_SETTING, value=value)
629 631
630 632 def update_global_path_setting(self, value):
631 633 self._create_or_update_ui(
632 634 self.global_settings, *self.PATH_SETTING, value=value)
633 635
634 636 @assert_repo_settings
635 637 def delete_repo_svn_pattern(self, id_):
636 638 self.repo_settings.delete_ui(id_)
637 639
638 640 def delete_global_svn_pattern(self, id_):
639 641 self.global_settings.delete_ui(id_)
640 642
641 643 @assert_repo_settings
642 644 def get_repo_ui_settings(self, section=None, key=None):
643 645 global_uis = self.global_settings.get_ui(section, key)
644 646 repo_uis = self.repo_settings.get_ui(section, key)
645 647 filtered_repo_uis = self._filter_ui_settings(repo_uis)
646 648 filtered_repo_uis_keys = [
647 649 (s.section, s.key) for s in filtered_repo_uis]
648 650
649 651 def _is_global_ui_filtered(ui):
650 652 return (
651 653 (ui.section, ui.key) in filtered_repo_uis_keys
652 654 or ui.section in self._svn_sections)
653 655
654 656 filtered_global_uis = [
655 657 ui for ui in global_uis if not _is_global_ui_filtered(ui)]
656 658
657 659 return filtered_global_uis + filtered_repo_uis
658 660
659 661 def get_global_ui_settings(self, section=None, key=None):
660 662 return self.global_settings.get_ui(section, key)
661 663
662 664 def get_ui_settings_as_config_obj(self, section=None, key=None):
663 665 config = base.Config()
664 666
665 667 ui_settings = self.get_ui_settings(section=section, key=key)
666 668
667 669 for entry in ui_settings:
668 670 config.set(entry.section, entry.key, entry.value)
669 671
670 672 return config
671 673
672 674 def get_ui_settings(self, section=None, key=None):
673 675 if not self.repo_settings or self.inherit_global_settings:
674 676 return self.get_global_ui_settings(section, key)
675 677 else:
676 678 return self.get_repo_ui_settings(section, key)
677 679
678 680 def get_svn_patterns(self, section=None):
679 681 if not self.repo_settings:
680 682 return self.get_global_ui_settings(section)
681 683 else:
682 684 return self.get_repo_ui_settings(section)
683 685
684 686 @assert_repo_settings
685 687 def get_repo_general_settings(self):
686 688 global_settings = self.global_settings.get_all_settings()
687 689 repo_settings = self.repo_settings.get_all_settings()
688 690 filtered_repo_settings = self._filter_general_settings(repo_settings)
689 691 global_settings.update(filtered_repo_settings)
690 692 return global_settings
691 693
692 694 def get_global_general_settings(self):
693 695 return self.global_settings.get_all_settings()
694 696
695 697 def get_general_settings(self):
696 698 if not self.repo_settings or self.inherit_global_settings:
697 699 return self.get_global_general_settings()
698 700 else:
699 701 return self.get_repo_general_settings()
700 702
701 703 def get_repos_location(self):
702 704 return self.global_settings.get_ui_by_key('/').ui_value
703 705
704 706 def _filter_ui_settings(self, settings):
705 707 filtered_settings = [
706 708 s for s in settings if self._should_keep_setting(s)]
707 709 return filtered_settings
708 710
709 711 def _should_keep_setting(self, setting):
710 712 keep = (
711 713 (setting.section, setting.key) in self._ui_settings or
712 714 setting.section in self._svn_sections)
713 715 return keep
714 716
715 717 def _filter_general_settings(self, settings):
716 718 keys = ['rhodecode_{}'.format(key) for key in self.GENERAL_SETTINGS]
717 719 return {
718 720 k: settings[k]
719 721 for k in settings if k in keys}
720 722
721 723 def _collect_all_settings(self, global_=False):
722 724 settings = self.global_settings if global_ else self.repo_settings
723 725 result = {}
724 726
725 727 for section, key in self._ui_settings:
726 728 ui = settings.get_ui_by_section_and_key(section, key)
727 729 result_key = self._get_form_ui_key(section, key)
728 730
729 731 if ui:
730 732 if section in ('hooks', 'extensions'):
731 733 result[result_key] = ui.ui_active
732 734 elif result_key in ['vcs_git_lfs_enabled']:
733 735 result[result_key] = ui.ui_active
734 736 else:
735 737 result[result_key] = ui.ui_value
736 738
737 739 for name in self.GENERAL_SETTINGS:
738 740 setting = settings.get_setting_by_name(name)
739 741 if setting:
740 742 result_key = 'rhodecode_{}'.format(name)
741 743 result[result_key] = setting.app_settings_value
742 744
743 745 return result
744 746
745 747 def _get_form_ui_key(self, section, key):
746 748 return '{section}_{key}'.format(
747 749 section=section, key=key.replace('.', '_'))
748 750
749 751 def _create_or_update_ui(
750 752 self, settings, section, key, value=None, active=None):
751 753 ui = settings.get_ui_by_section_and_key(section, key)
752 754 if not ui:
753 755 active = True if active is None else active
754 756 settings.create_ui_section_value(
755 757 section, value, key=key, active=active)
756 758 else:
757 759 if active is not None:
758 760 ui.ui_active = active
759 761 if value is not None:
760 762 ui.ui_value = value
761 763 Session().add(ui)
762 764
763 765 def _create_svn_settings(self, settings, data):
764 766 svn_settings = {
765 767 'new_svn_branch': self.SVN_BRANCH_SECTION,
766 768 'new_svn_tag': self.SVN_TAG_SECTION
767 769 }
768 770 for key in svn_settings:
769 771 if data.get(key):
770 772 settings.create_ui_section_value(svn_settings[key], data[key])
771 773
772 774 def _create_or_update_general_settings(self, settings, data):
773 775 for name in self.GENERAL_SETTINGS:
774 776 data_key = 'rhodecode_{}'.format(name)
775 777 if data_key not in data:
776 778 raise ValueError(
777 779 'The given data does not contain {} key'.format(data_key))
778 780 setting = settings.create_or_update_setting(
779 781 name, data[data_key], 'bool')
780 782 Session().add(setting)
781 783
782 784 def _get_settings_keys(self, settings, data):
783 785 data_keys = [self._get_form_ui_key(*s) for s in settings]
784 786 for data_key in data_keys:
785 787 if data_key not in data:
786 788 raise ValueError(
787 789 'The given data does not contain {} key'.format(data_key))
788 790 return data_keys
789 791
790 792 def create_largeobjects_dirs_if_needed(self, repo_store_path):
791 793 """
792 794 This is subscribed to the `pyramid.events.ApplicationCreated` event. It
793 795 does a repository scan if enabled in the settings.
794 796 """
795 797
796 798 from rhodecode.lib.vcs.backends.hg import largefiles_store
797 799 from rhodecode.lib.vcs.backends.git import lfs_store
798 800
799 801 paths = [
800 802 largefiles_store(repo_store_path),
801 803 lfs_store(repo_store_path)]
802 804
803 805 for path in paths:
804 806 if os.path.isdir(path):
805 807 continue
806 808 if os.path.isfile(path):
807 809 continue
808 810 # not a file nor dir, we try to create it
809 811 try:
810 812 os.makedirs(path)
811 813 except Exception:
812 814 log.warning('Failed to create largefiles dir:%s', path)
@@ -1,343 +1,370 b''
1 1 ## snippet for displaying vcs settings
2 2 ## usage:
3 3 ## <%namespace name="vcss" file="/base/vcssettings.mako"/>
4 4 ## ${vcss.vcs_settings_fields()}
5 5
6 6 <%def name="vcs_settings_fields(suffix='', svn_branch_patterns=None, svn_tag_patterns=None, repo_type=None, display_globals=False, allow_repo_location_change=False, **kwargs)">
7 7 % if display_globals:
8 8 <div class="panel panel-default">
9 9 <div class="panel-heading" id="general">
10 10 <h3 class="panel-title">${_('General')}<a class="permalink" href="#general"> ΒΆ</a></h3>
11 11 </div>
12 12 <div class="panel-body">
13 13 <div class="field">
14 14 <div class="checkbox">
15 15 ${h.checkbox('web_push_ssl' + suffix, 'True')}
16 16 <label for="web_push_ssl${suffix}">${_('Require SSL for vcs operations')}</label>
17 17 </div>
18 18 <div class="label">
19 19 <span class="help-block">${_('Activate to set RhodeCode to require SSL for pushing or pulling. If SSL certificate is missing it will return a HTTP Error 406: Not Acceptable.')}</span>
20 20 </div>
21 21 </div>
22 22 </div>
23 23 </div>
24 24 % endif
25 25
26 26 % if display_globals:
27 27 <div class="panel panel-default">
28 28 <div class="panel-heading" id="vcs-storage-options">
29 29 <h3 class="panel-title">${_('Main Storage Location')}<a class="permalink" href="#vcs-storage-options"> ΒΆ</a></h3>
30 30 </div>
31 31 <div class="panel-body">
32 32 <div class="field">
33 33 <div class="inputx locked_input">
34 34 %if allow_repo_location_change:
35 35 ${h.text('paths_root_path',size=59,readonly="readonly", class_="disabled")}
36 36 <span id="path_unlock" class="tooltip"
37 37 title="${h.tooltip(_('Click to unlock. You must restart RhodeCode in order to make this setting take effect.'))}">
38 38 <div class="btn btn-default lock_input_button"><i id="path_unlock_icon" class="icon-lock"></i></div>
39 39 </span>
40 40 %else:
41 41 ${_('Repository location change is disabled. You can enable this by changing the `allow_repo_location_change` inside .ini file.')}
42 42 ## form still requires this but we cannot internally change it anyway
43 43 ${h.hidden('paths_root_path',size=30,readonly="readonly", class_="disabled")}
44 44 %endif
45 45 </div>
46 46 </div>
47 47 <div class="label">
48 48 <span class="help-block">${_('Filesystem location where repositories should be stored. After changing this value a restart and rescan of the repository folder are required.')}</span>
49 49 </div>
50 50 </div>
51 51 </div>
52 52 % endif
53 53
54 54 % if display_globals or repo_type in ['git', 'hg']:
55 55 <div class="panel panel-default">
56 56 <div class="panel-heading" id="vcs-hooks-options">
57 57 <h3 class="panel-title">${_('Internal Hooks')}<a class="permalink" href="#vcs-hooks-options"> ΒΆ</a></h3>
58 58 </div>
59 59 <div class="panel-body">
60 60 <div class="field">
61 61 <div class="checkbox">
62 62 ${h.checkbox('hooks_changegroup_repo_size' + suffix, 'True', **kwargs)}
63 63 <label for="hooks_changegroup_repo_size${suffix}">${_('Show repository size after push')}</label>
64 64 </div>
65 65
66 66 <div class="label">
67 67 <span class="help-block">${_('Trigger a hook that calculates repository size after each push.')}</span>
68 68 </div>
69 69 <div class="checkbox">
70 70 ${h.checkbox('hooks_changegroup_push_logger' + suffix, 'True', **kwargs)}
71 71 <label for="hooks_changegroup_push_logger${suffix}">${_('Execute pre/post push hooks')}</label>
72 72 </div>
73 73 <div class="label">
74 74 <span class="help-block">${_('Execute Built in pre/post push hooks. This also executes rcextensions hooks.')}</span>
75 75 </div>
76 76 <div class="checkbox">
77 77 ${h.checkbox('hooks_outgoing_pull_logger' + suffix, 'True', **kwargs)}
78 78 <label for="hooks_outgoing_pull_logger${suffix}">${_('Execute pre/post pull hooks')}</label>
79 79 </div>
80 80 <div class="label">
81 81 <span class="help-block">${_('Execute Built in pre/post pull hooks. This also executes rcextensions hooks.')}</span>
82 82 </div>
83 83 </div>
84 84 </div>
85 85 </div>
86 86 % endif
87 87
88 88 % if display_globals or repo_type in ['hg']:
89 89 <div class="panel panel-default">
90 90 <div class="panel-heading" id="vcs-hg-options">
91 91 <h3 class="panel-title">${_('Mercurial Settings')}<a class="permalink" href="#vcs-hg-options"> ΒΆ</a></h3>
92 92 </div>
93 93 <div class="panel-body">
94 94 <div class="checkbox">
95 95 ${h.checkbox('extensions_largefiles' + suffix, 'True', **kwargs)}
96 96 <label for="extensions_largefiles${suffix}">${_('Enable largefiles extension')}</label>
97 97 </div>
98 98 <div class="label">
99 99 % if display_globals:
100 100 <span class="help-block">${_('Enable Largefiles extensions for all repositories.')}</span>
101 101 % else:
102 102 <span class="help-block">${_('Enable Largefiles extensions for this repository.')}</span>
103 103 % endif
104 104 </div>
105 105
106 106 % if display_globals:
107 107 <div class="field">
108 108 <div class="input">
109 109 ${h.text('largefiles_usercache' + suffix, size=59)}
110 110 </div>
111 111 </div>
112 112 <div class="label">
113 113 <span class="help-block">${_('Filesystem location where Mercurial largefile objects should be stored.')}</span>
114 114 </div>
115 115 % endif
116 116
117 117 <div class="checkbox">
118 118 ${h.checkbox('phases_publish' + suffix, 'True', **kwargs)}
119 119 <label for="phases_publish${suffix}">${_('Set repositories as publishing') if display_globals else _('Set repository as publishing')}</label>
120 120 </div>
121 121 <div class="label">
122 122 <span class="help-block">${_('When this is enabled all commits in the repository are seen as public commits by clients.')}</span>
123 123 </div>
124 124 % if display_globals:
125 125 <div class="checkbox">
126 126 ${h.checkbox('extensions_hgsubversion' + suffix,'True')}
127 127 <label for="extensions_hgsubversion${suffix}">${_('Enable hgsubversion extension')}</label>
128 128 </div>
129 129 <div class="label">
130 130 <span class="help-block">${_('Requires hgsubversion library to be installed. Allows cloning remote SVN repositories and migrates them to Mercurial type.')}</span>
131 131 </div>
132 132 % endif
133 133
134 134 <div class="checkbox">
135 135 ${h.checkbox('extensions_evolve' + suffix, 'True', **kwargs)}
136 136 <label for="extensions_evolve${suffix}">${_('Enable evolve extension')}</label>
137 137 </div>
138 138 <div class="label">
139 139 % if display_globals:
140 140 <span class="help-block">${_('Enable evolve extension for all repositories.')}</span>
141 141 % else:
142 142 <span class="help-block">${_('Enable evolve extension for this repository.')}</span>
143 143 % endif
144 144 </div>
145 145
146 146 </div>
147 147 </div>
148 ## LABS for HG
149 % if c.labs_active:
150 <div class="panel panel-danger">
151 <div class="panel-heading">
152 <h3 class="panel-title">${_('Mercurial Labs Settings')} (${_('These features are considered experimental and may not work as expected.')})</h3>
153 </div>
154 <div class="panel-body">
155
156 <div class="checkbox">
157 ${h.checkbox('rhodecode_hg_use_rebase_for_merging' + suffix, 'True', **kwargs)}
158 <label for="rhodecode_hg_use_rebase_for_merging${suffix}">${_('Use rebase as merge strategy')}</label>
159 </div>
160 <div class="label">
161 <span class="help-block">${_('Use rebase instead of creating a merge commit when merging via web interface.')}</span>
162 </div>
163
164 <div class="checkbox">
165 ${h.checkbox('rhodecode_hg_close_branch_before_merging' + suffix, 'True', **kwargs)}
166 <label for="rhodecode_hg_close_branch_before_merging{suffix}">${_('Close branch before merging it')}</label>
167 </div>
168 <div class="label">
169 <span class="help-block">${_('Close branch before merging it into destination branch. No effect when rebase strategy is use.')}</span>
170 </div>
171
172 </div>
173 </div>
174 % endif
175
176 148 % endif
177 149
178 150 % if display_globals or repo_type in ['git']:
179 151 <div class="panel panel-default">
180 152 <div class="panel-heading" id="vcs-git-options">
181 153 <h3 class="panel-title">${_('Git Settings')}<a class="permalink" href="#vcs-git-options"> ΒΆ</a></h3>
182 154 </div>
183 155 <div class="panel-body">
184 156 <div class="checkbox">
185 157 ${h.checkbox('vcs_git_lfs_enabled' + suffix, 'True', **kwargs)}
186 158 <label for="vcs_git_lfs_enabled${suffix}">${_('Enable lfs extension')}</label>
187 159 </div>
188 160 <div class="label">
189 161 % if display_globals:
190 162 <span class="help-block">${_('Enable lfs extensions for all repositories.')}</span>
191 163 % else:
192 164 <span class="help-block">${_('Enable lfs extensions for this repository.')}</span>
193 165 % endif
194 166 </div>
195 167
196 168 % if display_globals:
197 169 <div class="field">
198 170 <div class="input">
199 171 ${h.text('vcs_git_lfs_store_location' + suffix, size=59)}
200 172 </div>
201 173 </div>
202 174 <div class="label">
203 175 <span class="help-block">${_('Filesystem location where Git lfs objects should be stored.')}</span>
204 176 </div>
205 177 % endif
206 178 </div>
207 179 </div>
208 180 % endif
209 181
210 182
211 183 % if display_globals:
212 184 <div class="panel panel-default">
213 185 <div class="panel-heading" id="vcs-global-svn-options">
214 186 <h3 class="panel-title">${_('Global Subversion Settings')}<a class="permalink" href="#vcs-global-svn-options"> ΒΆ</a></h3>
215 187 </div>
216 188 <div class="panel-body">
217 189 <div class="field">
218 190 <div class="checkbox">
219 191 ${h.checkbox('vcs_svn_proxy_http_requests_enabled' + suffix, 'True', **kwargs)}
220 192 <label for="vcs_svn_proxy_http_requests_enabled${suffix}">${_('Proxy subversion HTTP requests')}</label>
221 193 </div>
222 194 <div class="label">
223 195 <span class="help-block">
224 196 ${_('Subversion HTTP Support. Enables communication with SVN over HTTP protocol.')}
225 197 <a href="${h.route_url('enterprise_svn_setup')}" target="_blank">${_('SVN Protocol setup Documentation')}</a>.
226 198 </span>
227 199 </div>
228 200 </div>
229 201 <div class="field">
230 202 <div class="label">
231 203 <label for="vcs_svn_proxy_http_server_url">${_('Subversion HTTP Server URL')}</label><br/>
232 204 </div>
233 205 <div class="input">
234 206 ${h.text('vcs_svn_proxy_http_server_url',size=59)}
235 207 % if c.svn_proxy_generate_config:
236 208 <span class="buttons">
237 209 <button class="btn btn-primary" id="vcs_svn_generate_cfg">${_('Generate Apache Config')}</button>
238 210 </span>
239 211 % endif
240 212 </div>
241 213 </div>
242 214 </div>
243 215 </div>
244 216 % endif
245 217
246 218 % if display_globals or repo_type in ['svn']:
247 219 <div class="panel panel-default">
248 220 <div class="panel-heading" id="vcs-svn-options">
249 221 <h3 class="panel-title">${_('Subversion Settings')}<a class="permalink" href="#vcs-svn-options"> ΒΆ</a></h3>
250 222 </div>
251 223 <div class="panel-body">
252 224 <div class="field">
253 225 <div class="content" >
254 226 <label>${_('Repository patterns')}</label><br/>
255 227 </div>
256 228 </div>
257 229 <div class="label">
258 230 <span class="help-block">${_('Patterns for identifying SVN branches and tags. For recursive search, use "*". Eg.: "/branches/*"')}</span>
259 231 </div>
260 232
261 233 <div class="field branch_patterns">
262 234 <div class="input" >
263 235 <label>${_('Branches')}:</label><br/>
264 236 </div>
265 237 % if svn_branch_patterns:
266 238 % for branch in svn_branch_patterns:
267 239 <div class="input adjacent" id="${'id%s' % branch.ui_id}">
268 240 ${h.hidden('branch_ui_key' + suffix, branch.ui_key)}
269 241 ${h.text('branch_value_%d' % branch.ui_id + suffix, branch.ui_value, size=59, readonly="readonly", class_='disabled')}
270 242 % if kwargs.get('disabled') != 'disabled':
271 243 <span class="btn btn-x" onclick="ajaxDeletePattern(${branch.ui_id},'${'id%s' % branch.ui_id}')">
272 244 ${_('Delete')}
273 245 </span>
274 246 % endif
275 247 </div>
276 248 % endfor
277 249 %endif
278 250 </div>
279 251 % if kwargs.get('disabled') != 'disabled':
280 252 <div class="field branch_patterns">
281 253 <div class="input" >
282 254 ${h.text('new_svn_branch',size=59,placeholder='New branch pattern')}
283 255 </div>
284 256 </div>
285 257 % endif
286 258 <div class="field tag_patterns">
287 259 <div class="input" >
288 260 <label>${_('Tags')}:</label><br/>
289 261 </div>
290 262 % if svn_tag_patterns:
291 263 % for tag in svn_tag_patterns:
292 264 <div class="input" id="${'id%s' % tag.ui_id + suffix}">
293 265 ${h.hidden('tag_ui_key' + suffix, tag.ui_key)}
294 266 ${h.text('tag_ui_value_new_%d' % tag.ui_id + suffix, tag.ui_value, size=59, readonly="readonly", class_='disabled tag_input')}
295 267 % if kwargs.get('disabled') != 'disabled':
296 268 <span class="btn btn-x" onclick="ajaxDeletePattern(${tag.ui_id},'${'id%s' % tag.ui_id}')">
297 269 ${_('Delete')}
298 270 </span>
299 271 %endif
300 272 </div>
301 273 % endfor
302 274 % endif
303 275 </div>
304 276 % if kwargs.get('disabled') != 'disabled':
305 277 <div class="field tag_patterns">
306 278 <div class="input" >
307 279 ${h.text('new_svn_tag' + suffix, size=59, placeholder='New tag pattern')}
308 280 </div>
309 281 </div>
310 282 %endif
311 283 </div>
312 284 </div>
313 285 % else:
314 286 ${h.hidden('new_svn_branch' + suffix, '')}
315 287 ${h.hidden('new_svn_tag' + suffix, '')}
316 288 % endif
317 289
318 290
319 291 % if display_globals or repo_type in ['hg', 'git']:
320 292 <div class="panel panel-default">
321 293 <div class="panel-heading" id="vcs-pull-requests-options">
322 294 <h3 class="panel-title">${_('Pull Request Settings')}<a class="permalink" href="#vcs-pull-requests-options"> ΒΆ</a></h3>
323 295 </div>
324 296 <div class="panel-body">
325 297 <div class="checkbox">
326 298 ${h.checkbox('rhodecode_pr_merge_enabled' + suffix, 'True', **kwargs)}
327 299 <label for="rhodecode_pr_merge_enabled${suffix}">${_('Enable server-side merge for pull requests')}</label>
328 300 </div>
329 301 <div class="label">
330 302 <span class="help-block">${_('Note: when this feature is enabled, it only runs hooks defined in the rcextension package. Custom hooks added on the Admin -> Settings -> Hooks page will not be run when pull requests are automatically merged from the web interface.')}</span>
331 303 </div>
332 304 <div class="checkbox">
333 305 ${h.checkbox('rhodecode_use_outdated_comments' + suffix, 'True', **kwargs)}
334 306 <label for="rhodecode_use_outdated_comments${suffix}">${_('Invalidate and relocate inline comments during update')}</label>
335 307 </div>
336 308 <div class="label">
337 309 <span class="help-block">${_('During the update of a pull request, the position of inline comments will be updated and outdated inline comments will be hidden.')}</span>
338 310 </div>
339 311 </div>
340 312 </div>
341 313 % endif
342 314
315 % if display_globals or repo_type in ['hg',]:
316 <div class="panel panel-default">
317 <div class="panel-heading" id="vcs-pull-requests-options">
318 <h3 class="panel-title">${_('Mercurial Pull Request Settings')}<a class="permalink" href="#vcs-hg-pull-requests-options"> ΒΆ</a></h3>
319 </div>
320 <div class="panel-body">
321 ## Specific HG settings
322 <div class="checkbox">
323 ${h.checkbox('rhodecode_hg_use_rebase_for_merging' + suffix, 'True', **kwargs)}
324 <label for="rhodecode_hg_use_rebase_for_merging${suffix}">${_('Use rebase as merge strategy')}</label>
325 </div>
326 <div class="label">
327 <span class="help-block">${_('Use rebase instead of creating a merge commit when merging via web interface.')}</span>
328 </div>
329
330 <div class="checkbox">
331 ${h.checkbox('rhodecode_hg_close_branch_before_merging' + suffix, 'True', **kwargs)}
332 <label for="rhodecode_hg_close_branch_before_merging{suffix}">${_('Close branch before merging it')}</label>
333 </div>
334 <div class="label">
335 <span class="help-block">${_('Close branch before merging it into destination branch. No effect when rebase strategy is use.')}</span>
336 </div>
337
338
339 </div>
340 </div>
341 % endif
342
343 ## DISABLED FOR GIT FOR NOW as the rebase/close is not supported yet
344 ## % if display_globals or repo_type in ['git']:
345 ## <div class="panel panel-default">
346 ## <div class="panel-heading" id="vcs-pull-requests-options">
347 ## <h3 class="panel-title">${_('Git Pull Request Settings')}<a class="permalink" href="#vcs-git-pull-requests-options"> ΒΆ</a></h3>
348 ## </div>
349 ## <div class="panel-body">
350 ## <div class="checkbox">
351 ## ${h.checkbox('rhodecode_git_use_rebase_for_merging' + suffix, 'True', **kwargs)}
352 ## <label for="rhodecode_git_use_rebase_for_merging${suffix}">${_('Use rebase as merge strategy')}</label>
353 ## </div>
354 ## <div class="label">
355 ## <span class="help-block">${_('Use rebase instead of creating a merge commit when merging via web interface.')}</span>
356 ## </div>
357 ##
358 ## <div class="checkbox">
359 ## ${h.checkbox('rhodecode_git_close_branch_before_merging' + suffix, 'True', **kwargs)}
360 ## <label for="rhodecode_git_close_branch_before_merging{suffix}">${_('Delete branch after merging it')}</label>
361 ## </div>
362 ## <div class="label">
363 ## <span class="help-block">${_('Delete branch after merging it into destination branch. No effect when rebase strategy is use.')}</span>
364 ## </div>
365 ## </div>
366 ## </div>
367 ## % endif
368
369
343 370 </%def>
@@ -1,682 +1,670 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import mock
22 22 import pytest
23 23
24 24 import rhodecode
25 25 from rhodecode.config.routing import ADMIN_PREFIX
26 26 from rhodecode.lib.utils2 import md5
27 27 from rhodecode.model.db import RhodeCodeUi
28 28 from rhodecode.model.meta import Session
29 29 from rhodecode.model.settings import SettingsModel, IssueTrackerSettingsModel
30 30 from rhodecode.tests import url, assert_session_flash
31 31 from rhodecode.tests.utils import AssertResponse
32 32
33 33
34 34 UPDATE_DATA_QUALNAME = (
35 35 'rhodecode.apps.admin.views.system_info.AdminSystemInfoSettingsView.get_update_data')
36 36
37 37
38 38 @pytest.mark.usefixtures('autologin_user', 'app')
39 39 class TestAdminSettingsController(object):
40 40
41 41 @pytest.mark.parametrize('urlname', [
42 42 'admin_settings_vcs',
43 43 'admin_settings_mapping',
44 44 'admin_settings_global',
45 45 'admin_settings_visual',
46 46 'admin_settings_email',
47 47 'admin_settings_hooks',
48 48 'admin_settings_search',
49 49 ])
50 50 def test_simple_get(self, urlname, app):
51 51 app.get(url(urlname))
52 52
53 53 def test_create_custom_hook(self, csrf_token):
54 54 response = self.app.post(
55 55 url('admin_settings_hooks'),
56 56 params={
57 57 'new_hook_ui_key': 'test_hooks_1',
58 58 'new_hook_ui_value': 'cd /tmp',
59 59 'csrf_token': csrf_token})
60 60
61 61 response = response.follow()
62 62 response.mustcontain('test_hooks_1')
63 63 response.mustcontain('cd /tmp')
64 64
65 65 def test_create_custom_hook_delete(self, csrf_token):
66 66 response = self.app.post(
67 67 url('admin_settings_hooks'),
68 68 params={
69 69 'new_hook_ui_key': 'test_hooks_2',
70 70 'new_hook_ui_value': 'cd /tmp2',
71 71 'csrf_token': csrf_token})
72 72
73 73 response = response.follow()
74 74 response.mustcontain('test_hooks_2')
75 75 response.mustcontain('cd /tmp2')
76 76
77 77 hook_id = SettingsModel().get_ui_by_key('test_hooks_2').ui_id
78 78
79 79 # delete
80 80 self.app.post(
81 81 url('admin_settings_hooks'),
82 82 params={'hook_id': hook_id, 'csrf_token': csrf_token})
83 83 response = self.app.get(url('admin_settings_hooks'))
84 84 response.mustcontain(no=['test_hooks_2'])
85 85 response.mustcontain(no=['cd /tmp2'])
86 86
87 87
88 88 @pytest.mark.usefixtures('autologin_user', 'app')
89 89 class TestAdminSettingsGlobal(object):
90 90
91 91 def test_pre_post_code_code_active(self, csrf_token):
92 92 pre_code = 'rc-pre-code-187652122'
93 93 post_code = 'rc-postcode-98165231'
94 94
95 95 response = self.post_and_verify_settings({
96 96 'rhodecode_pre_code': pre_code,
97 97 'rhodecode_post_code': post_code,
98 98 'csrf_token': csrf_token,
99 99 })
100 100
101 101 response = response.follow()
102 102 response.mustcontain(pre_code, post_code)
103 103
104 104 def test_pre_post_code_code_inactive(self, csrf_token):
105 105 pre_code = 'rc-pre-code-187652122'
106 106 post_code = 'rc-postcode-98165231'
107 107 response = self.post_and_verify_settings({
108 108 'rhodecode_pre_code': '',
109 109 'rhodecode_post_code': '',
110 110 'csrf_token': csrf_token,
111 111 })
112 112
113 113 response = response.follow()
114 114 response.mustcontain(no=[pre_code, post_code])
115 115
116 116 def test_captcha_activate(self, csrf_token):
117 117 self.post_and_verify_settings({
118 118 'rhodecode_captcha_private_key': '1234567890',
119 119 'rhodecode_captcha_public_key': '1234567890',
120 120 'csrf_token': csrf_token,
121 121 })
122 122
123 123 response = self.app.get(ADMIN_PREFIX + '/register')
124 124 response.mustcontain('captcha')
125 125
126 126 def test_captcha_deactivate(self, csrf_token):
127 127 self.post_and_verify_settings({
128 128 'rhodecode_captcha_private_key': '',
129 129 'rhodecode_captcha_public_key': '1234567890',
130 130 'csrf_token': csrf_token,
131 131 })
132 132
133 133 response = self.app.get(ADMIN_PREFIX + '/register')
134 134 response.mustcontain(no=['captcha'])
135 135
136 136 def test_title_change(self, csrf_token):
137 137 old_title = 'RhodeCode'
138 138 new_title = old_title + '_changed'
139 139
140 140 for new_title in ['Changed', 'Ε»Γ³Ε‚wik', old_title]:
141 141 response = self.post_and_verify_settings({
142 142 'rhodecode_title': new_title,
143 143 'csrf_token': csrf_token,
144 144 })
145 145
146 146 response = response.follow()
147 147 response.mustcontain(
148 148 """<div class="branding">- %s</div>""" % new_title)
149 149
150 150 def post_and_verify_settings(self, settings):
151 151 old_title = 'RhodeCode'
152 152 old_realm = 'RhodeCode authentication'
153 153 params = {
154 154 'rhodecode_title': old_title,
155 155 'rhodecode_realm': old_realm,
156 156 'rhodecode_pre_code': '',
157 157 'rhodecode_post_code': '',
158 158 'rhodecode_captcha_private_key': '',
159 159 'rhodecode_captcha_public_key': '',
160 160 'rhodecode_create_personal_repo_group': False,
161 161 'rhodecode_personal_repo_group_pattern': '${username}',
162 162 }
163 163 params.update(settings)
164 164 response = self.app.post(url('admin_settings_global'), params=params)
165 165
166 166 assert_session_flash(response, 'Updated application settings')
167 167 app_settings = SettingsModel().get_all_settings()
168 168 del settings['csrf_token']
169 169 for key, value in settings.iteritems():
170 170 assert app_settings[key] == value.decode('utf-8')
171 171
172 172 return response
173 173
174 174
175 175 @pytest.mark.usefixtures('autologin_user', 'app')
176 176 class TestAdminSettingsVcs(object):
177 177
178 178 def test_contains_svn_default_patterns(self, app):
179 179 response = app.get(url('admin_settings_vcs'))
180 180 expected_patterns = [
181 181 '/trunk',
182 182 '/branches/*',
183 183 '/tags/*',
184 184 ]
185 185 for pattern in expected_patterns:
186 186 response.mustcontain(pattern)
187 187
188 188 def test_add_new_svn_branch_and_tag_pattern(
189 189 self, app, backend_svn, form_defaults, disable_sql_cache,
190 190 csrf_token):
191 191 form_defaults.update({
192 192 'new_svn_branch': '/exp/branches/*',
193 193 'new_svn_tag': '/important_tags/*',
194 194 'csrf_token': csrf_token,
195 195 })
196 196
197 197 response = app.post(
198 198 url('admin_settings_vcs'), params=form_defaults, status=302)
199 199 response = response.follow()
200 200
201 201 # Expect to find the new values on the page
202 202 response.mustcontain('/exp/branches/*')
203 203 response.mustcontain('/important_tags/*')
204 204
205 205 # Expect that those patterns are used to match branches and tags now
206 206 repo = backend_svn['svn-simple-layout'].scm_instance()
207 207 assert 'exp/branches/exp-sphinx-docs' in repo.branches
208 208 assert 'important_tags/v0.5' in repo.tags
209 209
210 210 def test_add_same_svn_value_twice_shows_an_error_message(
211 211 self, app, form_defaults, csrf_token, settings_util):
212 212 settings_util.create_rhodecode_ui('vcs_svn_branch', '/test')
213 213 settings_util.create_rhodecode_ui('vcs_svn_tag', '/test')
214 214
215 215 response = app.post(
216 216 url('admin_settings_vcs'),
217 217 params={
218 218 'paths_root_path': form_defaults['paths_root_path'],
219 219 'new_svn_branch': '/test',
220 220 'new_svn_tag': '/test',
221 221 'csrf_token': csrf_token,
222 222 },
223 223 status=200)
224 224
225 225 response.mustcontain("Pattern already exists")
226 226 response.mustcontain("Some form inputs contain invalid data.")
227 227
228 228 @pytest.mark.parametrize('section', [
229 229 'vcs_svn_branch',
230 230 'vcs_svn_tag',
231 231 ])
232 232 def test_delete_svn_patterns(
233 233 self, section, app, csrf_token, settings_util):
234 234 setting = settings_util.create_rhodecode_ui(
235 235 section, '/test_delete', cleanup=False)
236 236
237 237 app.post(
238 238 url('admin_settings_vcs'),
239 239 params={
240 240 '_method': 'delete',
241 241 'delete_svn_pattern': setting.ui_id,
242 242 'csrf_token': csrf_token},
243 243 headers={'X-REQUESTED-WITH': 'XMLHttpRequest'})
244 244
245 245 @pytest.mark.parametrize('section', [
246 246 'vcs_svn_branch',
247 247 'vcs_svn_tag',
248 248 ])
249 249 def test_delete_svn_patterns_raises_400_when_no_xhr(
250 250 self, section, app, csrf_token, settings_util):
251 251 setting = settings_util.create_rhodecode_ui(section, '/test_delete')
252 252
253 253 app.post(
254 254 url('admin_settings_vcs'),
255 255 params={
256 256 '_method': 'delete',
257 257 'delete_svn_pattern': setting.ui_id,
258 258 'csrf_token': csrf_token},
259 259 status=400)
260 260
261 261 def test_extensions_hgsubversion(self, app, form_defaults, csrf_token):
262 262 form_defaults.update({
263 263 'csrf_token': csrf_token,
264 264 'extensions_hgsubversion': 'True',
265 265 })
266 266 response = app.post(
267 267 url('admin_settings_vcs'),
268 268 params=form_defaults,
269 269 status=302)
270 270
271 271 response = response.follow()
272 272 extensions_input = (
273 273 '<input id="extensions_hgsubversion" '
274 274 'name="extensions_hgsubversion" type="checkbox" '
275 275 'value="True" checked="checked" />')
276 276 response.mustcontain(extensions_input)
277 277
278 278 def test_extensions_hgevolve(self, app, form_defaults, csrf_token):
279 279 form_defaults.update({
280 280 'csrf_token': csrf_token,
281 281 'extensions_evolve': 'True',
282 282 })
283 283 response = app.post(
284 284 url('admin_settings_vcs'),
285 285 params=form_defaults,
286 286 status=302)
287 287
288 288 response = response.follow()
289 289 extensions_input = (
290 290 '<input id="extensions_evolve" '
291 291 'name="extensions_evolve" type="checkbox" '
292 292 'value="True" checked="checked" />')
293 293 response.mustcontain(extensions_input)
294 294
295 295 def test_has_a_section_for_pull_request_settings(self, app):
296 296 response = app.get(url('admin_settings_vcs'))
297 297 response.mustcontain('Pull Request Settings')
298 298
299 299 def test_has_an_input_for_invalidation_of_inline_comments(
300 300 self, app):
301 301 response = app.get(url('admin_settings_vcs'))
302 302 assert_response = AssertResponse(response)
303 303 assert_response.one_element_exists(
304 304 '[name=rhodecode_use_outdated_comments]')
305 305
306 306 @pytest.mark.parametrize('new_value', [True, False])
307 307 def test_allows_to_change_invalidation_of_inline_comments(
308 308 self, app, form_defaults, csrf_token, new_value):
309 309 setting_key = 'use_outdated_comments'
310 310 setting = SettingsModel().create_or_update_setting(
311 311 setting_key, not new_value, 'bool')
312 312 Session().add(setting)
313 313 Session().commit()
314 314
315 315 form_defaults.update({
316 316 'csrf_token': csrf_token,
317 317 'rhodecode_use_outdated_comments': str(new_value),
318 318 })
319 319 response = app.post(
320 320 url('admin_settings_vcs'),
321 321 params=form_defaults,
322 322 status=302)
323 323 response = response.follow()
324 324 setting = SettingsModel().get_setting_by_name(setting_key)
325 325 assert setting.app_settings_value is new_value
326 326
327 def test_has_a_section_for_labs_settings_if_enabled(self, app):
328 with mock.patch.dict(
329 rhodecode.CONFIG, {'labs_settings_active': 'true'}):
330 response = self.app.get(url('admin_settings_vcs'))
331 response.mustcontain('Labs Settings')
332
333 def test_has_not_a_section_for_labs_settings_if_disables(self, app):
334 with mock.patch.dict(
335 rhodecode.CONFIG, {'labs_settings_active': 'false'}):
336 response = self.app.get(url('admin_settings_vcs'))
337 response.mustcontain(no='Labs Settings')
338
339 327 @pytest.mark.parametrize('new_value', [True, False])
340 328 def test_allows_to_change_hg_rebase_merge_strategy(
341 329 self, app, form_defaults, csrf_token, new_value):
342 330 setting_key = 'hg_use_rebase_for_merging'
343 331
344 332 form_defaults.update({
345 333 'csrf_token': csrf_token,
346 334 'rhodecode_' + setting_key: str(new_value),
347 335 })
348 336
349 337 with mock.patch.dict(
350 338 rhodecode.CONFIG, {'labs_settings_active': 'true'}):
351 339 app.post(
352 340 url('admin_settings_vcs'),
353 341 params=form_defaults,
354 342 status=302)
355 343
356 344 setting = SettingsModel().get_setting_by_name(setting_key)
357 345 assert setting.app_settings_value is new_value
358 346
359 347 @pytest.fixture
360 348 def disable_sql_cache(self, request):
361 349 patcher = mock.patch(
362 350 'rhodecode.lib.caching_query.FromCache.process_query')
363 351 request.addfinalizer(patcher.stop)
364 352 patcher.start()
365 353
366 354 @pytest.fixture
367 355 def form_defaults(self):
368 356 from rhodecode.controllers.admin.settings import SettingsController
369 357 controller = SettingsController()
370 358 return controller._form_defaults()
371 359
372 360 # TODO: johbo: What we really want is to checkpoint before a test run and
373 361 # reset the session afterwards.
374 362 @pytest.fixture(scope='class', autouse=True)
375 363 def cleanup_settings(self, request, pylonsapp):
376 364 ui_id = RhodeCodeUi.ui_id
377 365 original_ids = list(
378 366 r.ui_id for r in RhodeCodeUi.query().values(ui_id))
379 367
380 368 @request.addfinalizer
381 369 def cleanup():
382 370 RhodeCodeUi.query().filter(
383 371 ui_id.notin_(original_ids)).delete(False)
384 372
385 373
386 374 @pytest.mark.usefixtures('autologin_user', 'app')
387 375 class TestLabsSettings(object):
388 376 def test_get_settings_page_disabled(self):
389 377 with mock.patch.dict(rhodecode.CONFIG,
390 378 {'labs_settings_active': 'false'}):
391 379 response = self.app.get(url('admin_settings_labs'), status=302)
392 380
393 381 assert response.location.endswith(url('admin_settings'))
394 382
395 383 def test_get_settings_page_enabled(self):
396 384 from rhodecode.controllers.admin import settings
397 385 lab_settings = [
398 386 settings.LabSetting(
399 387 key='rhodecode_bool',
400 388 type='bool',
401 389 group='bool group',
402 390 label='bool label',
403 391 help='bool help'
404 392 ),
405 393 settings.LabSetting(
406 394 key='rhodecode_text',
407 395 type='unicode',
408 396 group='text group',
409 397 label='text label',
410 398 help='text help'
411 399 ),
412 400 ]
413 401 with mock.patch.dict(rhodecode.CONFIG,
414 402 {'labs_settings_active': 'true'}):
415 403 with mock.patch.object(settings, '_LAB_SETTINGS', lab_settings):
416 404 response = self.app.get(url('admin_settings_labs'))
417 405
418 406 assert '<label>bool group:</label>' in response
419 407 assert '<label for="rhodecode_bool">bool label</label>' in response
420 408 assert '<p class="help-block">bool help</p>' in response
421 409 assert 'name="rhodecode_bool" type="checkbox"' in response
422 410
423 411 assert '<label>text group:</label>' in response
424 412 assert '<label for="rhodecode_text">text label</label>' in response
425 413 assert '<p class="help-block">text help</p>' in response
426 414 assert 'name="rhodecode_text" size="60" type="text"' in response
427 415
428 416
429 417 @pytest.mark.usefixtures('app')
430 418 class TestOpenSourceLicenses(object):
431 419
432 420 def _get_url(self):
433 421 return ADMIN_PREFIX + '/settings/open_source'
434 422
435 423 def test_records_are_displayed(self, autologin_user):
436 424 sample_licenses = {
437 425 "python2.7-pytest-2.7.1": {
438 426 "UNKNOWN": None
439 427 },
440 428 "python2.7-Markdown-2.6.2": {
441 429 "BSD-3-Clause": "http://spdx.org/licenses/BSD-3-Clause"
442 430 }
443 431 }
444 432 read_licenses_patch = mock.patch(
445 433 'rhodecode.apps.admin.views.open_source_licenses.read_opensource_licenses',
446 434 return_value=sample_licenses)
447 435 with read_licenses_patch:
448 436 response = self.app.get(self._get_url(), status=200)
449 437
450 438 assert_response = AssertResponse(response)
451 439 assert_response.element_contains(
452 440 '.panel-heading', 'Licenses of Third Party Packages')
453 441 for name in sample_licenses:
454 442 response.mustcontain(name)
455 443 for license in sample_licenses[name]:
456 444 assert_response.element_contains('.panel-body', license)
457 445
458 446 def test_records_can_be_read(self, autologin_user):
459 447 response = self.app.get(self._get_url(), status=200)
460 448 assert_response = AssertResponse(response)
461 449 assert_response.element_contains(
462 450 '.panel-heading', 'Licenses of Third Party Packages')
463 451
464 452 def test_forbidden_when_normal_user(self, autologin_regular_user):
465 453 self.app.get(self._get_url(), status=404)
466 454
467 455
468 456 @pytest.mark.usefixtures('app')
469 457 class TestUserSessions(object):
470 458
471 459 def _get_url(self, name='admin_settings_sessions'):
472 460 return {
473 461 'admin_settings_sessions': ADMIN_PREFIX + '/settings/sessions',
474 462 'admin_settings_sessions_cleanup': ADMIN_PREFIX + '/settings/sessions/cleanup'
475 463 }[name]
476 464
477 465 def test_forbidden_when_normal_user(self, autologin_regular_user):
478 466 self.app.get(self._get_url(), status=404)
479 467
480 468 def test_show_sessions_page(self, autologin_user):
481 469 response = self.app.get(self._get_url(), status=200)
482 470 response.mustcontain('file')
483 471
484 472 def test_cleanup_old_sessions(self, autologin_user, csrf_token):
485 473
486 474 post_data = {
487 475 'csrf_token': csrf_token,
488 476 'expire_days': '60'
489 477 }
490 478 response = self.app.post(
491 479 self._get_url('admin_settings_sessions_cleanup'), params=post_data,
492 480 status=302)
493 481 assert_session_flash(response, 'Cleaned up old sessions')
494 482
495 483
496 484 @pytest.mark.usefixtures('app')
497 485 class TestAdminSystemInfo(object):
498 486 def _get_url(self, name='admin_settings_system'):
499 487 return {
500 488 'admin_settings_system': ADMIN_PREFIX + '/settings/system',
501 489 'admin_settings_system_update': ADMIN_PREFIX + '/settings/system/updates',
502 490 }[name]
503 491
504 492 def test_forbidden_when_normal_user(self, autologin_regular_user):
505 493 self.app.get(self._get_url(), status=404)
506 494
507 495 def test_system_info_page(self, autologin_user):
508 496 response = self.app.get(self._get_url())
509 497 response.mustcontain('RhodeCode Community Edition, version {}'.format(
510 498 rhodecode.__version__))
511 499
512 500 def test_system_update_new_version(self, autologin_user):
513 501 update_data = {
514 502 'versions': [
515 503 {
516 504 'version': '100.3.1415926535',
517 505 'general': 'The latest version we are ever going to ship'
518 506 },
519 507 {
520 508 'version': '0.0.0',
521 509 'general': 'The first version we ever shipped'
522 510 }
523 511 ]
524 512 }
525 513 with mock.patch(UPDATE_DATA_QUALNAME, return_value=update_data):
526 514 response = self.app.get(self._get_url('admin_settings_system_update'))
527 515 response.mustcontain('A <b>new version</b> is available')
528 516
529 517 def test_system_update_nothing_new(self, autologin_user):
530 518 update_data = {
531 519 'versions': [
532 520 {
533 521 'version': '0.0.0',
534 522 'general': 'The first version we ever shipped'
535 523 }
536 524 ]
537 525 }
538 526 with mock.patch(UPDATE_DATA_QUALNAME, return_value=update_data):
539 527 response = self.app.get(self._get_url('admin_settings_system_update'))
540 528 response.mustcontain(
541 529 'You already have the <b>latest</b> stable version.')
542 530
543 531 def test_system_update_bad_response(self, autologin_user):
544 532 with mock.patch(UPDATE_DATA_QUALNAME, side_effect=ValueError('foo')):
545 533 response = self.app.get(self._get_url('admin_settings_system_update'))
546 534 response.mustcontain(
547 535 'Bad data sent from update server')
548 536
549 537
550 538 @pytest.mark.usefixtures("app")
551 539 class TestAdminSettingsIssueTracker(object):
552 540 RC_PREFIX = 'rhodecode_'
553 541 SHORT_PATTERN_KEY = 'issuetracker_pat_'
554 542 PATTERN_KEY = RC_PREFIX + SHORT_PATTERN_KEY
555 543
556 544 def test_issuetracker_index(self, autologin_user):
557 545 response = self.app.get(url('admin_settings_issuetracker'))
558 546 assert response.status_code == 200
559 547
560 548 def test_add_empty_issuetracker_pattern(
561 549 self, request, autologin_user, csrf_token):
562 550 post_url = url('admin_settings_issuetracker_save')
563 551 post_data = {
564 552 'csrf_token': csrf_token
565 553 }
566 554 self.app.post(post_url, post_data, status=302)
567 555
568 556 def test_add_issuetracker_pattern(
569 557 self, request, autologin_user, csrf_token):
570 558 pattern = 'issuetracker_pat'
571 559 another_pattern = pattern+'1'
572 560 post_url = url('admin_settings_issuetracker_save')
573 561 post_data = {
574 562 'new_pattern_pattern_0': pattern,
575 563 'new_pattern_url_0': 'url',
576 564 'new_pattern_prefix_0': 'prefix',
577 565 'new_pattern_description_0': 'description',
578 566 'new_pattern_pattern_1': another_pattern,
579 567 'new_pattern_url_1': 'url1',
580 568 'new_pattern_prefix_1': 'prefix1',
581 569 'new_pattern_description_1': 'description1',
582 570 'csrf_token': csrf_token
583 571 }
584 572 self.app.post(post_url, post_data, status=302)
585 573 settings = SettingsModel().get_all_settings()
586 574 self.uid = md5(pattern)
587 575 assert settings[self.PATTERN_KEY+self.uid] == pattern
588 576 self.another_uid = md5(another_pattern)
589 577 assert settings[self.PATTERN_KEY+self.another_uid] == another_pattern
590 578
591 579 @request.addfinalizer
592 580 def cleanup():
593 581 defaults = SettingsModel().get_all_settings()
594 582
595 583 entries = [name for name in defaults if (
596 584 (self.uid in name) or (self.another_uid) in name)]
597 585 start = len(self.RC_PREFIX)
598 586 for del_key in entries:
599 587 # TODO: anderson: get_by_name needs name without prefix
600 588 entry = SettingsModel().get_setting_by_name(del_key[start:])
601 589 Session().delete(entry)
602 590
603 591 Session().commit()
604 592
605 593 def test_edit_issuetracker_pattern(
606 594 self, autologin_user, backend, csrf_token, request):
607 595 old_pattern = 'issuetracker_pat'
608 596 old_uid = md5(old_pattern)
609 597 pattern = 'issuetracker_pat_new'
610 598 self.new_uid = md5(pattern)
611 599
612 600 SettingsModel().create_or_update_setting(
613 601 self.SHORT_PATTERN_KEY+old_uid, old_pattern, 'unicode')
614 602
615 603 post_url = url('admin_settings_issuetracker_save')
616 604 post_data = {
617 605 'new_pattern_pattern_0': pattern,
618 606 'new_pattern_url_0': 'url',
619 607 'new_pattern_prefix_0': 'prefix',
620 608 'new_pattern_description_0': 'description',
621 609 'uid': old_uid,
622 610 'csrf_token': csrf_token
623 611 }
624 612 self.app.post(post_url, post_data, status=302)
625 613 settings = SettingsModel().get_all_settings()
626 614 assert settings[self.PATTERN_KEY+self.new_uid] == pattern
627 615 assert self.PATTERN_KEY+old_uid not in settings
628 616
629 617 @request.addfinalizer
630 618 def cleanup():
631 619 IssueTrackerSettingsModel().delete_entries(self.new_uid)
632 620
633 621 def test_replace_issuetracker_pattern_description(
634 622 self, autologin_user, csrf_token, request, settings_util):
635 623 prefix = 'issuetracker'
636 624 pattern = 'issuetracker_pat'
637 625 self.uid = md5(pattern)
638 626 pattern_key = '_'.join([prefix, 'pat', self.uid])
639 627 rc_pattern_key = '_'.join(['rhodecode', pattern_key])
640 628 desc_key = '_'.join([prefix, 'desc', self.uid])
641 629 rc_desc_key = '_'.join(['rhodecode', desc_key])
642 630 new_description = 'new_description'
643 631
644 632 settings_util.create_rhodecode_setting(
645 633 pattern_key, pattern, 'unicode', cleanup=False)
646 634 settings_util.create_rhodecode_setting(
647 635 desc_key, 'old description', 'unicode', cleanup=False)
648 636
649 637 post_url = url('admin_settings_issuetracker_save')
650 638 post_data = {
651 639 'new_pattern_pattern_0': pattern,
652 640 'new_pattern_url_0': 'url',
653 641 'new_pattern_prefix_0': 'prefix',
654 642 'new_pattern_description_0': new_description,
655 643 'uid': self.uid,
656 644 'csrf_token': csrf_token
657 645 }
658 646 self.app.post(post_url, post_data, status=302)
659 647 settings = SettingsModel().get_all_settings()
660 648 assert settings[rc_pattern_key] == pattern
661 649 assert settings[rc_desc_key] == new_description
662 650
663 651 @request.addfinalizer
664 652 def cleanup():
665 653 IssueTrackerSettingsModel().delete_entries(self.uid)
666 654
667 655 def test_delete_issuetracker_pattern(
668 656 self, autologin_user, backend, csrf_token, settings_util):
669 657 pattern = 'issuetracker_pat'
670 658 uid = md5(pattern)
671 659 settings_util.create_rhodecode_setting(
672 660 self.SHORT_PATTERN_KEY+uid, pattern, 'unicode', cleanup=False)
673 661
674 662 post_url = url('admin_issuetracker_delete')
675 663 post_data = {
676 664 '_method': 'delete',
677 665 'uid': uid,
678 666 'csrf_token': csrf_token
679 667 }
680 668 self.app.post(post_url, post_data, status=302)
681 669 settings = SettingsModel().get_all_settings()
682 670 assert 'rhodecode_%s%s' % (self.SHORT_PATTERN_KEY, uid) not in settings
@@ -1,1069 +1,1072 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import mock
22 22 import pytest
23 23
24 24 from rhodecode.lib.utils2 import str2bool
25 25 from rhodecode.model.meta import Session
26 26 from rhodecode.model.settings import VcsSettingsModel, UiSetting
27 27
28 28
29 29 HOOKS_FORM_DATA = {
30 30 'hooks_changegroup_repo_size': True,
31 31 'hooks_changegroup_push_logger': True,
32 32 'hooks_outgoing_pull_logger': True
33 33 }
34 34
35 35 SVN_FORM_DATA = {
36 36 'new_svn_branch': 'test-branch',
37 37 'new_svn_tag': 'test-tag'
38 38 }
39 39
40 40 GENERAL_FORM_DATA = {
41 41 'rhodecode_pr_merge_enabled': True,
42 42 'rhodecode_use_outdated_comments': True,
43 43 'rhodecode_hg_use_rebase_for_merging': True,
44 'rhodecode_hg_close_branch_before_merging': True,
45 'rhodecode_git_use_rebase_for_merging': True,
46 'rhodecode_git_close_branch_before_merging': True,
44 47 }
45 48
46 49
47 50 class TestInheritGlobalSettingsProperty(object):
48 51 def test_get_raises_exception_when_repository_not_specified(self):
49 52 model = VcsSettingsModel()
50 53 with pytest.raises(Exception) as exc_info:
51 54 model.inherit_global_settings
52 55 assert exc_info.value.message == 'Repository is not specified'
53 56
54 57 def test_true_is_returned_when_value_is_not_found(self, repo_stub):
55 58 model = VcsSettingsModel(repo=repo_stub.repo_name)
56 59 assert model.inherit_global_settings is True
57 60
58 61 def test_value_is_returned(self, repo_stub, settings_util):
59 62 model = VcsSettingsModel(repo=repo_stub.repo_name)
60 63 settings_util.create_repo_rhodecode_setting(
61 64 repo_stub, VcsSettingsModel.INHERIT_SETTINGS, False, 'bool')
62 65 assert model.inherit_global_settings is False
63 66
64 67 def test_value_is_set(self, repo_stub):
65 68 model = VcsSettingsModel(repo=repo_stub.repo_name)
66 69 model.inherit_global_settings = False
67 70 setting = model.repo_settings.get_setting_by_name(
68 71 VcsSettingsModel.INHERIT_SETTINGS)
69 72 try:
70 73 assert setting.app_settings_type == 'bool'
71 74 assert setting.app_settings_value is False
72 75 finally:
73 76 Session().delete(setting)
74 77 Session().commit()
75 78
76 79 def test_set_raises_exception_when_repository_not_specified(self):
77 80 model = VcsSettingsModel()
78 81 with pytest.raises(Exception) as exc_info:
79 82 model.inherit_global_settings = False
80 83 assert exc_info.value.message == 'Repository is not specified'
81 84
82 85
83 86 class TestVcsSettingsModel(object):
84 87 def test_global_svn_branch_patterns(self):
85 88 model = VcsSettingsModel()
86 89 expected_result = {'test': 'test'}
87 90 with mock.patch.object(model, 'global_settings') as settings_mock:
88 91 get_settings = settings_mock.get_ui_by_section
89 92 get_settings.return_value = expected_result
90 93 settings_mock.return_value = expected_result
91 94 result = model.get_global_svn_branch_patterns()
92 95
93 96 get_settings.assert_called_once_with(model.SVN_BRANCH_SECTION)
94 97 assert expected_result == result
95 98
96 99 def test_repo_svn_branch_patterns(self):
97 100 model = VcsSettingsModel()
98 101 expected_result = {'test': 'test'}
99 102 with mock.patch.object(model, 'repo_settings') as settings_mock:
100 103 get_settings = settings_mock.get_ui_by_section
101 104 get_settings.return_value = expected_result
102 105 settings_mock.return_value = expected_result
103 106 result = model.get_repo_svn_branch_patterns()
104 107
105 108 get_settings.assert_called_once_with(model.SVN_BRANCH_SECTION)
106 109 assert expected_result == result
107 110
108 111 def test_repo_svn_branch_patterns_raises_exception_when_repo_is_not_set(
109 112 self):
110 113 model = VcsSettingsModel()
111 114 with pytest.raises(Exception) as exc_info:
112 115 model.get_repo_svn_branch_patterns()
113 116 assert exc_info.value.message == 'Repository is not specified'
114 117
115 118 def test_global_svn_tag_patterns(self):
116 119 model = VcsSettingsModel()
117 120 expected_result = {'test': 'test'}
118 121 with mock.patch.object(model, 'global_settings') as settings_mock:
119 122 get_settings = settings_mock.get_ui_by_section
120 123 get_settings.return_value = expected_result
121 124 settings_mock.return_value = expected_result
122 125 result = model.get_global_svn_tag_patterns()
123 126
124 127 get_settings.assert_called_once_with(model.SVN_TAG_SECTION)
125 128 assert expected_result == result
126 129
127 130 def test_repo_svn_tag_patterns(self):
128 131 model = VcsSettingsModel()
129 132 expected_result = {'test': 'test'}
130 133 with mock.patch.object(model, 'repo_settings') as settings_mock:
131 134 get_settings = settings_mock.get_ui_by_section
132 135 get_settings.return_value = expected_result
133 136 settings_mock.return_value = expected_result
134 137 result = model.get_repo_svn_tag_patterns()
135 138
136 139 get_settings.assert_called_once_with(model.SVN_TAG_SECTION)
137 140 assert expected_result == result
138 141
139 142 def test_repo_svn_tag_patterns_raises_exception_when_repo_is_not_set(self):
140 143 model = VcsSettingsModel()
141 144 with pytest.raises(Exception) as exc_info:
142 145 model.get_repo_svn_tag_patterns()
143 146 assert exc_info.value.message == 'Repository is not specified'
144 147
145 148 def test_get_global_settings(self):
146 149 expected_result = {'test': 'test'}
147 150 model = VcsSettingsModel()
148 151 with mock.patch.object(model, '_collect_all_settings') as collect_mock:
149 152 collect_mock.return_value = expected_result
150 153 result = model.get_global_settings()
151 154
152 155 collect_mock.assert_called_once_with(global_=True)
153 156 assert result == expected_result
154 157
155 158 def test_get_repo_settings(self, repo_stub):
156 159 model = VcsSettingsModel(repo=repo_stub.repo_name)
157 160 expected_result = {'test': 'test'}
158 161 with mock.patch.object(model, '_collect_all_settings') as collect_mock:
159 162 collect_mock.return_value = expected_result
160 163 result = model.get_repo_settings()
161 164
162 165 collect_mock.assert_called_once_with(global_=False)
163 166 assert result == expected_result
164 167
165 168 @pytest.mark.parametrize('settings, global_', [
166 169 ('global_settings', True),
167 170 ('repo_settings', False)
168 171 ])
169 172 def test_collect_all_settings(self, settings, global_):
170 173 model = VcsSettingsModel()
171 174 result_mock = self._mock_result()
172 175
173 176 settings_patch = mock.patch.object(model, settings)
174 177 with settings_patch as settings_mock:
175 178 settings_mock.get_ui_by_section_and_key.return_value = result_mock
176 179 settings_mock.get_setting_by_name.return_value = result_mock
177 180 result = model._collect_all_settings(global_=global_)
178 181
179 182 ui_settings = model.HG_SETTINGS + model.GIT_SETTINGS + model.HOOKS_SETTINGS
180 183 self._assert_get_settings_calls(
181 184 settings_mock, ui_settings, model.GENERAL_SETTINGS)
182 185 self._assert_collect_all_settings_result(
183 186 ui_settings, model.GENERAL_SETTINGS, result)
184 187
185 188 @pytest.mark.parametrize('settings, global_', [
186 189 ('global_settings', True),
187 190 ('repo_settings', False)
188 191 ])
189 192 def test_collect_all_settings_without_empty_value(self, settings, global_):
190 193 model = VcsSettingsModel()
191 194
192 195 settings_patch = mock.patch.object(model, settings)
193 196 with settings_patch as settings_mock:
194 197 settings_mock.get_ui_by_section_and_key.return_value = None
195 198 settings_mock.get_setting_by_name.return_value = None
196 199 result = model._collect_all_settings(global_=global_)
197 200
198 201 assert result == {}
199 202
200 203 def _mock_result(self):
201 204 result_mock = mock.Mock()
202 205 result_mock.ui_value = 'ui_value'
203 206 result_mock.ui_active = True
204 207 result_mock.app_settings_value = 'setting_value'
205 208 return result_mock
206 209
207 210 def _assert_get_settings_calls(
208 211 self, settings_mock, ui_settings, general_settings):
209 212 assert (
210 213 settings_mock.get_ui_by_section_and_key.call_count ==
211 214 len(ui_settings))
212 215 assert (
213 216 settings_mock.get_setting_by_name.call_count ==
214 217 len(general_settings))
215 218
216 219 for section, key in ui_settings:
217 220 expected_call = mock.call(section, key)
218 221 assert (
219 222 expected_call in
220 223 settings_mock.get_ui_by_section_and_key.call_args_list)
221 224
222 225 for name in general_settings:
223 226 expected_call = mock.call(name)
224 227 assert (
225 228 expected_call in
226 229 settings_mock.get_setting_by_name.call_args_list)
227 230
228 231 def _assert_collect_all_settings_result(
229 232 self, ui_settings, general_settings, result):
230 233 expected_result = {}
231 234 for section, key in ui_settings:
232 235 key = '{}_{}'.format(section, key.replace('.', '_'))
233 236
234 237 if section in ('extensions', 'hooks'):
235 238 value = True
236 239 elif key in ['vcs_git_lfs_enabled']:
237 240 value = True
238 241 else:
239 242 value = 'ui_value'
240 243 expected_result[key] = value
241 244
242 245 for name in general_settings:
243 246 key = 'rhodecode_' + name
244 247 expected_result[key] = 'setting_value'
245 248
246 249 assert expected_result == result
247 250
248 251
249 252 class TestCreateOrUpdateRepoHookSettings(object):
250 253 def test_create_when_no_repo_object_found(self, repo_stub):
251 254 model = VcsSettingsModel(repo=repo_stub.repo_name)
252 255
253 256 self._create_settings(model, HOOKS_FORM_DATA)
254 257
255 258 cleanup = []
256 259 try:
257 260 for section, key in model.HOOKS_SETTINGS:
258 261 ui = model.repo_settings.get_ui_by_section_and_key(
259 262 section, key)
260 263 assert ui.ui_active is True
261 264 cleanup.append(ui)
262 265 finally:
263 266 for ui in cleanup:
264 267 Session().delete(ui)
265 268 Session().commit()
266 269
267 270 def test_create_raises_exception_when_data_incomplete(self, repo_stub):
268 271 model = VcsSettingsModel(repo=repo_stub.repo_name)
269 272
270 273 deleted_key = 'hooks_changegroup_repo_size'
271 274 data = HOOKS_FORM_DATA.copy()
272 275 data.pop(deleted_key)
273 276
274 277 with pytest.raises(ValueError) as exc_info:
275 278 model.create_or_update_repo_hook_settings(data)
276 279 assert (
277 280 exc_info.value.message ==
278 281 'The given data does not contain {} key'.format(deleted_key))
279 282
280 283 def test_update_when_repo_object_found(self, repo_stub, settings_util):
281 284 model = VcsSettingsModel(repo=repo_stub.repo_name)
282 285 for section, key in model.HOOKS_SETTINGS:
283 286 settings_util.create_repo_rhodecode_ui(
284 287 repo_stub, section, None, key=key, active=False)
285 288 model.create_or_update_repo_hook_settings(HOOKS_FORM_DATA)
286 289 for section, key in model.HOOKS_SETTINGS:
287 290 ui = model.repo_settings.get_ui_by_section_and_key(section, key)
288 291 assert ui.ui_active is True
289 292
290 293 def _create_settings(self, model, data):
291 294 global_patch = mock.patch.object(model, 'global_settings')
292 295 global_setting = mock.Mock()
293 296 global_setting.ui_value = 'Test value'
294 297 with global_patch as global_mock:
295 298 global_mock.get_ui_by_section_and_key.return_value = global_setting
296 299 model.create_or_update_repo_hook_settings(HOOKS_FORM_DATA)
297 300
298 301
299 302 class TestUpdateGlobalHookSettings(object):
300 303 def test_update_raises_exception_when_data_incomplete(self):
301 304 model = VcsSettingsModel()
302 305
303 306 deleted_key = 'hooks_changegroup_repo_size'
304 307 data = HOOKS_FORM_DATA.copy()
305 308 data.pop(deleted_key)
306 309
307 310 with pytest.raises(ValueError) as exc_info:
308 311 model.update_global_hook_settings(data)
309 312 assert (
310 313 exc_info.value.message ==
311 314 'The given data does not contain {} key'.format(deleted_key))
312 315
313 316 def test_update_global_hook_settings(self, settings_util):
314 317 model = VcsSettingsModel()
315 318 setting_mock = mock.MagicMock()
316 319 setting_mock.ui_active = False
317 320 get_settings_patcher = mock.patch.object(
318 321 model.global_settings, 'get_ui_by_section_and_key',
319 322 return_value=setting_mock)
320 323 session_patcher = mock.patch('rhodecode.model.settings.Session')
321 324 with get_settings_patcher as get_settings_mock, session_patcher:
322 325 model.update_global_hook_settings(HOOKS_FORM_DATA)
323 326 assert setting_mock.ui_active is True
324 327 assert get_settings_mock.call_count == 3
325 328
326 329
327 330 class TestCreateOrUpdateRepoGeneralSettings(object):
328 331 def test_calls_create_or_update_general_settings(self, repo_stub):
329 332 model = VcsSettingsModel(repo=repo_stub.repo_name)
330 333 create_patch = mock.patch.object(
331 334 model, '_create_or_update_general_settings')
332 335 with create_patch as create_mock:
333 336 model.create_or_update_repo_pr_settings(GENERAL_FORM_DATA)
334 337 create_mock.assert_called_once_with(
335 338 model.repo_settings, GENERAL_FORM_DATA)
336 339
337 340 def test_raises_exception_when_repository_is_not_specified(self):
338 341 model = VcsSettingsModel()
339 342 with pytest.raises(Exception) as exc_info:
340 343 model.create_or_update_repo_pr_settings(GENERAL_FORM_DATA)
341 344 assert exc_info.value.message == 'Repository is not specified'
342 345
343 346
344 347 class TestCreateOrUpdatGlobalGeneralSettings(object):
345 348 def test_calls_create_or_update_general_settings(self):
346 349 model = VcsSettingsModel()
347 350 create_patch = mock.patch.object(
348 351 model, '_create_or_update_general_settings')
349 352 with create_patch as create_mock:
350 353 model.create_or_update_global_pr_settings(GENERAL_FORM_DATA)
351 354 create_mock.assert_called_once_with(
352 355 model.global_settings, GENERAL_FORM_DATA)
353 356
354 357
355 358 class TestCreateOrUpdateGeneralSettings(object):
356 359 def test_create_when_no_repo_settings_found(self, repo_stub):
357 360 model = VcsSettingsModel(repo=repo_stub.repo_name)
358 361 model._create_or_update_general_settings(
359 362 model.repo_settings, GENERAL_FORM_DATA)
360 363
361 364 cleanup = []
362 365 try:
363 366 for name in model.GENERAL_SETTINGS:
364 367 setting = model.repo_settings.get_setting_by_name(name)
365 368 assert setting.app_settings_value is True
366 369 cleanup.append(setting)
367 370 finally:
368 371 for setting in cleanup:
369 372 Session().delete(setting)
370 373 Session().commit()
371 374
372 375 def test_create_raises_exception_when_data_incomplete(self, repo_stub):
373 376 model = VcsSettingsModel(repo=repo_stub.repo_name)
374 377
375 378 deleted_key = 'rhodecode_pr_merge_enabled'
376 379 data = GENERAL_FORM_DATA.copy()
377 380 data.pop(deleted_key)
378 381
379 382 with pytest.raises(ValueError) as exc_info:
380 383 model._create_or_update_general_settings(model.repo_settings, data)
381 384 assert (
382 385 exc_info.value.message ==
383 386 'The given data does not contain {} key'.format(deleted_key))
384 387
385 388 def test_update_when_repo_setting_found(self, repo_stub, settings_util):
386 389 model = VcsSettingsModel(repo=repo_stub.repo_name)
387 390 for name in model.GENERAL_SETTINGS:
388 391 settings_util.create_repo_rhodecode_setting(
389 392 repo_stub, name, False, 'bool')
390 393
391 394 model._create_or_update_general_settings(
392 395 model.repo_settings, GENERAL_FORM_DATA)
393 396
394 397 for name in model.GENERAL_SETTINGS:
395 398 setting = model.repo_settings.get_setting_by_name(name)
396 399 assert setting.app_settings_value is True
397 400
398 401
399 402 class TestCreateRepoSvnSettings(object):
400 403 def test_calls_create_svn_settings(self, repo_stub):
401 404 model = VcsSettingsModel(repo=repo_stub.repo_name)
402 405 with mock.patch.object(model, '_create_svn_settings') as create_mock:
403 406 model.create_repo_svn_settings(SVN_FORM_DATA)
404 407 create_mock.assert_called_once_with(model.repo_settings, SVN_FORM_DATA)
405 408
406 409 def test_raises_exception_when_repository_is_not_specified(self):
407 410 model = VcsSettingsModel()
408 411 with pytest.raises(Exception) as exc_info:
409 412 model.create_repo_svn_settings(SVN_FORM_DATA)
410 413 assert exc_info.value.message == 'Repository is not specified'
411 414
412 415
413 416 class TestCreateSvnSettings(object):
414 417 def test_create(self, repo_stub):
415 418 model = VcsSettingsModel(repo=repo_stub.repo_name)
416 419 model._create_svn_settings(model.repo_settings, SVN_FORM_DATA)
417 420 Session().commit()
418 421
419 422 branch_ui = model.repo_settings.get_ui_by_section(
420 423 model.SVN_BRANCH_SECTION)
421 424 tag_ui = model.repo_settings.get_ui_by_section(
422 425 model.SVN_TAG_SECTION)
423 426
424 427 try:
425 428 assert len(branch_ui) == 1
426 429 assert len(tag_ui) == 1
427 430 finally:
428 431 Session().delete(branch_ui[0])
429 432 Session().delete(tag_ui[0])
430 433 Session().commit()
431 434
432 435 def test_create_tag(self, repo_stub):
433 436 model = VcsSettingsModel(repo=repo_stub.repo_name)
434 437 data = SVN_FORM_DATA.copy()
435 438 data.pop('new_svn_branch')
436 439 model._create_svn_settings(model.repo_settings, data)
437 440 Session().commit()
438 441
439 442 branch_ui = model.repo_settings.get_ui_by_section(
440 443 model.SVN_BRANCH_SECTION)
441 444 tag_ui = model.repo_settings.get_ui_by_section(
442 445 model.SVN_TAG_SECTION)
443 446
444 447 try:
445 448 assert len(branch_ui) == 0
446 449 assert len(tag_ui) == 1
447 450 finally:
448 451 Session().delete(tag_ui[0])
449 452 Session().commit()
450 453
451 454 def test_create_nothing_when_no_svn_settings_specified(self, repo_stub):
452 455 model = VcsSettingsModel(repo=repo_stub.repo_name)
453 456 model._create_svn_settings(model.repo_settings, {})
454 457 Session().commit()
455 458
456 459 branch_ui = model.repo_settings.get_ui_by_section(
457 460 model.SVN_BRANCH_SECTION)
458 461 tag_ui = model.repo_settings.get_ui_by_section(
459 462 model.SVN_TAG_SECTION)
460 463
461 464 assert len(branch_ui) == 0
462 465 assert len(tag_ui) == 0
463 466
464 467 def test_create_nothing_when_empty_settings_specified(self, repo_stub):
465 468 model = VcsSettingsModel(repo=repo_stub.repo_name)
466 469 data = {
467 470 'new_svn_branch': '',
468 471 'new_svn_tag': ''
469 472 }
470 473 model._create_svn_settings(model.repo_settings, data)
471 474 Session().commit()
472 475
473 476 branch_ui = model.repo_settings.get_ui_by_section(
474 477 model.SVN_BRANCH_SECTION)
475 478 tag_ui = model.repo_settings.get_ui_by_section(
476 479 model.SVN_TAG_SECTION)
477 480
478 481 assert len(branch_ui) == 0
479 482 assert len(tag_ui) == 0
480 483
481 484
482 485 class TestCreateOrUpdateUi(object):
483 486 def test_create(self, repo_stub):
484 487 model = VcsSettingsModel(repo=repo_stub.repo_name)
485 488 model._create_or_update_ui(
486 489 model.repo_settings, 'test-section', 'test-key', active=False,
487 490 value='False')
488 491 Session().commit()
489 492
490 493 created_ui = model.repo_settings.get_ui_by_section_and_key(
491 494 'test-section', 'test-key')
492 495
493 496 try:
494 497 assert created_ui.ui_active is False
495 498 assert str2bool(created_ui.ui_value) is False
496 499 finally:
497 500 Session().delete(created_ui)
498 501 Session().commit()
499 502
500 503 def test_update(self, repo_stub, settings_util):
501 504 model = VcsSettingsModel(repo=repo_stub.repo_name)
502 505
503 506 largefiles, phases, evolve = model.HG_SETTINGS
504 507
505 508 section = 'test-section'
506 509 key = 'test-key'
507 510 settings_util.create_repo_rhodecode_ui(
508 511 repo_stub, section, 'True', key=key, active=True)
509 512
510 513 model._create_or_update_ui(
511 514 model.repo_settings, section, key, active=False, value='False')
512 515 Session().commit()
513 516
514 517 created_ui = model.repo_settings.get_ui_by_section_and_key(
515 518 section, key)
516 519 assert created_ui.ui_active is False
517 520 assert str2bool(created_ui.ui_value) is False
518 521
519 522
520 523 class TestCreateOrUpdateRepoHgSettings(object):
521 524 FORM_DATA = {
522 525 'extensions_largefiles': False,
523 526 'extensions_evolve': False,
524 527 'phases_publish': False
525 528 }
526 529
527 530 def test_creates_repo_hg_settings_when_data_is_correct(self, repo_stub):
528 531 model = VcsSettingsModel(repo=repo_stub.repo_name)
529 532 with mock.patch.object(model, '_create_or_update_ui') as create_mock:
530 533 model.create_or_update_repo_hg_settings(self.FORM_DATA)
531 534 expected_calls = [
532 535 mock.call(model.repo_settings, 'extensions', 'largefiles',
533 536 active=False, value=''),
534 537 mock.call(model.repo_settings, 'extensions', 'evolve',
535 538 active=False, value=''),
536 539 mock.call(model.repo_settings, 'phases', 'publish', value='False'),
537 540 ]
538 541 assert expected_calls == create_mock.call_args_list
539 542
540 543 @pytest.mark.parametrize('field_to_remove', FORM_DATA.keys())
541 544 def test_key_is_not_found(self, repo_stub, field_to_remove):
542 545 model = VcsSettingsModel(repo=repo_stub.repo_name)
543 546 data = self.FORM_DATA.copy()
544 547 data.pop(field_to_remove)
545 548 with pytest.raises(ValueError) as exc_info:
546 549 model.create_or_update_repo_hg_settings(data)
547 550 expected_message = 'The given data does not contain {} key'.format(
548 551 field_to_remove)
549 552 assert exc_info.value.message == expected_message
550 553
551 554 def test_create_raises_exception_when_repository_not_specified(self):
552 555 model = VcsSettingsModel()
553 556 with pytest.raises(Exception) as exc_info:
554 557 model.create_or_update_repo_hg_settings(self.FORM_DATA)
555 558 assert exc_info.value.message == 'Repository is not specified'
556 559
557 560
558 561 class TestUpdateGlobalSslSetting(object):
559 562 def test_updates_global_hg_settings(self):
560 563 model = VcsSettingsModel()
561 564 with mock.patch.object(model, '_create_or_update_ui') as create_mock:
562 565 model.update_global_ssl_setting('False')
563 566 create_mock.assert_called_once_with(
564 567 model.global_settings, 'web', 'push_ssl', value='False')
565 568
566 569
567 570 class TestUpdateGlobalPathSetting(object):
568 571 def test_updates_global_path_settings(self):
569 572 model = VcsSettingsModel()
570 573 with mock.patch.object(model, '_create_or_update_ui') as create_mock:
571 574 model.update_global_path_setting('False')
572 575 create_mock.assert_called_once_with(
573 576 model.global_settings, 'paths', '/', value='False')
574 577
575 578
576 579 class TestCreateOrUpdateGlobalHgSettings(object):
577 580 FORM_DATA = {
578 581 'extensions_largefiles': False,
579 582 'largefiles_usercache': '/example/largefiles-store',
580 583 'phases_publish': False,
581 584 'extensions_hgsubversion': False,
582 585 'extensions_evolve': False
583 586 }
584 587
585 588 def test_creates_repo_hg_settings_when_data_is_correct(self):
586 589 model = VcsSettingsModel()
587 590 with mock.patch.object(model, '_create_or_update_ui') as create_mock:
588 591 model.create_or_update_global_hg_settings(self.FORM_DATA)
589 592 expected_calls = [
590 593 mock.call(model.global_settings, 'extensions', 'largefiles',
591 594 active=False, value=''),
592 595 mock.call(model.global_settings, 'largefiles', 'usercache',
593 596 value='/example/largefiles-store'),
594 597 mock.call(model.global_settings, 'phases', 'publish',
595 598 value='False'),
596 599 mock.call(model.global_settings, 'extensions', 'hgsubversion',
597 600 active=False),
598 601 mock.call(model.global_settings, 'extensions', 'evolve',
599 602 active=False, value='')
600 603 ]
601 604 assert expected_calls == create_mock.call_args_list
602 605
603 606 @pytest.mark.parametrize('field_to_remove', FORM_DATA.keys())
604 607 def test_key_is_not_found(self, repo_stub, field_to_remove):
605 608 model = VcsSettingsModel(repo=repo_stub.repo_name)
606 609 data = self.FORM_DATA.copy()
607 610 data.pop(field_to_remove)
608 611 with pytest.raises(Exception) as exc_info:
609 612 model.create_or_update_global_hg_settings(data)
610 613 expected_message = 'The given data does not contain {} key'.format(
611 614 field_to_remove)
612 615 assert exc_info.value.message == expected_message
613 616
614 617
615 618 class TestCreateOrUpdateGlobalGitSettings(object):
616 619 FORM_DATA = {
617 620 'vcs_git_lfs_enabled': False,
618 621 'vcs_git_lfs_store_location': '/example/lfs-store',
619 622 }
620 623
621 624 def test_creates_repo_hg_settings_when_data_is_correct(self):
622 625 model = VcsSettingsModel()
623 626 with mock.patch.object(model, '_create_or_update_ui') as create_mock:
624 627 model.create_or_update_global_git_settings(self.FORM_DATA)
625 628 expected_calls = [
626 629 mock.call(model.global_settings, 'vcs_git_lfs', 'enabled',
627 630 active=False, value=False),
628 631 mock.call(model.global_settings, 'vcs_git_lfs', 'store_location',
629 632 value='/example/lfs-store'),
630 633 ]
631 634 assert expected_calls == create_mock.call_args_list
632 635
633 636
634 637 class TestDeleteRepoSvnPattern(object):
635 638 def test_success_when_repo_is_set(self, backend_svn):
636 639 repo_name = backend_svn.repo_name
637 640 model = VcsSettingsModel(repo=repo_name)
638 641 delete_ui_patch = mock.patch.object(model.repo_settings, 'delete_ui')
639 642 with delete_ui_patch as delete_ui_mock:
640 643 model.delete_repo_svn_pattern(123)
641 644 delete_ui_mock.assert_called_once_with(123)
642 645
643 646 def test_raises_exception_when_repository_is_not_specified(self):
644 647 model = VcsSettingsModel()
645 648 with pytest.raises(Exception) as exc_info:
646 649 model.delete_repo_svn_pattern(123)
647 650 assert exc_info.value.message == 'Repository is not specified'
648 651
649 652
650 653 class TestDeleteGlobalSvnPattern(object):
651 654 def test_delete_global_svn_pattern_calls_delete_ui(self):
652 655 model = VcsSettingsModel()
653 656 delete_ui_patch = mock.patch.object(model.global_settings, 'delete_ui')
654 657 with delete_ui_patch as delete_ui_mock:
655 658 model.delete_global_svn_pattern(123)
656 659 delete_ui_mock.assert_called_once_with(123)
657 660
658 661
659 662 class TestFilterUiSettings(object):
660 663 def test_settings_are_filtered(self):
661 664 model = VcsSettingsModel()
662 665 repo_settings = [
663 666 UiSetting('extensions', 'largefiles', '', True),
664 667 UiSetting('phases', 'publish', 'True', True),
665 668 UiSetting('hooks', 'changegroup.repo_size', 'hook', True),
666 669 UiSetting('hooks', 'changegroup.push_logger', 'hook', True),
667 670 UiSetting('hooks', 'outgoing.pull_logger', 'hook', True),
668 671 UiSetting(
669 672 'vcs_svn_branch', '84223c972204fa545ca1b22dac7bef5b68d7442d',
670 673 'test_branch', True),
671 674 UiSetting(
672 675 'vcs_svn_tag', '84229c972204fa545ca1b22dac7bef5b68d7442d',
673 676 'test_tag', True),
674 677 ]
675 678 non_repo_settings = [
676 679 UiSetting('largefiles', 'usercache', '/example/largefiles-store', True),
677 680 UiSetting('test', 'outgoing.pull_logger', 'hook', True),
678 681 UiSetting('hooks', 'test2', 'hook', True),
679 682 UiSetting(
680 683 'vcs_svn_repo', '84229c972204fa545ca1b22dac7bef5b68d7442d',
681 684 'test_tag', True),
682 685 ]
683 686 settings = repo_settings + non_repo_settings
684 687 filtered_settings = model._filter_ui_settings(settings)
685 688 assert sorted(filtered_settings) == sorted(repo_settings)
686 689
687 690
688 691 class TestFilterGeneralSettings(object):
689 692 def test_settings_are_filtered(self):
690 693 model = VcsSettingsModel()
691 694 settings = {
692 695 'rhodecode_abcde': 'value1',
693 696 'rhodecode_vwxyz': 'value2',
694 697 }
695 698 general_settings = {
696 699 'rhodecode_{}'.format(key): 'value'
697 700 for key in VcsSettingsModel.GENERAL_SETTINGS
698 701 }
699 702 settings.update(general_settings)
700 703
701 704 filtered_settings = model._filter_general_settings(general_settings)
702 705 assert sorted(filtered_settings) == sorted(general_settings)
703 706
704 707
705 708 class TestGetRepoUiSettings(object):
706 709 def test_global_uis_are_returned_when_no_repo_uis_found(
707 710 self, repo_stub):
708 711 model = VcsSettingsModel(repo=repo_stub.repo_name)
709 712 result = model.get_repo_ui_settings()
710 713 svn_sections = (
711 714 VcsSettingsModel.SVN_TAG_SECTION,
712 715 VcsSettingsModel.SVN_BRANCH_SECTION)
713 716 expected_result = [
714 717 s for s in model.global_settings.get_ui()
715 718 if s.section not in svn_sections]
716 719 assert sorted(result) == sorted(expected_result)
717 720
718 721 def test_repo_uis_are_overriding_global_uis(
719 722 self, repo_stub, settings_util):
720 723 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
721 724 settings_util.create_repo_rhodecode_ui(
722 725 repo_stub, section, 'repo', key=key, active=False)
723 726 model = VcsSettingsModel(repo=repo_stub.repo_name)
724 727 result = model.get_repo_ui_settings()
725 728 for setting in result:
726 729 locator = (setting.section, setting.key)
727 730 if locator in VcsSettingsModel.HOOKS_SETTINGS:
728 731 assert setting.value == 'repo'
729 732
730 733 assert setting.active is False
731 734
732 735 def test_global_svn_patterns_are_not_in_list(
733 736 self, repo_stub, settings_util):
734 737 svn_sections = (
735 738 VcsSettingsModel.SVN_TAG_SECTION,
736 739 VcsSettingsModel.SVN_BRANCH_SECTION)
737 740 for section in svn_sections:
738 741 settings_util.create_rhodecode_ui(
739 742 section, 'repo', key='deadbeef' + section, active=False)
740 743 model = VcsSettingsModel(repo=repo_stub.repo_name)
741 744 result = model.get_repo_ui_settings()
742 745 for setting in result:
743 746 assert setting.section not in svn_sections
744 747
745 748 def test_repo_uis_filtered_by_section_are_returned(
746 749 self, repo_stub, settings_util):
747 750 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
748 751 settings_util.create_repo_rhodecode_ui(
749 752 repo_stub, section, 'repo', key=key, active=False)
750 753 model = VcsSettingsModel(repo=repo_stub.repo_name)
751 754 section, key = VcsSettingsModel.HOOKS_SETTINGS[0]
752 755 result = model.get_repo_ui_settings(section=section)
753 756 for setting in result:
754 757 assert setting.section == section
755 758
756 759 def test_repo_uis_filtered_by_key_are_returned(
757 760 self, repo_stub, settings_util):
758 761 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
759 762 settings_util.create_repo_rhodecode_ui(
760 763 repo_stub, section, 'repo', key=key, active=False)
761 764 model = VcsSettingsModel(repo=repo_stub.repo_name)
762 765 section, key = VcsSettingsModel.HOOKS_SETTINGS[0]
763 766 result = model.get_repo_ui_settings(key=key)
764 767 for setting in result:
765 768 assert setting.key == key
766 769
767 770 def test_raises_exception_when_repository_is_not_specified(self):
768 771 model = VcsSettingsModel()
769 772 with pytest.raises(Exception) as exc_info:
770 773 model.get_repo_ui_settings()
771 774 assert exc_info.value.message == 'Repository is not specified'
772 775
773 776
774 777 class TestGetRepoGeneralSettings(object):
775 778 def test_global_settings_are_returned_when_no_repo_settings_found(
776 779 self, repo_stub):
777 780 model = VcsSettingsModel(repo=repo_stub.repo_name)
778 781 result = model.get_repo_general_settings()
779 782 expected_result = model.global_settings.get_all_settings()
780 783 assert sorted(result) == sorted(expected_result)
781 784
782 785 def test_repo_uis_are_overriding_global_uis(
783 786 self, repo_stub, settings_util):
784 787 for key in VcsSettingsModel.GENERAL_SETTINGS:
785 788 settings_util.create_repo_rhodecode_setting(
786 789 repo_stub, key, 'abcde', type_='unicode')
787 790 model = VcsSettingsModel(repo=repo_stub.repo_name)
788 791 result = model.get_repo_ui_settings()
789 792 for key in result:
790 793 if key in VcsSettingsModel.GENERAL_SETTINGS:
791 794 assert result[key] == 'abcde'
792 795
793 796 def test_raises_exception_when_repository_is_not_specified(self):
794 797 model = VcsSettingsModel()
795 798 with pytest.raises(Exception) as exc_info:
796 799 model.get_repo_general_settings()
797 800 assert exc_info.value.message == 'Repository is not specified'
798 801
799 802
800 803 class TestGetGlobalGeneralSettings(object):
801 804 def test_global_settings_are_returned(self, repo_stub):
802 805 model = VcsSettingsModel()
803 806 result = model.get_global_general_settings()
804 807 expected_result = model.global_settings.get_all_settings()
805 808 assert sorted(result) == sorted(expected_result)
806 809
807 810 def test_repo_uis_are_not_overriding_global_uis(
808 811 self, repo_stub, settings_util):
809 812 for key in VcsSettingsModel.GENERAL_SETTINGS:
810 813 settings_util.create_repo_rhodecode_setting(
811 814 repo_stub, key, 'abcde', type_='unicode')
812 815 model = VcsSettingsModel(repo=repo_stub.repo_name)
813 816 result = model.get_global_general_settings()
814 817 expected_result = model.global_settings.get_all_settings()
815 818 assert sorted(result) == sorted(expected_result)
816 819
817 820
818 821 class TestGetGlobalUiSettings(object):
819 822 def test_global_uis_are_returned(self, repo_stub):
820 823 model = VcsSettingsModel()
821 824 result = model.get_global_ui_settings()
822 825 expected_result = model.global_settings.get_ui()
823 826 assert sorted(result) == sorted(expected_result)
824 827
825 828 def test_repo_uis_are_not_overriding_global_uis(
826 829 self, repo_stub, settings_util):
827 830 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
828 831 settings_util.create_repo_rhodecode_ui(
829 832 repo_stub, section, 'repo', key=key, active=False)
830 833 model = VcsSettingsModel(repo=repo_stub.repo_name)
831 834 result = model.get_global_ui_settings()
832 835 expected_result = model.global_settings.get_ui()
833 836 assert sorted(result) == sorted(expected_result)
834 837
835 838 def test_ui_settings_filtered_by_section(
836 839 self, repo_stub, settings_util):
837 840 model = VcsSettingsModel(repo=repo_stub.repo_name)
838 841 section, key = VcsSettingsModel.HOOKS_SETTINGS[0]
839 842 result = model.get_global_ui_settings(section=section)
840 843 expected_result = model.global_settings.get_ui(section=section)
841 844 assert sorted(result) == sorted(expected_result)
842 845
843 846 def test_ui_settings_filtered_by_key(
844 847 self, repo_stub, settings_util):
845 848 model = VcsSettingsModel(repo=repo_stub.repo_name)
846 849 section, key = VcsSettingsModel.HOOKS_SETTINGS[0]
847 850 result = model.get_global_ui_settings(key=key)
848 851 expected_result = model.global_settings.get_ui(key=key)
849 852 assert sorted(result) == sorted(expected_result)
850 853
851 854
852 855 class TestGetGeneralSettings(object):
853 856 def test_global_settings_are_returned_when_inherited_is_true(
854 857 self, repo_stub, settings_util):
855 858 model = VcsSettingsModel(repo=repo_stub.repo_name)
856 859 model.inherit_global_settings = True
857 860 for key in VcsSettingsModel.GENERAL_SETTINGS:
858 861 settings_util.create_repo_rhodecode_setting(
859 862 repo_stub, key, 'abcde', type_='unicode')
860 863 result = model.get_general_settings()
861 864 expected_result = model.get_global_general_settings()
862 865 assert sorted(result) == sorted(expected_result)
863 866
864 867 def test_repo_settings_are_returned_when_inherited_is_false(
865 868 self, repo_stub, settings_util):
866 869 model = VcsSettingsModel(repo=repo_stub.repo_name)
867 870 model.inherit_global_settings = False
868 871 for key in VcsSettingsModel.GENERAL_SETTINGS:
869 872 settings_util.create_repo_rhodecode_setting(
870 873 repo_stub, key, 'abcde', type_='unicode')
871 874 result = model.get_general_settings()
872 875 expected_result = model.get_repo_general_settings()
873 876 assert sorted(result) == sorted(expected_result)
874 877
875 878 def test_global_settings_are_returned_when_no_repository_specified(self):
876 879 model = VcsSettingsModel()
877 880 result = model.get_general_settings()
878 881 expected_result = model.get_global_general_settings()
879 882 assert sorted(result) == sorted(expected_result)
880 883
881 884
882 885 class TestGetUiSettings(object):
883 886 def test_global_settings_are_returned_when_inherited_is_true(
884 887 self, repo_stub, settings_util):
885 888 model = VcsSettingsModel(repo=repo_stub.repo_name)
886 889 model.inherit_global_settings = True
887 890 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
888 891 settings_util.create_repo_rhodecode_ui(
889 892 repo_stub, section, 'repo', key=key, active=True)
890 893 result = model.get_ui_settings()
891 894 expected_result = model.get_global_ui_settings()
892 895 assert sorted(result) == sorted(expected_result)
893 896
894 897 def test_repo_settings_are_returned_when_inherited_is_false(
895 898 self, repo_stub, settings_util):
896 899 model = VcsSettingsModel(repo=repo_stub.repo_name)
897 900 model.inherit_global_settings = False
898 901 for section, key in VcsSettingsModel.HOOKS_SETTINGS:
899 902 settings_util.create_repo_rhodecode_ui(
900 903 repo_stub, section, 'repo', key=key, active=True)
901 904 result = model.get_ui_settings()
902 905 expected_result = model.get_repo_ui_settings()
903 906 assert sorted(result) == sorted(expected_result)
904 907
905 908 def test_repo_settings_filtered_by_section_and_key(self, repo_stub):
906 909 model = VcsSettingsModel(repo=repo_stub.repo_name)
907 910 model.inherit_global_settings = False
908 911 args = ('section', 'key')
909 912 with mock.patch.object(model, 'get_repo_ui_settings') as settings_mock:
910 913 model.get_ui_settings(*args)
911 914 settings_mock.assert_called_once_with(*args)
912 915
913 916 def test_global_settings_filtered_by_section_and_key(self):
914 917 model = VcsSettingsModel()
915 918 args = ('section', 'key')
916 919 with mock.patch.object(model, 'get_global_ui_settings') as (
917 920 settings_mock):
918 921 model.get_ui_settings(*args)
919 922 settings_mock.assert_called_once_with(*args)
920 923
921 924 def test_global_settings_are_returned_when_no_repository_specified(self):
922 925 model = VcsSettingsModel()
923 926 result = model.get_ui_settings()
924 927 expected_result = model.get_global_ui_settings()
925 928 assert sorted(result) == sorted(expected_result)
926 929
927 930
928 931 class TestGetSvnPatterns(object):
929 932 def test_repo_settings_filtered_by_section_and_key(self, repo_stub):
930 933 model = VcsSettingsModel(repo=repo_stub.repo_name)
931 934 args = ('section', )
932 935 with mock.patch.object(model, 'get_repo_ui_settings') as settings_mock:
933 936 model.get_svn_patterns(*args)
934 937 settings_mock.assert_called_once_with(*args)
935 938
936 939 def test_global_settings_filtered_by_section_and_key(self):
937 940 model = VcsSettingsModel()
938 941 args = ('section', )
939 942 with mock.patch.object(model, 'get_global_ui_settings') as (
940 943 settings_mock):
941 944 model.get_svn_patterns(*args)
942 945 settings_mock.assert_called_once_with(*args)
943 946
944 947
945 948 class TestGetReposLocation(object):
946 949 def test_returns_repos_location(self, repo_stub):
947 950 model = VcsSettingsModel()
948 951
949 952 result_mock = mock.Mock()
950 953 result_mock.ui_value = '/tmp'
951 954
952 955 with mock.patch.object(model, 'global_settings') as settings_mock:
953 956 settings_mock.get_ui_by_key.return_value = result_mock
954 957 result = model.get_repos_location()
955 958
956 959 settings_mock.get_ui_by_key.assert_called_once_with('/')
957 960 assert result == '/tmp'
958 961
959 962
960 963 class TestCreateOrUpdateRepoSettings(object):
961 964 FORM_DATA = {
962 965 'inherit_global_settings': False,
963 966 'hooks_changegroup_repo_size': False,
964 967 'hooks_changegroup_push_logger': False,
965 968 'hooks_outgoing_pull_logger': False,
966 969 'extensions_largefiles': False,
967 970 'extensions_evolve': False,
968 971 'largefiles_usercache': '/example/largefiles-store',
969 972 'vcs_git_lfs_enabled': False,
970 973 'vcs_git_lfs_store_location': '/',
971 974 'phases_publish': 'False',
972 975 'rhodecode_pr_merge_enabled': False,
973 976 'rhodecode_use_outdated_comments': False,
974 977 'new_svn_branch': '',
975 978 'new_svn_tag': ''
976 979 }
977 980
978 981 def test_get_raises_exception_when_repository_not_specified(self):
979 982 model = VcsSettingsModel()
980 983 with pytest.raises(Exception) as exc_info:
981 984 model.create_or_update_repo_settings(data=self.FORM_DATA)
982 985 assert exc_info.value.message == 'Repository is not specified'
983 986
984 987 def test_only_svn_settings_are_updated_when_type_is_svn(self, backend_svn):
985 988 repo = backend_svn.create_repo()
986 989 model = VcsSettingsModel(repo=repo)
987 990 with self._patch_model(model) as mocks:
988 991 model.create_or_update_repo_settings(
989 992 data=self.FORM_DATA, inherit_global_settings=False)
990 993 mocks['create_repo_svn_settings'].assert_called_once_with(
991 994 self.FORM_DATA)
992 995 non_called_methods = (
993 996 'create_or_update_repo_hook_settings',
994 997 'create_or_update_repo_pr_settings',
995 998 'create_or_update_repo_hg_settings')
996 999 for method in non_called_methods:
997 1000 assert mocks[method].call_count == 0
998 1001
999 1002 def test_non_svn_settings_are_updated_when_type_is_hg(self, backend_hg):
1000 1003 repo = backend_hg.create_repo()
1001 1004 model = VcsSettingsModel(repo=repo)
1002 1005 with self._patch_model(model) as mocks:
1003 1006 model.create_or_update_repo_settings(
1004 1007 data=self.FORM_DATA, inherit_global_settings=False)
1005 1008
1006 1009 assert mocks['create_repo_svn_settings'].call_count == 0
1007 1010 called_methods = (
1008 1011 'create_or_update_repo_hook_settings',
1009 1012 'create_or_update_repo_pr_settings',
1010 1013 'create_or_update_repo_hg_settings')
1011 1014 for method in called_methods:
1012 1015 mocks[method].assert_called_once_with(self.FORM_DATA)
1013 1016
1014 1017 def test_non_svn_and_hg_settings_are_updated_when_type_is_git(
1015 1018 self, backend_git):
1016 1019 repo = backend_git.create_repo()
1017 1020 model = VcsSettingsModel(repo=repo)
1018 1021 with self._patch_model(model) as mocks:
1019 1022 model.create_or_update_repo_settings(
1020 1023 data=self.FORM_DATA, inherit_global_settings=False)
1021 1024
1022 1025 assert mocks['create_repo_svn_settings'].call_count == 0
1023 1026 called_methods = (
1024 1027 'create_or_update_repo_hook_settings',
1025 1028 'create_or_update_repo_pr_settings')
1026 1029 non_called_methods = (
1027 1030 'create_repo_svn_settings',
1028 1031 'create_or_update_repo_hg_settings'
1029 1032 )
1030 1033 for method in called_methods:
1031 1034 mocks[method].assert_called_once_with(self.FORM_DATA)
1032 1035 for method in non_called_methods:
1033 1036 assert mocks[method].call_count == 0
1034 1037
1035 1038 def test_no_methods_are_called_when_settings_are_inherited(
1036 1039 self, backend):
1037 1040 repo = backend.create_repo()
1038 1041 model = VcsSettingsModel(repo=repo)
1039 1042 with self._patch_model(model) as mocks:
1040 1043 model.create_or_update_repo_settings(
1041 1044 data=self.FORM_DATA, inherit_global_settings=True)
1042 1045 for method_name in mocks:
1043 1046 assert mocks[method_name].call_count == 0
1044 1047
1045 1048 def test_cache_is_marked_for_invalidation(self, repo_stub):
1046 1049 model = VcsSettingsModel(repo=repo_stub)
1047 1050 invalidation_patcher = mock.patch(
1048 1051 'rhodecode.model.scm.ScmModel.mark_for_invalidation')
1049 1052 with invalidation_patcher as invalidation_mock:
1050 1053 model.create_or_update_repo_settings(
1051 1054 data=self.FORM_DATA, inherit_global_settings=True)
1052 1055 invalidation_mock.assert_called_once_with(
1053 1056 repo_stub.repo_name, delete=True)
1054 1057
1055 1058 def test_inherit_flag_is_saved(self, repo_stub):
1056 1059 model = VcsSettingsModel(repo=repo_stub)
1057 1060 model.inherit_global_settings = True
1058 1061 with self._patch_model(model):
1059 1062 model.create_or_update_repo_settings(
1060 1063 data=self.FORM_DATA, inherit_global_settings=False)
1061 1064 assert model.inherit_global_settings is False
1062 1065
1063 1066 def _patch_model(self, model):
1064 1067 return mock.patch.multiple(
1065 1068 model,
1066 1069 create_repo_svn_settings=mock.DEFAULT,
1067 1070 create_or_update_repo_hook_settings=mock.DEFAULT,
1068 1071 create_or_update_repo_pr_settings=mock.DEFAULT,
1069 1072 create_or_update_repo_hg_settings=mock.DEFAULT)
@@ -1,859 +1,859 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import mock
22 22 import pytest
23 23 import textwrap
24 24
25 25 import rhodecode
26 26 from rhodecode.lib.utils2 import safe_unicode
27 27 from rhodecode.lib.vcs.backends import get_backend
28 28 from rhodecode.lib.vcs.backends.base import (
29 29 MergeResponse, MergeFailureReason, Reference)
30 30 from rhodecode.lib.vcs.exceptions import RepositoryError
31 31 from rhodecode.lib.vcs.nodes import FileNode
32 32 from rhodecode.model.comment import CommentsModel
33 33 from rhodecode.model.db import PullRequest, Session
34 34 from rhodecode.model.pull_request import PullRequestModel
35 35 from rhodecode.model.user import UserModel
36 36 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
37 37
38 38
39 39 pytestmark = [
40 40 pytest.mark.backends("git", "hg"),
41 41 ]
42 42
43 43
44 44 @pytest.mark.usefixtures('config_stub')
45 45 class TestPullRequestModel(object):
46 46
47 47 @pytest.fixture
48 48 def pull_request(self, request, backend, pr_util):
49 49 """
50 50 A pull request combined with multiples patches.
51 51 """
52 52 BackendClass = get_backend(backend.alias)
53 53 self.merge_patcher = mock.patch.object(
54 54 BackendClass, 'merge', return_value=MergeResponse(
55 55 False, False, None, MergeFailureReason.UNKNOWN))
56 56 self.workspace_remove_patcher = mock.patch.object(
57 57 BackendClass, 'cleanup_merge_workspace')
58 58
59 59 self.workspace_remove_mock = self.workspace_remove_patcher.start()
60 60 self.merge_mock = self.merge_patcher.start()
61 61 self.comment_patcher = mock.patch(
62 62 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status')
63 63 self.comment_patcher.start()
64 64 self.notification_patcher = mock.patch(
65 65 'rhodecode.model.notification.NotificationModel.create')
66 66 self.notification_patcher.start()
67 67 self.helper_patcher = mock.patch(
68 68 'rhodecode.lib.helpers.url')
69 69 self.helper_patcher.start()
70 70
71 71 self.hook_patcher = mock.patch.object(PullRequestModel,
72 72 '_trigger_pull_request_hook')
73 73 self.hook_mock = self.hook_patcher.start()
74 74
75 75 self.invalidation_patcher = mock.patch(
76 76 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation')
77 77 self.invalidation_mock = self.invalidation_patcher.start()
78 78
79 79 self.pull_request = pr_util.create_pull_request(
80 80 mergeable=True, name_suffix=u'Δ…Δ‡')
81 81 self.source_commit = self.pull_request.source_ref_parts.commit_id
82 82 self.target_commit = self.pull_request.target_ref_parts.commit_id
83 83 self.workspace_id = 'pr-%s' % self.pull_request.pull_request_id
84 84
85 85 @request.addfinalizer
86 86 def cleanup_pull_request():
87 87 calls = [mock.call(
88 88 self.pull_request, self.pull_request.author, 'create')]
89 89 self.hook_mock.assert_has_calls(calls)
90 90
91 91 self.workspace_remove_patcher.stop()
92 92 self.merge_patcher.stop()
93 93 self.comment_patcher.stop()
94 94 self.notification_patcher.stop()
95 95 self.helper_patcher.stop()
96 96 self.hook_patcher.stop()
97 97 self.invalidation_patcher.stop()
98 98
99 99 return self.pull_request
100 100
101 101 def test_get_all(self, pull_request):
102 102 prs = PullRequestModel().get_all(pull_request.target_repo)
103 103 assert isinstance(prs, list)
104 104 assert len(prs) == 1
105 105
106 106 def test_count_all(self, pull_request):
107 107 pr_count = PullRequestModel().count_all(pull_request.target_repo)
108 108 assert pr_count == 1
109 109
110 110 def test_get_awaiting_review(self, pull_request):
111 111 prs = PullRequestModel().get_awaiting_review(pull_request.target_repo)
112 112 assert isinstance(prs, list)
113 113 assert len(prs) == 1
114 114
115 115 def test_count_awaiting_review(self, pull_request):
116 116 pr_count = PullRequestModel().count_awaiting_review(
117 117 pull_request.target_repo)
118 118 assert pr_count == 1
119 119
120 120 def test_get_awaiting_my_review(self, pull_request):
121 121 PullRequestModel().update_reviewers(
122 122 pull_request, [(pull_request.author, ['author'], False)],
123 123 pull_request.author)
124 124 prs = PullRequestModel().get_awaiting_my_review(
125 125 pull_request.target_repo, user_id=pull_request.author.user_id)
126 126 assert isinstance(prs, list)
127 127 assert len(prs) == 1
128 128
129 129 def test_count_awaiting_my_review(self, pull_request):
130 130 PullRequestModel().update_reviewers(
131 131 pull_request, [(pull_request.author, ['author'], False)],
132 132 pull_request.author)
133 133 pr_count = PullRequestModel().count_awaiting_my_review(
134 134 pull_request.target_repo, user_id=pull_request.author.user_id)
135 135 assert pr_count == 1
136 136
137 137 def test_delete_calls_cleanup_merge(self, pull_request):
138 138 PullRequestModel().delete(pull_request, pull_request.author)
139 139
140 140 self.workspace_remove_mock.assert_called_once_with(
141 141 self.workspace_id)
142 142
143 143 def test_close_calls_cleanup_and_hook(self, pull_request):
144 144 PullRequestModel().close_pull_request(
145 145 pull_request, pull_request.author)
146 146
147 147 self.workspace_remove_mock.assert_called_once_with(
148 148 self.workspace_id)
149 149 self.hook_mock.assert_called_with(
150 150 self.pull_request, self.pull_request.author, 'close')
151 151
152 152 def test_merge_status(self, pull_request):
153 153 self.merge_mock.return_value = MergeResponse(
154 154 True, False, None, MergeFailureReason.NONE)
155 155
156 156 assert pull_request._last_merge_source_rev is None
157 157 assert pull_request._last_merge_target_rev is None
158 158 assert pull_request.last_merge_status is None
159 159
160 160 status, msg = PullRequestModel().merge_status(pull_request)
161 161 assert status is True
162 162 assert msg.eval() == 'This pull request can be automatically merged.'
163 163 self.merge_mock.assert_called_once_with(
164 164 pull_request.target_ref_parts,
165 165 pull_request.source_repo.scm_instance(),
166 166 pull_request.source_ref_parts, self.workspace_id, dry_run=True,
167 use_rebase=False)
167 use_rebase=False, close_branch=False)
168 168
169 169 assert pull_request._last_merge_source_rev == self.source_commit
170 170 assert pull_request._last_merge_target_rev == self.target_commit
171 171 assert pull_request.last_merge_status is MergeFailureReason.NONE
172 172
173 173 self.merge_mock.reset_mock()
174 174 status, msg = PullRequestModel().merge_status(pull_request)
175 175 assert status is True
176 176 assert msg.eval() == 'This pull request can be automatically merged.'
177 177 assert self.merge_mock.called is False
178 178
179 179 def test_merge_status_known_failure(self, pull_request):
180 180 self.merge_mock.return_value = MergeResponse(
181 181 False, False, None, MergeFailureReason.MERGE_FAILED)
182 182
183 183 assert pull_request._last_merge_source_rev is None
184 184 assert pull_request._last_merge_target_rev is None
185 185 assert pull_request.last_merge_status is None
186 186
187 187 status, msg = PullRequestModel().merge_status(pull_request)
188 188 assert status is False
189 189 assert (
190 190 msg.eval() ==
191 191 'This pull request cannot be merged because of merge conflicts.')
192 192 self.merge_mock.assert_called_once_with(
193 193 pull_request.target_ref_parts,
194 194 pull_request.source_repo.scm_instance(),
195 195 pull_request.source_ref_parts, self.workspace_id, dry_run=True,
196 use_rebase=False)
196 use_rebase=False, close_branch=False)
197 197
198 198 assert pull_request._last_merge_source_rev == self.source_commit
199 199 assert pull_request._last_merge_target_rev == self.target_commit
200 200 assert (
201 201 pull_request.last_merge_status is MergeFailureReason.MERGE_FAILED)
202 202
203 203 self.merge_mock.reset_mock()
204 204 status, msg = PullRequestModel().merge_status(pull_request)
205 205 assert status is False
206 206 assert (
207 207 msg.eval() ==
208 208 'This pull request cannot be merged because of merge conflicts.')
209 209 assert self.merge_mock.called is False
210 210
211 211 def test_merge_status_unknown_failure(self, pull_request):
212 212 self.merge_mock.return_value = MergeResponse(
213 213 False, False, None, MergeFailureReason.UNKNOWN)
214 214
215 215 assert pull_request._last_merge_source_rev is None
216 216 assert pull_request._last_merge_target_rev is None
217 217 assert pull_request.last_merge_status is None
218 218
219 219 status, msg = PullRequestModel().merge_status(pull_request)
220 220 assert status is False
221 221 assert msg.eval() == (
222 222 'This pull request cannot be merged because of an unhandled'
223 223 ' exception.')
224 224 self.merge_mock.assert_called_once_with(
225 225 pull_request.target_ref_parts,
226 226 pull_request.source_repo.scm_instance(),
227 227 pull_request.source_ref_parts, self.workspace_id, dry_run=True,
228 use_rebase=False)
228 use_rebase=False, close_branch=False)
229 229
230 230 assert pull_request._last_merge_source_rev is None
231 231 assert pull_request._last_merge_target_rev is None
232 232 assert pull_request.last_merge_status is None
233 233
234 234 self.merge_mock.reset_mock()
235 235 status, msg = PullRequestModel().merge_status(pull_request)
236 236 assert status is False
237 237 assert msg.eval() == (
238 238 'This pull request cannot be merged because of an unhandled'
239 239 ' exception.')
240 240 assert self.merge_mock.called is True
241 241
242 242 def test_merge_status_when_target_is_locked(self, pull_request):
243 243 pull_request.target_repo.locked = [1, u'12345.50', 'lock_web']
244 244 status, msg = PullRequestModel().merge_status(pull_request)
245 245 assert status is False
246 246 assert msg.eval() == (
247 247 'This pull request cannot be merged because the target repository'
248 248 ' is locked.')
249 249
250 250 def test_merge_status_requirements_check_target(self, pull_request):
251 251
252 252 def has_largefiles(self, repo):
253 253 return repo == pull_request.source_repo
254 254
255 255 patcher = mock.patch.object(
256 256 PullRequestModel, '_has_largefiles', has_largefiles)
257 257 with patcher:
258 258 status, msg = PullRequestModel().merge_status(pull_request)
259 259
260 260 assert status is False
261 261 assert msg == 'Target repository large files support is disabled.'
262 262
263 263 def test_merge_status_requirements_check_source(self, pull_request):
264 264
265 265 def has_largefiles(self, repo):
266 266 return repo == pull_request.target_repo
267 267
268 268 patcher = mock.patch.object(
269 269 PullRequestModel, '_has_largefiles', has_largefiles)
270 270 with patcher:
271 271 status, msg = PullRequestModel().merge_status(pull_request)
272 272
273 273 assert status is False
274 274 assert msg == 'Source repository large files support is disabled.'
275 275
276 276 def test_merge(self, pull_request, merge_extras):
277 277 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
278 278 merge_ref = Reference(
279 279 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
280 280 self.merge_mock.return_value = MergeResponse(
281 281 True, True, merge_ref, MergeFailureReason.NONE)
282 282
283 283 merge_extras['repository'] = pull_request.target_repo.repo_name
284 284 PullRequestModel().merge(
285 285 pull_request, pull_request.author, extras=merge_extras)
286 286
287 287 message = (
288 288 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
289 289 u'\n\n {pr_title}'.format(
290 290 pr_id=pull_request.pull_request_id,
291 291 source_repo=safe_unicode(
292 292 pull_request.source_repo.scm_instance().name),
293 293 source_ref_name=pull_request.source_ref_parts.name,
294 294 pr_title=safe_unicode(pull_request.title)
295 295 )
296 296 )
297 297 self.merge_mock.assert_called_once_with(
298 298 pull_request.target_ref_parts,
299 299 pull_request.source_repo.scm_instance(),
300 300 pull_request.source_ref_parts, self.workspace_id,
301 301 user_name=user.username, user_email=user.email, message=message,
302 use_rebase=False
302 use_rebase=False, close_branch=False
303 303 )
304 304 self.invalidation_mock.assert_called_once_with(
305 305 pull_request.target_repo.repo_name)
306 306
307 307 self.hook_mock.assert_called_with(
308 308 self.pull_request, self.pull_request.author, 'merge')
309 309
310 310 pull_request = PullRequest.get(pull_request.pull_request_id)
311 311 assert (
312 312 pull_request.merge_rev ==
313 313 '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
314 314
315 315 def test_merge_failed(self, pull_request, merge_extras):
316 316 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
317 317 merge_ref = Reference(
318 318 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
319 319 self.merge_mock.return_value = MergeResponse(
320 320 False, False, merge_ref, MergeFailureReason.MERGE_FAILED)
321 321
322 322 merge_extras['repository'] = pull_request.target_repo.repo_name
323 323 PullRequestModel().merge(
324 324 pull_request, pull_request.author, extras=merge_extras)
325 325
326 326 message = (
327 327 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
328 328 u'\n\n {pr_title}'.format(
329 329 pr_id=pull_request.pull_request_id,
330 330 source_repo=safe_unicode(
331 331 pull_request.source_repo.scm_instance().name),
332 332 source_ref_name=pull_request.source_ref_parts.name,
333 333 pr_title=safe_unicode(pull_request.title)
334 334 )
335 335 )
336 336 self.merge_mock.assert_called_once_with(
337 337 pull_request.target_ref_parts,
338 338 pull_request.source_repo.scm_instance(),
339 339 pull_request.source_ref_parts, self.workspace_id,
340 340 user_name=user.username, user_email=user.email, message=message,
341 use_rebase=False
341 use_rebase=False, close_branch=False
342 342 )
343 343
344 344 pull_request = PullRequest.get(pull_request.pull_request_id)
345 345 assert self.invalidation_mock.called is False
346 346 assert pull_request.merge_rev is None
347 347
348 348 def test_get_commit_ids(self, pull_request):
349 349 # The PR has been not merget yet, so expect an exception
350 350 with pytest.raises(ValueError):
351 351 PullRequestModel()._get_commit_ids(pull_request)
352 352
353 353 # Merge revision is in the revisions list
354 354 pull_request.merge_rev = pull_request.revisions[0]
355 355 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
356 356 assert commit_ids == pull_request.revisions
357 357
358 358 # Merge revision is not in the revisions list
359 359 pull_request.merge_rev = 'f000' * 10
360 360 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
361 361 assert commit_ids == pull_request.revisions + [pull_request.merge_rev]
362 362
363 363 def test_get_diff_from_pr_version(self, pull_request):
364 364 source_repo = pull_request.source_repo
365 365 source_ref_id = pull_request.source_ref_parts.commit_id
366 366 target_ref_id = pull_request.target_ref_parts.commit_id
367 367 diff = PullRequestModel()._get_diff_from_pr_or_version(
368 368 source_repo, source_ref_id, target_ref_id, context=6)
369 369 assert 'file_1' in diff.raw
370 370
371 371 def test_generate_title_returns_unicode(self):
372 372 title = PullRequestModel().generate_pullrequest_title(
373 373 source='source-dummy',
374 374 source_ref='source-ref-dummy',
375 375 target='target-dummy',
376 376 )
377 377 assert type(title) == unicode
378 378
379 379
380 380 @pytest.mark.usefixtures('config_stub')
381 381 class TestIntegrationMerge(object):
382 382 @pytest.mark.parametrize('extra_config', (
383 383 {'vcs.hooks.protocol': 'http', 'vcs.hooks.direct_calls': False},
384 384 ))
385 385 def test_merge_triggers_push_hooks(
386 386 self, pr_util, user_admin, capture_rcextensions, merge_extras,
387 387 extra_config):
388 388 pull_request = pr_util.create_pull_request(
389 389 approved=True, mergeable=True)
390 390 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
391 391 merge_extras['repository'] = pull_request.target_repo.repo_name
392 392 Session().commit()
393 393
394 394 with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False):
395 395 merge_state = PullRequestModel().merge(
396 396 pull_request, user_admin, extras=merge_extras)
397 397
398 398 assert merge_state.executed
399 399 assert 'pre_push' in capture_rcextensions
400 400 assert 'post_push' in capture_rcextensions
401 401
402 402 def test_merge_can_be_rejected_by_pre_push_hook(
403 403 self, pr_util, user_admin, capture_rcextensions, merge_extras):
404 404 pull_request = pr_util.create_pull_request(
405 405 approved=True, mergeable=True)
406 406 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
407 407 merge_extras['repository'] = pull_request.target_repo.repo_name
408 408 Session().commit()
409 409
410 410 with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull:
411 411 pre_pull.side_effect = RepositoryError("Disallow push!")
412 412 merge_status = PullRequestModel().merge(
413 413 pull_request, user_admin, extras=merge_extras)
414 414
415 415 assert not merge_status.executed
416 416 assert 'pre_push' not in capture_rcextensions
417 417 assert 'post_push' not in capture_rcextensions
418 418
419 419 def test_merge_fails_if_target_is_locked(
420 420 self, pr_util, user_regular, merge_extras):
421 421 pull_request = pr_util.create_pull_request(
422 422 approved=True, mergeable=True)
423 423 locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web']
424 424 pull_request.target_repo.locked = locked_by
425 425 # TODO: johbo: Check if this can work based on the database, currently
426 426 # all data is pre-computed, that's why just updating the DB is not
427 427 # enough.
428 428 merge_extras['locked_by'] = locked_by
429 429 merge_extras['repository'] = pull_request.target_repo.repo_name
430 430 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
431 431 Session().commit()
432 432 merge_status = PullRequestModel().merge(
433 433 pull_request, user_regular, extras=merge_extras)
434 434 assert not merge_status.executed
435 435
436 436
437 437 @pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [
438 438 (False, 1, 0),
439 439 (True, 0, 1),
440 440 ])
441 441 def test_outdated_comments(
442 442 pr_util, use_outdated, inlines_count, outdated_count, config_stub):
443 443 pull_request = pr_util.create_pull_request()
444 444 pr_util.create_inline_comment(file_path='not_in_updated_diff')
445 445
446 446 with outdated_comments_patcher(use_outdated) as outdated_comment_mock:
447 447 pr_util.add_one_commit()
448 448 assert_inline_comments(
449 449 pull_request, visible=inlines_count, outdated=outdated_count)
450 450 outdated_comment_mock.assert_called_with(pull_request)
451 451
452 452
453 453 @pytest.fixture
454 454 def merge_extras(user_regular):
455 455 """
456 456 Context for the vcs operation when running a merge.
457 457 """
458 458 extras = {
459 459 'ip': '127.0.0.1',
460 460 'username': user_regular.username,
461 461 'action': 'push',
462 462 'repository': 'fake_target_repo_name',
463 463 'scm': 'git',
464 464 'config': 'fake_config_ini_path',
465 465 'make_lock': None,
466 466 'locked_by': [None, None, None],
467 467 'server_url': 'http://test.example.com:5000',
468 468 'hooks': ['push', 'pull'],
469 469 'is_shadow_repo': False,
470 470 }
471 471 return extras
472 472
473 473
474 474 @pytest.mark.usefixtures('config_stub')
475 475 class TestUpdateCommentHandling(object):
476 476
477 477 @pytest.fixture(autouse=True, scope='class')
478 478 def enable_outdated_comments(self, request, pylonsapp):
479 479 config_patch = mock.patch.dict(
480 480 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True})
481 481 config_patch.start()
482 482
483 483 @request.addfinalizer
484 484 def cleanup():
485 485 config_patch.stop()
486 486
487 487 def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util):
488 488 commits = [
489 489 {'message': 'a'},
490 490 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
491 491 {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]},
492 492 ]
493 493 pull_request = pr_util.create_pull_request(
494 494 commits=commits, target_head='a', source_head='b', revisions=['b'])
495 495 pr_util.create_inline_comment(file_path='file_b')
496 496 pr_util.add_one_commit(head='c')
497 497
498 498 assert_inline_comments(pull_request, visible=1, outdated=0)
499 499
500 500 def test_comment_stays_unflagged_on_change_above(self, pr_util):
501 501 original_content = ''.join(
502 502 ['line {}\n'.format(x) for x in range(1, 11)])
503 503 updated_content = 'new_line_at_top\n' + original_content
504 504 commits = [
505 505 {'message': 'a'},
506 506 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
507 507 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
508 508 ]
509 509 pull_request = pr_util.create_pull_request(
510 510 commits=commits, target_head='a', source_head='b', revisions=['b'])
511 511
512 512 with outdated_comments_patcher():
513 513 comment = pr_util.create_inline_comment(
514 514 line_no=u'n8', file_path='file_b')
515 515 pr_util.add_one_commit(head='c')
516 516
517 517 assert_inline_comments(pull_request, visible=1, outdated=0)
518 518 assert comment.line_no == u'n9'
519 519
520 520 def test_comment_stays_unflagged_on_change_below(self, pr_util):
521 521 original_content = ''.join(['line {}\n'.format(x) for x in range(10)])
522 522 updated_content = original_content + 'new_line_at_end\n'
523 523 commits = [
524 524 {'message': 'a'},
525 525 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
526 526 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
527 527 ]
528 528 pull_request = pr_util.create_pull_request(
529 529 commits=commits, target_head='a', source_head='b', revisions=['b'])
530 530 pr_util.create_inline_comment(file_path='file_b')
531 531 pr_util.add_one_commit(head='c')
532 532
533 533 assert_inline_comments(pull_request, visible=1, outdated=0)
534 534
535 535 @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9'])
536 536 def test_comment_flagged_on_change_around_context(self, pr_util, line_no):
537 537 base_lines = ['line {}\n'.format(x) for x in range(1, 13)]
538 538 change_lines = list(base_lines)
539 539 change_lines.insert(6, 'line 6a added\n')
540 540
541 541 # Changes on the last line of sight
542 542 update_lines = list(change_lines)
543 543 update_lines[0] = 'line 1 changed\n'
544 544 update_lines[-1] = 'line 12 changed\n'
545 545
546 546 def file_b(lines):
547 547 return FileNode('file_b', ''.join(lines))
548 548
549 549 commits = [
550 550 {'message': 'a', 'added': [file_b(base_lines)]},
551 551 {'message': 'b', 'changed': [file_b(change_lines)]},
552 552 {'message': 'c', 'changed': [file_b(update_lines)]},
553 553 ]
554 554
555 555 pull_request = pr_util.create_pull_request(
556 556 commits=commits, target_head='a', source_head='b', revisions=['b'])
557 557 pr_util.create_inline_comment(line_no=line_no, file_path='file_b')
558 558
559 559 with outdated_comments_patcher():
560 560 pr_util.add_one_commit(head='c')
561 561 assert_inline_comments(pull_request, visible=0, outdated=1)
562 562
563 563 @pytest.mark.parametrize("change, content", [
564 564 ('changed', 'changed\n'),
565 565 ('removed', ''),
566 566 ], ids=['changed', 'removed'])
567 567 def test_comment_flagged_on_change(self, pr_util, change, content):
568 568 commits = [
569 569 {'message': 'a'},
570 570 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
571 571 {'message': 'c', change: [FileNode('file_b', content)]},
572 572 ]
573 573 pull_request = pr_util.create_pull_request(
574 574 commits=commits, target_head='a', source_head='b', revisions=['b'])
575 575 pr_util.create_inline_comment(file_path='file_b')
576 576
577 577 with outdated_comments_patcher():
578 578 pr_util.add_one_commit(head='c')
579 579 assert_inline_comments(pull_request, visible=0, outdated=1)
580 580
581 581
582 582 @pytest.mark.usefixtures('config_stub')
583 583 class TestUpdateChangedFiles(object):
584 584
585 585 def test_no_changes_on_unchanged_diff(self, pr_util):
586 586 commits = [
587 587 {'message': 'a'},
588 588 {'message': 'b',
589 589 'added': [FileNode('file_b', 'test_content b\n')]},
590 590 {'message': 'c',
591 591 'added': [FileNode('file_c', 'test_content c\n')]},
592 592 ]
593 593 # open a PR from a to b, adding file_b
594 594 pull_request = pr_util.create_pull_request(
595 595 commits=commits, target_head='a', source_head='b', revisions=['b'],
596 596 name_suffix='per-file-review')
597 597
598 598 # modify PR adding new file file_c
599 599 pr_util.add_one_commit(head='c')
600 600
601 601 assert_pr_file_changes(
602 602 pull_request,
603 603 added=['file_c'],
604 604 modified=[],
605 605 removed=[])
606 606
607 607 def test_modify_and_undo_modification_diff(self, pr_util):
608 608 commits = [
609 609 {'message': 'a'},
610 610 {'message': 'b',
611 611 'added': [FileNode('file_b', 'test_content b\n')]},
612 612 {'message': 'c',
613 613 'changed': [FileNode('file_b', 'test_content b modified\n')]},
614 614 {'message': 'd',
615 615 'changed': [FileNode('file_b', 'test_content b\n')]},
616 616 ]
617 617 # open a PR from a to b, adding file_b
618 618 pull_request = pr_util.create_pull_request(
619 619 commits=commits, target_head='a', source_head='b', revisions=['b'],
620 620 name_suffix='per-file-review')
621 621
622 622 # modify PR modifying file file_b
623 623 pr_util.add_one_commit(head='c')
624 624
625 625 assert_pr_file_changes(
626 626 pull_request,
627 627 added=[],
628 628 modified=['file_b'],
629 629 removed=[])
630 630
631 631 # move the head again to d, which rollbacks change,
632 632 # meaning we should indicate no changes
633 633 pr_util.add_one_commit(head='d')
634 634
635 635 assert_pr_file_changes(
636 636 pull_request,
637 637 added=[],
638 638 modified=[],
639 639 removed=[])
640 640
641 641 def test_updated_all_files_in_pr(self, pr_util):
642 642 commits = [
643 643 {'message': 'a'},
644 644 {'message': 'b', 'added': [
645 645 FileNode('file_a', 'test_content a\n'),
646 646 FileNode('file_b', 'test_content b\n'),
647 647 FileNode('file_c', 'test_content c\n')]},
648 648 {'message': 'c', 'changed': [
649 649 FileNode('file_a', 'test_content a changed\n'),
650 650 FileNode('file_b', 'test_content b changed\n'),
651 651 FileNode('file_c', 'test_content c changed\n')]},
652 652 ]
653 653 # open a PR from a to b, changing 3 files
654 654 pull_request = pr_util.create_pull_request(
655 655 commits=commits, target_head='a', source_head='b', revisions=['b'],
656 656 name_suffix='per-file-review')
657 657
658 658 pr_util.add_one_commit(head='c')
659 659
660 660 assert_pr_file_changes(
661 661 pull_request,
662 662 added=[],
663 663 modified=['file_a', 'file_b', 'file_c'],
664 664 removed=[])
665 665
666 666 def test_updated_and_removed_all_files_in_pr(self, pr_util):
667 667 commits = [
668 668 {'message': 'a'},
669 669 {'message': 'b', 'added': [
670 670 FileNode('file_a', 'test_content a\n'),
671 671 FileNode('file_b', 'test_content b\n'),
672 672 FileNode('file_c', 'test_content c\n')]},
673 673 {'message': 'c', 'removed': [
674 674 FileNode('file_a', 'test_content a changed\n'),
675 675 FileNode('file_b', 'test_content b changed\n'),
676 676 FileNode('file_c', 'test_content c changed\n')]},
677 677 ]
678 678 # open a PR from a to b, removing 3 files
679 679 pull_request = pr_util.create_pull_request(
680 680 commits=commits, target_head='a', source_head='b', revisions=['b'],
681 681 name_suffix='per-file-review')
682 682
683 683 pr_util.add_one_commit(head='c')
684 684
685 685 assert_pr_file_changes(
686 686 pull_request,
687 687 added=[],
688 688 modified=[],
689 689 removed=['file_a', 'file_b', 'file_c'])
690 690
691 691
692 692 def test_update_writes_snapshot_into_pull_request_version(pr_util, config_stub):
693 693 model = PullRequestModel()
694 694 pull_request = pr_util.create_pull_request()
695 695 pr_util.update_source_repository()
696 696
697 697 model.update_commits(pull_request)
698 698
699 699 # Expect that it has a version entry now
700 700 assert len(model.get_versions(pull_request)) == 1
701 701
702 702
703 703 def test_update_skips_new_version_if_unchanged(pr_util, config_stub):
704 704 pull_request = pr_util.create_pull_request()
705 705 model = PullRequestModel()
706 706 model.update_commits(pull_request)
707 707
708 708 # Expect that it still has no versions
709 709 assert len(model.get_versions(pull_request)) == 0
710 710
711 711
712 712 def test_update_assigns_comments_to_the_new_version(pr_util, config_stub):
713 713 model = PullRequestModel()
714 714 pull_request = pr_util.create_pull_request()
715 715 comment = pr_util.create_comment()
716 716 pr_util.update_source_repository()
717 717
718 718 model.update_commits(pull_request)
719 719
720 720 # Expect that the comment is linked to the pr version now
721 721 assert comment.pull_request_version == model.get_versions(pull_request)[0]
722 722
723 723
724 724 def test_update_adds_a_comment_to_the_pull_request_about_the_change(pr_util, config_stub):
725 725 model = PullRequestModel()
726 726 pull_request = pr_util.create_pull_request()
727 727 pr_util.update_source_repository()
728 728 pr_util.update_source_repository()
729 729
730 730 model.update_commits(pull_request)
731 731
732 732 # Expect to find a new comment about the change
733 733 expected_message = textwrap.dedent(
734 734 """\
735 735 Pull request updated. Auto status change to |under_review|
736 736
737 737 .. role:: added
738 738 .. role:: removed
739 739 .. parsed-literal::
740 740
741 741 Changed commits:
742 742 * :added:`1 added`
743 743 * :removed:`0 removed`
744 744
745 745 Changed files:
746 746 * `A file_2 <#a_c--92ed3b5f07b4>`_
747 747
748 748 .. |under_review| replace:: *"Under Review"*"""
749 749 )
750 750 pull_request_comments = sorted(
751 751 pull_request.comments, key=lambda c: c.modified_at)
752 752 update_comment = pull_request_comments[-1]
753 753 assert update_comment.text == expected_message
754 754
755 755
756 756 def test_create_version_from_snapshot_updates_attributes(pr_util, config_stub):
757 757 pull_request = pr_util.create_pull_request()
758 758
759 759 # Avoiding default values
760 760 pull_request.status = PullRequest.STATUS_CLOSED
761 761 pull_request._last_merge_source_rev = "0" * 40
762 762 pull_request._last_merge_target_rev = "1" * 40
763 763 pull_request.last_merge_status = 1
764 764 pull_request.merge_rev = "2" * 40
765 765
766 766 # Remember automatic values
767 767 created_on = pull_request.created_on
768 768 updated_on = pull_request.updated_on
769 769
770 770 # Create a new version of the pull request
771 771 version = PullRequestModel()._create_version_from_snapshot(pull_request)
772 772
773 773 # Check attributes
774 774 assert version.title == pr_util.create_parameters['title']
775 775 assert version.description == pr_util.create_parameters['description']
776 776 assert version.status == PullRequest.STATUS_CLOSED
777 777
778 778 # versions get updated created_on
779 779 assert version.created_on != created_on
780 780
781 781 assert version.updated_on == updated_on
782 782 assert version.user_id == pull_request.user_id
783 783 assert version.revisions == pr_util.create_parameters['revisions']
784 784 assert version.source_repo == pr_util.source_repository
785 785 assert version.source_ref == pr_util.create_parameters['source_ref']
786 786 assert version.target_repo == pr_util.target_repository
787 787 assert version.target_ref == pr_util.create_parameters['target_ref']
788 788 assert version._last_merge_source_rev == pull_request._last_merge_source_rev
789 789 assert version._last_merge_target_rev == pull_request._last_merge_target_rev
790 790 assert version.last_merge_status == pull_request.last_merge_status
791 791 assert version.merge_rev == pull_request.merge_rev
792 792 assert version.pull_request == pull_request
793 793
794 794
795 795 def test_link_comments_to_version_only_updates_unlinked_comments(pr_util, config_stub):
796 796 version1 = pr_util.create_version_of_pull_request()
797 797 comment_linked = pr_util.create_comment(linked_to=version1)
798 798 comment_unlinked = pr_util.create_comment()
799 799 version2 = pr_util.create_version_of_pull_request()
800 800
801 801 PullRequestModel()._link_comments_to_version(version2)
802 802
803 803 # Expect that only the new comment is linked to version2
804 804 assert (
805 805 comment_unlinked.pull_request_version_id ==
806 806 version2.pull_request_version_id)
807 807 assert (
808 808 comment_linked.pull_request_version_id ==
809 809 version1.pull_request_version_id)
810 810 assert (
811 811 comment_unlinked.pull_request_version_id !=
812 812 comment_linked.pull_request_version_id)
813 813
814 814
815 815 def test_calculate_commits():
816 816 old_ids = [1, 2, 3]
817 817 new_ids = [1, 3, 4, 5]
818 818 change = PullRequestModel()._calculate_commit_id_changes(old_ids, new_ids)
819 819 assert change.added == [4, 5]
820 820 assert change.common == [1, 3]
821 821 assert change.removed == [2]
822 822 assert change.total == [1, 3, 4, 5]
823 823
824 824
825 825 def assert_inline_comments(pull_request, visible=None, outdated=None):
826 826 if visible is not None:
827 827 inline_comments = CommentsModel().get_inline_comments(
828 828 pull_request.target_repo.repo_id, pull_request=pull_request)
829 829 inline_cnt = CommentsModel().get_inline_comments_count(
830 830 inline_comments)
831 831 assert inline_cnt == visible
832 832 if outdated is not None:
833 833 outdated_comments = CommentsModel().get_outdated_comments(
834 834 pull_request.target_repo.repo_id, pull_request)
835 835 assert len(outdated_comments) == outdated
836 836
837 837
838 838 def assert_pr_file_changes(
839 839 pull_request, added=None, modified=None, removed=None):
840 840 pr_versions = PullRequestModel().get_versions(pull_request)
841 841 # always use first version, ie original PR to calculate changes
842 842 pull_request_version = pr_versions[0]
843 843 old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs(
844 844 pull_request, pull_request_version)
845 845 file_changes = PullRequestModel()._calculate_file_changes(
846 846 old_diff_data, new_diff_data)
847 847
848 848 assert added == file_changes.added, \
849 849 'expected added:%s vs value:%s' % (added, file_changes.added)
850 850 assert modified == file_changes.modified, \
851 851 'expected modified:%s vs value:%s' % (modified, file_changes.modified)
852 852 assert removed == file_changes.removed, \
853 853 'expected removed:%s vs value:%s' % (removed, file_changes.removed)
854 854
855 855
856 856 def outdated_comments_patcher(use_outdated=True):
857 857 return mock.patch.object(
858 858 CommentsModel, 'use_outdated_comments',
859 859 return_value=use_outdated)
General Comments 0
You need to be logged in to leave comments. Login now