Show More
@@ -1,148 +1,153 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | from rhodecode.apps._base import add_route_with_slash |
|
21 | 21 | |
|
22 | 22 | |
|
23 | 23 | def includeme(config): |
|
24 | 24 | |
|
25 | 25 | # Summary |
|
26 | 26 | # NOTE(marcink): one additional route is defined in very bottom, catch |
|
27 | 27 | # all pattern |
|
28 | 28 | config.add_route( |
|
29 | 29 | name='repo_summary_explicit', |
|
30 | 30 | pattern='/{repo_name:.*?[^/]}/summary', repo_route=True) |
|
31 | 31 | config.add_route( |
|
32 | 32 | name='repo_summary_commits', |
|
33 | 33 | pattern='/{repo_name:.*?[^/]}/summary-commits', repo_route=True) |
|
34 | 34 | |
|
35 | # repo commits | |
|
36 | config.add_route( | |
|
37 | name='repo_commit', | |
|
38 | pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}', repo_route=True) | |
|
39 | ||
|
35 | 40 | # refs data |
|
36 | 41 | config.add_route( |
|
37 | 42 | name='repo_refs_data', |
|
38 | 43 | pattern='/{repo_name:.*?[^/]}/refs-data', repo_route=True) |
|
39 | 44 | |
|
40 | 45 | config.add_route( |
|
41 | 46 | name='repo_refs_changelog_data', |
|
42 | 47 | pattern='/{repo_name:.*?[^/]}/refs-data-changelog', repo_route=True) |
|
43 | 48 | |
|
44 | 49 | config.add_route( |
|
45 | 50 | name='repo_stats', |
|
46 | 51 | pattern='/{repo_name:.*?[^/]}/repo_stats/{commit_id}', repo_route=True) |
|
47 | 52 | |
|
48 | 53 | # Tags |
|
49 | 54 | config.add_route( |
|
50 | 55 | name='tags_home', |
|
51 | 56 | pattern='/{repo_name:.*?[^/]}/tags', repo_route=True) |
|
52 | 57 | |
|
53 | 58 | # Branches |
|
54 | 59 | config.add_route( |
|
55 | 60 | name='branches_home', |
|
56 | 61 | pattern='/{repo_name:.*?[^/]}/branches', repo_route=True) |
|
57 | 62 | |
|
58 | 63 | config.add_route( |
|
59 | 64 | name='bookmarks_home', |
|
60 | 65 | pattern='/{repo_name:.*?[^/]}/bookmarks', repo_route=True) |
|
61 | 66 | |
|
62 | 67 | # Pull Requests |
|
63 | 68 | config.add_route( |
|
64 | 69 | name='pullrequest_show', |
|
65 | 70 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id}', |
|
66 | 71 | repo_route=True) |
|
67 | 72 | |
|
68 | 73 | config.add_route( |
|
69 | 74 | name='pullrequest_show_all', |
|
70 | 75 | pattern='/{repo_name:.*?[^/]}/pull-request', |
|
71 | 76 | repo_route=True, repo_accepted_types=['hg', 'git']) |
|
72 | 77 | |
|
73 | 78 | config.add_route( |
|
74 | 79 | name='pullrequest_show_all_data', |
|
75 | 80 | pattern='/{repo_name:.*?[^/]}/pull-request-data', |
|
76 | 81 | repo_route=True, repo_accepted_types=['hg', 'git']) |
|
77 | 82 | |
|
78 | 83 | # Settings |
|
79 | 84 | config.add_route( |
|
80 | 85 | name='edit_repo', |
|
81 | 86 | pattern='/{repo_name:.*?[^/]}/settings', repo_route=True) |
|
82 | 87 | |
|
83 | 88 | # Settings advanced |
|
84 | 89 | config.add_route( |
|
85 | 90 | name='edit_repo_advanced', |
|
86 | 91 | pattern='/{repo_name:.*?[^/]}/settings/advanced', repo_route=True) |
|
87 | 92 | config.add_route( |
|
88 | 93 | name='edit_repo_advanced_delete', |
|
89 | 94 | pattern='/{repo_name:.*?[^/]}/settings/advanced/delete', repo_route=True) |
|
90 | 95 | config.add_route( |
|
91 | 96 | name='edit_repo_advanced_locking', |
|
92 | 97 | pattern='/{repo_name:.*?[^/]}/settings/advanced/locking', repo_route=True) |
|
93 | 98 | config.add_route( |
|
94 | 99 | name='edit_repo_advanced_journal', |
|
95 | 100 | pattern='/{repo_name:.*?[^/]}/settings/advanced/journal', repo_route=True) |
|
96 | 101 | config.add_route( |
|
97 | 102 | name='edit_repo_advanced_fork', |
|
98 | 103 | pattern='/{repo_name:.*?[^/]}/settings/advanced/fork', repo_route=True) |
|
99 | 104 | |
|
100 | 105 | # Caches |
|
101 | 106 | config.add_route( |
|
102 | 107 | name='edit_repo_caches', |
|
103 | 108 | pattern='/{repo_name:.*?[^/]}/settings/caches', repo_route=True) |
|
104 | 109 | |
|
105 | 110 | # Permissions |
|
106 | 111 | config.add_route( |
|
107 | 112 | name='edit_repo_perms', |
|
108 | 113 | pattern='/{repo_name:.*?[^/]}/settings/permissions', repo_route=True) |
|
109 | 114 | |
|
110 | 115 | # Repo Review Rules |
|
111 | 116 | config.add_route( |
|
112 | 117 | name='repo_reviewers', |
|
113 | 118 | pattern='/{repo_name:.*?[^/]}/settings/review/rules', repo_route=True) |
|
114 | 119 | |
|
115 | 120 | config.add_route( |
|
116 | 121 | name='repo_default_reviewers_data', |
|
117 | 122 | pattern='/{repo_name:.*?[^/]}/settings/review/default-reviewers', repo_route=True) |
|
118 | 123 | |
|
119 | 124 | # Maintenance |
|
120 | 125 | config.add_route( |
|
121 | 126 | name='repo_maintenance', |
|
122 | 127 | pattern='/{repo_name:.*?[^/]}/settings/maintenance', repo_route=True) |
|
123 | 128 | |
|
124 | 129 | config.add_route( |
|
125 | 130 | name='repo_maintenance_execute', |
|
126 | 131 | pattern='/{repo_name:.*?[^/]}/settings/maintenance/execute', repo_route=True) |
|
127 | 132 | |
|
128 | 133 | # Strip |
|
129 | 134 | config.add_route( |
|
130 | 135 | name='strip', |
|
131 | 136 | pattern='/{repo_name:.*?[^/]}/settings/strip', repo_route=True) |
|
132 | 137 | |
|
133 | 138 | config.add_route( |
|
134 | 139 | name='strip_check', |
|
135 | 140 | pattern='/{repo_name:.*?[^/]}/settings/strip_check', repo_route=True) |
|
136 | 141 | |
|
137 | 142 | config.add_route( |
|
138 | 143 | name='strip_execute', |
|
139 | 144 | pattern='/{repo_name:.*?[^/]}/settings/strip_execute', repo_route=True) |
|
140 | 145 | |
|
141 | 146 | # NOTE(marcink): needs to be at the end for catch-all |
|
142 | 147 | add_route_with_slash( |
|
143 | 148 | config, |
|
144 | 149 | name='repo_summary', |
|
145 | 150 | pattern='/{repo_name:.*?[^/]}', repo_route=True) |
|
146 | 151 | |
|
147 | 152 | # Scan module for configuration decorators. |
|
148 | 153 | config.scan() |
@@ -1,137 +1,141 b'' | |||
|
1 | 1 | # Copyright (C) 2016-2017 RhodeCode GmbH |
|
2 | 2 | # |
|
3 | 3 | # This program is free software: you can redistribute it and/or modify |
|
4 | 4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
5 | 5 | # (only), as published by the Free Software Foundation. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU Affero General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | # |
|
15 | 15 | # This program is dual-licensed. If you wish to learn more about the |
|
16 | 16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
17 | 17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
18 | 18 | |
|
19 | 19 | import logging |
|
20 | 20 | |
|
21 | 21 | from rhodecode.translation import lazy_ugettext |
|
22 | 22 | from rhodecode.events.repo import ( |
|
23 | 23 | RepoEvent, _commits_as_dict, _issues_as_dict) |
|
24 | 24 | |
|
25 | 25 | log = logging.getLogger(__name__) |
|
26 | 26 | |
|
27 | 27 | |
|
28 | 28 | class PullRequestEvent(RepoEvent): |
|
29 | 29 | """ |
|
30 | 30 | Base class for pull request events. |
|
31 | 31 | |
|
32 | 32 | :param pullrequest: a :class:`PullRequest` instance |
|
33 | 33 | """ |
|
34 | 34 | |
|
35 | 35 | def __init__(self, pullrequest): |
|
36 | 36 | super(PullRequestEvent, self).__init__(pullrequest.target_repo) |
|
37 | 37 | self.pullrequest = pullrequest |
|
38 | 38 | |
|
39 | 39 | def as_dict(self): |
|
40 | 40 | from rhodecode.model.pull_request import PullRequestModel |
|
41 | 41 | data = super(PullRequestEvent, self).as_dict() |
|
42 | 42 | |
|
43 | 43 | commits = _commits_as_dict( |
|
44 | 44 | commit_ids=self.pullrequest.revisions, |
|
45 | 45 | repos=[self.pullrequest.source_repo] |
|
46 | 46 | ) |
|
47 | 47 | issues = _issues_as_dict(commits) |
|
48 | 48 | |
|
49 | 49 | data.update({ |
|
50 | 50 | 'pullrequest': { |
|
51 | 51 | 'title': self.pullrequest.title, |
|
52 | 52 | 'issues': issues, |
|
53 | 53 | 'pull_request_id': self.pullrequest.pull_request_id, |
|
54 | 54 | 'url': PullRequestModel().get_url(self.pullrequest), |
|
55 | 'permalink_url': PullRequestModel().get_url( | |
|
56 | self.pullrequest, permalink=True), | |
|
55 | 57 | 'status': self.pullrequest.calculated_review_status(), |
|
56 | 58 | 'commits': commits, |
|
57 | 59 | } |
|
58 | 60 | }) |
|
59 | 61 | return data |
|
60 | 62 | |
|
61 | 63 | |
|
62 | 64 | class PullRequestCreateEvent(PullRequestEvent): |
|
63 | 65 | """ |
|
64 | 66 | An instance of this class is emitted as an :term:`event` after a pull |
|
65 | 67 | request is created. |
|
66 | 68 | """ |
|
67 | 69 | name = 'pullrequest-create' |
|
68 | 70 | display_name = lazy_ugettext('pullrequest created') |
|
69 | 71 | |
|
70 | 72 | |
|
71 | 73 | class PullRequestCloseEvent(PullRequestEvent): |
|
72 | 74 | """ |
|
73 | 75 | An instance of this class is emitted as an :term:`event` after a pull |
|
74 | 76 | request is closed. |
|
75 | 77 | """ |
|
76 | 78 | name = 'pullrequest-close' |
|
77 | 79 | display_name = lazy_ugettext('pullrequest closed') |
|
78 | 80 | |
|
79 | 81 | |
|
80 | 82 | class PullRequestUpdateEvent(PullRequestEvent): |
|
81 | 83 | """ |
|
82 | 84 | An instance of this class is emitted as an :term:`event` after a pull |
|
83 | 85 | request's commits have been updated. |
|
84 | 86 | """ |
|
85 | 87 | name = 'pullrequest-update' |
|
86 | 88 | display_name = lazy_ugettext('pullrequest commits updated') |
|
87 | 89 | |
|
88 | 90 | |
|
89 | 91 | class PullRequestReviewEvent(PullRequestEvent): |
|
90 | 92 | """ |
|
91 | 93 | An instance of this class is emitted as an :term:`event` after a pull |
|
92 | 94 | request review has changed. |
|
93 | 95 | """ |
|
94 | 96 | name = 'pullrequest-review' |
|
95 | 97 | display_name = lazy_ugettext('pullrequest review changed') |
|
96 | 98 | |
|
97 | 99 | |
|
98 | 100 | class PullRequestMergeEvent(PullRequestEvent): |
|
99 | 101 | """ |
|
100 | 102 | An instance of this class is emitted as an :term:`event` after a pull |
|
101 | 103 | request is merged. |
|
102 | 104 | """ |
|
103 | 105 | name = 'pullrequest-merge' |
|
104 | 106 | display_name = lazy_ugettext('pullrequest merged') |
|
105 | 107 | |
|
106 | 108 | |
|
107 | 109 | class PullRequestCommentEvent(PullRequestEvent): |
|
108 | 110 | """ |
|
109 | 111 | An instance of this class is emitted as an :term:`event` after a pull |
|
110 | 112 | request comment is created. |
|
111 | 113 | """ |
|
112 | 114 | name = 'pullrequest-comment' |
|
113 | 115 | display_name = lazy_ugettext('pullrequest commented') |
|
114 | 116 | |
|
115 | 117 | def __init__(self, pullrequest, comment): |
|
116 | 118 | super(PullRequestCommentEvent, self).__init__(pullrequest) |
|
117 | 119 | self.comment = comment |
|
118 | 120 | |
|
119 | 121 | def as_dict(self): |
|
120 | 122 | from rhodecode.model.comment import CommentsModel |
|
121 | 123 | data = super(PullRequestCommentEvent, self).as_dict() |
|
122 | 124 | |
|
123 | 125 | status = None |
|
124 | 126 | if self.comment.status_change: |
|
125 | 127 | status = self.comment.status_change[0].status |
|
126 | 128 | |
|
127 | 129 | data.update({ |
|
128 | 130 | 'comment': { |
|
129 | 131 | 'status': status, |
|
130 | 132 | 'text': self.comment.text, |
|
131 | 133 | 'type': self.comment.comment_type, |
|
132 | 134 | 'file': self.comment.f_path, |
|
133 | 135 | 'line': self.comment.line_no, |
|
134 | 'url': CommentsModel().get_url(self.comment) | |
|
136 | 'url': CommentsModel().get_url(self.comment), | |
|
137 | 'permalink_url': CommentsModel().get_url( | |
|
138 | self.comment, permalink=True), | |
|
135 | 139 | } |
|
136 | 140 | }) |
|
137 | 141 | return data |
@@ -1,272 +1,274 b'' | |||
|
1 | 1 | # Copyright (C) 2016-2017 RhodeCode GmbH |
|
2 | 2 | # |
|
3 | 3 | # This program is free software: you can redistribute it and/or modify |
|
4 | 4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
5 | 5 | # (only), as published by the Free Software Foundation. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU Affero General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | # |
|
15 | 15 | # This program is dual-licensed. If you wish to learn more about the |
|
16 | 16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
17 | 17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
18 | 18 | |
|
19 | 19 | import collections |
|
20 | 20 | import logging |
|
21 | 21 | |
|
22 | 22 | from rhodecode.translation import lazy_ugettext |
|
23 | 23 | from rhodecode.model.db import User, Repository, Session |
|
24 | 24 | from rhodecode.events.base import RhodecodeEvent |
|
25 | 25 | from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError |
|
26 | 26 | |
|
27 | 27 | log = logging.getLogger(__name__) |
|
28 | 28 | |
|
29 | 29 | |
|
30 | 30 | def _commits_as_dict(commit_ids, repos): |
|
31 | 31 | """ |
|
32 | 32 | Helper function to serialize commit_ids |
|
33 | 33 | |
|
34 | 34 | :param commit_ids: commits to get |
|
35 | 35 | :param repos: list of repos to check |
|
36 | 36 | """ |
|
37 | 37 | from rhodecode.lib.utils2 import extract_mentioned_users |
|
38 | from rhodecode.lib import helpers as h | |
|
39 | 38 | from rhodecode.lib.helpers import ( |
|
40 | 39 | urlify_commit_message, process_patterns, chop_at_smart) |
|
40 | from rhodecode.model.repo import RepoModel | |
|
41 | 41 | |
|
42 | 42 | if not repos: |
|
43 | 43 | raise Exception('no repo defined') |
|
44 | 44 | |
|
45 | 45 | if not isinstance(repos, (tuple, list)): |
|
46 | 46 | repos = [repos] |
|
47 | 47 | |
|
48 | 48 | if not commit_ids: |
|
49 | 49 | return [] |
|
50 | 50 | |
|
51 | 51 | needed_commits = list(commit_ids) |
|
52 | 52 | |
|
53 | 53 | commits = [] |
|
54 | 54 | reviewers = [] |
|
55 | 55 | for repo in repos: |
|
56 | 56 | if not needed_commits: |
|
57 | return commits # return early if we have the commits we need | |
|
57 | return commits # return early if we have the commits we need | |
|
58 | 58 | |
|
59 | 59 | vcs_repo = repo.scm_instance(cache=False) |
|
60 | 60 | try: |
|
61 | 61 | # use copy of needed_commits since we modify it while iterating |
|
62 | 62 | for commit_id in list(needed_commits): |
|
63 | 63 | try: |
|
64 | 64 | cs = vcs_repo.get_changeset(commit_id) |
|
65 | 65 | except CommitDoesNotExistError: |
|
66 | continue # maybe its in next repo | |
|
66 | continue # maybe its in next repo | |
|
67 | 67 | |
|
68 | 68 | cs_data = cs.__json__() |
|
69 | 69 | cs_data['mentions'] = extract_mentioned_users(cs_data['message']) |
|
70 | 70 | cs_data['reviewers'] = reviewers |
|
71 |
cs_data['url'] = |
|
|
72 |
|
|
|
73 | revision=cs_data['raw_id'], | |
|
74 | qualified=True | |
|
75 | ) | |
|
71 | cs_data['url'] = RepoModel().get_commit_url( | |
|
72 | repo, cs_data['raw_id']) | |
|
73 | cs_data['permalink_url'] = RepoModel().get_commit_url( | |
|
74 | repo, cs_data['raw_id'], permalink=True) | |
|
76 | 75 | urlified_message, issues_data = process_patterns( |
|
77 | 76 | cs_data['message'], repo.repo_name) |
|
78 | 77 | cs_data['issues'] = issues_data |
|
79 | 78 | cs_data['message_html'] = urlify_commit_message( |
|
80 | 79 | cs_data['message'], repo.repo_name) |
|
81 | 80 | cs_data['message_html_title'] = chop_at_smart( |
|
82 | 81 | cs_data['message'], '\n', suffix_if_chopped='...') |
|
83 | 82 | commits.append(cs_data) |
|
84 | 83 | |
|
85 | 84 | needed_commits.remove(commit_id) |
|
86 | 85 | |
|
87 | 86 | except Exception as e: |
|
88 | 87 | log.exception(e) |
|
89 | 88 | # we don't send any commits when crash happens, only full list |
|
90 | 89 | # matters we short circuit then. |
|
91 | 90 | return [] |
|
92 | 91 | |
|
93 | 92 | missing_commits = set(commit_ids) - set(c['raw_id'] for c in commits) |
|
94 | 93 | if missing_commits: |
|
95 | 94 | log.error('missing commits: %s' % ', '.join(missing_commits)) |
|
96 | 95 | |
|
97 | 96 | return commits |
|
98 | 97 | |
|
99 | 98 | |
|
100 | 99 | def _issues_as_dict(commits): |
|
101 | 100 | """ Helper function to serialize issues from commits """ |
|
102 | 101 | issues = {} |
|
103 | 102 | for commit in commits: |
|
104 | 103 | for issue in commit['issues']: |
|
105 | 104 | issues[issue['id']] = issue |
|
106 | 105 | return issues |
|
107 | 106 | |
|
108 | 107 | |
|
109 | 108 | class RepoEvent(RhodecodeEvent): |
|
110 | 109 | """ |
|
111 | 110 | Base class for events acting on a repository. |
|
112 | 111 | |
|
113 | 112 | :param repo: a :class:`Repository` instance |
|
114 | 113 | """ |
|
115 | 114 | |
|
116 | 115 | def __init__(self, repo): |
|
117 | 116 | super(RepoEvent, self).__init__() |
|
118 | 117 | self.repo = repo |
|
119 | 118 | |
|
120 | 119 | def as_dict(self): |
|
121 | 120 | from rhodecode.model.repo import RepoModel |
|
122 | 121 | data = super(RepoEvent, self).as_dict() |
|
123 | 122 | extra_fields = collections.OrderedDict() |
|
124 | 123 | for field in self.repo.extra_fields: |
|
125 | 124 | extra_fields[field.field_key] = field.field_value |
|
126 | 125 | |
|
127 | 126 | data.update({ |
|
128 | 127 | 'repo': { |
|
129 | 128 | 'repo_id': self.repo.repo_id, |
|
130 | 129 | 'repo_name': self.repo.repo_name, |
|
131 | 130 | 'repo_type': self.repo.repo_type, |
|
132 | 131 | 'url': RepoModel().get_url(self.repo), |
|
132 | 'permalink_url': RepoModel().get_url(self.repo, permalink=True), | |
|
133 | 133 | 'extra_fields': extra_fields |
|
134 | 134 | } |
|
135 | 135 | }) |
|
136 | 136 | return data |
|
137 | 137 | |
|
138 | 138 | |
|
139 | 139 | class RepoPreCreateEvent(RepoEvent): |
|
140 | 140 | """ |
|
141 | 141 | An instance of this class is emitted as an :term:`event` before a repo is |
|
142 | 142 | created. |
|
143 | 143 | """ |
|
144 | 144 | name = 'repo-pre-create' |
|
145 | 145 | display_name = lazy_ugettext('repository pre create') |
|
146 | 146 | |
|
147 | 147 | |
|
148 | 148 | class RepoCreateEvent(RepoEvent): |
|
149 | 149 | """ |
|
150 | 150 | An instance of this class is emitted as an :term:`event` whenever a repo is |
|
151 | 151 | created. |
|
152 | 152 | """ |
|
153 | 153 | name = 'repo-create' |
|
154 | 154 | display_name = lazy_ugettext('repository created') |
|
155 | 155 | |
|
156 | 156 | |
|
157 | 157 | class RepoPreDeleteEvent(RepoEvent): |
|
158 | 158 | """ |
|
159 | 159 | An instance of this class is emitted as an :term:`event` whenever a repo is |
|
160 | 160 | created. |
|
161 | 161 | """ |
|
162 | 162 | name = 'repo-pre-delete' |
|
163 | 163 | display_name = lazy_ugettext('repository pre delete') |
|
164 | 164 | |
|
165 | 165 | |
|
166 | 166 | class RepoDeleteEvent(RepoEvent): |
|
167 | 167 | """ |
|
168 | 168 | An instance of this class is emitted as an :term:`event` whenever a repo is |
|
169 | 169 | created. |
|
170 | 170 | """ |
|
171 | 171 | name = 'repo-delete' |
|
172 | 172 | display_name = lazy_ugettext('repository deleted') |
|
173 | 173 | |
|
174 | 174 | |
|
175 | 175 | class RepoVCSEvent(RepoEvent): |
|
176 | 176 | """ |
|
177 | 177 | Base class for events triggered by the VCS |
|
178 | 178 | """ |
|
179 | 179 | def __init__(self, repo_name, extras): |
|
180 | 180 | self.repo = Repository.get_by_repo_name(repo_name) |
|
181 | 181 | if not self.repo: |
|
182 | 182 | raise Exception('repo by this name %s does not exist' % repo_name) |
|
183 | 183 | self.extras = extras |
|
184 | 184 | super(RepoVCSEvent, self).__init__(self.repo) |
|
185 | 185 | |
|
186 | 186 | @property |
|
187 | 187 | def actor(self): |
|
188 | 188 | if self.extras.get('username'): |
|
189 | 189 | return User.get_by_username(self.extras['username']) |
|
190 | 190 | |
|
191 | 191 | @property |
|
192 | 192 | def actor_ip(self): |
|
193 | 193 | if self.extras.get('ip'): |
|
194 | 194 | return self.extras['ip'] |
|
195 | 195 | |
|
196 | 196 | @property |
|
197 | 197 | def server_url(self): |
|
198 | 198 | if self.extras.get('server_url'): |
|
199 | 199 | return self.extras['server_url'] |
|
200 | 200 | |
|
201 | 201 | |
|
202 | 202 | class RepoPrePullEvent(RepoVCSEvent): |
|
203 | 203 | """ |
|
204 | 204 | An instance of this class is emitted as an :term:`event` before commits |
|
205 | 205 | are pulled from a repo. |
|
206 | 206 | """ |
|
207 | 207 | name = 'repo-pre-pull' |
|
208 | 208 | display_name = lazy_ugettext('repository pre pull') |
|
209 | 209 | |
|
210 | 210 | |
|
211 | 211 | class RepoPullEvent(RepoVCSEvent): |
|
212 | 212 | """ |
|
213 | 213 | An instance of this class is emitted as an :term:`event` after commits |
|
214 | 214 | are pulled from a repo. |
|
215 | 215 | """ |
|
216 | 216 | name = 'repo-pull' |
|
217 | 217 | display_name = lazy_ugettext('repository pull') |
|
218 | 218 | |
|
219 | 219 | |
|
220 | 220 | class RepoPrePushEvent(RepoVCSEvent): |
|
221 | 221 | """ |
|
222 | 222 | An instance of this class is emitted as an :term:`event` before commits |
|
223 | 223 | are pushed to a repo. |
|
224 | 224 | """ |
|
225 | 225 | name = 'repo-pre-push' |
|
226 | 226 | display_name = lazy_ugettext('repository pre push') |
|
227 | 227 | |
|
228 | 228 | |
|
229 | 229 | class RepoPushEvent(RepoVCSEvent): |
|
230 | 230 | """ |
|
231 | 231 | An instance of this class is emitted as an :term:`event` after commits |
|
232 | 232 | are pushed to a repo. |
|
233 | 233 | |
|
234 | 234 | :param extras: (optional) dict of data from proxied VCS actions |
|
235 | 235 | """ |
|
236 | 236 | name = 'repo-push' |
|
237 | 237 | display_name = lazy_ugettext('repository push') |
|
238 | 238 | |
|
239 | 239 | def __init__(self, repo_name, pushed_commit_ids, extras): |
|
240 | 240 | super(RepoPushEvent, self).__init__(repo_name, extras) |
|
241 | 241 | self.pushed_commit_ids = pushed_commit_ids |
|
242 | 242 | |
|
243 | 243 | def as_dict(self): |
|
244 | 244 | data = super(RepoPushEvent, self).as_dict() |
|
245 | branch_url = repo_url = data['repo']['url'] | |
|
245 | ||
|
246 | def branch_url(branch_name): | |
|
247 | return '{}/changelog?branch={}'.format( | |
|
248 | data['repo']['url'], branch_name) | |
|
246 | 249 | |
|
247 | 250 | commits = _commits_as_dict( |
|
248 | 251 | commit_ids=self.pushed_commit_ids, repos=[self.repo]) |
|
249 | 252 | |
|
250 | 253 | last_branch = None |
|
251 | 254 | for commit in reversed(commits): |
|
252 | 255 | commit['branch'] = commit['branch'] or last_branch |
|
253 | 256 | last_branch = commit['branch'] |
|
254 | 257 | issues = _issues_as_dict(commits) |
|
255 | 258 | |
|
256 | 259 | branches = set( |
|
257 | 260 | commit['branch'] for commit in commits if commit['branch']) |
|
258 | 261 | branches = [ |
|
259 | 262 | { |
|
260 | 263 | 'name': branch, |
|
261 |
'url': |
|
|
262 | data['repo']['url'], branch) | |
|
264 | 'url': branch_url(branch) | |
|
263 | 265 | } |
|
264 | 266 | for branch in branches |
|
265 | 267 | ] |
|
266 | 268 | |
|
267 | 269 | data['push'] = { |
|
268 | 270 | 'commits': commits, |
|
269 | 271 | 'issues': issues, |
|
270 | 272 | 'branches': branches, |
|
271 | 273 | } |
|
272 | 274 | return data |
@@ -1,632 +1,650 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2011-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | comments model for RhodeCode |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import logging |
|
26 | 26 | import traceback |
|
27 | 27 | import collections |
|
28 | 28 | |
|
29 | 29 | from datetime import datetime |
|
30 | 30 | |
|
31 | 31 | from pylons.i18n.translation import _ |
|
32 | from pyramid.threadlocal import get_current_registry | |
|
32 | from pyramid.threadlocal import get_current_registry, get_current_request | |
|
33 | 33 | from sqlalchemy.sql.expression import null |
|
34 | 34 | from sqlalchemy.sql.functions import coalesce |
|
35 | 35 | |
|
36 | 36 | from rhodecode.lib import helpers as h, diffs |
|
37 | 37 | from rhodecode.lib.channelstream import channelstream_request |
|
38 | 38 | from rhodecode.lib.utils import action_logger |
|
39 | from rhodecode.lib.utils2 import extract_mentioned_users | |
|
39 | from rhodecode.lib.utils2 import extract_mentioned_users, safe_str | |
|
40 | 40 | from rhodecode.model import BaseModel |
|
41 | 41 | from rhodecode.model.db import ( |
|
42 | 42 | ChangesetComment, User, Notification, PullRequest, AttributeDict) |
|
43 | 43 | from rhodecode.model.notification import NotificationModel |
|
44 | 44 | from rhodecode.model.meta import Session |
|
45 | 45 | from rhodecode.model.settings import VcsSettingsModel |
|
46 | 46 | from rhodecode.model.notification import EmailNotificationModel |
|
47 | 47 | from rhodecode.model.validation_schema.schemas import comment_schema |
|
48 | 48 | |
|
49 | 49 | |
|
50 | 50 | log = logging.getLogger(__name__) |
|
51 | 51 | |
|
52 | 52 | |
|
53 | 53 | class CommentsModel(BaseModel): |
|
54 | 54 | |
|
55 | 55 | cls = ChangesetComment |
|
56 | 56 | |
|
57 | 57 | DIFF_CONTEXT_BEFORE = 3 |
|
58 | 58 | DIFF_CONTEXT_AFTER = 3 |
|
59 | 59 | |
|
60 | 60 | def __get_commit_comment(self, changeset_comment): |
|
61 | 61 | return self._get_instance(ChangesetComment, changeset_comment) |
|
62 | 62 | |
|
63 | 63 | def __get_pull_request(self, pull_request): |
|
64 | 64 | return self._get_instance(PullRequest, pull_request) |
|
65 | 65 | |
|
66 | 66 | def _extract_mentions(self, s): |
|
67 | 67 | user_objects = [] |
|
68 | 68 | for username in extract_mentioned_users(s): |
|
69 | 69 | user_obj = User.get_by_username(username, case_insensitive=True) |
|
70 | 70 | if user_obj: |
|
71 | 71 | user_objects.append(user_obj) |
|
72 | 72 | return user_objects |
|
73 | 73 | |
|
74 | 74 | def _get_renderer(self, global_renderer='rst'): |
|
75 | 75 | try: |
|
76 | 76 | # try reading from visual context |
|
77 | 77 | from pylons import tmpl_context |
|
78 | 78 | global_renderer = tmpl_context.visual.default_renderer |
|
79 | 79 | except AttributeError: |
|
80 | 80 | log.debug("Renderer not set, falling back " |
|
81 | 81 | "to default renderer '%s'", global_renderer) |
|
82 | 82 | except Exception: |
|
83 | 83 | log.error(traceback.format_exc()) |
|
84 | 84 | return global_renderer |
|
85 | 85 | |
|
86 | 86 | def aggregate_comments(self, comments, versions, show_version, inline=False): |
|
87 | 87 | # group by versions, and count until, and display objects |
|
88 | 88 | |
|
89 | 89 | comment_groups = collections.defaultdict(list) |
|
90 | 90 | [comment_groups[ |
|
91 | 91 | _co.pull_request_version_id].append(_co) for _co in comments] |
|
92 | 92 | |
|
93 | 93 | def yield_comments(pos): |
|
94 | 94 | for co in comment_groups[pos]: |
|
95 | 95 | yield co |
|
96 | 96 | |
|
97 | 97 | comment_versions = collections.defaultdict( |
|
98 | 98 | lambda: collections.defaultdict(list)) |
|
99 | 99 | prev_prvid = -1 |
|
100 | 100 | # fake last entry with None, to aggregate on "latest" version which |
|
101 | 101 | # doesn't have an pull_request_version_id |
|
102 | 102 | for ver in versions + [AttributeDict({'pull_request_version_id': None})]: |
|
103 | 103 | prvid = ver.pull_request_version_id |
|
104 | 104 | if prev_prvid == -1: |
|
105 | 105 | prev_prvid = prvid |
|
106 | 106 | |
|
107 | 107 | for co in yield_comments(prvid): |
|
108 | 108 | comment_versions[prvid]['at'].append(co) |
|
109 | 109 | |
|
110 | 110 | # save until |
|
111 | 111 | current = comment_versions[prvid]['at'] |
|
112 | 112 | prev_until = comment_versions[prev_prvid]['until'] |
|
113 | 113 | cur_until = prev_until + current |
|
114 | 114 | comment_versions[prvid]['until'].extend(cur_until) |
|
115 | 115 | |
|
116 | 116 | # save outdated |
|
117 | 117 | if inline: |
|
118 | 118 | outdated = [x for x in cur_until |
|
119 | 119 | if x.outdated_at_version(show_version)] |
|
120 | 120 | else: |
|
121 | 121 | outdated = [x for x in cur_until |
|
122 | 122 | if x.older_than_version(show_version)] |
|
123 | 123 | display = [x for x in cur_until if x not in outdated] |
|
124 | 124 | |
|
125 | 125 | comment_versions[prvid]['outdated'] = outdated |
|
126 | 126 | comment_versions[prvid]['display'] = display |
|
127 | 127 | |
|
128 | 128 | prev_prvid = prvid |
|
129 | 129 | |
|
130 | 130 | return comment_versions |
|
131 | 131 | |
|
132 | 132 | def get_unresolved_todos(self, pull_request, show_outdated=True): |
|
133 | 133 | |
|
134 | 134 | todos = Session().query(ChangesetComment) \ |
|
135 | 135 | .filter(ChangesetComment.pull_request == pull_request) \ |
|
136 | 136 | .filter(ChangesetComment.resolved_by == None) \ |
|
137 | 137 | .filter(ChangesetComment.comment_type |
|
138 | 138 | == ChangesetComment.COMMENT_TYPE_TODO) |
|
139 | 139 | |
|
140 | 140 | if not show_outdated: |
|
141 | 141 | todos = todos.filter( |
|
142 | 142 | coalesce(ChangesetComment.display_state, '') != |
|
143 | 143 | ChangesetComment.COMMENT_OUTDATED) |
|
144 | 144 | |
|
145 | 145 | todos = todos.all() |
|
146 | 146 | |
|
147 | 147 | return todos |
|
148 | 148 | |
|
149 | 149 | def get_commit_unresolved_todos(self, commit_id, show_outdated=True): |
|
150 | 150 | |
|
151 | 151 | todos = Session().query(ChangesetComment) \ |
|
152 | 152 | .filter(ChangesetComment.revision == commit_id) \ |
|
153 | 153 | .filter(ChangesetComment.resolved_by == None) \ |
|
154 | 154 | .filter(ChangesetComment.comment_type |
|
155 | 155 | == ChangesetComment.COMMENT_TYPE_TODO) |
|
156 | 156 | |
|
157 | 157 | if not show_outdated: |
|
158 | 158 | todos = todos.filter( |
|
159 | 159 | coalesce(ChangesetComment.display_state, '') != |
|
160 | 160 | ChangesetComment.COMMENT_OUTDATED) |
|
161 | 161 | |
|
162 | 162 | todos = todos.all() |
|
163 | 163 | |
|
164 | 164 | return todos |
|
165 | 165 | |
|
166 | 166 | def create(self, text, repo, user, commit_id=None, pull_request=None, |
|
167 | 167 | f_path=None, line_no=None, status_change=None, |
|
168 | 168 | status_change_type=None, comment_type=None, |
|
169 | 169 | resolves_comment_id=None, closing_pr=False, send_email=True, |
|
170 | 170 | renderer=None): |
|
171 | 171 | """ |
|
172 | 172 | Creates new comment for commit or pull request. |
|
173 | 173 | IF status_change is not none this comment is associated with a |
|
174 | 174 | status change of commit or commit associated with pull request |
|
175 | 175 | |
|
176 | 176 | :param text: |
|
177 | 177 | :param repo: |
|
178 | 178 | :param user: |
|
179 | 179 | :param commit_id: |
|
180 | 180 | :param pull_request: |
|
181 | 181 | :param f_path: |
|
182 | 182 | :param line_no: |
|
183 | 183 | :param status_change: Label for status change |
|
184 | 184 | :param comment_type: Type of comment |
|
185 | 185 | :param status_change_type: type of status change |
|
186 | 186 | :param closing_pr: |
|
187 | 187 | :param send_email: |
|
188 | 188 | :param renderer: pick renderer for this comment |
|
189 | 189 | """ |
|
190 | 190 | if not text: |
|
191 | 191 | log.warning('Missing text for comment, skipping...') |
|
192 | 192 | return |
|
193 | 193 | |
|
194 | 194 | if not renderer: |
|
195 | 195 | renderer = self._get_renderer() |
|
196 | 196 | |
|
197 | 197 | repo = self._get_repo(repo) |
|
198 | 198 | user = self._get_user(user) |
|
199 | 199 | |
|
200 | 200 | schema = comment_schema.CommentSchema() |
|
201 | 201 | validated_kwargs = schema.deserialize(dict( |
|
202 | 202 | comment_body=text, |
|
203 | 203 | comment_type=comment_type, |
|
204 | 204 | comment_file=f_path, |
|
205 | 205 | comment_line=line_no, |
|
206 | 206 | renderer_type=renderer, |
|
207 | 207 | status_change=status_change_type, |
|
208 | 208 | resolves_comment_id=resolves_comment_id, |
|
209 | 209 | repo=repo.repo_id, |
|
210 | 210 | user=user.user_id, |
|
211 | 211 | )) |
|
212 | 212 | |
|
213 | 213 | comment = ChangesetComment() |
|
214 | 214 | comment.renderer = validated_kwargs['renderer_type'] |
|
215 | 215 | comment.text = validated_kwargs['comment_body'] |
|
216 | 216 | comment.f_path = validated_kwargs['comment_file'] |
|
217 | 217 | comment.line_no = validated_kwargs['comment_line'] |
|
218 | 218 | comment.comment_type = validated_kwargs['comment_type'] |
|
219 | 219 | |
|
220 | 220 | comment.repo = repo |
|
221 | 221 | comment.author = user |
|
222 | 222 | comment.resolved_comment = self.__get_commit_comment( |
|
223 | 223 | validated_kwargs['resolves_comment_id']) |
|
224 | 224 | |
|
225 | 225 | pull_request_id = pull_request |
|
226 | 226 | |
|
227 | 227 | commit_obj = None |
|
228 | 228 | pull_request_obj = None |
|
229 | 229 | |
|
230 | 230 | if commit_id: |
|
231 | 231 | notification_type = EmailNotificationModel.TYPE_COMMIT_COMMENT |
|
232 | 232 | # do a lookup, so we don't pass something bad here |
|
233 | 233 | commit_obj = repo.scm_instance().get_commit(commit_id=commit_id) |
|
234 | 234 | comment.revision = commit_obj.raw_id |
|
235 | 235 | |
|
236 | 236 | elif pull_request_id: |
|
237 | 237 | notification_type = EmailNotificationModel.TYPE_PULL_REQUEST_COMMENT |
|
238 | 238 | pull_request_obj = self.__get_pull_request(pull_request_id) |
|
239 | 239 | comment.pull_request = pull_request_obj |
|
240 | 240 | else: |
|
241 | 241 | raise Exception('Please specify commit or pull_request_id') |
|
242 | 242 | |
|
243 | 243 | Session().add(comment) |
|
244 | 244 | Session().flush() |
|
245 | 245 | kwargs = { |
|
246 | 246 | 'user': user, |
|
247 | 247 | 'renderer_type': renderer, |
|
248 | 248 | 'repo_name': repo.repo_name, |
|
249 | 249 | 'status_change': status_change, |
|
250 | 250 | 'status_change_type': status_change_type, |
|
251 | 251 | 'comment_body': text, |
|
252 | 252 | 'comment_file': f_path, |
|
253 | 253 | 'comment_line': line_no, |
|
254 | 254 | 'comment_type': comment_type or 'note' |
|
255 | 255 | } |
|
256 | 256 | |
|
257 | 257 | if commit_obj: |
|
258 | 258 | recipients = ChangesetComment.get_users( |
|
259 | 259 | revision=commit_obj.raw_id) |
|
260 | 260 | # add commit author if it's in RhodeCode system |
|
261 | 261 | cs_author = User.get_from_cs_author(commit_obj.author) |
|
262 | 262 | if not cs_author: |
|
263 | 263 | # use repo owner if we cannot extract the author correctly |
|
264 | 264 | cs_author = repo.user |
|
265 | 265 | recipients += [cs_author] |
|
266 | 266 | |
|
267 | 267 | commit_comment_url = self.get_url(comment) |
|
268 | 268 | |
|
269 | 269 | target_repo_url = h.link_to( |
|
270 | 270 | repo.repo_name, |
|
271 | 271 | h.route_url('repo_summary', repo_name=repo.repo_name)) |
|
272 | 272 | |
|
273 | 273 | # commit specifics |
|
274 | 274 | kwargs.update({ |
|
275 | 275 | 'commit': commit_obj, |
|
276 | 276 | 'commit_message': commit_obj.message, |
|
277 | 277 | 'commit_target_repo': target_repo_url, |
|
278 | 278 | 'commit_comment_url': commit_comment_url, |
|
279 | 279 | }) |
|
280 | 280 | |
|
281 | 281 | elif pull_request_obj: |
|
282 | 282 | # get the current participants of this pull request |
|
283 | 283 | recipients = ChangesetComment.get_users( |
|
284 | 284 | pull_request_id=pull_request_obj.pull_request_id) |
|
285 | 285 | # add pull request author |
|
286 | 286 | recipients += [pull_request_obj.author] |
|
287 | 287 | |
|
288 | 288 | # add the reviewers to notification |
|
289 | 289 | recipients += [x.user for x in pull_request_obj.reviewers] |
|
290 | 290 | |
|
291 | 291 | pr_target_repo = pull_request_obj.target_repo |
|
292 | 292 | pr_source_repo = pull_request_obj.source_repo |
|
293 | 293 | |
|
294 | 294 | pr_comment_url = h.url( |
|
295 | 295 | 'pullrequest_show', |
|
296 | 296 | repo_name=pr_target_repo.repo_name, |
|
297 | 297 | pull_request_id=pull_request_obj.pull_request_id, |
|
298 | 298 | anchor='comment-%s' % comment.comment_id, |
|
299 | 299 | qualified=True,) |
|
300 | 300 | |
|
301 | 301 | # set some variables for email notification |
|
302 | 302 | pr_target_repo_url = h.route_url( |
|
303 | 303 | 'repo_summary', repo_name=pr_target_repo.repo_name) |
|
304 | 304 | |
|
305 | 305 | pr_source_repo_url = h.route_url( |
|
306 | 306 | 'repo_summary', repo_name=pr_source_repo.repo_name) |
|
307 | 307 | |
|
308 | 308 | # pull request specifics |
|
309 | 309 | kwargs.update({ |
|
310 | 310 | 'pull_request': pull_request_obj, |
|
311 | 311 | 'pr_id': pull_request_obj.pull_request_id, |
|
312 | 312 | 'pr_target_repo': pr_target_repo, |
|
313 | 313 | 'pr_target_repo_url': pr_target_repo_url, |
|
314 | 314 | 'pr_source_repo': pr_source_repo, |
|
315 | 315 | 'pr_source_repo_url': pr_source_repo_url, |
|
316 | 316 | 'pr_comment_url': pr_comment_url, |
|
317 | 317 | 'pr_closing': closing_pr, |
|
318 | 318 | }) |
|
319 | 319 | if send_email: |
|
320 | 320 | # pre-generate the subject for notification itself |
|
321 | 321 | (subject, |
|
322 | 322 | _h, _e, # we don't care about those |
|
323 | 323 | body_plaintext) = EmailNotificationModel().render_email( |
|
324 | 324 | notification_type, **kwargs) |
|
325 | 325 | |
|
326 | 326 | mention_recipients = set( |
|
327 | 327 | self._extract_mentions(text)).difference(recipients) |
|
328 | 328 | |
|
329 | 329 | # create notification objects, and emails |
|
330 | 330 | NotificationModel().create( |
|
331 | 331 | created_by=user, |
|
332 | 332 | notification_subject=subject, |
|
333 | 333 | notification_body=body_plaintext, |
|
334 | 334 | notification_type=notification_type, |
|
335 | 335 | recipients=recipients, |
|
336 | 336 | mention_recipients=mention_recipients, |
|
337 | 337 | email_kwargs=kwargs, |
|
338 | 338 | ) |
|
339 | 339 | |
|
340 | 340 | action = ( |
|
341 | 341 | 'user_commented_pull_request:{}'.format( |
|
342 | 342 | comment.pull_request.pull_request_id) |
|
343 | 343 | if comment.pull_request |
|
344 | 344 | else 'user_commented_revision:{}'.format(comment.revision) |
|
345 | 345 | ) |
|
346 | 346 | action_logger(user, action, comment.repo) |
|
347 | 347 | |
|
348 | 348 | registry = get_current_registry() |
|
349 | 349 | rhodecode_plugins = getattr(registry, 'rhodecode_plugins', {}) |
|
350 | 350 | channelstream_config = rhodecode_plugins.get('channelstream', {}) |
|
351 | 351 | msg_url = '' |
|
352 | 352 | if commit_obj: |
|
353 | 353 | msg_url = commit_comment_url |
|
354 | 354 | repo_name = repo.repo_name |
|
355 | 355 | elif pull_request_obj: |
|
356 | 356 | msg_url = pr_comment_url |
|
357 | 357 | repo_name = pr_target_repo.repo_name |
|
358 | 358 | |
|
359 | 359 | if channelstream_config.get('enabled'): |
|
360 | 360 | message = '<strong>{}</strong> {} - ' \ |
|
361 | 361 | '<a onclick="window.location=\'{}\';' \ |
|
362 | 362 | 'window.location.reload()">' \ |
|
363 | 363 | '<strong>{}</strong></a>' |
|
364 | 364 | message = message.format( |
|
365 | 365 | user.username, _('made a comment'), msg_url, |
|
366 | 366 | _('Show it now')) |
|
367 | 367 | channel = '/repo${}$/pr/{}'.format( |
|
368 | 368 | repo_name, |
|
369 | 369 | pull_request_id |
|
370 | 370 | ) |
|
371 | 371 | payload = { |
|
372 | 372 | 'type': 'message', |
|
373 | 373 | 'timestamp': datetime.utcnow(), |
|
374 | 374 | 'user': 'system', |
|
375 | 375 | 'exclude_users': [user.username], |
|
376 | 376 | 'channel': channel, |
|
377 | 377 | 'message': { |
|
378 | 378 | 'message': message, |
|
379 | 379 | 'level': 'info', |
|
380 | 380 | 'topic': '/notifications' |
|
381 | 381 | } |
|
382 | 382 | } |
|
383 | 383 | channelstream_request(channelstream_config, [payload], |
|
384 | 384 | '/message', raise_exc=False) |
|
385 | 385 | |
|
386 | 386 | return comment |
|
387 | 387 | |
|
388 | 388 | def delete(self, comment): |
|
389 | 389 | """ |
|
390 | 390 | Deletes given comment |
|
391 | 391 | |
|
392 | 392 | :param comment_id: |
|
393 | 393 | """ |
|
394 | 394 | comment = self.__get_commit_comment(comment) |
|
395 | 395 | Session().delete(comment) |
|
396 | 396 | |
|
397 | 397 | return comment |
|
398 | 398 | |
|
399 | 399 | def get_all_comments(self, repo_id, revision=None, pull_request=None): |
|
400 | 400 | q = ChangesetComment.query()\ |
|
401 | 401 | .filter(ChangesetComment.repo_id == repo_id) |
|
402 | 402 | if revision: |
|
403 | 403 | q = q.filter(ChangesetComment.revision == revision) |
|
404 | 404 | elif pull_request: |
|
405 | 405 | pull_request = self.__get_pull_request(pull_request) |
|
406 | 406 | q = q.filter(ChangesetComment.pull_request == pull_request) |
|
407 | 407 | else: |
|
408 | 408 | raise Exception('Please specify commit or pull_request') |
|
409 | 409 | q = q.order_by(ChangesetComment.created_on) |
|
410 | 410 | return q.all() |
|
411 | 411 | |
|
412 | def get_url(self, comment): | |
|
412 | def get_url(self, comment, request=None, permalink=False): | |
|
413 | if not request: | |
|
414 | request = get_current_request() | |
|
415 | ||
|
413 | 416 | comment = self.__get_commit_comment(comment) |
|
414 | 417 | if comment.pull_request: |
|
415 | return h.url( | |
|
416 | 'pullrequest_show', | |
|
417 | repo_name=comment.pull_request.target_repo.repo_name, | |
|
418 | pull_request_id=comment.pull_request.pull_request_id, | |
|
419 | anchor='comment-%s' % comment.comment_id, | |
|
420 | qualified=True,) | |
|
418 | pull_request = comment.pull_request | |
|
419 | if permalink: | |
|
420 | return request.route_url( | |
|
421 | 'pull_requests_global', | |
|
422 | pull_request_id=pull_request.pull_request_id, | |
|
423 | _anchor='comment-%s' % comment.comment_id) | |
|
424 | else: | |
|
425 | return request.route_url( | |
|
426 | 'pullrequest_show', | |
|
427 | repo_name=safe_str(pull_request.target_repo.repo_name), | |
|
428 | pull_request_id=pull_request.pull_request_id, | |
|
429 | _anchor='comment-%s' % comment.comment_id) | |
|
430 | ||
|
421 | 431 | else: |
|
422 | return h.url( | |
|
423 | 'changeset_home', | |
|
424 | repo_name=comment.repo.repo_name, | |
|
425 | revision=comment.revision, | |
|
426 | anchor='comment-%s' % comment.comment_id, | |
|
427 | qualified=True,) | |
|
432 | repo = comment.repo | |
|
433 | commit_id = comment.revision | |
|
434 | ||
|
435 | if permalink: | |
|
436 | return request.route_url( | |
|
437 | 'repo_commit', repo_name=safe_str(repo.repo_id), | |
|
438 | commit_id=commit_id, | |
|
439 | _anchor='comment-%s' % comment.comment_id) | |
|
440 | ||
|
441 | else: | |
|
442 | return request.route_url( | |
|
443 | 'repo_commit', repo_name=safe_str(repo.repo_name), | |
|
444 | commit_id=commit_id, | |
|
445 | _anchor='comment-%s' % comment.comment_id) | |
|
428 | 446 | |
|
429 | 447 | def get_comments(self, repo_id, revision=None, pull_request=None): |
|
430 | 448 | """ |
|
431 | 449 | Gets main comments based on revision or pull_request_id |
|
432 | 450 | |
|
433 | 451 | :param repo_id: |
|
434 | 452 | :param revision: |
|
435 | 453 | :param pull_request: |
|
436 | 454 | """ |
|
437 | 455 | |
|
438 | 456 | q = ChangesetComment.query()\ |
|
439 | 457 | .filter(ChangesetComment.repo_id == repo_id)\ |
|
440 | 458 | .filter(ChangesetComment.line_no == None)\ |
|
441 | 459 | .filter(ChangesetComment.f_path == None) |
|
442 | 460 | if revision: |
|
443 | 461 | q = q.filter(ChangesetComment.revision == revision) |
|
444 | 462 | elif pull_request: |
|
445 | 463 | pull_request = self.__get_pull_request(pull_request) |
|
446 | 464 | q = q.filter(ChangesetComment.pull_request == pull_request) |
|
447 | 465 | else: |
|
448 | 466 | raise Exception('Please specify commit or pull_request') |
|
449 | 467 | q = q.order_by(ChangesetComment.created_on) |
|
450 | 468 | return q.all() |
|
451 | 469 | |
|
452 | 470 | def get_inline_comments(self, repo_id, revision=None, pull_request=None): |
|
453 | 471 | q = self._get_inline_comments_query(repo_id, revision, pull_request) |
|
454 | 472 | return self._group_comments_by_path_and_line_number(q) |
|
455 | 473 | |
|
456 | 474 | def get_inline_comments_count(self, inline_comments, skip_outdated=True, |
|
457 | 475 | version=None): |
|
458 | 476 | inline_cnt = 0 |
|
459 | 477 | for fname, per_line_comments in inline_comments.iteritems(): |
|
460 | 478 | for lno, comments in per_line_comments.iteritems(): |
|
461 | 479 | for comm in comments: |
|
462 | 480 | if not comm.outdated_at_version(version) and skip_outdated: |
|
463 | 481 | inline_cnt += 1 |
|
464 | 482 | |
|
465 | 483 | return inline_cnt |
|
466 | 484 | |
|
467 | 485 | def get_outdated_comments(self, repo_id, pull_request): |
|
468 | 486 | # TODO: johbo: Remove `repo_id`, it is not needed to find the comments |
|
469 | 487 | # of a pull request. |
|
470 | 488 | q = self._all_inline_comments_of_pull_request(pull_request) |
|
471 | 489 | q = q.filter( |
|
472 | 490 | ChangesetComment.display_state == |
|
473 | 491 | ChangesetComment.COMMENT_OUTDATED |
|
474 | 492 | ).order_by(ChangesetComment.comment_id.asc()) |
|
475 | 493 | |
|
476 | 494 | return self._group_comments_by_path_and_line_number(q) |
|
477 | 495 | |
|
478 | 496 | def _get_inline_comments_query(self, repo_id, revision, pull_request): |
|
479 | 497 | # TODO: johbo: Split this into two methods: One for PR and one for |
|
480 | 498 | # commit. |
|
481 | 499 | if revision: |
|
482 | 500 | q = Session().query(ChangesetComment).filter( |
|
483 | 501 | ChangesetComment.repo_id == repo_id, |
|
484 | 502 | ChangesetComment.line_no != null(), |
|
485 | 503 | ChangesetComment.f_path != null(), |
|
486 | 504 | ChangesetComment.revision == revision) |
|
487 | 505 | |
|
488 | 506 | elif pull_request: |
|
489 | 507 | pull_request = self.__get_pull_request(pull_request) |
|
490 | 508 | if not CommentsModel.use_outdated_comments(pull_request): |
|
491 | 509 | q = self._visible_inline_comments_of_pull_request(pull_request) |
|
492 | 510 | else: |
|
493 | 511 | q = self._all_inline_comments_of_pull_request(pull_request) |
|
494 | 512 | |
|
495 | 513 | else: |
|
496 | 514 | raise Exception('Please specify commit or pull_request_id') |
|
497 | 515 | q = q.order_by(ChangesetComment.comment_id.asc()) |
|
498 | 516 | return q |
|
499 | 517 | |
|
500 | 518 | def _group_comments_by_path_and_line_number(self, q): |
|
501 | 519 | comments = q.all() |
|
502 | 520 | paths = collections.defaultdict(lambda: collections.defaultdict(list)) |
|
503 | 521 | for co in comments: |
|
504 | 522 | paths[co.f_path][co.line_no].append(co) |
|
505 | 523 | return paths |
|
506 | 524 | |
|
507 | 525 | @classmethod |
|
508 | 526 | def needed_extra_diff_context(cls): |
|
509 | 527 | return max(cls.DIFF_CONTEXT_BEFORE, cls.DIFF_CONTEXT_AFTER) |
|
510 | 528 | |
|
511 | 529 | def outdate_comments(self, pull_request, old_diff_data, new_diff_data): |
|
512 | 530 | if not CommentsModel.use_outdated_comments(pull_request): |
|
513 | 531 | return |
|
514 | 532 | |
|
515 | 533 | comments = self._visible_inline_comments_of_pull_request(pull_request) |
|
516 | 534 | comments_to_outdate = comments.all() |
|
517 | 535 | |
|
518 | 536 | for comment in comments_to_outdate: |
|
519 | 537 | self._outdate_one_comment(comment, old_diff_data, new_diff_data) |
|
520 | 538 | |
|
521 | 539 | def _outdate_one_comment(self, comment, old_diff_proc, new_diff_proc): |
|
522 | 540 | diff_line = _parse_comment_line_number(comment.line_no) |
|
523 | 541 | |
|
524 | 542 | try: |
|
525 | 543 | old_context = old_diff_proc.get_context_of_line( |
|
526 | 544 | path=comment.f_path, diff_line=diff_line) |
|
527 | 545 | new_context = new_diff_proc.get_context_of_line( |
|
528 | 546 | path=comment.f_path, diff_line=diff_line) |
|
529 | 547 | except (diffs.LineNotInDiffException, |
|
530 | 548 | diffs.FileNotInDiffException): |
|
531 | 549 | comment.display_state = ChangesetComment.COMMENT_OUTDATED |
|
532 | 550 | return |
|
533 | 551 | |
|
534 | 552 | if old_context == new_context: |
|
535 | 553 | return |
|
536 | 554 | |
|
537 | 555 | if self._should_relocate_diff_line(diff_line): |
|
538 | 556 | new_diff_lines = new_diff_proc.find_context( |
|
539 | 557 | path=comment.f_path, context=old_context, |
|
540 | 558 | offset=self.DIFF_CONTEXT_BEFORE) |
|
541 | 559 | if not new_diff_lines: |
|
542 | 560 | comment.display_state = ChangesetComment.COMMENT_OUTDATED |
|
543 | 561 | else: |
|
544 | 562 | new_diff_line = self._choose_closest_diff_line( |
|
545 | 563 | diff_line, new_diff_lines) |
|
546 | 564 | comment.line_no = _diff_to_comment_line_number(new_diff_line) |
|
547 | 565 | else: |
|
548 | 566 | comment.display_state = ChangesetComment.COMMENT_OUTDATED |
|
549 | 567 | |
|
550 | 568 | def _should_relocate_diff_line(self, diff_line): |
|
551 | 569 | """ |
|
552 | 570 | Checks if relocation shall be tried for the given `diff_line`. |
|
553 | 571 | |
|
554 | 572 | If a comment points into the first lines, then we can have a situation |
|
555 | 573 | that after an update another line has been added on top. In this case |
|
556 | 574 | we would find the context still and move the comment around. This |
|
557 | 575 | would be wrong. |
|
558 | 576 | """ |
|
559 | 577 | should_relocate = ( |
|
560 | 578 | (diff_line.new and diff_line.new > self.DIFF_CONTEXT_BEFORE) or |
|
561 | 579 | (diff_line.old and diff_line.old > self.DIFF_CONTEXT_BEFORE)) |
|
562 | 580 | return should_relocate |
|
563 | 581 | |
|
564 | 582 | def _choose_closest_diff_line(self, diff_line, new_diff_lines): |
|
565 | 583 | candidate = new_diff_lines[0] |
|
566 | 584 | best_delta = _diff_line_delta(diff_line, candidate) |
|
567 | 585 | for new_diff_line in new_diff_lines[1:]: |
|
568 | 586 | delta = _diff_line_delta(diff_line, new_diff_line) |
|
569 | 587 | if delta < best_delta: |
|
570 | 588 | candidate = new_diff_line |
|
571 | 589 | best_delta = delta |
|
572 | 590 | return candidate |
|
573 | 591 | |
|
574 | 592 | def _visible_inline_comments_of_pull_request(self, pull_request): |
|
575 | 593 | comments = self._all_inline_comments_of_pull_request(pull_request) |
|
576 | 594 | comments = comments.filter( |
|
577 | 595 | coalesce(ChangesetComment.display_state, '') != |
|
578 | 596 | ChangesetComment.COMMENT_OUTDATED) |
|
579 | 597 | return comments |
|
580 | 598 | |
|
581 | 599 | def _all_inline_comments_of_pull_request(self, pull_request): |
|
582 | 600 | comments = Session().query(ChangesetComment)\ |
|
583 | 601 | .filter(ChangesetComment.line_no != None)\ |
|
584 | 602 | .filter(ChangesetComment.f_path != None)\ |
|
585 | 603 | .filter(ChangesetComment.pull_request == pull_request) |
|
586 | 604 | return comments |
|
587 | 605 | |
|
588 | 606 | def _all_general_comments_of_pull_request(self, pull_request): |
|
589 | 607 | comments = Session().query(ChangesetComment)\ |
|
590 | 608 | .filter(ChangesetComment.line_no == None)\ |
|
591 | 609 | .filter(ChangesetComment.f_path == None)\ |
|
592 | 610 | .filter(ChangesetComment.pull_request == pull_request) |
|
593 | 611 | return comments |
|
594 | 612 | |
|
595 | 613 | @staticmethod |
|
596 | 614 | def use_outdated_comments(pull_request): |
|
597 | 615 | settings_model = VcsSettingsModel(repo=pull_request.target_repo) |
|
598 | 616 | settings = settings_model.get_general_settings() |
|
599 | 617 | return settings.get('rhodecode_use_outdated_comments', False) |
|
600 | 618 | |
|
601 | 619 | |
|
602 | 620 | def _parse_comment_line_number(line_no): |
|
603 | 621 | """ |
|
604 | 622 | Parses line numbers of the form "(o|n)\d+" and returns them in a tuple. |
|
605 | 623 | """ |
|
606 | 624 | old_line = None |
|
607 | 625 | new_line = None |
|
608 | 626 | if line_no.startswith('o'): |
|
609 | 627 | old_line = int(line_no[1:]) |
|
610 | 628 | elif line_no.startswith('n'): |
|
611 | 629 | new_line = int(line_no[1:]) |
|
612 | 630 | else: |
|
613 | 631 | raise ValueError("Comment lines have to start with either 'o' or 'n'.") |
|
614 | 632 | return diffs.DiffLineNumber(old_line, new_line) |
|
615 | 633 | |
|
616 | 634 | |
|
617 | 635 | def _diff_to_comment_line_number(diff_line): |
|
618 | 636 | if diff_line.new is not None: |
|
619 | 637 | return u'n{}'.format(diff_line.new) |
|
620 | 638 | elif diff_line.old is not None: |
|
621 | 639 | return u'o{}'.format(diff_line.old) |
|
622 | 640 | return u'' |
|
623 | 641 | |
|
624 | 642 | |
|
625 | 643 | def _diff_line_delta(a, b): |
|
626 | 644 | if None not in (a.new, b.new): |
|
627 | 645 | return abs(a.new - b.new) |
|
628 | 646 | elif None not in (a.old, b.old): |
|
629 | 647 | return abs(a.old - b.old) |
|
630 | 648 | else: |
|
631 | 649 | raise ValueError( |
|
632 | 650 | "Cannot compute delta between {} and {}".format(a, b)) |
@@ -1,1495 +1,1504 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2012-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | |
|
22 | 22 | """ |
|
23 | 23 | pull request model for RhodeCode |
|
24 | 24 | """ |
|
25 | 25 | |
|
26 | 26 | from collections import namedtuple |
|
27 | 27 | import json |
|
28 | 28 | import logging |
|
29 | 29 | import datetime |
|
30 | 30 | import urllib |
|
31 | 31 | |
|
32 | 32 | from pylons.i18n.translation import _ |
|
33 | 33 | from pylons.i18n.translation import lazy_ugettext |
|
34 | from pyramid.threadlocal import get_current_request | |
|
34 | 35 | from sqlalchemy import or_ |
|
35 | 36 | |
|
36 | 37 | from rhodecode.lib import helpers as h, hooks_utils, diffs |
|
37 | 38 | from rhodecode.lib.compat import OrderedDict |
|
38 | 39 | from rhodecode.lib.hooks_daemon import prepare_callback_daemon |
|
39 | 40 | from rhodecode.lib.markup_renderer import ( |
|
40 | 41 | DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer) |
|
41 | 42 | from rhodecode.lib.utils import action_logger |
|
42 | 43 | from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe |
|
43 | 44 | from rhodecode.lib.vcs.backends.base import ( |
|
44 | 45 | Reference, MergeResponse, MergeFailureReason, UpdateFailureReason) |
|
45 | 46 | from rhodecode.lib.vcs.conf import settings as vcs_settings |
|
46 | 47 | from rhodecode.lib.vcs.exceptions import ( |
|
47 | 48 | CommitDoesNotExistError, EmptyRepositoryError) |
|
48 | 49 | from rhodecode.model import BaseModel |
|
49 | 50 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
50 | 51 | from rhodecode.model.comment import CommentsModel |
|
51 | 52 | from rhodecode.model.db import ( |
|
52 | 53 | PullRequest, PullRequestReviewers, ChangesetStatus, |
|
53 | 54 | PullRequestVersion, ChangesetComment, Repository) |
|
54 | 55 | from rhodecode.model.meta import Session |
|
55 | 56 | from rhodecode.model.notification import NotificationModel, \ |
|
56 | 57 | EmailNotificationModel |
|
57 | 58 | from rhodecode.model.scm import ScmModel |
|
58 | 59 | from rhodecode.model.settings import VcsSettingsModel |
|
59 | 60 | |
|
60 | 61 | |
|
61 | 62 | log = logging.getLogger(__name__) |
|
62 | 63 | |
|
63 | 64 | |
|
64 | 65 | # Data structure to hold the response data when updating commits during a pull |
|
65 | 66 | # request update. |
|
66 | 67 | UpdateResponse = namedtuple('UpdateResponse', [ |
|
67 | 68 | 'executed', 'reason', 'new', 'old', 'changes', |
|
68 | 69 | 'source_changed', 'target_changed']) |
|
69 | 70 | |
|
70 | 71 | |
|
71 | 72 | class PullRequestModel(BaseModel): |
|
72 | 73 | |
|
73 | 74 | cls = PullRequest |
|
74 | 75 | |
|
75 | 76 | DIFF_CONTEXT = 3 |
|
76 | 77 | |
|
77 | 78 | MERGE_STATUS_MESSAGES = { |
|
78 | 79 | MergeFailureReason.NONE: lazy_ugettext( |
|
79 | 80 | 'This pull request can be automatically merged.'), |
|
80 | 81 | MergeFailureReason.UNKNOWN: lazy_ugettext( |
|
81 | 82 | 'This pull request cannot be merged because of an unhandled' |
|
82 | 83 | ' exception.'), |
|
83 | 84 | MergeFailureReason.MERGE_FAILED: lazy_ugettext( |
|
84 | 85 | 'This pull request cannot be merged because of merge conflicts.'), |
|
85 | 86 | MergeFailureReason.PUSH_FAILED: lazy_ugettext( |
|
86 | 87 | 'This pull request could not be merged because push to target' |
|
87 | 88 | ' failed.'), |
|
88 | 89 | MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext( |
|
89 | 90 | 'This pull request cannot be merged because the target is not a' |
|
90 | 91 | ' head.'), |
|
91 | 92 | MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext( |
|
92 | 93 | 'This pull request cannot be merged because the source contains' |
|
93 | 94 | ' more branches than the target.'), |
|
94 | 95 | MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext( |
|
95 | 96 | 'This pull request cannot be merged because the target has' |
|
96 | 97 | ' multiple heads.'), |
|
97 | 98 | MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext( |
|
98 | 99 | 'This pull request cannot be merged because the target repository' |
|
99 | 100 | ' is locked.'), |
|
100 | 101 | MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext( |
|
101 | 102 | 'This pull request cannot be merged because the target or the ' |
|
102 | 103 | 'source reference is missing.'), |
|
103 | 104 | MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext( |
|
104 | 105 | 'This pull request cannot be merged because the target ' |
|
105 | 106 | 'reference is missing.'), |
|
106 | 107 | MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext( |
|
107 | 108 | 'This pull request cannot be merged because the source ' |
|
108 | 109 | 'reference is missing.'), |
|
109 | 110 | MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext( |
|
110 | 111 | 'This pull request cannot be merged because of conflicts related ' |
|
111 | 112 | 'to sub repositories.'), |
|
112 | 113 | } |
|
113 | 114 | |
|
114 | 115 | UPDATE_STATUS_MESSAGES = { |
|
115 | 116 | UpdateFailureReason.NONE: lazy_ugettext( |
|
116 | 117 | 'Pull request update successful.'), |
|
117 | 118 | UpdateFailureReason.UNKNOWN: lazy_ugettext( |
|
118 | 119 | 'Pull request update failed because of an unknown error.'), |
|
119 | 120 | UpdateFailureReason.NO_CHANGE: lazy_ugettext( |
|
120 | 121 | 'No update needed because the source and target have not changed.'), |
|
121 | 122 | UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext( |
|
122 | 123 | 'Pull request cannot be updated because the reference type is ' |
|
123 | 124 | 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'), |
|
124 | 125 | UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext( |
|
125 | 126 | 'This pull request cannot be updated because the target ' |
|
126 | 127 | 'reference is missing.'), |
|
127 | 128 | UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext( |
|
128 | 129 | 'This pull request cannot be updated because the source ' |
|
129 | 130 | 'reference is missing.'), |
|
130 | 131 | } |
|
131 | 132 | |
|
132 | 133 | def __get_pull_request(self, pull_request): |
|
133 | 134 | return self._get_instance(( |
|
134 | 135 | PullRequest, PullRequestVersion), pull_request) |
|
135 | 136 | |
|
136 | 137 | def _check_perms(self, perms, pull_request, user, api=False): |
|
137 | 138 | if not api: |
|
138 | 139 | return h.HasRepoPermissionAny(*perms)( |
|
139 | 140 | user=user, repo_name=pull_request.target_repo.repo_name) |
|
140 | 141 | else: |
|
141 | 142 | return h.HasRepoPermissionAnyApi(*perms)( |
|
142 | 143 | user=user, repo_name=pull_request.target_repo.repo_name) |
|
143 | 144 | |
|
144 | 145 | def check_user_read(self, pull_request, user, api=False): |
|
145 | 146 | _perms = ('repository.admin', 'repository.write', 'repository.read',) |
|
146 | 147 | return self._check_perms(_perms, pull_request, user, api) |
|
147 | 148 | |
|
148 | 149 | def check_user_merge(self, pull_request, user, api=False): |
|
149 | 150 | _perms = ('repository.admin', 'repository.write', 'hg.admin',) |
|
150 | 151 | return self._check_perms(_perms, pull_request, user, api) |
|
151 | 152 | |
|
152 | 153 | def check_user_update(self, pull_request, user, api=False): |
|
153 | 154 | owner = user.user_id == pull_request.user_id |
|
154 | 155 | return self.check_user_merge(pull_request, user, api) or owner |
|
155 | 156 | |
|
156 | 157 | def check_user_delete(self, pull_request, user): |
|
157 | 158 | owner = user.user_id == pull_request.user_id |
|
158 | 159 | _perms = ('repository.admin',) |
|
159 | 160 | return self._check_perms(_perms, pull_request, user) or owner |
|
160 | 161 | |
|
161 | 162 | def check_user_change_status(self, pull_request, user, api=False): |
|
162 | 163 | reviewer = user.user_id in [x.user_id for x in |
|
163 | 164 | pull_request.reviewers] |
|
164 | 165 | return self.check_user_update(pull_request, user, api) or reviewer |
|
165 | 166 | |
|
166 | 167 | def get(self, pull_request): |
|
167 | 168 | return self.__get_pull_request(pull_request) |
|
168 | 169 | |
|
169 | 170 | def _prepare_get_all_query(self, repo_name, source=False, statuses=None, |
|
170 | 171 | opened_by=None, order_by=None, |
|
171 | 172 | order_dir='desc'): |
|
172 | 173 | repo = None |
|
173 | 174 | if repo_name: |
|
174 | 175 | repo = self._get_repo(repo_name) |
|
175 | 176 | |
|
176 | 177 | q = PullRequest.query() |
|
177 | 178 | |
|
178 | 179 | # source or target |
|
179 | 180 | if repo and source: |
|
180 | 181 | q = q.filter(PullRequest.source_repo == repo) |
|
181 | 182 | elif repo: |
|
182 | 183 | q = q.filter(PullRequest.target_repo == repo) |
|
183 | 184 | |
|
184 | 185 | # closed,opened |
|
185 | 186 | if statuses: |
|
186 | 187 | q = q.filter(PullRequest.status.in_(statuses)) |
|
187 | 188 | |
|
188 | 189 | # opened by filter |
|
189 | 190 | if opened_by: |
|
190 | 191 | q = q.filter(PullRequest.user_id.in_(opened_by)) |
|
191 | 192 | |
|
192 | 193 | if order_by: |
|
193 | 194 | order_map = { |
|
194 | 195 | 'name_raw': PullRequest.pull_request_id, |
|
195 | 196 | 'title': PullRequest.title, |
|
196 | 197 | 'updated_on_raw': PullRequest.updated_on, |
|
197 | 198 | 'target_repo': PullRequest.target_repo_id |
|
198 | 199 | } |
|
199 | 200 | if order_dir == 'asc': |
|
200 | 201 | q = q.order_by(order_map[order_by].asc()) |
|
201 | 202 | else: |
|
202 | 203 | q = q.order_by(order_map[order_by].desc()) |
|
203 | 204 | |
|
204 | 205 | return q |
|
205 | 206 | |
|
206 | 207 | def count_all(self, repo_name, source=False, statuses=None, |
|
207 | 208 | opened_by=None): |
|
208 | 209 | """ |
|
209 | 210 | Count the number of pull requests for a specific repository. |
|
210 | 211 | |
|
211 | 212 | :param repo_name: target or source repo |
|
212 | 213 | :param source: boolean flag to specify if repo_name refers to source |
|
213 | 214 | :param statuses: list of pull request statuses |
|
214 | 215 | :param opened_by: author user of the pull request |
|
215 | 216 | :returns: int number of pull requests |
|
216 | 217 | """ |
|
217 | 218 | q = self._prepare_get_all_query( |
|
218 | 219 | repo_name, source=source, statuses=statuses, opened_by=opened_by) |
|
219 | 220 | |
|
220 | 221 | return q.count() |
|
221 | 222 | |
|
222 | 223 | def get_all(self, repo_name, source=False, statuses=None, opened_by=None, |
|
223 | 224 | offset=0, length=None, order_by=None, order_dir='desc'): |
|
224 | 225 | """ |
|
225 | 226 | Get all pull requests for a specific repository. |
|
226 | 227 | |
|
227 | 228 | :param repo_name: target or source repo |
|
228 | 229 | :param source: boolean flag to specify if repo_name refers to source |
|
229 | 230 | :param statuses: list of pull request statuses |
|
230 | 231 | :param opened_by: author user of the pull request |
|
231 | 232 | :param offset: pagination offset |
|
232 | 233 | :param length: length of returned list |
|
233 | 234 | :param order_by: order of the returned list |
|
234 | 235 | :param order_dir: 'asc' or 'desc' ordering direction |
|
235 | 236 | :returns: list of pull requests |
|
236 | 237 | """ |
|
237 | 238 | q = self._prepare_get_all_query( |
|
238 | 239 | repo_name, source=source, statuses=statuses, opened_by=opened_by, |
|
239 | 240 | order_by=order_by, order_dir=order_dir) |
|
240 | 241 | |
|
241 | 242 | if length: |
|
242 | 243 | pull_requests = q.limit(length).offset(offset).all() |
|
243 | 244 | else: |
|
244 | 245 | pull_requests = q.all() |
|
245 | 246 | |
|
246 | 247 | return pull_requests |
|
247 | 248 | |
|
248 | 249 | def count_awaiting_review(self, repo_name, source=False, statuses=None, |
|
249 | 250 | opened_by=None): |
|
250 | 251 | """ |
|
251 | 252 | Count the number of pull requests for a specific repository that are |
|
252 | 253 | awaiting review. |
|
253 | 254 | |
|
254 | 255 | :param repo_name: target or source repo |
|
255 | 256 | :param source: boolean flag to specify if repo_name refers to source |
|
256 | 257 | :param statuses: list of pull request statuses |
|
257 | 258 | :param opened_by: author user of the pull request |
|
258 | 259 | :returns: int number of pull requests |
|
259 | 260 | """ |
|
260 | 261 | pull_requests = self.get_awaiting_review( |
|
261 | 262 | repo_name, source=source, statuses=statuses, opened_by=opened_by) |
|
262 | 263 | |
|
263 | 264 | return len(pull_requests) |
|
264 | 265 | |
|
265 | 266 | def get_awaiting_review(self, repo_name, source=False, statuses=None, |
|
266 | 267 | opened_by=None, offset=0, length=None, |
|
267 | 268 | order_by=None, order_dir='desc'): |
|
268 | 269 | """ |
|
269 | 270 | Get all pull requests for a specific repository that are awaiting |
|
270 | 271 | review. |
|
271 | 272 | |
|
272 | 273 | :param repo_name: target or source repo |
|
273 | 274 | :param source: boolean flag to specify if repo_name refers to source |
|
274 | 275 | :param statuses: list of pull request statuses |
|
275 | 276 | :param opened_by: author user of the pull request |
|
276 | 277 | :param offset: pagination offset |
|
277 | 278 | :param length: length of returned list |
|
278 | 279 | :param order_by: order of the returned list |
|
279 | 280 | :param order_dir: 'asc' or 'desc' ordering direction |
|
280 | 281 | :returns: list of pull requests |
|
281 | 282 | """ |
|
282 | 283 | pull_requests = self.get_all( |
|
283 | 284 | repo_name, source=source, statuses=statuses, opened_by=opened_by, |
|
284 | 285 | order_by=order_by, order_dir=order_dir) |
|
285 | 286 | |
|
286 | 287 | _filtered_pull_requests = [] |
|
287 | 288 | for pr in pull_requests: |
|
288 | 289 | status = pr.calculated_review_status() |
|
289 | 290 | if status in [ChangesetStatus.STATUS_NOT_REVIEWED, |
|
290 | 291 | ChangesetStatus.STATUS_UNDER_REVIEW]: |
|
291 | 292 | _filtered_pull_requests.append(pr) |
|
292 | 293 | if length: |
|
293 | 294 | return _filtered_pull_requests[offset:offset+length] |
|
294 | 295 | else: |
|
295 | 296 | return _filtered_pull_requests |
|
296 | 297 | |
|
297 | 298 | def count_awaiting_my_review(self, repo_name, source=False, statuses=None, |
|
298 | 299 | opened_by=None, user_id=None): |
|
299 | 300 | """ |
|
300 | 301 | Count the number of pull requests for a specific repository that are |
|
301 | 302 | awaiting review from a specific user. |
|
302 | 303 | |
|
303 | 304 | :param repo_name: target or source repo |
|
304 | 305 | :param source: boolean flag to specify if repo_name refers to source |
|
305 | 306 | :param statuses: list of pull request statuses |
|
306 | 307 | :param opened_by: author user of the pull request |
|
307 | 308 | :param user_id: reviewer user of the pull request |
|
308 | 309 | :returns: int number of pull requests |
|
309 | 310 | """ |
|
310 | 311 | pull_requests = self.get_awaiting_my_review( |
|
311 | 312 | repo_name, source=source, statuses=statuses, opened_by=opened_by, |
|
312 | 313 | user_id=user_id) |
|
313 | 314 | |
|
314 | 315 | return len(pull_requests) |
|
315 | 316 | |
|
316 | 317 | def get_awaiting_my_review(self, repo_name, source=False, statuses=None, |
|
317 | 318 | opened_by=None, user_id=None, offset=0, |
|
318 | 319 | length=None, order_by=None, order_dir='desc'): |
|
319 | 320 | """ |
|
320 | 321 | Get all pull requests for a specific repository that are awaiting |
|
321 | 322 | review from a specific user. |
|
322 | 323 | |
|
323 | 324 | :param repo_name: target or source repo |
|
324 | 325 | :param source: boolean flag to specify if repo_name refers to source |
|
325 | 326 | :param statuses: list of pull request statuses |
|
326 | 327 | :param opened_by: author user of the pull request |
|
327 | 328 | :param user_id: reviewer user of the pull request |
|
328 | 329 | :param offset: pagination offset |
|
329 | 330 | :param length: length of returned list |
|
330 | 331 | :param order_by: order of the returned list |
|
331 | 332 | :param order_dir: 'asc' or 'desc' ordering direction |
|
332 | 333 | :returns: list of pull requests |
|
333 | 334 | """ |
|
334 | 335 | pull_requests = self.get_all( |
|
335 | 336 | repo_name, source=source, statuses=statuses, opened_by=opened_by, |
|
336 | 337 | order_by=order_by, order_dir=order_dir) |
|
337 | 338 | |
|
338 | 339 | _my = PullRequestModel().get_not_reviewed(user_id) |
|
339 | 340 | my_participation = [] |
|
340 | 341 | for pr in pull_requests: |
|
341 | 342 | if pr in _my: |
|
342 | 343 | my_participation.append(pr) |
|
343 | 344 | _filtered_pull_requests = my_participation |
|
344 | 345 | if length: |
|
345 | 346 | return _filtered_pull_requests[offset:offset+length] |
|
346 | 347 | else: |
|
347 | 348 | return _filtered_pull_requests |
|
348 | 349 | |
|
349 | 350 | def get_not_reviewed(self, user_id): |
|
350 | 351 | return [ |
|
351 | 352 | x.pull_request for x in PullRequestReviewers.query().filter( |
|
352 | 353 | PullRequestReviewers.user_id == user_id).all() |
|
353 | 354 | ] |
|
354 | 355 | |
|
355 | 356 | def _prepare_participating_query(self, user_id=None, statuses=None, |
|
356 | 357 | order_by=None, order_dir='desc'): |
|
357 | 358 | q = PullRequest.query() |
|
358 | 359 | if user_id: |
|
359 | 360 | reviewers_subquery = Session().query( |
|
360 | 361 | PullRequestReviewers.pull_request_id).filter( |
|
361 | 362 | PullRequestReviewers.user_id == user_id).subquery() |
|
362 | 363 | user_filter= or_( |
|
363 | 364 | PullRequest.user_id == user_id, |
|
364 | 365 | PullRequest.pull_request_id.in_(reviewers_subquery) |
|
365 | 366 | ) |
|
366 | 367 | q = PullRequest.query().filter(user_filter) |
|
367 | 368 | |
|
368 | 369 | # closed,opened |
|
369 | 370 | if statuses: |
|
370 | 371 | q = q.filter(PullRequest.status.in_(statuses)) |
|
371 | 372 | |
|
372 | 373 | if order_by: |
|
373 | 374 | order_map = { |
|
374 | 375 | 'name_raw': PullRequest.pull_request_id, |
|
375 | 376 | 'title': PullRequest.title, |
|
376 | 377 | 'updated_on_raw': PullRequest.updated_on, |
|
377 | 378 | 'target_repo': PullRequest.target_repo_id |
|
378 | 379 | } |
|
379 | 380 | if order_dir == 'asc': |
|
380 | 381 | q = q.order_by(order_map[order_by].asc()) |
|
381 | 382 | else: |
|
382 | 383 | q = q.order_by(order_map[order_by].desc()) |
|
383 | 384 | |
|
384 | 385 | return q |
|
385 | 386 | |
|
386 | 387 | def count_im_participating_in(self, user_id=None, statuses=None): |
|
387 | 388 | q = self._prepare_participating_query(user_id, statuses=statuses) |
|
388 | 389 | return q.count() |
|
389 | 390 | |
|
390 | 391 | def get_im_participating_in( |
|
391 | 392 | self, user_id=None, statuses=None, offset=0, |
|
392 | 393 | length=None, order_by=None, order_dir='desc'): |
|
393 | 394 | """ |
|
394 | 395 | Get all Pull requests that i'm participating in, or i have opened |
|
395 | 396 | """ |
|
396 | 397 | |
|
397 | 398 | q = self._prepare_participating_query( |
|
398 | 399 | user_id, statuses=statuses, order_by=order_by, |
|
399 | 400 | order_dir=order_dir) |
|
400 | 401 | |
|
401 | 402 | if length: |
|
402 | 403 | pull_requests = q.limit(length).offset(offset).all() |
|
403 | 404 | else: |
|
404 | 405 | pull_requests = q.all() |
|
405 | 406 | |
|
406 | 407 | return pull_requests |
|
407 | 408 | |
|
408 | 409 | def get_versions(self, pull_request): |
|
409 | 410 | """ |
|
410 | 411 | returns version of pull request sorted by ID descending |
|
411 | 412 | """ |
|
412 | 413 | return PullRequestVersion.query()\ |
|
413 | 414 | .filter(PullRequestVersion.pull_request == pull_request)\ |
|
414 | 415 | .order_by(PullRequestVersion.pull_request_version_id.asc())\ |
|
415 | 416 | .all() |
|
416 | 417 | |
|
417 | 418 | def create(self, created_by, source_repo, source_ref, target_repo, |
|
418 | 419 | target_ref, revisions, reviewers, title, description=None, |
|
419 | 420 | reviewer_data=None): |
|
420 | 421 | |
|
421 | 422 | created_by_user = self._get_user(created_by) |
|
422 | 423 | source_repo = self._get_repo(source_repo) |
|
423 | 424 | target_repo = self._get_repo(target_repo) |
|
424 | 425 | |
|
425 | 426 | pull_request = PullRequest() |
|
426 | 427 | pull_request.source_repo = source_repo |
|
427 | 428 | pull_request.source_ref = source_ref |
|
428 | 429 | pull_request.target_repo = target_repo |
|
429 | 430 | pull_request.target_ref = target_ref |
|
430 | 431 | pull_request.revisions = revisions |
|
431 | 432 | pull_request.title = title |
|
432 | 433 | pull_request.description = description |
|
433 | 434 | pull_request.author = created_by_user |
|
434 | 435 | pull_request.reviewer_data = reviewer_data |
|
435 | 436 | |
|
436 | 437 | Session().add(pull_request) |
|
437 | 438 | Session().flush() |
|
438 | 439 | |
|
439 | 440 | reviewer_ids = set() |
|
440 | 441 | # members / reviewers |
|
441 | 442 | for reviewer_object in reviewers: |
|
442 | 443 | user_id, reasons, mandatory = reviewer_object |
|
443 | 444 | |
|
444 | 445 | user = self._get_user(user_id) |
|
445 | 446 | reviewer_ids.add(user.user_id) |
|
446 | 447 | |
|
447 | 448 | reviewer = PullRequestReviewers() |
|
448 | 449 | reviewer.user = user |
|
449 | 450 | reviewer.pull_request = pull_request |
|
450 | 451 | reviewer.reasons = reasons |
|
451 | 452 | reviewer.mandatory = mandatory |
|
452 | 453 | Session().add(reviewer) |
|
453 | 454 | |
|
454 | 455 | # Set approval status to "Under Review" for all commits which are |
|
455 | 456 | # part of this pull request. |
|
456 | 457 | ChangesetStatusModel().set_status( |
|
457 | 458 | repo=target_repo, |
|
458 | 459 | status=ChangesetStatus.STATUS_UNDER_REVIEW, |
|
459 | 460 | user=created_by_user, |
|
460 | 461 | pull_request=pull_request |
|
461 | 462 | ) |
|
462 | 463 | |
|
463 | 464 | self.notify_reviewers(pull_request, reviewer_ids) |
|
464 | 465 | self._trigger_pull_request_hook( |
|
465 | 466 | pull_request, created_by_user, 'create') |
|
466 | 467 | |
|
467 | 468 | return pull_request |
|
468 | 469 | |
|
469 | 470 | def _trigger_pull_request_hook(self, pull_request, user, action): |
|
470 | 471 | pull_request = self.__get_pull_request(pull_request) |
|
471 | 472 | target_scm = pull_request.target_repo.scm_instance() |
|
472 | 473 | if action == 'create': |
|
473 | 474 | trigger_hook = hooks_utils.trigger_log_create_pull_request_hook |
|
474 | 475 | elif action == 'merge': |
|
475 | 476 | trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook |
|
476 | 477 | elif action == 'close': |
|
477 | 478 | trigger_hook = hooks_utils.trigger_log_close_pull_request_hook |
|
478 | 479 | elif action == 'review_status_change': |
|
479 | 480 | trigger_hook = hooks_utils.trigger_log_review_pull_request_hook |
|
480 | 481 | elif action == 'update': |
|
481 | 482 | trigger_hook = hooks_utils.trigger_log_update_pull_request_hook |
|
482 | 483 | else: |
|
483 | 484 | return |
|
484 | 485 | |
|
485 | 486 | trigger_hook( |
|
486 | 487 | username=user.username, |
|
487 | 488 | repo_name=pull_request.target_repo.repo_name, |
|
488 | 489 | repo_alias=target_scm.alias, |
|
489 | 490 | pull_request=pull_request) |
|
490 | 491 | |
|
491 | 492 | def _get_commit_ids(self, pull_request): |
|
492 | 493 | """ |
|
493 | 494 | Return the commit ids of the merged pull request. |
|
494 | 495 | |
|
495 | 496 | This method is not dealing correctly yet with the lack of autoupdates |
|
496 | 497 | nor with the implicit target updates. |
|
497 | 498 | For example: if a commit in the source repo is already in the target it |
|
498 | 499 | will be reported anyways. |
|
499 | 500 | """ |
|
500 | 501 | merge_rev = pull_request.merge_rev |
|
501 | 502 | if merge_rev is None: |
|
502 | 503 | raise ValueError('This pull request was not merged yet') |
|
503 | 504 | |
|
504 | 505 | commit_ids = list(pull_request.revisions) |
|
505 | 506 | if merge_rev not in commit_ids: |
|
506 | 507 | commit_ids.append(merge_rev) |
|
507 | 508 | |
|
508 | 509 | return commit_ids |
|
509 | 510 | |
|
510 | 511 | def merge(self, pull_request, user, extras): |
|
511 | 512 | log.debug("Merging pull request %s", pull_request.pull_request_id) |
|
512 | 513 | merge_state = self._merge_pull_request(pull_request, user, extras) |
|
513 | 514 | if merge_state.executed: |
|
514 | 515 | log.debug( |
|
515 | 516 | "Merge was successful, updating the pull request comments.") |
|
516 | 517 | self._comment_and_close_pr(pull_request, user, merge_state) |
|
517 | 518 | self._log_action('user_merged_pull_request', user, pull_request) |
|
518 | 519 | else: |
|
519 | 520 | log.warn("Merge failed, not updating the pull request.") |
|
520 | 521 | return merge_state |
|
521 | 522 | |
|
522 | 523 | def _merge_pull_request(self, pull_request, user, extras): |
|
523 | 524 | target_vcs = pull_request.target_repo.scm_instance() |
|
524 | 525 | source_vcs = pull_request.source_repo.scm_instance() |
|
525 | 526 | target_ref = self._refresh_reference( |
|
526 | 527 | pull_request.target_ref_parts, target_vcs) |
|
527 | 528 | |
|
528 | 529 | message = _( |
|
529 | 530 | 'Merge pull request #%(pr_id)s from ' |
|
530 | 531 | '%(source_repo)s %(source_ref_name)s\n\n %(pr_title)s') % { |
|
531 | 532 | 'pr_id': pull_request.pull_request_id, |
|
532 | 533 | 'source_repo': source_vcs.name, |
|
533 | 534 | 'source_ref_name': pull_request.source_ref_parts.name, |
|
534 | 535 | 'pr_title': pull_request.title |
|
535 | 536 | } |
|
536 | 537 | |
|
537 | 538 | workspace_id = self._workspace_id(pull_request) |
|
538 | 539 | use_rebase = self._use_rebase_for_merging(pull_request) |
|
539 | 540 | |
|
540 | 541 | callback_daemon, extras = prepare_callback_daemon( |
|
541 | 542 | extras, protocol=vcs_settings.HOOKS_PROTOCOL, |
|
542 | 543 | use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS) |
|
543 | 544 | |
|
544 | 545 | with callback_daemon: |
|
545 | 546 | # TODO: johbo: Implement a clean way to run a config_override |
|
546 | 547 | # for a single call. |
|
547 | 548 | target_vcs.config.set( |
|
548 | 549 | 'rhodecode', 'RC_SCM_DATA', json.dumps(extras)) |
|
549 | 550 | merge_state = target_vcs.merge( |
|
550 | 551 | target_ref, source_vcs, pull_request.source_ref_parts, |
|
551 | 552 | workspace_id, user_name=user.username, |
|
552 | 553 | user_email=user.email, message=message, use_rebase=use_rebase) |
|
553 | 554 | return merge_state |
|
554 | 555 | |
|
555 | 556 | def _comment_and_close_pr(self, pull_request, user, merge_state): |
|
556 | 557 | pull_request.merge_rev = merge_state.merge_ref.commit_id |
|
557 | 558 | pull_request.updated_on = datetime.datetime.now() |
|
558 | 559 | |
|
559 | 560 | CommentsModel().create( |
|
560 | 561 | text=unicode(_('Pull request merged and closed')), |
|
561 | 562 | repo=pull_request.target_repo.repo_id, |
|
562 | 563 | user=user.user_id, |
|
563 | 564 | pull_request=pull_request.pull_request_id, |
|
564 | 565 | f_path=None, |
|
565 | 566 | line_no=None, |
|
566 | 567 | closing_pr=True |
|
567 | 568 | ) |
|
568 | 569 | |
|
569 | 570 | Session().add(pull_request) |
|
570 | 571 | Session().flush() |
|
571 | 572 | # TODO: paris: replace invalidation with less radical solution |
|
572 | 573 | ScmModel().mark_for_invalidation( |
|
573 | 574 | pull_request.target_repo.repo_name) |
|
574 | 575 | self._trigger_pull_request_hook(pull_request, user, 'merge') |
|
575 | 576 | |
|
576 | 577 | def has_valid_update_type(self, pull_request): |
|
577 | 578 | source_ref_type = pull_request.source_ref_parts.type |
|
578 | 579 | return source_ref_type in ['book', 'branch', 'tag'] |
|
579 | 580 | |
|
580 | 581 | def update_commits(self, pull_request): |
|
581 | 582 | """ |
|
582 | 583 | Get the updated list of commits for the pull request |
|
583 | 584 | and return the new pull request version and the list |
|
584 | 585 | of commits processed by this update action |
|
585 | 586 | """ |
|
586 | 587 | pull_request = self.__get_pull_request(pull_request) |
|
587 | 588 | source_ref_type = pull_request.source_ref_parts.type |
|
588 | 589 | source_ref_name = pull_request.source_ref_parts.name |
|
589 | 590 | source_ref_id = pull_request.source_ref_parts.commit_id |
|
590 | 591 | |
|
591 | 592 | target_ref_type = pull_request.target_ref_parts.type |
|
592 | 593 | target_ref_name = pull_request.target_ref_parts.name |
|
593 | 594 | target_ref_id = pull_request.target_ref_parts.commit_id |
|
594 | 595 | |
|
595 | 596 | if not self.has_valid_update_type(pull_request): |
|
596 | 597 | log.debug( |
|
597 | 598 | "Skipping update of pull request %s due to ref type: %s", |
|
598 | 599 | pull_request, source_ref_type) |
|
599 | 600 | return UpdateResponse( |
|
600 | 601 | executed=False, |
|
601 | 602 | reason=UpdateFailureReason.WRONG_REF_TYPE, |
|
602 | 603 | old=pull_request, new=None, changes=None, |
|
603 | 604 | source_changed=False, target_changed=False) |
|
604 | 605 | |
|
605 | 606 | # source repo |
|
606 | 607 | source_repo = pull_request.source_repo.scm_instance() |
|
607 | 608 | try: |
|
608 | 609 | source_commit = source_repo.get_commit(commit_id=source_ref_name) |
|
609 | 610 | except CommitDoesNotExistError: |
|
610 | 611 | return UpdateResponse( |
|
611 | 612 | executed=False, |
|
612 | 613 | reason=UpdateFailureReason.MISSING_SOURCE_REF, |
|
613 | 614 | old=pull_request, new=None, changes=None, |
|
614 | 615 | source_changed=False, target_changed=False) |
|
615 | 616 | |
|
616 | 617 | source_changed = source_ref_id != source_commit.raw_id |
|
617 | 618 | |
|
618 | 619 | # target repo |
|
619 | 620 | target_repo = pull_request.target_repo.scm_instance() |
|
620 | 621 | try: |
|
621 | 622 | target_commit = target_repo.get_commit(commit_id=target_ref_name) |
|
622 | 623 | except CommitDoesNotExistError: |
|
623 | 624 | return UpdateResponse( |
|
624 | 625 | executed=False, |
|
625 | 626 | reason=UpdateFailureReason.MISSING_TARGET_REF, |
|
626 | 627 | old=pull_request, new=None, changes=None, |
|
627 | 628 | source_changed=False, target_changed=False) |
|
628 | 629 | target_changed = target_ref_id != target_commit.raw_id |
|
629 | 630 | |
|
630 | 631 | if not (source_changed or target_changed): |
|
631 | 632 | log.debug("Nothing changed in pull request %s", pull_request) |
|
632 | 633 | return UpdateResponse( |
|
633 | 634 | executed=False, |
|
634 | 635 | reason=UpdateFailureReason.NO_CHANGE, |
|
635 | 636 | old=pull_request, new=None, changes=None, |
|
636 | 637 | source_changed=target_changed, target_changed=source_changed) |
|
637 | 638 | |
|
638 | 639 | change_in_found = 'target repo' if target_changed else 'source repo' |
|
639 | 640 | log.debug('Updating pull request because of change in %s detected', |
|
640 | 641 | change_in_found) |
|
641 | 642 | |
|
642 | 643 | # Finally there is a need for an update, in case of source change |
|
643 | 644 | # we create a new version, else just an update |
|
644 | 645 | if source_changed: |
|
645 | 646 | pull_request_version = self._create_version_from_snapshot(pull_request) |
|
646 | 647 | self._link_comments_to_version(pull_request_version) |
|
647 | 648 | else: |
|
648 | 649 | try: |
|
649 | 650 | ver = pull_request.versions[-1] |
|
650 | 651 | except IndexError: |
|
651 | 652 | ver = None |
|
652 | 653 | |
|
653 | 654 | pull_request.pull_request_version_id = \ |
|
654 | 655 | ver.pull_request_version_id if ver else None |
|
655 | 656 | pull_request_version = pull_request |
|
656 | 657 | |
|
657 | 658 | try: |
|
658 | 659 | if target_ref_type in ('tag', 'branch', 'book'): |
|
659 | 660 | target_commit = target_repo.get_commit(target_ref_name) |
|
660 | 661 | else: |
|
661 | 662 | target_commit = target_repo.get_commit(target_ref_id) |
|
662 | 663 | except CommitDoesNotExistError: |
|
663 | 664 | return UpdateResponse( |
|
664 | 665 | executed=False, |
|
665 | 666 | reason=UpdateFailureReason.MISSING_TARGET_REF, |
|
666 | 667 | old=pull_request, new=None, changes=None, |
|
667 | 668 | source_changed=source_changed, target_changed=target_changed) |
|
668 | 669 | |
|
669 | 670 | # re-compute commit ids |
|
670 | 671 | old_commit_ids = pull_request.revisions |
|
671 | 672 | pre_load = ["author", "branch", "date", "message"] |
|
672 | 673 | commit_ranges = target_repo.compare( |
|
673 | 674 | target_commit.raw_id, source_commit.raw_id, source_repo, merge=True, |
|
674 | 675 | pre_load=pre_load) |
|
675 | 676 | |
|
676 | 677 | ancestor = target_repo.get_common_ancestor( |
|
677 | 678 | target_commit.raw_id, source_commit.raw_id, source_repo) |
|
678 | 679 | |
|
679 | 680 | pull_request.source_ref = '%s:%s:%s' % ( |
|
680 | 681 | source_ref_type, source_ref_name, source_commit.raw_id) |
|
681 | 682 | pull_request.target_ref = '%s:%s:%s' % ( |
|
682 | 683 | target_ref_type, target_ref_name, ancestor) |
|
683 | 684 | |
|
684 | 685 | pull_request.revisions = [ |
|
685 | 686 | commit.raw_id for commit in reversed(commit_ranges)] |
|
686 | 687 | pull_request.updated_on = datetime.datetime.now() |
|
687 | 688 | Session().add(pull_request) |
|
688 | 689 | new_commit_ids = pull_request.revisions |
|
689 | 690 | |
|
690 | 691 | old_diff_data, new_diff_data = self._generate_update_diffs( |
|
691 | 692 | pull_request, pull_request_version) |
|
692 | 693 | |
|
693 | 694 | # calculate commit and file changes |
|
694 | 695 | changes = self._calculate_commit_id_changes( |
|
695 | 696 | old_commit_ids, new_commit_ids) |
|
696 | 697 | file_changes = self._calculate_file_changes( |
|
697 | 698 | old_diff_data, new_diff_data) |
|
698 | 699 | |
|
699 | 700 | # set comments as outdated if DIFFS changed |
|
700 | 701 | CommentsModel().outdate_comments( |
|
701 | 702 | pull_request, old_diff_data=old_diff_data, |
|
702 | 703 | new_diff_data=new_diff_data) |
|
703 | 704 | |
|
704 | 705 | commit_changes = (changes.added or changes.removed) |
|
705 | 706 | file_node_changes = ( |
|
706 | 707 | file_changes.added or file_changes.modified or file_changes.removed) |
|
707 | 708 | pr_has_changes = commit_changes or file_node_changes |
|
708 | 709 | |
|
709 | 710 | # Add an automatic comment to the pull request, in case |
|
710 | 711 | # anything has changed |
|
711 | 712 | if pr_has_changes: |
|
712 | 713 | update_comment = CommentsModel().create( |
|
713 | 714 | text=self._render_update_message(changes, file_changes), |
|
714 | 715 | repo=pull_request.target_repo, |
|
715 | 716 | user=pull_request.author, |
|
716 | 717 | pull_request=pull_request, |
|
717 | 718 | send_email=False, renderer=DEFAULT_COMMENTS_RENDERER) |
|
718 | 719 | |
|
719 | 720 | # Update status to "Under Review" for added commits |
|
720 | 721 | for commit_id in changes.added: |
|
721 | 722 | ChangesetStatusModel().set_status( |
|
722 | 723 | repo=pull_request.source_repo, |
|
723 | 724 | status=ChangesetStatus.STATUS_UNDER_REVIEW, |
|
724 | 725 | comment=update_comment, |
|
725 | 726 | user=pull_request.author, |
|
726 | 727 | pull_request=pull_request, |
|
727 | 728 | revision=commit_id) |
|
728 | 729 | |
|
729 | 730 | log.debug( |
|
730 | 731 | 'Updated pull request %s, added_ids: %s, common_ids: %s, ' |
|
731 | 732 | 'removed_ids: %s', pull_request.pull_request_id, |
|
732 | 733 | changes.added, changes.common, changes.removed) |
|
733 | 734 | log.debug( |
|
734 | 735 | 'Updated pull request with the following file changes: %s', |
|
735 | 736 | file_changes) |
|
736 | 737 | |
|
737 | 738 | log.info( |
|
738 | 739 | "Updated pull request %s from commit %s to commit %s, " |
|
739 | 740 | "stored new version %s of this pull request.", |
|
740 | 741 | pull_request.pull_request_id, source_ref_id, |
|
741 | 742 | pull_request.source_ref_parts.commit_id, |
|
742 | 743 | pull_request_version.pull_request_version_id) |
|
743 | 744 | Session().commit() |
|
744 | 745 | self._trigger_pull_request_hook( |
|
745 | 746 | pull_request, pull_request.author, 'update') |
|
746 | 747 | |
|
747 | 748 | return UpdateResponse( |
|
748 | 749 | executed=True, reason=UpdateFailureReason.NONE, |
|
749 | 750 | old=pull_request, new=pull_request_version, changes=changes, |
|
750 | 751 | source_changed=source_changed, target_changed=target_changed) |
|
751 | 752 | |
|
752 | 753 | def _create_version_from_snapshot(self, pull_request): |
|
753 | 754 | version = PullRequestVersion() |
|
754 | 755 | version.title = pull_request.title |
|
755 | 756 | version.description = pull_request.description |
|
756 | 757 | version.status = pull_request.status |
|
757 | 758 | version.created_on = datetime.datetime.now() |
|
758 | 759 | version.updated_on = pull_request.updated_on |
|
759 | 760 | version.user_id = pull_request.user_id |
|
760 | 761 | version.source_repo = pull_request.source_repo |
|
761 | 762 | version.source_ref = pull_request.source_ref |
|
762 | 763 | version.target_repo = pull_request.target_repo |
|
763 | 764 | version.target_ref = pull_request.target_ref |
|
764 | 765 | |
|
765 | 766 | version._last_merge_source_rev = pull_request._last_merge_source_rev |
|
766 | 767 | version._last_merge_target_rev = pull_request._last_merge_target_rev |
|
767 | 768 | version._last_merge_status = pull_request._last_merge_status |
|
768 | 769 | version.shadow_merge_ref = pull_request.shadow_merge_ref |
|
769 | 770 | version.merge_rev = pull_request.merge_rev |
|
770 | 771 | version.reviewer_data = pull_request.reviewer_data |
|
771 | 772 | |
|
772 | 773 | version.revisions = pull_request.revisions |
|
773 | 774 | version.pull_request = pull_request |
|
774 | 775 | Session().add(version) |
|
775 | 776 | Session().flush() |
|
776 | 777 | |
|
777 | 778 | return version |
|
778 | 779 | |
|
779 | 780 | def _generate_update_diffs(self, pull_request, pull_request_version): |
|
780 | 781 | |
|
781 | 782 | diff_context = ( |
|
782 | 783 | self.DIFF_CONTEXT + |
|
783 | 784 | CommentsModel.needed_extra_diff_context()) |
|
784 | 785 | |
|
785 | 786 | source_repo = pull_request_version.source_repo |
|
786 | 787 | source_ref_id = pull_request_version.source_ref_parts.commit_id |
|
787 | 788 | target_ref_id = pull_request_version.target_ref_parts.commit_id |
|
788 | 789 | old_diff = self._get_diff_from_pr_or_version( |
|
789 | 790 | source_repo, source_ref_id, target_ref_id, context=diff_context) |
|
790 | 791 | |
|
791 | 792 | source_repo = pull_request.source_repo |
|
792 | 793 | source_ref_id = pull_request.source_ref_parts.commit_id |
|
793 | 794 | target_ref_id = pull_request.target_ref_parts.commit_id |
|
794 | 795 | |
|
795 | 796 | new_diff = self._get_diff_from_pr_or_version( |
|
796 | 797 | source_repo, source_ref_id, target_ref_id, context=diff_context) |
|
797 | 798 | |
|
798 | 799 | old_diff_data = diffs.DiffProcessor(old_diff) |
|
799 | 800 | old_diff_data.prepare() |
|
800 | 801 | new_diff_data = diffs.DiffProcessor(new_diff) |
|
801 | 802 | new_diff_data.prepare() |
|
802 | 803 | |
|
803 | 804 | return old_diff_data, new_diff_data |
|
804 | 805 | |
|
805 | 806 | def _link_comments_to_version(self, pull_request_version): |
|
806 | 807 | """ |
|
807 | 808 | Link all unlinked comments of this pull request to the given version. |
|
808 | 809 | |
|
809 | 810 | :param pull_request_version: The `PullRequestVersion` to which |
|
810 | 811 | the comments shall be linked. |
|
811 | 812 | |
|
812 | 813 | """ |
|
813 | 814 | pull_request = pull_request_version.pull_request |
|
814 | 815 | comments = ChangesetComment.query()\ |
|
815 | 816 | .filter( |
|
816 | 817 | # TODO: johbo: Should we query for the repo at all here? |
|
817 | 818 | # Pending decision on how comments of PRs are to be related |
|
818 | 819 | # to either the source repo, the target repo or no repo at all. |
|
819 | 820 | ChangesetComment.repo_id == pull_request.target_repo.repo_id, |
|
820 | 821 | ChangesetComment.pull_request == pull_request, |
|
821 | 822 | ChangesetComment.pull_request_version == None)\ |
|
822 | 823 | .order_by(ChangesetComment.comment_id.asc()) |
|
823 | 824 | |
|
824 | 825 | # TODO: johbo: Find out why this breaks if it is done in a bulk |
|
825 | 826 | # operation. |
|
826 | 827 | for comment in comments: |
|
827 | 828 | comment.pull_request_version_id = ( |
|
828 | 829 | pull_request_version.pull_request_version_id) |
|
829 | 830 | Session().add(comment) |
|
830 | 831 | |
|
831 | 832 | def _calculate_commit_id_changes(self, old_ids, new_ids): |
|
832 | 833 | added = [x for x in new_ids if x not in old_ids] |
|
833 | 834 | common = [x for x in new_ids if x in old_ids] |
|
834 | 835 | removed = [x for x in old_ids if x not in new_ids] |
|
835 | 836 | total = new_ids |
|
836 | 837 | return ChangeTuple(added, common, removed, total) |
|
837 | 838 | |
|
838 | 839 | def _calculate_file_changes(self, old_diff_data, new_diff_data): |
|
839 | 840 | |
|
840 | 841 | old_files = OrderedDict() |
|
841 | 842 | for diff_data in old_diff_data.parsed_diff: |
|
842 | 843 | old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff']) |
|
843 | 844 | |
|
844 | 845 | added_files = [] |
|
845 | 846 | modified_files = [] |
|
846 | 847 | removed_files = [] |
|
847 | 848 | for diff_data in new_diff_data.parsed_diff: |
|
848 | 849 | new_filename = diff_data['filename'] |
|
849 | 850 | new_hash = md5_safe(diff_data['raw_diff']) |
|
850 | 851 | |
|
851 | 852 | old_hash = old_files.get(new_filename) |
|
852 | 853 | if not old_hash: |
|
853 | 854 | # file is not present in old diff, means it's added |
|
854 | 855 | added_files.append(new_filename) |
|
855 | 856 | else: |
|
856 | 857 | if new_hash != old_hash: |
|
857 | 858 | modified_files.append(new_filename) |
|
858 | 859 | # now remove a file from old, since we have seen it already |
|
859 | 860 | del old_files[new_filename] |
|
860 | 861 | |
|
861 | 862 | # removed files is when there are present in old, but not in NEW, |
|
862 | 863 | # since we remove old files that are present in new diff, left-overs |
|
863 | 864 | # if any should be the removed files |
|
864 | 865 | removed_files.extend(old_files.keys()) |
|
865 | 866 | |
|
866 | 867 | return FileChangeTuple(added_files, modified_files, removed_files) |
|
867 | 868 | |
|
868 | 869 | def _render_update_message(self, changes, file_changes): |
|
869 | 870 | """ |
|
870 | 871 | render the message using DEFAULT_COMMENTS_RENDERER (RST renderer), |
|
871 | 872 | so it's always looking the same disregarding on which default |
|
872 | 873 | renderer system is using. |
|
873 | 874 | |
|
874 | 875 | :param changes: changes named tuple |
|
875 | 876 | :param file_changes: file changes named tuple |
|
876 | 877 | |
|
877 | 878 | """ |
|
878 | 879 | new_status = ChangesetStatus.get_status_lbl( |
|
879 | 880 | ChangesetStatus.STATUS_UNDER_REVIEW) |
|
880 | 881 | |
|
881 | 882 | changed_files = ( |
|
882 | 883 | file_changes.added + file_changes.modified + file_changes.removed) |
|
883 | 884 | |
|
884 | 885 | params = { |
|
885 | 886 | 'under_review_label': new_status, |
|
886 | 887 | 'added_commits': changes.added, |
|
887 | 888 | 'removed_commits': changes.removed, |
|
888 | 889 | 'changed_files': changed_files, |
|
889 | 890 | 'added_files': file_changes.added, |
|
890 | 891 | 'modified_files': file_changes.modified, |
|
891 | 892 | 'removed_files': file_changes.removed, |
|
892 | 893 | } |
|
893 | 894 | renderer = RstTemplateRenderer() |
|
894 | 895 | return renderer.render('pull_request_update.mako', **params) |
|
895 | 896 | |
|
896 | 897 | def edit(self, pull_request, title, description): |
|
897 | 898 | pull_request = self.__get_pull_request(pull_request) |
|
898 | 899 | if pull_request.is_closed(): |
|
899 | 900 | raise ValueError('This pull request is closed') |
|
900 | 901 | if title: |
|
901 | 902 | pull_request.title = title |
|
902 | 903 | pull_request.description = description |
|
903 | 904 | pull_request.updated_on = datetime.datetime.now() |
|
904 | 905 | Session().add(pull_request) |
|
905 | 906 | |
|
906 | 907 | def update_reviewers(self, pull_request, reviewer_data): |
|
907 | 908 | """ |
|
908 | 909 | Update the reviewers in the pull request |
|
909 | 910 | |
|
910 | 911 | :param pull_request: the pr to update |
|
911 | 912 | :param reviewer_data: list of tuples |
|
912 | 913 | [(user, ['reason1', 'reason2'], mandatory_flag)] |
|
913 | 914 | """ |
|
914 | 915 | |
|
915 | 916 | reviewers = {} |
|
916 | 917 | for user_id, reasons, mandatory in reviewer_data: |
|
917 | 918 | if isinstance(user_id, (int, basestring)): |
|
918 | 919 | user_id = self._get_user(user_id).user_id |
|
919 | 920 | reviewers[user_id] = { |
|
920 | 921 | 'reasons': reasons, 'mandatory': mandatory} |
|
921 | 922 | |
|
922 | 923 | reviewers_ids = set(reviewers.keys()) |
|
923 | 924 | pull_request = self.__get_pull_request(pull_request) |
|
924 | 925 | current_reviewers = PullRequestReviewers.query()\ |
|
925 | 926 | .filter(PullRequestReviewers.pull_request == |
|
926 | 927 | pull_request).all() |
|
927 | 928 | current_reviewers_ids = set([x.user.user_id for x in current_reviewers]) |
|
928 | 929 | |
|
929 | 930 | ids_to_add = reviewers_ids.difference(current_reviewers_ids) |
|
930 | 931 | ids_to_remove = current_reviewers_ids.difference(reviewers_ids) |
|
931 | 932 | |
|
932 | 933 | log.debug("Adding %s reviewers", ids_to_add) |
|
933 | 934 | log.debug("Removing %s reviewers", ids_to_remove) |
|
934 | 935 | changed = False |
|
935 | 936 | for uid in ids_to_add: |
|
936 | 937 | changed = True |
|
937 | 938 | _usr = self._get_user(uid) |
|
938 | 939 | reviewer = PullRequestReviewers() |
|
939 | 940 | reviewer.user = _usr |
|
940 | 941 | reviewer.pull_request = pull_request |
|
941 | 942 | reviewer.reasons = reviewers[uid]['reasons'] |
|
942 | 943 | # NOTE(marcink): mandatory shouldn't be changed now |
|
943 | 944 | #reviewer.mandatory = reviewers[uid]['reasons'] |
|
944 | 945 | Session().add(reviewer) |
|
945 | 946 | |
|
946 | 947 | for uid in ids_to_remove: |
|
947 | 948 | changed = True |
|
948 | 949 | reviewers = PullRequestReviewers.query()\ |
|
949 | 950 | .filter(PullRequestReviewers.user_id == uid, |
|
950 | 951 | PullRequestReviewers.pull_request == pull_request)\ |
|
951 | 952 | .all() |
|
952 | 953 | # use .all() in case we accidentally added the same person twice |
|
953 | 954 | # this CAN happen due to the lack of DB checks |
|
954 | 955 | for obj in reviewers: |
|
955 | 956 | Session().delete(obj) |
|
956 | 957 | |
|
957 | 958 | if changed: |
|
958 | 959 | pull_request.updated_on = datetime.datetime.now() |
|
959 | 960 | Session().add(pull_request) |
|
960 | 961 | |
|
961 | 962 | self.notify_reviewers(pull_request, ids_to_add) |
|
962 | 963 | return ids_to_add, ids_to_remove |
|
963 | 964 | |
|
964 | def get_url(self, pull_request): | |
|
965 | return h.url('pullrequest_show', | |
|
966 | repo_name=safe_str(pull_request.target_repo.repo_name), | |
|
967 | pull_request_id=pull_request.pull_request_id, | |
|
968 | qualified=True) | |
|
965 | def get_url(self, pull_request, request=None, permalink=False): | |
|
966 | if not request: | |
|
967 | request = get_current_request() | |
|
968 | ||
|
969 | if permalink: | |
|
970 | return request.route_url( | |
|
971 | 'pull_requests_global', | |
|
972 | pull_request_id=pull_request.pull_request_id,) | |
|
973 | else: | |
|
974 | return request.route_url( | |
|
975 | 'pullrequest_show', | |
|
976 | repo_name=safe_str(pull_request.target_repo.repo_name), | |
|
977 | pull_request_id=pull_request.pull_request_id,) | |
|
969 | 978 | |
|
970 | 979 | def get_shadow_clone_url(self, pull_request): |
|
971 | 980 | """ |
|
972 | 981 | Returns qualified url pointing to the shadow repository. If this pull |
|
973 | 982 | request is closed there is no shadow repository and ``None`` will be |
|
974 | 983 | returned. |
|
975 | 984 | """ |
|
976 | 985 | if pull_request.is_closed(): |
|
977 | 986 | return None |
|
978 | 987 | else: |
|
979 | 988 | pr_url = urllib.unquote(self.get_url(pull_request)) |
|
980 | 989 | return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url)) |
|
981 | 990 | |
|
982 | 991 | def notify_reviewers(self, pull_request, reviewers_ids): |
|
983 | 992 | # notification to reviewers |
|
984 | 993 | if not reviewers_ids: |
|
985 | 994 | return |
|
986 | 995 | |
|
987 | 996 | pull_request_obj = pull_request |
|
988 | 997 | # get the current participants of this pull request |
|
989 | 998 | recipients = reviewers_ids |
|
990 | 999 | notification_type = EmailNotificationModel.TYPE_PULL_REQUEST |
|
991 | 1000 | |
|
992 | 1001 | pr_source_repo = pull_request_obj.source_repo |
|
993 | 1002 | pr_target_repo = pull_request_obj.target_repo |
|
994 | 1003 | |
|
995 | 1004 | pr_url = h.url( |
|
996 | 1005 | 'pullrequest_show', |
|
997 | 1006 | repo_name=pr_target_repo.repo_name, |
|
998 | 1007 | pull_request_id=pull_request_obj.pull_request_id, |
|
999 | 1008 | qualified=True,) |
|
1000 | 1009 | |
|
1001 | 1010 | # set some variables for email notification |
|
1002 | 1011 | pr_target_repo_url = h.route_url( |
|
1003 | 1012 | 'repo_summary', repo_name=pr_target_repo.repo_name) |
|
1004 | 1013 | |
|
1005 | 1014 | pr_source_repo_url = h.route_url( |
|
1006 | 1015 | 'repo_summary', repo_name=pr_source_repo.repo_name) |
|
1007 | 1016 | |
|
1008 | 1017 | # pull request specifics |
|
1009 | 1018 | pull_request_commits = [ |
|
1010 | 1019 | (x.raw_id, x.message) |
|
1011 | 1020 | for x in map(pr_source_repo.get_commit, pull_request.revisions)] |
|
1012 | 1021 | |
|
1013 | 1022 | kwargs = { |
|
1014 | 1023 | 'user': pull_request.author, |
|
1015 | 1024 | 'pull_request': pull_request_obj, |
|
1016 | 1025 | 'pull_request_commits': pull_request_commits, |
|
1017 | 1026 | |
|
1018 | 1027 | 'pull_request_target_repo': pr_target_repo, |
|
1019 | 1028 | 'pull_request_target_repo_url': pr_target_repo_url, |
|
1020 | 1029 | |
|
1021 | 1030 | 'pull_request_source_repo': pr_source_repo, |
|
1022 | 1031 | 'pull_request_source_repo_url': pr_source_repo_url, |
|
1023 | 1032 | |
|
1024 | 1033 | 'pull_request_url': pr_url, |
|
1025 | 1034 | } |
|
1026 | 1035 | |
|
1027 | 1036 | # pre-generate the subject for notification itself |
|
1028 | 1037 | (subject, |
|
1029 | 1038 | _h, _e, # we don't care about those |
|
1030 | 1039 | body_plaintext) = EmailNotificationModel().render_email( |
|
1031 | 1040 | notification_type, **kwargs) |
|
1032 | 1041 | |
|
1033 | 1042 | # create notification objects, and emails |
|
1034 | 1043 | NotificationModel().create( |
|
1035 | 1044 | created_by=pull_request.author, |
|
1036 | 1045 | notification_subject=subject, |
|
1037 | 1046 | notification_body=body_plaintext, |
|
1038 | 1047 | notification_type=notification_type, |
|
1039 | 1048 | recipients=recipients, |
|
1040 | 1049 | email_kwargs=kwargs, |
|
1041 | 1050 | ) |
|
1042 | 1051 | |
|
1043 | 1052 | def delete(self, pull_request): |
|
1044 | 1053 | pull_request = self.__get_pull_request(pull_request) |
|
1045 | 1054 | self._cleanup_merge_workspace(pull_request) |
|
1046 | 1055 | Session().delete(pull_request) |
|
1047 | 1056 | |
|
1048 | 1057 | def close_pull_request(self, pull_request, user): |
|
1049 | 1058 | pull_request = self.__get_pull_request(pull_request) |
|
1050 | 1059 | self._cleanup_merge_workspace(pull_request) |
|
1051 | 1060 | pull_request.status = PullRequest.STATUS_CLOSED |
|
1052 | 1061 | pull_request.updated_on = datetime.datetime.now() |
|
1053 | 1062 | Session().add(pull_request) |
|
1054 | 1063 | self._trigger_pull_request_hook( |
|
1055 | 1064 | pull_request, pull_request.author, 'close') |
|
1056 | 1065 | self._log_action('user_closed_pull_request', user, pull_request) |
|
1057 | 1066 | |
|
1058 | 1067 | def close_pull_request_with_comment(self, pull_request, user, repo, |
|
1059 | 1068 | message=None): |
|
1060 | 1069 | status = ChangesetStatus.STATUS_REJECTED |
|
1061 | 1070 | |
|
1062 | 1071 | if not message: |
|
1063 | 1072 | message = ( |
|
1064 | 1073 | _('Status change %(transition_icon)s %(status)s') % { |
|
1065 | 1074 | 'transition_icon': '>', |
|
1066 | 1075 | 'status': ChangesetStatus.get_status_lbl(status)}) |
|
1067 | 1076 | |
|
1068 | 1077 | internal_message = _('Closing with') + ' ' + message |
|
1069 | 1078 | |
|
1070 | 1079 | comm = CommentsModel().create( |
|
1071 | 1080 | text=internal_message, |
|
1072 | 1081 | repo=repo.repo_id, |
|
1073 | 1082 | user=user.user_id, |
|
1074 | 1083 | pull_request=pull_request.pull_request_id, |
|
1075 | 1084 | f_path=None, |
|
1076 | 1085 | line_no=None, |
|
1077 | 1086 | status_change=ChangesetStatus.get_status_lbl(status), |
|
1078 | 1087 | status_change_type=status, |
|
1079 | 1088 | closing_pr=True |
|
1080 | 1089 | ) |
|
1081 | 1090 | |
|
1082 | 1091 | ChangesetStatusModel().set_status( |
|
1083 | 1092 | repo.repo_id, |
|
1084 | 1093 | status, |
|
1085 | 1094 | user.user_id, |
|
1086 | 1095 | comm, |
|
1087 | 1096 | pull_request=pull_request.pull_request_id |
|
1088 | 1097 | ) |
|
1089 | 1098 | Session().flush() |
|
1090 | 1099 | |
|
1091 | 1100 | PullRequestModel().close_pull_request( |
|
1092 | 1101 | pull_request.pull_request_id, user) |
|
1093 | 1102 | |
|
1094 | 1103 | def merge_status(self, pull_request): |
|
1095 | 1104 | if not self._is_merge_enabled(pull_request): |
|
1096 | 1105 | return False, _('Server-side pull request merging is disabled.') |
|
1097 | 1106 | if pull_request.is_closed(): |
|
1098 | 1107 | return False, _('This pull request is closed.') |
|
1099 | 1108 | merge_possible, msg = self._check_repo_requirements( |
|
1100 | 1109 | target=pull_request.target_repo, source=pull_request.source_repo) |
|
1101 | 1110 | if not merge_possible: |
|
1102 | 1111 | return merge_possible, msg |
|
1103 | 1112 | |
|
1104 | 1113 | try: |
|
1105 | 1114 | resp = self._try_merge(pull_request) |
|
1106 | 1115 | log.debug("Merge response: %s", resp) |
|
1107 | 1116 | status = resp.possible, self.merge_status_message( |
|
1108 | 1117 | resp.failure_reason) |
|
1109 | 1118 | except NotImplementedError: |
|
1110 | 1119 | status = False, _('Pull request merging is not supported.') |
|
1111 | 1120 | |
|
1112 | 1121 | return status |
|
1113 | 1122 | |
|
1114 | 1123 | def _check_repo_requirements(self, target, source): |
|
1115 | 1124 | """ |
|
1116 | 1125 | Check if `target` and `source` have compatible requirements. |
|
1117 | 1126 | |
|
1118 | 1127 | Currently this is just checking for largefiles. |
|
1119 | 1128 | """ |
|
1120 | 1129 | target_has_largefiles = self._has_largefiles(target) |
|
1121 | 1130 | source_has_largefiles = self._has_largefiles(source) |
|
1122 | 1131 | merge_possible = True |
|
1123 | 1132 | message = u'' |
|
1124 | 1133 | |
|
1125 | 1134 | if target_has_largefiles != source_has_largefiles: |
|
1126 | 1135 | merge_possible = False |
|
1127 | 1136 | if source_has_largefiles: |
|
1128 | 1137 | message = _( |
|
1129 | 1138 | 'Target repository large files support is disabled.') |
|
1130 | 1139 | else: |
|
1131 | 1140 | message = _( |
|
1132 | 1141 | 'Source repository large files support is disabled.') |
|
1133 | 1142 | |
|
1134 | 1143 | return merge_possible, message |
|
1135 | 1144 | |
|
1136 | 1145 | def _has_largefiles(self, repo): |
|
1137 | 1146 | largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings( |
|
1138 | 1147 | 'extensions', 'largefiles') |
|
1139 | 1148 | return largefiles_ui and largefiles_ui[0].active |
|
1140 | 1149 | |
|
1141 | 1150 | def _try_merge(self, pull_request): |
|
1142 | 1151 | """ |
|
1143 | 1152 | Try to merge the pull request and return the merge status. |
|
1144 | 1153 | """ |
|
1145 | 1154 | log.debug( |
|
1146 | 1155 | "Trying out if the pull request %s can be merged.", |
|
1147 | 1156 | pull_request.pull_request_id) |
|
1148 | 1157 | target_vcs = pull_request.target_repo.scm_instance() |
|
1149 | 1158 | |
|
1150 | 1159 | # Refresh the target reference. |
|
1151 | 1160 | try: |
|
1152 | 1161 | target_ref = self._refresh_reference( |
|
1153 | 1162 | pull_request.target_ref_parts, target_vcs) |
|
1154 | 1163 | except CommitDoesNotExistError: |
|
1155 | 1164 | merge_state = MergeResponse( |
|
1156 | 1165 | False, False, None, MergeFailureReason.MISSING_TARGET_REF) |
|
1157 | 1166 | return merge_state |
|
1158 | 1167 | |
|
1159 | 1168 | target_locked = pull_request.target_repo.locked |
|
1160 | 1169 | if target_locked and target_locked[0]: |
|
1161 | 1170 | log.debug("The target repository is locked.") |
|
1162 | 1171 | merge_state = MergeResponse( |
|
1163 | 1172 | False, False, None, MergeFailureReason.TARGET_IS_LOCKED) |
|
1164 | 1173 | elif self._needs_merge_state_refresh(pull_request, target_ref): |
|
1165 | 1174 | log.debug("Refreshing the merge status of the repository.") |
|
1166 | 1175 | merge_state = self._refresh_merge_state( |
|
1167 | 1176 | pull_request, target_vcs, target_ref) |
|
1168 | 1177 | else: |
|
1169 | 1178 | possible = pull_request.\ |
|
1170 | 1179 | _last_merge_status == MergeFailureReason.NONE |
|
1171 | 1180 | merge_state = MergeResponse( |
|
1172 | 1181 | possible, False, None, pull_request._last_merge_status) |
|
1173 | 1182 | |
|
1174 | 1183 | return merge_state |
|
1175 | 1184 | |
|
1176 | 1185 | def _refresh_reference(self, reference, vcs_repository): |
|
1177 | 1186 | if reference.type in ('branch', 'book'): |
|
1178 | 1187 | name_or_id = reference.name |
|
1179 | 1188 | else: |
|
1180 | 1189 | name_or_id = reference.commit_id |
|
1181 | 1190 | refreshed_commit = vcs_repository.get_commit(name_or_id) |
|
1182 | 1191 | refreshed_reference = Reference( |
|
1183 | 1192 | reference.type, reference.name, refreshed_commit.raw_id) |
|
1184 | 1193 | return refreshed_reference |
|
1185 | 1194 | |
|
1186 | 1195 | def _needs_merge_state_refresh(self, pull_request, target_reference): |
|
1187 | 1196 | return not( |
|
1188 | 1197 | pull_request.revisions and |
|
1189 | 1198 | pull_request.revisions[0] == pull_request._last_merge_source_rev and |
|
1190 | 1199 | target_reference.commit_id == pull_request._last_merge_target_rev) |
|
1191 | 1200 | |
|
1192 | 1201 | def _refresh_merge_state(self, pull_request, target_vcs, target_reference): |
|
1193 | 1202 | workspace_id = self._workspace_id(pull_request) |
|
1194 | 1203 | source_vcs = pull_request.source_repo.scm_instance() |
|
1195 | 1204 | use_rebase = self._use_rebase_for_merging(pull_request) |
|
1196 | 1205 | merge_state = target_vcs.merge( |
|
1197 | 1206 | target_reference, source_vcs, pull_request.source_ref_parts, |
|
1198 | 1207 | workspace_id, dry_run=True, use_rebase=use_rebase) |
|
1199 | 1208 | |
|
1200 | 1209 | # Do not store the response if there was an unknown error. |
|
1201 | 1210 | if merge_state.failure_reason != MergeFailureReason.UNKNOWN: |
|
1202 | 1211 | pull_request._last_merge_source_rev = \ |
|
1203 | 1212 | pull_request.source_ref_parts.commit_id |
|
1204 | 1213 | pull_request._last_merge_target_rev = target_reference.commit_id |
|
1205 | 1214 | pull_request._last_merge_status = merge_state.failure_reason |
|
1206 | 1215 | pull_request.shadow_merge_ref = merge_state.merge_ref |
|
1207 | 1216 | Session().add(pull_request) |
|
1208 | 1217 | Session().commit() |
|
1209 | 1218 | |
|
1210 | 1219 | return merge_state |
|
1211 | 1220 | |
|
1212 | 1221 | def _workspace_id(self, pull_request): |
|
1213 | 1222 | workspace_id = 'pr-%s' % pull_request.pull_request_id |
|
1214 | 1223 | return workspace_id |
|
1215 | 1224 | |
|
1216 | 1225 | def merge_status_message(self, status_code): |
|
1217 | 1226 | """ |
|
1218 | 1227 | Return a human friendly error message for the given merge status code. |
|
1219 | 1228 | """ |
|
1220 | 1229 | return self.MERGE_STATUS_MESSAGES[status_code] |
|
1221 | 1230 | |
|
1222 | 1231 | def generate_repo_data(self, repo, commit_id=None, branch=None, |
|
1223 | 1232 | bookmark=None): |
|
1224 | 1233 | all_refs, selected_ref = \ |
|
1225 | 1234 | self._get_repo_pullrequest_sources( |
|
1226 | 1235 | repo.scm_instance(), commit_id=commit_id, |
|
1227 | 1236 | branch=branch, bookmark=bookmark) |
|
1228 | 1237 | |
|
1229 | 1238 | refs_select2 = [] |
|
1230 | 1239 | for element in all_refs: |
|
1231 | 1240 | children = [{'id': x[0], 'text': x[1]} for x in element[0]] |
|
1232 | 1241 | refs_select2.append({'text': element[1], 'children': children}) |
|
1233 | 1242 | |
|
1234 | 1243 | return { |
|
1235 | 1244 | 'user': { |
|
1236 | 1245 | 'user_id': repo.user.user_id, |
|
1237 | 1246 | 'username': repo.user.username, |
|
1238 | 1247 | 'firstname': repo.user.firstname, |
|
1239 | 1248 | 'lastname': repo.user.lastname, |
|
1240 | 1249 | 'gravatar_link': h.gravatar_url(repo.user.email, 14), |
|
1241 | 1250 | }, |
|
1242 | 1251 | 'description': h.chop_at_smart(repo.description, '\n'), |
|
1243 | 1252 | 'refs': { |
|
1244 | 1253 | 'all_refs': all_refs, |
|
1245 | 1254 | 'selected_ref': selected_ref, |
|
1246 | 1255 | 'select2_refs': refs_select2 |
|
1247 | 1256 | } |
|
1248 | 1257 | } |
|
1249 | 1258 | |
|
1250 | 1259 | def generate_pullrequest_title(self, source, source_ref, target): |
|
1251 | 1260 | return u'{source}#{at_ref} to {target}'.format( |
|
1252 | 1261 | source=source, |
|
1253 | 1262 | at_ref=source_ref, |
|
1254 | 1263 | target=target, |
|
1255 | 1264 | ) |
|
1256 | 1265 | |
|
1257 | 1266 | def _cleanup_merge_workspace(self, pull_request): |
|
1258 | 1267 | # Merging related cleanup |
|
1259 | 1268 | target_scm = pull_request.target_repo.scm_instance() |
|
1260 | 1269 | workspace_id = 'pr-%s' % pull_request.pull_request_id |
|
1261 | 1270 | |
|
1262 | 1271 | try: |
|
1263 | 1272 | target_scm.cleanup_merge_workspace(workspace_id) |
|
1264 | 1273 | except NotImplementedError: |
|
1265 | 1274 | pass |
|
1266 | 1275 | |
|
1267 | 1276 | def _get_repo_pullrequest_sources( |
|
1268 | 1277 | self, repo, commit_id=None, branch=None, bookmark=None): |
|
1269 | 1278 | """ |
|
1270 | 1279 | Return a structure with repo's interesting commits, suitable for |
|
1271 | 1280 | the selectors in pullrequest controller |
|
1272 | 1281 | |
|
1273 | 1282 | :param commit_id: a commit that must be in the list somehow |
|
1274 | 1283 | and selected by default |
|
1275 | 1284 | :param branch: a branch that must be in the list and selected |
|
1276 | 1285 | by default - even if closed |
|
1277 | 1286 | :param bookmark: a bookmark that must be in the list and selected |
|
1278 | 1287 | """ |
|
1279 | 1288 | |
|
1280 | 1289 | commit_id = safe_str(commit_id) if commit_id else None |
|
1281 | 1290 | branch = safe_str(branch) if branch else None |
|
1282 | 1291 | bookmark = safe_str(bookmark) if bookmark else None |
|
1283 | 1292 | |
|
1284 | 1293 | selected = None |
|
1285 | 1294 | |
|
1286 | 1295 | # order matters: first source that has commit_id in it will be selected |
|
1287 | 1296 | sources = [] |
|
1288 | 1297 | sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark)) |
|
1289 | 1298 | sources.append(('branch', repo.branches.items(), _('Branches'), branch)) |
|
1290 | 1299 | |
|
1291 | 1300 | if commit_id: |
|
1292 | 1301 | ref_commit = (h.short_id(commit_id), commit_id) |
|
1293 | 1302 | sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id)) |
|
1294 | 1303 | |
|
1295 | 1304 | sources.append( |
|
1296 | 1305 | ('branch', repo.branches_closed.items(), _('Closed Branches'), branch), |
|
1297 | 1306 | ) |
|
1298 | 1307 | |
|
1299 | 1308 | groups = [] |
|
1300 | 1309 | for group_key, ref_list, group_name, match in sources: |
|
1301 | 1310 | group_refs = [] |
|
1302 | 1311 | for ref_name, ref_id in ref_list: |
|
1303 | 1312 | ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id) |
|
1304 | 1313 | group_refs.append((ref_key, ref_name)) |
|
1305 | 1314 | |
|
1306 | 1315 | if not selected: |
|
1307 | 1316 | if set([commit_id, match]) & set([ref_id, ref_name]): |
|
1308 | 1317 | selected = ref_key |
|
1309 | 1318 | |
|
1310 | 1319 | if group_refs: |
|
1311 | 1320 | groups.append((group_refs, group_name)) |
|
1312 | 1321 | |
|
1313 | 1322 | if not selected: |
|
1314 | 1323 | ref = commit_id or branch or bookmark |
|
1315 | 1324 | if ref: |
|
1316 | 1325 | raise CommitDoesNotExistError( |
|
1317 | 1326 | 'No commit refs could be found matching: %s' % ref) |
|
1318 | 1327 | elif repo.DEFAULT_BRANCH_NAME in repo.branches: |
|
1319 | 1328 | selected = 'branch:%s:%s' % ( |
|
1320 | 1329 | repo.DEFAULT_BRANCH_NAME, |
|
1321 | 1330 | repo.branches[repo.DEFAULT_BRANCH_NAME] |
|
1322 | 1331 | ) |
|
1323 | 1332 | elif repo.commit_ids: |
|
1324 | 1333 | rev = repo.commit_ids[0] |
|
1325 | 1334 | selected = 'rev:%s:%s' % (rev, rev) |
|
1326 | 1335 | else: |
|
1327 | 1336 | raise EmptyRepositoryError() |
|
1328 | 1337 | return groups, selected |
|
1329 | 1338 | |
|
1330 | 1339 | def get_diff(self, source_repo, source_ref_id, target_ref_id, context=DIFF_CONTEXT): |
|
1331 | 1340 | return self._get_diff_from_pr_or_version( |
|
1332 | 1341 | source_repo, source_ref_id, target_ref_id, context=context) |
|
1333 | 1342 | |
|
1334 | 1343 | def _get_diff_from_pr_or_version( |
|
1335 | 1344 | self, source_repo, source_ref_id, target_ref_id, context): |
|
1336 | 1345 | target_commit = source_repo.get_commit( |
|
1337 | 1346 | commit_id=safe_str(target_ref_id)) |
|
1338 | 1347 | source_commit = source_repo.get_commit( |
|
1339 | 1348 | commit_id=safe_str(source_ref_id)) |
|
1340 | 1349 | if isinstance(source_repo, Repository): |
|
1341 | 1350 | vcs_repo = source_repo.scm_instance() |
|
1342 | 1351 | else: |
|
1343 | 1352 | vcs_repo = source_repo |
|
1344 | 1353 | |
|
1345 | 1354 | # TODO: johbo: In the context of an update, we cannot reach |
|
1346 | 1355 | # the old commit anymore with our normal mechanisms. It needs |
|
1347 | 1356 | # some sort of special support in the vcs layer to avoid this |
|
1348 | 1357 | # workaround. |
|
1349 | 1358 | if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and |
|
1350 | 1359 | vcs_repo.alias == 'git'): |
|
1351 | 1360 | source_commit.raw_id = safe_str(source_ref_id) |
|
1352 | 1361 | |
|
1353 | 1362 | log.debug('calculating diff between ' |
|
1354 | 1363 | 'source_ref:%s and target_ref:%s for repo `%s`', |
|
1355 | 1364 | target_ref_id, source_ref_id, |
|
1356 | 1365 | safe_unicode(vcs_repo.path)) |
|
1357 | 1366 | |
|
1358 | 1367 | vcs_diff = vcs_repo.get_diff( |
|
1359 | 1368 | commit1=target_commit, commit2=source_commit, context=context) |
|
1360 | 1369 | return vcs_diff |
|
1361 | 1370 | |
|
1362 | 1371 | def _is_merge_enabled(self, pull_request): |
|
1363 | 1372 | settings_model = VcsSettingsModel(repo=pull_request.target_repo) |
|
1364 | 1373 | settings = settings_model.get_general_settings() |
|
1365 | 1374 | return settings.get('rhodecode_pr_merge_enabled', False) |
|
1366 | 1375 | |
|
1367 | 1376 | def _use_rebase_for_merging(self, pull_request): |
|
1368 | 1377 | settings_model = VcsSettingsModel(repo=pull_request.target_repo) |
|
1369 | 1378 | settings = settings_model.get_general_settings() |
|
1370 | 1379 | return settings.get('rhodecode_hg_use_rebase_for_merging', False) |
|
1371 | 1380 | |
|
1372 | 1381 | def _log_action(self, action, user, pull_request): |
|
1373 | 1382 | action_logger( |
|
1374 | 1383 | user, |
|
1375 | 1384 | '{action}:{pr_id}'.format( |
|
1376 | 1385 | action=action, pr_id=pull_request.pull_request_id), |
|
1377 | 1386 | pull_request.target_repo) |
|
1378 | 1387 | |
|
1379 | 1388 | def get_reviewer_functions(self): |
|
1380 | 1389 | """ |
|
1381 | 1390 | Fetches functions for validation and fetching default reviewers. |
|
1382 | 1391 | If available we use the EE package, else we fallback to CE |
|
1383 | 1392 | package functions |
|
1384 | 1393 | """ |
|
1385 | 1394 | try: |
|
1386 | 1395 | from rc_reviewers.utils import get_default_reviewers_data |
|
1387 | 1396 | from rc_reviewers.utils import validate_default_reviewers |
|
1388 | 1397 | except ImportError: |
|
1389 | 1398 | from rhodecode.apps.repository.utils import \ |
|
1390 | 1399 | get_default_reviewers_data |
|
1391 | 1400 | from rhodecode.apps.repository.utils import \ |
|
1392 | 1401 | validate_default_reviewers |
|
1393 | 1402 | |
|
1394 | 1403 | return get_default_reviewers_data, validate_default_reviewers |
|
1395 | 1404 | |
|
1396 | 1405 | |
|
1397 | 1406 | class MergeCheck(object): |
|
1398 | 1407 | """ |
|
1399 | 1408 | Perform Merge Checks and returns a check object which stores information |
|
1400 | 1409 | about merge errors, and merge conditions |
|
1401 | 1410 | """ |
|
1402 | 1411 | TODO_CHECK = 'todo' |
|
1403 | 1412 | PERM_CHECK = 'perm' |
|
1404 | 1413 | REVIEW_CHECK = 'review' |
|
1405 | 1414 | MERGE_CHECK = 'merge' |
|
1406 | 1415 | |
|
1407 | 1416 | def __init__(self): |
|
1408 | 1417 | self.review_status = None |
|
1409 | 1418 | self.merge_possible = None |
|
1410 | 1419 | self.merge_msg = '' |
|
1411 | 1420 | self.failed = None |
|
1412 | 1421 | self.errors = [] |
|
1413 | 1422 | self.error_details = OrderedDict() |
|
1414 | 1423 | |
|
1415 | 1424 | def push_error(self, error_type, message, error_key, details): |
|
1416 | 1425 | self.failed = True |
|
1417 | 1426 | self.errors.append([error_type, message]) |
|
1418 | 1427 | self.error_details[error_key] = dict( |
|
1419 | 1428 | details=details, |
|
1420 | 1429 | error_type=error_type, |
|
1421 | 1430 | message=message |
|
1422 | 1431 | ) |
|
1423 | 1432 | |
|
1424 | 1433 | @classmethod |
|
1425 | 1434 | def validate(cls, pull_request, user, fail_early=False, translator=None): |
|
1426 | 1435 | # if migrated to pyramid... |
|
1427 | 1436 | # _ = lambda: translator or _ # use passed in translator if any |
|
1428 | 1437 | |
|
1429 | 1438 | merge_check = cls() |
|
1430 | 1439 | |
|
1431 | 1440 | # permissions to merge |
|
1432 | 1441 | user_allowed_to_merge = PullRequestModel().check_user_merge( |
|
1433 | 1442 | pull_request, user) |
|
1434 | 1443 | if not user_allowed_to_merge: |
|
1435 | 1444 | log.debug("MergeCheck: cannot merge, approval is pending.") |
|
1436 | 1445 | |
|
1437 | 1446 | msg = _('User `{}` not allowed to perform merge.').format(user.username) |
|
1438 | 1447 | merge_check.push_error('error', msg, cls.PERM_CHECK, user.username) |
|
1439 | 1448 | if fail_early: |
|
1440 | 1449 | return merge_check |
|
1441 | 1450 | |
|
1442 | 1451 | # review status, must be always present |
|
1443 | 1452 | review_status = pull_request.calculated_review_status() |
|
1444 | 1453 | merge_check.review_status = review_status |
|
1445 | 1454 | |
|
1446 | 1455 | status_approved = review_status == ChangesetStatus.STATUS_APPROVED |
|
1447 | 1456 | if not status_approved: |
|
1448 | 1457 | log.debug("MergeCheck: cannot merge, approval is pending.") |
|
1449 | 1458 | |
|
1450 | 1459 | msg = _('Pull request reviewer approval is pending.') |
|
1451 | 1460 | |
|
1452 | 1461 | merge_check.push_error( |
|
1453 | 1462 | 'warning', msg, cls.REVIEW_CHECK, review_status) |
|
1454 | 1463 | |
|
1455 | 1464 | if fail_early: |
|
1456 | 1465 | return merge_check |
|
1457 | 1466 | |
|
1458 | 1467 | # left over TODOs |
|
1459 | 1468 | todos = CommentsModel().get_unresolved_todos(pull_request) |
|
1460 | 1469 | if todos: |
|
1461 | 1470 | log.debug("MergeCheck: cannot merge, {} " |
|
1462 | 1471 | "unresolved todos left.".format(len(todos))) |
|
1463 | 1472 | |
|
1464 | 1473 | if len(todos) == 1: |
|
1465 | 1474 | msg = _('Cannot merge, {} TODO still not resolved.').format( |
|
1466 | 1475 | len(todos)) |
|
1467 | 1476 | else: |
|
1468 | 1477 | msg = _('Cannot merge, {} TODOs still not resolved.').format( |
|
1469 | 1478 | len(todos)) |
|
1470 | 1479 | |
|
1471 | 1480 | merge_check.push_error('warning', msg, cls.TODO_CHECK, todos) |
|
1472 | 1481 | |
|
1473 | 1482 | if fail_early: |
|
1474 | 1483 | return merge_check |
|
1475 | 1484 | |
|
1476 | 1485 | # merge possible |
|
1477 | 1486 | merge_status, msg = PullRequestModel().merge_status(pull_request) |
|
1478 | 1487 | merge_check.merge_possible = merge_status |
|
1479 | 1488 | merge_check.merge_msg = msg |
|
1480 | 1489 | if not merge_status: |
|
1481 | 1490 | log.debug( |
|
1482 | 1491 | "MergeCheck: cannot merge, pull request merge not possible.") |
|
1483 | 1492 | merge_check.push_error('warning', msg, cls.MERGE_CHECK, None) |
|
1484 | 1493 | |
|
1485 | 1494 | if fail_early: |
|
1486 | 1495 | return merge_check |
|
1487 | 1496 | |
|
1488 | 1497 | return merge_check |
|
1489 | 1498 | |
|
1490 | 1499 | |
|
1491 | 1500 | ChangeTuple = namedtuple('ChangeTuple', |
|
1492 | 1501 | ['added', 'common', 'removed', 'total']) |
|
1493 | 1502 | |
|
1494 | 1503 | FileChangeTuple = namedtuple('FileChangeTuple', |
|
1495 | 1504 | ['added', 'modified', 'removed']) |
@@ -1,1003 +1,1023 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | Repository model for rhodecode |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import logging |
|
26 | 26 | import os |
|
27 | 27 | import re |
|
28 | 28 | import shutil |
|
29 | 29 | import time |
|
30 | 30 | import traceback |
|
31 | 31 | from datetime import datetime, timedelta |
|
32 | 32 | |
|
33 | 33 | from pyramid.threadlocal import get_current_request |
|
34 | 34 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
35 | 35 | |
|
36 | 36 | from rhodecode import events |
|
37 | 37 | from rhodecode.lib import helpers as h |
|
38 | 38 | from rhodecode.lib.auth import HasUserGroupPermissionAny |
|
39 | 39 | from rhodecode.lib.caching_query import FromCache |
|
40 | 40 | from rhodecode.lib.exceptions import AttachedForksError |
|
41 | 41 | from rhodecode.lib.hooks_base import log_delete_repository |
|
42 | 42 | from rhodecode.lib.utils import make_db_config |
|
43 | 43 | from rhodecode.lib.utils2 import ( |
|
44 | 44 | safe_str, safe_unicode, remove_prefix, obfuscate_url_pw, |
|
45 | 45 | get_current_rhodecode_user, safe_int, datetime_to_time, action_logger_generic) |
|
46 | 46 | from rhodecode.lib.vcs.backends import get_backend |
|
47 | 47 | from rhodecode.model import BaseModel |
|
48 | 48 | from rhodecode.model.db import (_hash_key, |
|
49 | 49 | Repository, UserRepoToPerm, UserGroupRepoToPerm, UserRepoGroupToPerm, |
|
50 | 50 | UserGroupRepoGroupToPerm, User, Permission, Statistics, UserGroup, |
|
51 | 51 | RepoGroup, RepositoryField) |
|
52 | 52 | |
|
53 | 53 | from rhodecode.model.settings import VcsSettingsModel |
|
54 | 54 | |
|
55 | 55 | |
|
56 | 56 | log = logging.getLogger(__name__) |
|
57 | 57 | |
|
58 | 58 | |
|
59 | 59 | class RepoModel(BaseModel): |
|
60 | 60 | |
|
61 | 61 | cls = Repository |
|
62 | 62 | |
|
63 | 63 | def _get_user_group(self, users_group): |
|
64 | 64 | return self._get_instance(UserGroup, users_group, |
|
65 | 65 | callback=UserGroup.get_by_group_name) |
|
66 | 66 | |
|
67 | 67 | def _get_repo_group(self, repo_group): |
|
68 | 68 | return self._get_instance(RepoGroup, repo_group, |
|
69 | 69 | callback=RepoGroup.get_by_group_name) |
|
70 | 70 | |
|
71 | 71 | def _create_default_perms(self, repository, private): |
|
72 | 72 | # create default permission |
|
73 | 73 | default = 'repository.read' |
|
74 | 74 | def_user = User.get_default_user() |
|
75 | 75 | for p in def_user.user_perms: |
|
76 | 76 | if p.permission.permission_name.startswith('repository.'): |
|
77 | 77 | default = p.permission.permission_name |
|
78 | 78 | break |
|
79 | 79 | |
|
80 | 80 | default_perm = 'repository.none' if private else default |
|
81 | 81 | |
|
82 | 82 | repo_to_perm = UserRepoToPerm() |
|
83 | 83 | repo_to_perm.permission = Permission.get_by_key(default_perm) |
|
84 | 84 | |
|
85 | 85 | repo_to_perm.repository = repository |
|
86 | 86 | repo_to_perm.user_id = def_user.user_id |
|
87 | 87 | |
|
88 | 88 | return repo_to_perm |
|
89 | 89 | |
|
90 | 90 | @LazyProperty |
|
91 | 91 | def repos_path(self): |
|
92 | 92 | """ |
|
93 | 93 | Gets the repositories root path from database |
|
94 | 94 | """ |
|
95 | 95 | settings_model = VcsSettingsModel(sa=self.sa) |
|
96 | 96 | return settings_model.get_repos_location() |
|
97 | 97 | |
|
98 | 98 | def get(self, repo_id, cache=False): |
|
99 | 99 | repo = self.sa.query(Repository) \ |
|
100 | 100 | .filter(Repository.repo_id == repo_id) |
|
101 | 101 | |
|
102 | 102 | if cache: |
|
103 | 103 | repo = repo.options( |
|
104 | 104 | FromCache("sql_cache_short", "get_repo_%s" % repo_id)) |
|
105 | 105 | return repo.scalar() |
|
106 | 106 | |
|
107 | 107 | def get_repo(self, repository): |
|
108 | 108 | return self._get_repo(repository) |
|
109 | 109 | |
|
110 | 110 | def get_by_repo_name(self, repo_name, cache=False): |
|
111 | 111 | repo = self.sa.query(Repository) \ |
|
112 | 112 | .filter(Repository.repo_name == repo_name) |
|
113 | 113 | |
|
114 | 114 | if cache: |
|
115 | 115 | name_key = _hash_key(repo_name) |
|
116 | 116 | repo = repo.options( |
|
117 | 117 | FromCache("sql_cache_short", "get_repo_%s" % name_key)) |
|
118 | 118 | return repo.scalar() |
|
119 | 119 | |
|
120 | 120 | def _extract_id_from_repo_name(self, repo_name): |
|
121 | 121 | if repo_name.startswith('/'): |
|
122 | 122 | repo_name = repo_name.lstrip('/') |
|
123 | 123 | by_id_match = re.match(r'^_(\d{1,})', repo_name) |
|
124 | 124 | if by_id_match: |
|
125 | 125 | return by_id_match.groups()[0] |
|
126 | 126 | |
|
127 | 127 | def get_repo_by_id(self, repo_name): |
|
128 | 128 | """ |
|
129 | 129 | Extracts repo_name by id from special urls. |
|
130 | 130 | Example url is _11/repo_name |
|
131 | 131 | |
|
132 | 132 | :param repo_name: |
|
133 | 133 | :return: repo object if matched else None |
|
134 | 134 | """ |
|
135 | 135 | |
|
136 | 136 | try: |
|
137 | 137 | _repo_id = self._extract_id_from_repo_name(repo_name) |
|
138 | 138 | if _repo_id: |
|
139 | 139 | return self.get(_repo_id) |
|
140 | 140 | except Exception: |
|
141 | 141 | log.exception('Failed to extract repo_name from URL') |
|
142 | 142 | |
|
143 | 143 | return None |
|
144 | 144 | |
|
145 | 145 | def get_repos_for_root(self, root, traverse=False): |
|
146 | 146 | if traverse: |
|
147 | 147 | like_expression = u'{}%'.format(safe_unicode(root)) |
|
148 | 148 | repos = Repository.query().filter( |
|
149 | 149 | Repository.repo_name.like(like_expression)).all() |
|
150 | 150 | else: |
|
151 | 151 | if root and not isinstance(root, RepoGroup): |
|
152 | 152 | raise ValueError( |
|
153 | 153 | 'Root must be an instance ' |
|
154 | 154 | 'of RepoGroup, got:{} instead'.format(type(root))) |
|
155 | 155 | repos = Repository.query().filter(Repository.group == root).all() |
|
156 | 156 | return repos |
|
157 | 157 | |
|
158 | def get_url(self, repo, request=None): | |
|
158 | def get_url(self, repo, request=None, permalink=False): | |
|
159 | 159 | if not request: |
|
160 | 160 | request = get_current_request() |
|
161 | return request.route_url('repo_summary', repo_name=safe_str(repo.repo_name)) | |
|
161 | ||
|
162 | if permalink: | |
|
163 | return request.route_url( | |
|
164 | 'repo_summary', repo_name=safe_str(repo.repo_id)) | |
|
165 | else: | |
|
166 | return request.route_url( | |
|
167 | 'repo_summary', repo_name=safe_str(repo.repo_name)) | |
|
168 | ||
|
169 | def get_commit_url(self, repo, commit_id, request=None, permalink=False): | |
|
170 | if not request: | |
|
171 | request = get_current_request() | |
|
172 | ||
|
173 | if permalink: | |
|
174 | return request.route_url( | |
|
175 | 'repo_commit', repo_name=safe_str(repo.repo_id), | |
|
176 | commit_id=commit_id) | |
|
177 | ||
|
178 | else: | |
|
179 | return request.route_url( | |
|
180 | 'repo_commit', repo_name=safe_str(repo.repo_name), | |
|
181 | commit_id=commit_id) | |
|
162 | 182 | |
|
163 | 183 | @classmethod |
|
164 | 184 | def update_repoinfo(cls, repositories=None): |
|
165 | 185 | if not repositories: |
|
166 | 186 | repositories = Repository.getAll() |
|
167 | 187 | for repo in repositories: |
|
168 | 188 | repo.update_commit_cache() |
|
169 | 189 | |
|
170 | 190 | def get_repos_as_dict(self, repo_list=None, admin=False, |
|
171 | 191 | super_user_actions=False): |
|
172 | 192 | |
|
173 | 193 | from rhodecode.lib.utils import PartialRenderer |
|
174 | 194 | _render = PartialRenderer('data_table/_dt_elements.mako') |
|
175 | 195 | c = _render.c |
|
176 | 196 | |
|
177 | 197 | def quick_menu(repo_name): |
|
178 | 198 | return _render('quick_menu', repo_name) |
|
179 | 199 | |
|
180 | 200 | def repo_lnk(name, rtype, rstate, private, fork_of): |
|
181 | 201 | return _render('repo_name', name, rtype, rstate, private, fork_of, |
|
182 | 202 | short_name=not admin, admin=False) |
|
183 | 203 | |
|
184 | 204 | def last_change(last_change): |
|
185 | 205 | if admin and isinstance(last_change, datetime) and not last_change.tzinfo: |
|
186 | 206 | last_change = last_change + timedelta(seconds= |
|
187 | 207 | (datetime.now() - datetime.utcnow()).seconds) |
|
188 | 208 | return _render("last_change", last_change) |
|
189 | 209 | |
|
190 | 210 | def rss_lnk(repo_name): |
|
191 | 211 | return _render("rss", repo_name) |
|
192 | 212 | |
|
193 | 213 | def atom_lnk(repo_name): |
|
194 | 214 | return _render("atom", repo_name) |
|
195 | 215 | |
|
196 | 216 | def last_rev(repo_name, cs_cache): |
|
197 | 217 | return _render('revision', repo_name, cs_cache.get('revision'), |
|
198 | 218 | cs_cache.get('raw_id'), cs_cache.get('author'), |
|
199 | 219 | cs_cache.get('message')) |
|
200 | 220 | |
|
201 | 221 | def desc(desc): |
|
202 | 222 | if c.visual.stylify_metatags: |
|
203 | 223 | desc = h.urlify_text(h.escaped_stylize(desc)) |
|
204 | 224 | else: |
|
205 | 225 | desc = h.urlify_text(h.html_escape(desc)) |
|
206 | 226 | |
|
207 | 227 | return _render('repo_desc', desc) |
|
208 | 228 | |
|
209 | 229 | def state(repo_state): |
|
210 | 230 | return _render("repo_state", repo_state) |
|
211 | 231 | |
|
212 | 232 | def repo_actions(repo_name): |
|
213 | 233 | return _render('repo_actions', repo_name, super_user_actions) |
|
214 | 234 | |
|
215 | 235 | def user_profile(username): |
|
216 | 236 | return _render('user_profile', username) |
|
217 | 237 | |
|
218 | 238 | repos_data = [] |
|
219 | 239 | for repo in repo_list: |
|
220 | 240 | cs_cache = repo.changeset_cache |
|
221 | 241 | row = { |
|
222 | 242 | "menu": quick_menu(repo.repo_name), |
|
223 | 243 | |
|
224 | 244 | "name": repo_lnk(repo.repo_name, repo.repo_type, |
|
225 | 245 | repo.repo_state, repo.private, repo.fork), |
|
226 | 246 | "name_raw": repo.repo_name.lower(), |
|
227 | 247 | |
|
228 | 248 | "last_change": last_change(repo.last_db_change), |
|
229 | 249 | "last_change_raw": datetime_to_time(repo.last_db_change), |
|
230 | 250 | |
|
231 | 251 | "last_changeset": last_rev(repo.repo_name, cs_cache), |
|
232 | 252 | "last_changeset_raw": cs_cache.get('revision'), |
|
233 | 253 | |
|
234 | 254 | "desc": desc(repo.description), |
|
235 | 255 | "owner": user_profile(repo.user.username), |
|
236 | 256 | |
|
237 | 257 | "state": state(repo.repo_state), |
|
238 | 258 | "rss": rss_lnk(repo.repo_name), |
|
239 | 259 | |
|
240 | 260 | "atom": atom_lnk(repo.repo_name), |
|
241 | 261 | } |
|
242 | 262 | if admin: |
|
243 | 263 | row.update({ |
|
244 | 264 | "action": repo_actions(repo.repo_name), |
|
245 | 265 | }) |
|
246 | 266 | repos_data.append(row) |
|
247 | 267 | |
|
248 | 268 | return repos_data |
|
249 | 269 | |
|
250 | 270 | def _get_defaults(self, repo_name): |
|
251 | 271 | """ |
|
252 | 272 | Gets information about repository, and returns a dict for |
|
253 | 273 | usage in forms |
|
254 | 274 | |
|
255 | 275 | :param repo_name: |
|
256 | 276 | """ |
|
257 | 277 | |
|
258 | 278 | repo_info = Repository.get_by_repo_name(repo_name) |
|
259 | 279 | |
|
260 | 280 | if repo_info is None: |
|
261 | 281 | return None |
|
262 | 282 | |
|
263 | 283 | defaults = repo_info.get_dict() |
|
264 | 284 | defaults['repo_name'] = repo_info.just_name |
|
265 | 285 | |
|
266 | 286 | groups = repo_info.groups_with_parents |
|
267 | 287 | parent_group = groups[-1] if groups else None |
|
268 | 288 | |
|
269 | 289 | # we use -1 as this is how in HTML, we mark an empty group |
|
270 | 290 | defaults['repo_group'] = getattr(parent_group, 'group_id', -1) |
|
271 | 291 | |
|
272 | 292 | keys_to_process = ( |
|
273 | 293 | {'k': 'repo_type', 'strip': False}, |
|
274 | 294 | {'k': 'repo_enable_downloads', 'strip': True}, |
|
275 | 295 | {'k': 'repo_description', 'strip': True}, |
|
276 | 296 | {'k': 'repo_enable_locking', 'strip': True}, |
|
277 | 297 | {'k': 'repo_landing_rev', 'strip': True}, |
|
278 | 298 | {'k': 'clone_uri', 'strip': False}, |
|
279 | 299 | {'k': 'repo_private', 'strip': True}, |
|
280 | 300 | {'k': 'repo_enable_statistics', 'strip': True} |
|
281 | 301 | ) |
|
282 | 302 | |
|
283 | 303 | for item in keys_to_process: |
|
284 | 304 | attr = item['k'] |
|
285 | 305 | if item['strip']: |
|
286 | 306 | attr = remove_prefix(item['k'], 'repo_') |
|
287 | 307 | |
|
288 | 308 | val = defaults[attr] |
|
289 | 309 | if item['k'] == 'repo_landing_rev': |
|
290 | 310 | val = ':'.join(defaults[attr]) |
|
291 | 311 | defaults[item['k']] = val |
|
292 | 312 | if item['k'] == 'clone_uri': |
|
293 | 313 | defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden |
|
294 | 314 | |
|
295 | 315 | # fill owner |
|
296 | 316 | if repo_info.user: |
|
297 | 317 | defaults.update({'user': repo_info.user.username}) |
|
298 | 318 | else: |
|
299 | 319 | replacement_user = User.get_first_super_admin().username |
|
300 | 320 | defaults.update({'user': replacement_user}) |
|
301 | 321 | |
|
302 | 322 | return defaults |
|
303 | 323 | |
|
304 | 324 | def update(self, repo, **kwargs): |
|
305 | 325 | try: |
|
306 | 326 | cur_repo = self._get_repo(repo) |
|
307 | 327 | source_repo_name = cur_repo.repo_name |
|
308 | 328 | if 'user' in kwargs: |
|
309 | 329 | cur_repo.user = User.get_by_username(kwargs['user']) |
|
310 | 330 | |
|
311 | 331 | if 'repo_group' in kwargs: |
|
312 | 332 | cur_repo.group = RepoGroup.get(kwargs['repo_group']) |
|
313 | 333 | log.debug('Updating repo %s with params:%s', cur_repo, kwargs) |
|
314 | 334 | |
|
315 | 335 | update_keys = [ |
|
316 | 336 | (1, 'repo_description'), |
|
317 | 337 | (1, 'repo_landing_rev'), |
|
318 | 338 | (1, 'repo_private'), |
|
319 | 339 | (1, 'repo_enable_downloads'), |
|
320 | 340 | (1, 'repo_enable_locking'), |
|
321 | 341 | (1, 'repo_enable_statistics'), |
|
322 | 342 | (0, 'clone_uri'), |
|
323 | 343 | (0, 'fork_id') |
|
324 | 344 | ] |
|
325 | 345 | for strip, k in update_keys: |
|
326 | 346 | if k in kwargs: |
|
327 | 347 | val = kwargs[k] |
|
328 | 348 | if strip: |
|
329 | 349 | k = remove_prefix(k, 'repo_') |
|
330 | 350 | |
|
331 | 351 | setattr(cur_repo, k, val) |
|
332 | 352 | |
|
333 | 353 | new_name = cur_repo.get_new_name(kwargs['repo_name']) |
|
334 | 354 | cur_repo.repo_name = new_name |
|
335 | 355 | |
|
336 | 356 | # if private flag is set, reset default permission to NONE |
|
337 | 357 | if kwargs.get('repo_private'): |
|
338 | 358 | EMPTY_PERM = 'repository.none' |
|
339 | 359 | RepoModel().grant_user_permission( |
|
340 | 360 | repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM |
|
341 | 361 | ) |
|
342 | 362 | |
|
343 | 363 | # handle extra fields |
|
344 | 364 | for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), |
|
345 | 365 | kwargs): |
|
346 | 366 | k = RepositoryField.un_prefix_key(field) |
|
347 | 367 | ex_field = RepositoryField.get_by_key_name( |
|
348 | 368 | key=k, repo=cur_repo) |
|
349 | 369 | if ex_field: |
|
350 | 370 | ex_field.field_value = kwargs[field] |
|
351 | 371 | self.sa.add(ex_field) |
|
352 | 372 | self.sa.add(cur_repo) |
|
353 | 373 | |
|
354 | 374 | if source_repo_name != new_name: |
|
355 | 375 | # rename repository |
|
356 | 376 | self._rename_filesystem_repo( |
|
357 | 377 | old=source_repo_name, new=new_name) |
|
358 | 378 | |
|
359 | 379 | return cur_repo |
|
360 | 380 | except Exception: |
|
361 | 381 | log.error(traceback.format_exc()) |
|
362 | 382 | raise |
|
363 | 383 | |
|
364 | 384 | def _create_repo(self, repo_name, repo_type, description, owner, |
|
365 | 385 | private=False, clone_uri=None, repo_group=None, |
|
366 | 386 | landing_rev='rev:tip', fork_of=None, |
|
367 | 387 | copy_fork_permissions=False, enable_statistics=False, |
|
368 | 388 | enable_locking=False, enable_downloads=False, |
|
369 | 389 | copy_group_permissions=False, |
|
370 | 390 | state=Repository.STATE_PENDING): |
|
371 | 391 | """ |
|
372 | 392 | Create repository inside database with PENDING state, this should be |
|
373 | 393 | only executed by create() repo. With exception of importing existing |
|
374 | 394 | repos |
|
375 | 395 | """ |
|
376 | 396 | from rhodecode.model.scm import ScmModel |
|
377 | 397 | |
|
378 | 398 | owner = self._get_user(owner) |
|
379 | 399 | fork_of = self._get_repo(fork_of) |
|
380 | 400 | repo_group = self._get_repo_group(safe_int(repo_group)) |
|
381 | 401 | |
|
382 | 402 | try: |
|
383 | 403 | repo_name = safe_unicode(repo_name) |
|
384 | 404 | description = safe_unicode(description) |
|
385 | 405 | # repo name is just a name of repository |
|
386 | 406 | # while repo_name_full is a full qualified name that is combined |
|
387 | 407 | # with name and path of group |
|
388 | 408 | repo_name_full = repo_name |
|
389 | 409 | repo_name = repo_name.split(Repository.NAME_SEP)[-1] |
|
390 | 410 | |
|
391 | 411 | new_repo = Repository() |
|
392 | 412 | new_repo.repo_state = state |
|
393 | 413 | new_repo.enable_statistics = False |
|
394 | 414 | new_repo.repo_name = repo_name_full |
|
395 | 415 | new_repo.repo_type = repo_type |
|
396 | 416 | new_repo.user = owner |
|
397 | 417 | new_repo.group = repo_group |
|
398 | 418 | new_repo.description = description or repo_name |
|
399 | 419 | new_repo.private = private |
|
400 | 420 | new_repo.clone_uri = clone_uri |
|
401 | 421 | new_repo.landing_rev = landing_rev |
|
402 | 422 | |
|
403 | 423 | new_repo.enable_statistics = enable_statistics |
|
404 | 424 | new_repo.enable_locking = enable_locking |
|
405 | 425 | new_repo.enable_downloads = enable_downloads |
|
406 | 426 | |
|
407 | 427 | if repo_group: |
|
408 | 428 | new_repo.enable_locking = repo_group.enable_locking |
|
409 | 429 | |
|
410 | 430 | if fork_of: |
|
411 | 431 | parent_repo = fork_of |
|
412 | 432 | new_repo.fork = parent_repo |
|
413 | 433 | |
|
414 | 434 | events.trigger(events.RepoPreCreateEvent(new_repo)) |
|
415 | 435 | |
|
416 | 436 | self.sa.add(new_repo) |
|
417 | 437 | |
|
418 | 438 | EMPTY_PERM = 'repository.none' |
|
419 | 439 | if fork_of and copy_fork_permissions: |
|
420 | 440 | repo = fork_of |
|
421 | 441 | user_perms = UserRepoToPerm.query() \ |
|
422 | 442 | .filter(UserRepoToPerm.repository == repo).all() |
|
423 | 443 | group_perms = UserGroupRepoToPerm.query() \ |
|
424 | 444 | .filter(UserGroupRepoToPerm.repository == repo).all() |
|
425 | 445 | |
|
426 | 446 | for perm in user_perms: |
|
427 | 447 | UserRepoToPerm.create( |
|
428 | 448 | perm.user, new_repo, perm.permission) |
|
429 | 449 | |
|
430 | 450 | for perm in group_perms: |
|
431 | 451 | UserGroupRepoToPerm.create( |
|
432 | 452 | perm.users_group, new_repo, perm.permission) |
|
433 | 453 | # in case we copy permissions and also set this repo to private |
|
434 | 454 | # override the default user permission to make it a private |
|
435 | 455 | # repo |
|
436 | 456 | if private: |
|
437 | 457 | RepoModel(self.sa).grant_user_permission( |
|
438 | 458 | repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM) |
|
439 | 459 | |
|
440 | 460 | elif repo_group and copy_group_permissions: |
|
441 | 461 | user_perms = UserRepoGroupToPerm.query() \ |
|
442 | 462 | .filter(UserRepoGroupToPerm.group == repo_group).all() |
|
443 | 463 | |
|
444 | 464 | group_perms = UserGroupRepoGroupToPerm.query() \ |
|
445 | 465 | .filter(UserGroupRepoGroupToPerm.group == repo_group).all() |
|
446 | 466 | |
|
447 | 467 | for perm in user_perms: |
|
448 | 468 | perm_name = perm.permission.permission_name.replace( |
|
449 | 469 | 'group.', 'repository.') |
|
450 | 470 | perm_obj = Permission.get_by_key(perm_name) |
|
451 | 471 | UserRepoToPerm.create(perm.user, new_repo, perm_obj) |
|
452 | 472 | |
|
453 | 473 | for perm in group_perms: |
|
454 | 474 | perm_name = perm.permission.permission_name.replace( |
|
455 | 475 | 'group.', 'repository.') |
|
456 | 476 | perm_obj = Permission.get_by_key(perm_name) |
|
457 | 477 | UserGroupRepoToPerm.create( |
|
458 | 478 | perm.users_group, new_repo, perm_obj) |
|
459 | 479 | |
|
460 | 480 | if private: |
|
461 | 481 | RepoModel(self.sa).grant_user_permission( |
|
462 | 482 | repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM) |
|
463 | 483 | |
|
464 | 484 | else: |
|
465 | 485 | perm_obj = self._create_default_perms(new_repo, private) |
|
466 | 486 | self.sa.add(perm_obj) |
|
467 | 487 | |
|
468 | 488 | # now automatically start following this repository as owner |
|
469 | 489 | ScmModel(self.sa).toggle_following_repo(new_repo.repo_id, |
|
470 | 490 | owner.user_id) |
|
471 | 491 | |
|
472 | 492 | # we need to flush here, in order to check if database won't |
|
473 | 493 | # throw any exceptions, create filesystem dirs at the very end |
|
474 | 494 | self.sa.flush() |
|
475 | 495 | events.trigger(events.RepoCreateEvent(new_repo)) |
|
476 | 496 | return new_repo |
|
477 | 497 | |
|
478 | 498 | except Exception: |
|
479 | 499 | log.error(traceback.format_exc()) |
|
480 | 500 | raise |
|
481 | 501 | |
|
482 | 502 | def create(self, form_data, cur_user): |
|
483 | 503 | """ |
|
484 | 504 | Create repository using celery tasks |
|
485 | 505 | |
|
486 | 506 | :param form_data: |
|
487 | 507 | :param cur_user: |
|
488 | 508 | """ |
|
489 | 509 | from rhodecode.lib.celerylib import tasks, run_task |
|
490 | 510 | return run_task(tasks.create_repo, form_data, cur_user) |
|
491 | 511 | |
|
492 | 512 | def update_permissions(self, repo, perm_additions=None, perm_updates=None, |
|
493 | 513 | perm_deletions=None, check_perms=True, |
|
494 | 514 | cur_user=None): |
|
495 | 515 | if not perm_additions: |
|
496 | 516 | perm_additions = [] |
|
497 | 517 | if not perm_updates: |
|
498 | 518 | perm_updates = [] |
|
499 | 519 | if not perm_deletions: |
|
500 | 520 | perm_deletions = [] |
|
501 | 521 | |
|
502 | 522 | req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin') |
|
503 | 523 | |
|
504 | 524 | changes = { |
|
505 | 525 | 'added': [], |
|
506 | 526 | 'updated': [], |
|
507 | 527 | 'deleted': [] |
|
508 | 528 | } |
|
509 | 529 | # update permissions |
|
510 | 530 | for member_id, perm, member_type in perm_updates: |
|
511 | 531 | member_id = int(member_id) |
|
512 | 532 | if member_type == 'user': |
|
513 | 533 | member_name = User.get(member_id).username |
|
514 | 534 | # this updates also current one if found |
|
515 | 535 | self.grant_user_permission( |
|
516 | 536 | repo=repo, user=member_id, perm=perm) |
|
517 | 537 | else: # set for user group |
|
518 | 538 | # check if we have permissions to alter this usergroup |
|
519 | 539 | member_name = UserGroup.get(member_id).users_group_name |
|
520 | 540 | if not check_perms or HasUserGroupPermissionAny( |
|
521 | 541 | *req_perms)(member_name, user=cur_user): |
|
522 | 542 | self.grant_user_group_permission( |
|
523 | 543 | repo=repo, group_name=member_id, perm=perm) |
|
524 | 544 | |
|
525 | 545 | changes['updated'].append({'type': member_type, 'id': member_id, |
|
526 | 546 | 'name': member_name, 'new_perm': perm}) |
|
527 | 547 | |
|
528 | 548 | # set new permissions |
|
529 | 549 | for member_id, perm, member_type in perm_additions: |
|
530 | 550 | member_id = int(member_id) |
|
531 | 551 | if member_type == 'user': |
|
532 | 552 | member_name = User.get(member_id).username |
|
533 | 553 | self.grant_user_permission( |
|
534 | 554 | repo=repo, user=member_id, perm=perm) |
|
535 | 555 | else: # set for user group |
|
536 | 556 | # check if we have permissions to alter this usergroup |
|
537 | 557 | member_name = UserGroup.get(member_id).users_group_name |
|
538 | 558 | if not check_perms or HasUserGroupPermissionAny( |
|
539 | 559 | *req_perms)(member_name, user=cur_user): |
|
540 | 560 | self.grant_user_group_permission( |
|
541 | 561 | repo=repo, group_name=member_id, perm=perm) |
|
542 | 562 | changes['added'].append({'type': member_type, 'id': member_id, |
|
543 | 563 | 'name': member_name, 'new_perm': perm}) |
|
544 | 564 | # delete permissions |
|
545 | 565 | for member_id, perm, member_type in perm_deletions: |
|
546 | 566 | member_id = int(member_id) |
|
547 | 567 | if member_type == 'user': |
|
548 | 568 | member_name = User.get(member_id).username |
|
549 | 569 | self.revoke_user_permission(repo=repo, user=member_id) |
|
550 | 570 | else: # set for user group |
|
551 | 571 | # check if we have permissions to alter this usergroup |
|
552 | 572 | member_name = UserGroup.get(member_id).users_group_name |
|
553 | 573 | if not check_perms or HasUserGroupPermissionAny( |
|
554 | 574 | *req_perms)(member_name, user=cur_user): |
|
555 | 575 | self.revoke_user_group_permission( |
|
556 | 576 | repo=repo, group_name=member_id) |
|
557 | 577 | |
|
558 | 578 | changes['deleted'].append({'type': member_type, 'id': member_id, |
|
559 | 579 | 'name': member_name, 'new_perm': perm}) |
|
560 | 580 | return changes |
|
561 | 581 | |
|
562 | 582 | def create_fork(self, form_data, cur_user): |
|
563 | 583 | """ |
|
564 | 584 | Simple wrapper into executing celery task for fork creation |
|
565 | 585 | |
|
566 | 586 | :param form_data: |
|
567 | 587 | :param cur_user: |
|
568 | 588 | """ |
|
569 | 589 | from rhodecode.lib.celerylib import tasks, run_task |
|
570 | 590 | return run_task(tasks.create_repo_fork, form_data, cur_user) |
|
571 | 591 | |
|
572 | 592 | def delete(self, repo, forks=None, fs_remove=True, cur_user=None): |
|
573 | 593 | """ |
|
574 | 594 | Delete given repository, forks parameter defines what do do with |
|
575 | 595 | attached forks. Throws AttachedForksError if deleted repo has attached |
|
576 | 596 | forks |
|
577 | 597 | |
|
578 | 598 | :param repo: |
|
579 | 599 | :param forks: str 'delete' or 'detach' |
|
580 | 600 | :param fs_remove: remove(archive) repo from filesystem |
|
581 | 601 | """ |
|
582 | 602 | if not cur_user: |
|
583 | 603 | cur_user = getattr(get_current_rhodecode_user(), 'username', None) |
|
584 | 604 | repo = self._get_repo(repo) |
|
585 | 605 | if repo: |
|
586 | 606 | if forks == 'detach': |
|
587 | 607 | for r in repo.forks: |
|
588 | 608 | r.fork = None |
|
589 | 609 | self.sa.add(r) |
|
590 | 610 | elif forks == 'delete': |
|
591 | 611 | for r in repo.forks: |
|
592 | 612 | self.delete(r, forks='delete') |
|
593 | 613 | elif [f for f in repo.forks]: |
|
594 | 614 | raise AttachedForksError() |
|
595 | 615 | |
|
596 | 616 | old_repo_dict = repo.get_dict() |
|
597 | 617 | events.trigger(events.RepoPreDeleteEvent(repo)) |
|
598 | 618 | try: |
|
599 | 619 | self.sa.delete(repo) |
|
600 | 620 | if fs_remove: |
|
601 | 621 | self._delete_filesystem_repo(repo) |
|
602 | 622 | else: |
|
603 | 623 | log.debug('skipping removal from filesystem') |
|
604 | 624 | old_repo_dict.update({ |
|
605 | 625 | 'deleted_by': cur_user, |
|
606 | 626 | 'deleted_on': time.time(), |
|
607 | 627 | }) |
|
608 | 628 | log_delete_repository(**old_repo_dict) |
|
609 | 629 | events.trigger(events.RepoDeleteEvent(repo)) |
|
610 | 630 | except Exception: |
|
611 | 631 | log.error(traceback.format_exc()) |
|
612 | 632 | raise |
|
613 | 633 | |
|
614 | 634 | def grant_user_permission(self, repo, user, perm): |
|
615 | 635 | """ |
|
616 | 636 | Grant permission for user on given repository, or update existing one |
|
617 | 637 | if found |
|
618 | 638 | |
|
619 | 639 | :param repo: Instance of Repository, repository_id, or repository name |
|
620 | 640 | :param user: Instance of User, user_id or username |
|
621 | 641 | :param perm: Instance of Permission, or permission_name |
|
622 | 642 | """ |
|
623 | 643 | user = self._get_user(user) |
|
624 | 644 | repo = self._get_repo(repo) |
|
625 | 645 | permission = self._get_perm(perm) |
|
626 | 646 | |
|
627 | 647 | # check if we have that permission already |
|
628 | 648 | obj = self.sa.query(UserRepoToPerm) \ |
|
629 | 649 | .filter(UserRepoToPerm.user == user) \ |
|
630 | 650 | .filter(UserRepoToPerm.repository == repo) \ |
|
631 | 651 | .scalar() |
|
632 | 652 | if obj is None: |
|
633 | 653 | # create new ! |
|
634 | 654 | obj = UserRepoToPerm() |
|
635 | 655 | obj.repository = repo |
|
636 | 656 | obj.user = user |
|
637 | 657 | obj.permission = permission |
|
638 | 658 | self.sa.add(obj) |
|
639 | 659 | log.debug('Granted perm %s to %s on %s', perm, user, repo) |
|
640 | 660 | action_logger_generic( |
|
641 | 661 | 'granted permission: {} to user: {} on repo: {}'.format( |
|
642 | 662 | perm, user, repo), namespace='security.repo') |
|
643 | 663 | return obj |
|
644 | 664 | |
|
645 | 665 | def revoke_user_permission(self, repo, user): |
|
646 | 666 | """ |
|
647 | 667 | Revoke permission for user on given repository |
|
648 | 668 | |
|
649 | 669 | :param repo: Instance of Repository, repository_id, or repository name |
|
650 | 670 | :param user: Instance of User, user_id or username |
|
651 | 671 | """ |
|
652 | 672 | |
|
653 | 673 | user = self._get_user(user) |
|
654 | 674 | repo = self._get_repo(repo) |
|
655 | 675 | |
|
656 | 676 | obj = self.sa.query(UserRepoToPerm) \ |
|
657 | 677 | .filter(UserRepoToPerm.repository == repo) \ |
|
658 | 678 | .filter(UserRepoToPerm.user == user) \ |
|
659 | 679 | .scalar() |
|
660 | 680 | if obj: |
|
661 | 681 | self.sa.delete(obj) |
|
662 | 682 | log.debug('Revoked perm on %s on %s', repo, user) |
|
663 | 683 | action_logger_generic( |
|
664 | 684 | 'revoked permission from user: {} on repo: {}'.format( |
|
665 | 685 | user, repo), namespace='security.repo') |
|
666 | 686 | |
|
667 | 687 | def grant_user_group_permission(self, repo, group_name, perm): |
|
668 | 688 | """ |
|
669 | 689 | Grant permission for user group on given repository, or update |
|
670 | 690 | existing one if found |
|
671 | 691 | |
|
672 | 692 | :param repo: Instance of Repository, repository_id, or repository name |
|
673 | 693 | :param group_name: Instance of UserGroup, users_group_id, |
|
674 | 694 | or user group name |
|
675 | 695 | :param perm: Instance of Permission, or permission_name |
|
676 | 696 | """ |
|
677 | 697 | repo = self._get_repo(repo) |
|
678 | 698 | group_name = self._get_user_group(group_name) |
|
679 | 699 | permission = self._get_perm(perm) |
|
680 | 700 | |
|
681 | 701 | # check if we have that permission already |
|
682 | 702 | obj = self.sa.query(UserGroupRepoToPerm) \ |
|
683 | 703 | .filter(UserGroupRepoToPerm.users_group == group_name) \ |
|
684 | 704 | .filter(UserGroupRepoToPerm.repository == repo) \ |
|
685 | 705 | .scalar() |
|
686 | 706 | |
|
687 | 707 | if obj is None: |
|
688 | 708 | # create new |
|
689 | 709 | obj = UserGroupRepoToPerm() |
|
690 | 710 | |
|
691 | 711 | obj.repository = repo |
|
692 | 712 | obj.users_group = group_name |
|
693 | 713 | obj.permission = permission |
|
694 | 714 | self.sa.add(obj) |
|
695 | 715 | log.debug('Granted perm %s to %s on %s', perm, group_name, repo) |
|
696 | 716 | action_logger_generic( |
|
697 | 717 | 'granted permission: {} to usergroup: {} on repo: {}'.format( |
|
698 | 718 | perm, group_name, repo), namespace='security.repo') |
|
699 | 719 | |
|
700 | 720 | return obj |
|
701 | 721 | |
|
702 | 722 | def revoke_user_group_permission(self, repo, group_name): |
|
703 | 723 | """ |
|
704 | 724 | Revoke permission for user group on given repository |
|
705 | 725 | |
|
706 | 726 | :param repo: Instance of Repository, repository_id, or repository name |
|
707 | 727 | :param group_name: Instance of UserGroup, users_group_id, |
|
708 | 728 | or user group name |
|
709 | 729 | """ |
|
710 | 730 | repo = self._get_repo(repo) |
|
711 | 731 | group_name = self._get_user_group(group_name) |
|
712 | 732 | |
|
713 | 733 | obj = self.sa.query(UserGroupRepoToPerm) \ |
|
714 | 734 | .filter(UserGroupRepoToPerm.repository == repo) \ |
|
715 | 735 | .filter(UserGroupRepoToPerm.users_group == group_name) \ |
|
716 | 736 | .scalar() |
|
717 | 737 | if obj: |
|
718 | 738 | self.sa.delete(obj) |
|
719 | 739 | log.debug('Revoked perm to %s on %s', repo, group_name) |
|
720 | 740 | action_logger_generic( |
|
721 | 741 | 'revoked permission from usergroup: {} on repo: {}'.format( |
|
722 | 742 | group_name, repo), namespace='security.repo') |
|
723 | 743 | |
|
724 | 744 | def delete_stats(self, repo_name): |
|
725 | 745 | """ |
|
726 | 746 | removes stats for given repo |
|
727 | 747 | |
|
728 | 748 | :param repo_name: |
|
729 | 749 | """ |
|
730 | 750 | repo = self._get_repo(repo_name) |
|
731 | 751 | try: |
|
732 | 752 | obj = self.sa.query(Statistics) \ |
|
733 | 753 | .filter(Statistics.repository == repo).scalar() |
|
734 | 754 | if obj: |
|
735 | 755 | self.sa.delete(obj) |
|
736 | 756 | except Exception: |
|
737 | 757 | log.error(traceback.format_exc()) |
|
738 | 758 | raise |
|
739 | 759 | |
|
740 | 760 | def add_repo_field(self, repo_name, field_key, field_label, field_value='', |
|
741 | 761 | field_type='str', field_desc=''): |
|
742 | 762 | |
|
743 | 763 | repo = self._get_repo(repo_name) |
|
744 | 764 | |
|
745 | 765 | new_field = RepositoryField() |
|
746 | 766 | new_field.repository = repo |
|
747 | 767 | new_field.field_key = field_key |
|
748 | 768 | new_field.field_type = field_type # python type |
|
749 | 769 | new_field.field_value = field_value |
|
750 | 770 | new_field.field_desc = field_desc |
|
751 | 771 | new_field.field_label = field_label |
|
752 | 772 | self.sa.add(new_field) |
|
753 | 773 | return new_field |
|
754 | 774 | |
|
755 | 775 | def delete_repo_field(self, repo_name, field_key): |
|
756 | 776 | repo = self._get_repo(repo_name) |
|
757 | 777 | field = RepositoryField.get_by_key_name(field_key, repo) |
|
758 | 778 | if field: |
|
759 | 779 | self.sa.delete(field) |
|
760 | 780 | |
|
761 | 781 | def _create_filesystem_repo(self, repo_name, repo_type, repo_group, |
|
762 | 782 | clone_uri=None, repo_store_location=None, |
|
763 | 783 | use_global_config=False): |
|
764 | 784 | """ |
|
765 | 785 | makes repository on filesystem. It's group aware means it'll create |
|
766 | 786 | a repository within a group, and alter the paths accordingly of |
|
767 | 787 | group location |
|
768 | 788 | |
|
769 | 789 | :param repo_name: |
|
770 | 790 | :param alias: |
|
771 | 791 | :param parent: |
|
772 | 792 | :param clone_uri: |
|
773 | 793 | :param repo_store_location: |
|
774 | 794 | """ |
|
775 | 795 | from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group |
|
776 | 796 | from rhodecode.model.scm import ScmModel |
|
777 | 797 | |
|
778 | 798 | if Repository.NAME_SEP in repo_name: |
|
779 | 799 | raise ValueError( |
|
780 | 800 | 'repo_name must not contain groups got `%s`' % repo_name) |
|
781 | 801 | |
|
782 | 802 | if isinstance(repo_group, RepoGroup): |
|
783 | 803 | new_parent_path = os.sep.join(repo_group.full_path_splitted) |
|
784 | 804 | else: |
|
785 | 805 | new_parent_path = repo_group or '' |
|
786 | 806 | |
|
787 | 807 | if repo_store_location: |
|
788 | 808 | _paths = [repo_store_location] |
|
789 | 809 | else: |
|
790 | 810 | _paths = [self.repos_path, new_parent_path, repo_name] |
|
791 | 811 | # we need to make it str for mercurial |
|
792 | 812 | repo_path = os.path.join(*map(lambda x: safe_str(x), _paths)) |
|
793 | 813 | |
|
794 | 814 | # check if this path is not a repository |
|
795 | 815 | if is_valid_repo(repo_path, self.repos_path): |
|
796 | 816 | raise Exception('This path %s is a valid repository' % repo_path) |
|
797 | 817 | |
|
798 | 818 | # check if this path is a group |
|
799 | 819 | if is_valid_repo_group(repo_path, self.repos_path): |
|
800 | 820 | raise Exception('This path %s is a valid group' % repo_path) |
|
801 | 821 | |
|
802 | 822 | log.info('creating repo %s in %s from url: `%s`', |
|
803 | 823 | repo_name, safe_unicode(repo_path), |
|
804 | 824 | obfuscate_url_pw(clone_uri)) |
|
805 | 825 | |
|
806 | 826 | backend = get_backend(repo_type) |
|
807 | 827 | |
|
808 | 828 | config_repo = None if use_global_config else repo_name |
|
809 | 829 | if config_repo and new_parent_path: |
|
810 | 830 | config_repo = Repository.NAME_SEP.join( |
|
811 | 831 | (new_parent_path, config_repo)) |
|
812 | 832 | config = make_db_config(clear_session=False, repo=config_repo) |
|
813 | 833 | config.set('extensions', 'largefiles', '') |
|
814 | 834 | |
|
815 | 835 | # patch and reset hooks section of UI config to not run any |
|
816 | 836 | # hooks on creating remote repo |
|
817 | 837 | config.clear_section('hooks') |
|
818 | 838 | |
|
819 | 839 | # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice |
|
820 | 840 | if repo_type == 'git': |
|
821 | 841 | repo = backend( |
|
822 | 842 | repo_path, config=config, create=True, src_url=clone_uri, |
|
823 | 843 | bare=True) |
|
824 | 844 | else: |
|
825 | 845 | repo = backend( |
|
826 | 846 | repo_path, config=config, create=True, src_url=clone_uri) |
|
827 | 847 | |
|
828 | 848 | ScmModel().install_hooks(repo, repo_type=repo_type) |
|
829 | 849 | |
|
830 | 850 | log.debug('Created repo %s with %s backend', |
|
831 | 851 | safe_unicode(repo_name), safe_unicode(repo_type)) |
|
832 | 852 | return repo |
|
833 | 853 | |
|
834 | 854 | def _rename_filesystem_repo(self, old, new): |
|
835 | 855 | """ |
|
836 | 856 | renames repository on filesystem |
|
837 | 857 | |
|
838 | 858 | :param old: old name |
|
839 | 859 | :param new: new name |
|
840 | 860 | """ |
|
841 | 861 | log.info('renaming repo from %s to %s', old, new) |
|
842 | 862 | |
|
843 | 863 | old_path = os.path.join(self.repos_path, old) |
|
844 | 864 | new_path = os.path.join(self.repos_path, new) |
|
845 | 865 | if os.path.isdir(new_path): |
|
846 | 866 | raise Exception( |
|
847 | 867 | 'Was trying to rename to already existing dir %s' % new_path |
|
848 | 868 | ) |
|
849 | 869 | shutil.move(old_path, new_path) |
|
850 | 870 | |
|
851 | 871 | def _delete_filesystem_repo(self, repo): |
|
852 | 872 | """ |
|
853 | 873 | removes repo from filesystem, the removal is acctually made by |
|
854 | 874 | added rm__ prefix into dir, and rename internat .hg/.git dirs so this |
|
855 | 875 | repository is no longer valid for rhodecode, can be undeleted later on |
|
856 | 876 | by reverting the renames on this repository |
|
857 | 877 | |
|
858 | 878 | :param repo: repo object |
|
859 | 879 | """ |
|
860 | 880 | rm_path = os.path.join(self.repos_path, repo.repo_name) |
|
861 | 881 | repo_group = repo.group |
|
862 | 882 | log.info("Removing repository %s", rm_path) |
|
863 | 883 | # disable hg/git internal that it doesn't get detected as repo |
|
864 | 884 | alias = repo.repo_type |
|
865 | 885 | |
|
866 | 886 | config = make_db_config(clear_session=False) |
|
867 | 887 | config.set('extensions', 'largefiles', '') |
|
868 | 888 | bare = getattr(repo.scm_instance(config=config), 'bare', False) |
|
869 | 889 | |
|
870 | 890 | # skip this for bare git repos |
|
871 | 891 | if not bare: |
|
872 | 892 | # disable VCS repo |
|
873 | 893 | vcs_path = os.path.join(rm_path, '.%s' % alias) |
|
874 | 894 | if os.path.exists(vcs_path): |
|
875 | 895 | shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias)) |
|
876 | 896 | |
|
877 | 897 | _now = datetime.now() |
|
878 | 898 | _ms = str(_now.microsecond).rjust(6, '0') |
|
879 | 899 | _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms), |
|
880 | 900 | repo.just_name) |
|
881 | 901 | if repo_group: |
|
882 | 902 | # if repository is in group, prefix the removal path with the group |
|
883 | 903 | args = repo_group.full_path_splitted + [_d] |
|
884 | 904 | _d = os.path.join(*args) |
|
885 | 905 | |
|
886 | 906 | if os.path.isdir(rm_path): |
|
887 | 907 | shutil.move(rm_path, os.path.join(self.repos_path, _d)) |
|
888 | 908 | |
|
889 | 909 | |
|
890 | 910 | class ReadmeFinder: |
|
891 | 911 | """ |
|
892 | 912 | Utility which knows how to find a readme for a specific commit. |
|
893 | 913 | |
|
894 | 914 | The main idea is that this is a configurable algorithm. When creating an |
|
895 | 915 | instance you can define parameters, currently only the `default_renderer`. |
|
896 | 916 | Based on this configuration the method :meth:`search` behaves slightly |
|
897 | 917 | different. |
|
898 | 918 | """ |
|
899 | 919 | |
|
900 | 920 | readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE) |
|
901 | 921 | path_re = re.compile(r'^docs?', re.IGNORECASE) |
|
902 | 922 | |
|
903 | 923 | default_priorities = { |
|
904 | 924 | None: 0, |
|
905 | 925 | '.text': 2, |
|
906 | 926 | '.txt': 3, |
|
907 | 927 | '.rst': 1, |
|
908 | 928 | '.rest': 2, |
|
909 | 929 | '.md': 1, |
|
910 | 930 | '.mkdn': 2, |
|
911 | 931 | '.mdown': 3, |
|
912 | 932 | '.markdown': 4, |
|
913 | 933 | } |
|
914 | 934 | |
|
915 | 935 | path_priority = { |
|
916 | 936 | 'doc': 0, |
|
917 | 937 | 'docs': 1, |
|
918 | 938 | } |
|
919 | 939 | |
|
920 | 940 | FALLBACK_PRIORITY = 99 |
|
921 | 941 | |
|
922 | 942 | RENDERER_TO_EXTENSION = { |
|
923 | 943 | 'rst': ['.rst', '.rest'], |
|
924 | 944 | 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'], |
|
925 | 945 | } |
|
926 | 946 | |
|
927 | 947 | def __init__(self, default_renderer=None): |
|
928 | 948 | self._default_renderer = default_renderer |
|
929 | 949 | self._renderer_extensions = self.RENDERER_TO_EXTENSION.get( |
|
930 | 950 | default_renderer, []) |
|
931 | 951 | |
|
932 | 952 | def search(self, commit, path='/'): |
|
933 | 953 | """ |
|
934 | 954 | Find a readme in the given `commit`. |
|
935 | 955 | """ |
|
936 | 956 | nodes = commit.get_nodes(path) |
|
937 | 957 | matches = self._match_readmes(nodes) |
|
938 | 958 | matches = self._sort_according_to_priority(matches) |
|
939 | 959 | if matches: |
|
940 | 960 | return matches[0].node |
|
941 | 961 | |
|
942 | 962 | paths = self._match_paths(nodes) |
|
943 | 963 | paths = self._sort_paths_according_to_priority(paths) |
|
944 | 964 | for path in paths: |
|
945 | 965 | match = self.search(commit, path=path) |
|
946 | 966 | if match: |
|
947 | 967 | return match |
|
948 | 968 | |
|
949 | 969 | return None |
|
950 | 970 | |
|
951 | 971 | def _match_readmes(self, nodes): |
|
952 | 972 | for node in nodes: |
|
953 | 973 | if not node.is_file(): |
|
954 | 974 | continue |
|
955 | 975 | path = node.path.rsplit('/', 1)[-1] |
|
956 | 976 | match = self.readme_re.match(path) |
|
957 | 977 | if match: |
|
958 | 978 | extension = match.group(1) |
|
959 | 979 | yield ReadmeMatch(node, match, self._priority(extension)) |
|
960 | 980 | |
|
961 | 981 | def _match_paths(self, nodes): |
|
962 | 982 | for node in nodes: |
|
963 | 983 | if not node.is_dir(): |
|
964 | 984 | continue |
|
965 | 985 | match = self.path_re.match(node.path) |
|
966 | 986 | if match: |
|
967 | 987 | yield node.path |
|
968 | 988 | |
|
969 | 989 | def _priority(self, extension): |
|
970 | 990 | renderer_priority = ( |
|
971 | 991 | 0 if extension in self._renderer_extensions else 1) |
|
972 | 992 | extension_priority = self.default_priorities.get( |
|
973 | 993 | extension, self.FALLBACK_PRIORITY) |
|
974 | 994 | return (renderer_priority, extension_priority) |
|
975 | 995 | |
|
976 | 996 | def _sort_according_to_priority(self, matches): |
|
977 | 997 | |
|
978 | 998 | def priority_and_path(match): |
|
979 | 999 | return (match.priority, match.path) |
|
980 | 1000 | |
|
981 | 1001 | return sorted(matches, key=priority_and_path) |
|
982 | 1002 | |
|
983 | 1003 | def _sort_paths_according_to_priority(self, paths): |
|
984 | 1004 | |
|
985 | 1005 | def priority_and_path(path): |
|
986 | 1006 | return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path) |
|
987 | 1007 | |
|
988 | 1008 | return sorted(paths, key=priority_and_path) |
|
989 | 1009 | |
|
990 | 1010 | |
|
991 | 1011 | class ReadmeMatch: |
|
992 | 1012 | |
|
993 | 1013 | def __init__(self, node, match, priority): |
|
994 | 1014 | self.node = node |
|
995 | 1015 | self._match = match |
|
996 | 1016 | self.priority = priority |
|
997 | 1017 | |
|
998 | 1018 | @property |
|
999 | 1019 | def path(self): |
|
1000 | 1020 | return self.node.path |
|
1001 | 1021 | |
|
1002 | 1022 | def __repr__(self): |
|
1003 | 1023 | return '<ReadmeMatch {} priority={}'.format(self.path, self.priority) |
@@ -1,95 +1,97 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import pytest |
|
22 | 22 | |
|
23 | 23 | from rhodecode.tests.events.conftest import EventCatcher |
|
24 | 24 | |
|
25 | 25 | from rhodecode.model.comment import CommentsModel |
|
26 | 26 | from rhodecode.model.pull_request import PullRequestModel |
|
27 | 27 | from rhodecode.events import ( |
|
28 | 28 | PullRequestCreateEvent, |
|
29 | 29 | PullRequestUpdateEvent, |
|
30 | 30 | PullRequestCommentEvent, |
|
31 | 31 | PullRequestReviewEvent, |
|
32 | 32 | PullRequestMergeEvent, |
|
33 | 33 | PullRequestCloseEvent, |
|
34 | 34 | ) |
|
35 | 35 | |
|
36 | 36 | # TODO: dan: make the serialization tests complete json comparisons |
|
37 | 37 | @pytest.mark.backends("git", "hg") |
|
38 | 38 | @pytest.mark.parametrize('EventClass', [ |
|
39 | 39 | PullRequestCreateEvent, |
|
40 | 40 | PullRequestUpdateEvent, |
|
41 | 41 | PullRequestReviewEvent, |
|
42 | 42 | PullRequestMergeEvent, |
|
43 | 43 | PullRequestCloseEvent, |
|
44 | 44 | ]) |
|
45 | 45 | def test_pullrequest_events_serialized(EventClass, pr_util, config_stub): |
|
46 | 46 | pr = pr_util.create_pull_request() |
|
47 | 47 | event = EventClass(pr) |
|
48 | 48 | data = event.as_dict() |
|
49 | 49 | assert data['name'] == EventClass.name |
|
50 | 50 | assert data['repo']['repo_name'] == pr.target_repo.repo_name |
|
51 | 51 | assert data['pullrequest']['pull_request_id'] == pr.pull_request_id |
|
52 | 52 | assert data['pullrequest']['url'] |
|
53 | assert data['pullrequest']['permalink_url'] | |
|
53 | 54 | |
|
54 | 55 | |
|
55 | 56 | @pytest.mark.backends("git", "hg") |
|
56 | 57 | def test_create_pull_request_events(pr_util, config_stub): |
|
57 | 58 | with EventCatcher() as event_catcher: |
|
58 | 59 | pr_util.create_pull_request() |
|
59 | 60 | |
|
60 | 61 | assert PullRequestCreateEvent in event_catcher.events_types |
|
61 | 62 | |
|
62 | 63 | |
|
63 | 64 | @pytest.mark.backends("git", "hg") |
|
64 | 65 | def test_pullrequest_comment_events_serialized(pr_util, config_stub): |
|
65 | 66 | pr = pr_util.create_pull_request() |
|
66 | 67 | comment = CommentsModel().get_comments( |
|
67 | 68 | pr.target_repo.repo_id, pull_request=pr)[0] |
|
68 | 69 | event = PullRequestCommentEvent(pr, comment) |
|
69 | 70 | data = event.as_dict() |
|
70 | 71 | assert data['name'] == PullRequestCommentEvent.name |
|
71 | 72 | assert data['repo']['repo_name'] == pr.target_repo.repo_name |
|
72 | 73 | assert data['pullrequest']['pull_request_id'] == pr.pull_request_id |
|
73 | 74 | assert data['pullrequest']['url'] |
|
75 | assert data['pullrequest']['permalink_url'] | |
|
74 | 76 | assert data['comment']['text'] == comment.text |
|
75 | 77 | |
|
76 | 78 | |
|
77 | 79 | @pytest.mark.backends("git", "hg") |
|
78 | 80 | def test_close_pull_request_events(pr_util, user_admin, config_stub): |
|
79 | 81 | pr = pr_util.create_pull_request() |
|
80 | 82 | |
|
81 | 83 | with EventCatcher() as event_catcher: |
|
82 | 84 | PullRequestModel().close_pull_request(pr, user_admin) |
|
83 | 85 | |
|
84 | 86 | assert PullRequestCloseEvent in event_catcher.events_types |
|
85 | 87 | |
|
86 | 88 | |
|
87 | 89 | @pytest.mark.backends("git", "hg") |
|
88 | 90 | def test_close_pull_request_with_comment_events(pr_util, user_admin, config_stub): |
|
89 | 91 | pr = pr_util.create_pull_request() |
|
90 | 92 | |
|
91 | 93 | with EventCatcher() as event_catcher: |
|
92 | 94 | PullRequestModel().close_pull_request_with_comment( |
|
93 | 95 | pr, user_admin, pr.target_repo) |
|
94 | 96 | |
|
95 | 97 | assert PullRequestCloseEvent in event_catcher.events_types |
@@ -1,117 +1,120 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import pytest |
|
22 | 22 | |
|
23 | 23 | from rhodecode.tests.events.conftest import EventCatcher |
|
24 | 24 | |
|
25 | 25 | from rhodecode.lib import hooks_base, utils2 |
|
26 | 26 | from rhodecode.model.repo import RepoModel |
|
27 | 27 | from rhodecode.events.repo import ( |
|
28 | 28 | RepoPrePullEvent, RepoPullEvent, |
|
29 | 29 | RepoPrePushEvent, RepoPushEvent, |
|
30 | 30 | RepoPreCreateEvent, RepoCreateEvent, |
|
31 | 31 | RepoPreDeleteEvent, RepoDeleteEvent, |
|
32 | 32 | ) |
|
33 | 33 | |
|
34 | 34 | |
|
35 | 35 | @pytest.fixture |
|
36 | 36 | def scm_extras(user_regular, repo_stub): |
|
37 | 37 | extras = utils2.AttributeDict({ |
|
38 | 38 | 'ip': '127.0.0.1', |
|
39 | 39 | 'username': user_regular.username, |
|
40 | 40 | 'action': '', |
|
41 | 41 | 'repository': repo_stub.repo_name, |
|
42 | 42 | 'scm': repo_stub.scm_instance().alias, |
|
43 | 43 | 'config': '', |
|
44 | 44 | 'server_url': 'http://example.com', |
|
45 | 45 | 'make_lock': None, |
|
46 | 46 | 'user-agent': 'some-client', |
|
47 | 47 | 'locked_by': [None], |
|
48 | 48 | 'commit_ids': ['a' * 40] * 3, |
|
49 | 49 | 'is_shadow_repo': False, |
|
50 | 50 | }) |
|
51 | 51 | return extras |
|
52 | 52 | |
|
53 | 53 | |
|
54 | 54 | # TODO: dan: make the serialization tests complete json comparisons |
|
55 | 55 | @pytest.mark.parametrize('EventClass', [ |
|
56 | 56 | RepoPreCreateEvent, RepoCreateEvent, |
|
57 | 57 | RepoPreDeleteEvent, RepoDeleteEvent, |
|
58 | 58 | ]) |
|
59 | 59 | def test_repo_events_serialized(config_stub, repo_stub, EventClass): |
|
60 | 60 | event = EventClass(repo_stub) |
|
61 | 61 | data = event.as_dict() |
|
62 | 62 | assert data['name'] == EventClass.name |
|
63 | 63 | assert data['repo']['repo_name'] == repo_stub.repo_name |
|
64 | 64 | assert data['repo']['url'] |
|
65 | assert data['repo']['permalink_url'] | |
|
65 | 66 | |
|
66 | 67 | |
|
67 | 68 | @pytest.mark.parametrize('EventClass', [ |
|
68 | 69 | RepoPrePullEvent, RepoPullEvent, RepoPrePushEvent |
|
69 | 70 | ]) |
|
70 | 71 | def test_vcs_repo_events_serialize(config_stub, repo_stub, scm_extras, EventClass): |
|
71 | 72 | event = EventClass(repo_name=repo_stub.repo_name, extras=scm_extras) |
|
72 | 73 | data = event.as_dict() |
|
73 | 74 | assert data['name'] == EventClass.name |
|
74 | 75 | assert data['repo']['repo_name'] == repo_stub.repo_name |
|
75 | 76 | assert data['repo']['url'] |
|
77 | assert data['repo']['permalink_url'] | |
|
76 | 78 | |
|
77 | 79 | |
|
78 | 80 | @pytest.mark.parametrize('EventClass', [RepoPushEvent]) |
|
79 | 81 | def test_vcs_repo_push_event_serialize(config_stub, repo_stub, scm_extras, EventClass): |
|
80 | 82 | event = EventClass(repo_name=repo_stub.repo_name, |
|
81 | 83 | pushed_commit_ids=scm_extras['commit_ids'], |
|
82 | 84 | extras=scm_extras) |
|
83 | 85 | data = event.as_dict() |
|
84 | 86 | assert data['name'] == EventClass.name |
|
85 | 87 | assert data['repo']['repo_name'] == repo_stub.repo_name |
|
86 | 88 | assert data['repo']['url'] |
|
89 | assert data['repo']['permalink_url'] | |
|
87 | 90 | |
|
88 | 91 | |
|
89 | 92 | def test_create_delete_repo_fires_events(backend): |
|
90 | 93 | with EventCatcher() as event_catcher: |
|
91 | 94 | repo = backend.create_repo() |
|
92 | 95 | assert event_catcher.events_types == [RepoPreCreateEvent, RepoCreateEvent] |
|
93 | 96 | |
|
94 | 97 | with EventCatcher() as event_catcher: |
|
95 | 98 | RepoModel().delete(repo) |
|
96 | 99 | assert event_catcher.events_types == [RepoPreDeleteEvent, RepoDeleteEvent] |
|
97 | 100 | |
|
98 | 101 | |
|
99 | 102 | def test_pull_fires_events(scm_extras): |
|
100 | 103 | with EventCatcher() as event_catcher: |
|
101 | 104 | hooks_base.pre_push(scm_extras) |
|
102 | 105 | assert event_catcher.events_types == [RepoPrePushEvent] |
|
103 | 106 | |
|
104 | 107 | with EventCatcher() as event_catcher: |
|
105 | 108 | hooks_base.post_push(scm_extras) |
|
106 | 109 | assert event_catcher.events_types == [RepoPushEvent] |
|
107 | 110 | |
|
108 | 111 | |
|
109 | 112 | def test_push_fires_events(scm_extras): |
|
110 | 113 | with EventCatcher() as event_catcher: |
|
111 | 114 | hooks_base.pre_pull(scm_extras) |
|
112 | 115 | assert event_catcher.events_types == [RepoPrePullEvent] |
|
113 | 116 | |
|
114 | 117 | with EventCatcher() as event_catcher: |
|
115 | 118 | hooks_base.post_pull(scm_extras) |
|
116 | 119 | assert event_catcher.events_types == [RepoPullEvent] |
|
117 | 120 |
@@ -1,1090 +1,1088 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import mock |
|
22 | 22 | import pytest |
|
23 | 23 | from webob.exc import HTTPNotFound |
|
24 | 24 | |
|
25 | 25 | import rhodecode |
|
26 | 26 | from rhodecode.lib.vcs.nodes import FileNode |
|
27 | 27 | from rhodecode.lib import helpers as h |
|
28 | 28 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
29 | 29 | from rhodecode.model.db import ( |
|
30 | 30 | PullRequest, ChangesetStatus, UserLog, Notification) |
|
31 | 31 | from rhodecode.model.meta import Session |
|
32 | 32 | from rhodecode.model.pull_request import PullRequestModel |
|
33 | 33 | from rhodecode.model.user import UserModel |
|
34 | 34 | from rhodecode.tests import ( |
|
35 | 35 | assert_session_flash, url, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN) |
|
36 | 36 | from rhodecode.tests.utils import AssertResponse |
|
37 | 37 | |
|
38 | 38 | |
|
39 | 39 | @pytest.mark.usefixtures('app', 'autologin_user') |
|
40 | 40 | @pytest.mark.backends("git", "hg") |
|
41 | 41 | class TestPullrequestsController(object): |
|
42 | 42 | |
|
43 | 43 | def test_index(self, backend): |
|
44 | 44 | self.app.get(url( |
|
45 | 45 | controller='pullrequests', action='index', |
|
46 | 46 | repo_name=backend.repo_name)) |
|
47 | 47 | |
|
48 | 48 | def test_option_menu_create_pull_request_exists(self, backend): |
|
49 | 49 | repo_name = backend.repo_name |
|
50 | 50 | response = self.app.get(h.route_path('repo_summary', repo_name=repo_name)) |
|
51 | 51 | |
|
52 | 52 | create_pr_link = '<a href="%s">Create Pull Request</a>' % url( |
|
53 | 53 | 'pullrequest', repo_name=repo_name) |
|
54 | 54 | response.mustcontain(create_pr_link) |
|
55 | 55 | |
|
56 | 56 | def test_create_pr_form_with_raw_commit_id(self, backend): |
|
57 | 57 | repo = backend.repo |
|
58 | 58 | |
|
59 | 59 | self.app.get( |
|
60 | 60 | url(controller='pullrequests', action='index', |
|
61 | 61 | repo_name=repo.repo_name, |
|
62 | 62 | commit=repo.get_commit().raw_id), |
|
63 | 63 | status=200) |
|
64 | 64 | |
|
65 | 65 | @pytest.mark.parametrize('pr_merge_enabled', [True, False]) |
|
66 | 66 | def test_show(self, pr_util, pr_merge_enabled): |
|
67 | 67 | pull_request = pr_util.create_pull_request( |
|
68 | 68 | mergeable=pr_merge_enabled, enable_notifications=False) |
|
69 | 69 | |
|
70 | 70 | response = self.app.get(url( |
|
71 | 71 | controller='pullrequests', action='show', |
|
72 | 72 | repo_name=pull_request.target_repo.scm_instance().name, |
|
73 | 73 | pull_request_id=str(pull_request.pull_request_id))) |
|
74 | 74 | |
|
75 | 75 | for commit_id in pull_request.revisions: |
|
76 | 76 | response.mustcontain(commit_id) |
|
77 | 77 | |
|
78 | 78 | assert pull_request.target_ref_parts.type in response |
|
79 | 79 | assert pull_request.target_ref_parts.name in response |
|
80 | 80 | target_clone_url = pull_request.target_repo.clone_url() |
|
81 | 81 | assert target_clone_url in response |
|
82 | 82 | |
|
83 | 83 | assert 'class="pull-request-merge"' in response |
|
84 | 84 | assert ( |
|
85 | 85 | 'Server-side pull request merging is disabled.' |
|
86 | 86 | in response) != pr_merge_enabled |
|
87 | 87 | |
|
88 | 88 | def test_close_status_visibility(self, pr_util, user_util, csrf_token): |
|
89 | 89 | from rhodecode.tests.functional.test_login import login_url, logut_url |
|
90 | 90 | # Logout |
|
91 | 91 | response = self.app.post( |
|
92 | 92 | logut_url, |
|
93 | 93 | params={'csrf_token': csrf_token}) |
|
94 | 94 | # Login as regular user |
|
95 | 95 | response = self.app.post(login_url, |
|
96 | 96 | {'username': TEST_USER_REGULAR_LOGIN, |
|
97 | 97 | 'password': 'test12'}) |
|
98 | 98 | |
|
99 | 99 | pull_request = pr_util.create_pull_request( |
|
100 | 100 | author=TEST_USER_REGULAR_LOGIN) |
|
101 | 101 | |
|
102 | 102 | response = self.app.get(url( |
|
103 | 103 | controller='pullrequests', action='show', |
|
104 | 104 | repo_name=pull_request.target_repo.scm_instance().name, |
|
105 | 105 | pull_request_id=str(pull_request.pull_request_id))) |
|
106 | 106 | |
|
107 | 107 | response.mustcontain('Server-side pull request merging is disabled.') |
|
108 | 108 | |
|
109 | 109 | assert_response = response.assert_response() |
|
110 | 110 | # for regular user without a merge permissions, we don't see it |
|
111 | 111 | assert_response.no_element_exists('#close-pull-request-action') |
|
112 | 112 | |
|
113 | 113 | user_util.grant_user_permission_to_repo( |
|
114 | 114 | pull_request.target_repo, |
|
115 | 115 | UserModel().get_by_username(TEST_USER_REGULAR_LOGIN), |
|
116 | 116 | 'repository.write') |
|
117 | 117 | response = self.app.get(url( |
|
118 | 118 | controller='pullrequests', action='show', |
|
119 | 119 | repo_name=pull_request.target_repo.scm_instance().name, |
|
120 | 120 | pull_request_id=str(pull_request.pull_request_id))) |
|
121 | 121 | |
|
122 | 122 | response.mustcontain('Server-side pull request merging is disabled.') |
|
123 | 123 | |
|
124 | 124 | assert_response = response.assert_response() |
|
125 | 125 | # now regular user has a merge permissions, we have CLOSE button |
|
126 | 126 | assert_response.one_element_exists('#close-pull-request-action') |
|
127 | 127 | |
|
128 | 128 | def test_show_invalid_commit_id(self, pr_util): |
|
129 | 129 | # Simulating invalid revisions which will cause a lookup error |
|
130 | 130 | pull_request = pr_util.create_pull_request() |
|
131 | 131 | pull_request.revisions = ['invalid'] |
|
132 | 132 | Session().add(pull_request) |
|
133 | 133 | Session().commit() |
|
134 | 134 | |
|
135 | 135 | response = self.app.get(url( |
|
136 | 136 | controller='pullrequests', action='show', |
|
137 | 137 | repo_name=pull_request.target_repo.scm_instance().name, |
|
138 | 138 | pull_request_id=str(pull_request.pull_request_id))) |
|
139 | 139 | |
|
140 | 140 | for commit_id in pull_request.revisions: |
|
141 | 141 | response.mustcontain(commit_id) |
|
142 | 142 | |
|
143 | 143 | def test_show_invalid_source_reference(self, pr_util): |
|
144 | 144 | pull_request = pr_util.create_pull_request() |
|
145 | 145 | pull_request.source_ref = 'branch:b:invalid' |
|
146 | 146 | Session().add(pull_request) |
|
147 | 147 | Session().commit() |
|
148 | 148 | |
|
149 | 149 | self.app.get(url( |
|
150 | 150 | controller='pullrequests', action='show', |
|
151 | 151 | repo_name=pull_request.target_repo.scm_instance().name, |
|
152 | 152 | pull_request_id=str(pull_request.pull_request_id))) |
|
153 | 153 | |
|
154 | 154 | def test_edit_title_description(self, pr_util, csrf_token): |
|
155 | 155 | pull_request = pr_util.create_pull_request() |
|
156 | 156 | pull_request_id = pull_request.pull_request_id |
|
157 | 157 | |
|
158 | 158 | response = self.app.post( |
|
159 | 159 | url(controller='pullrequests', action='update', |
|
160 | 160 | repo_name=pull_request.target_repo.repo_name, |
|
161 | 161 | pull_request_id=str(pull_request_id)), |
|
162 | 162 | params={ |
|
163 | 163 | 'edit_pull_request': 'true', |
|
164 | 164 | '_method': 'put', |
|
165 | 165 | 'title': 'New title', |
|
166 | 166 | 'description': 'New description', |
|
167 | 167 | 'csrf_token': csrf_token}) |
|
168 | 168 | |
|
169 | 169 | assert_session_flash( |
|
170 | 170 | response, u'Pull request title & description updated.', |
|
171 | 171 | category='success') |
|
172 | 172 | |
|
173 | 173 | pull_request = PullRequest.get(pull_request_id) |
|
174 | 174 | assert pull_request.title == 'New title' |
|
175 | 175 | assert pull_request.description == 'New description' |
|
176 | 176 | |
|
177 | 177 | def test_edit_title_description_closed(self, pr_util, csrf_token): |
|
178 | 178 | pull_request = pr_util.create_pull_request() |
|
179 | 179 | pull_request_id = pull_request.pull_request_id |
|
180 | 180 | pr_util.close() |
|
181 | 181 | |
|
182 | 182 | response = self.app.post( |
|
183 | 183 | url(controller='pullrequests', action='update', |
|
184 | 184 | repo_name=pull_request.target_repo.repo_name, |
|
185 | 185 | pull_request_id=str(pull_request_id)), |
|
186 | 186 | params={ |
|
187 | 187 | 'edit_pull_request': 'true', |
|
188 | 188 | '_method': 'put', |
|
189 | 189 | 'title': 'New title', |
|
190 | 190 | 'description': 'New description', |
|
191 | 191 | 'csrf_token': csrf_token}) |
|
192 | 192 | |
|
193 | 193 | assert_session_flash( |
|
194 | 194 | response, u'Cannot update closed pull requests.', |
|
195 | 195 | category='error') |
|
196 | 196 | |
|
197 | 197 | def test_update_invalid_source_reference(self, pr_util, csrf_token): |
|
198 | 198 | from rhodecode.lib.vcs.backends.base import UpdateFailureReason |
|
199 | 199 | |
|
200 | 200 | pull_request = pr_util.create_pull_request() |
|
201 | 201 | pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id' |
|
202 | 202 | Session().add(pull_request) |
|
203 | 203 | Session().commit() |
|
204 | 204 | |
|
205 | 205 | pull_request_id = pull_request.pull_request_id |
|
206 | 206 | |
|
207 | 207 | response = self.app.post( |
|
208 | 208 | url(controller='pullrequests', action='update', |
|
209 | 209 | repo_name=pull_request.target_repo.repo_name, |
|
210 | 210 | pull_request_id=str(pull_request_id)), |
|
211 | 211 | params={'update_commits': 'true', '_method': 'put', |
|
212 | 212 | 'csrf_token': csrf_token}) |
|
213 | 213 | |
|
214 | 214 | expected_msg = PullRequestModel.UPDATE_STATUS_MESSAGES[ |
|
215 | 215 | UpdateFailureReason.MISSING_SOURCE_REF] |
|
216 | 216 | assert_session_flash(response, expected_msg, category='error') |
|
217 | 217 | |
|
218 | 218 | def test_missing_target_reference(self, pr_util, csrf_token): |
|
219 | 219 | from rhodecode.lib.vcs.backends.base import MergeFailureReason |
|
220 | 220 | pull_request = pr_util.create_pull_request( |
|
221 | 221 | approved=True, mergeable=True) |
|
222 | 222 | pull_request.target_ref = 'branch:invalid-branch:invalid-commit-id' |
|
223 | 223 | Session().add(pull_request) |
|
224 | 224 | Session().commit() |
|
225 | 225 | |
|
226 | 226 | pull_request_id = pull_request.pull_request_id |
|
227 | 227 | pull_request_url = url( |
|
228 | 228 | controller='pullrequests', action='show', |
|
229 | 229 | repo_name=pull_request.target_repo.repo_name, |
|
230 | 230 | pull_request_id=str(pull_request_id)) |
|
231 | 231 | |
|
232 | 232 | response = self.app.get(pull_request_url) |
|
233 | 233 | |
|
234 | 234 | assertr = AssertResponse(response) |
|
235 | 235 | expected_msg = PullRequestModel.MERGE_STATUS_MESSAGES[ |
|
236 | 236 | MergeFailureReason.MISSING_TARGET_REF] |
|
237 | 237 | assertr.element_contains( |
|
238 | 238 | 'span[data-role="merge-message"]', str(expected_msg)) |
|
239 | 239 | |
|
240 | 240 | def test_comment_and_close_pull_request(self, pr_util, csrf_token): |
|
241 | 241 | pull_request = pr_util.create_pull_request(approved=True) |
|
242 | 242 | pull_request_id = pull_request.pull_request_id |
|
243 | 243 | author = pull_request.user_id |
|
244 | 244 | repo = pull_request.target_repo.repo_id |
|
245 | 245 | |
|
246 | 246 | self.app.post( |
|
247 | 247 | url(controller='pullrequests', |
|
248 | 248 | action='comment', |
|
249 | 249 | repo_name=pull_request.target_repo.scm_instance().name, |
|
250 | 250 | pull_request_id=str(pull_request_id)), |
|
251 | 251 | params={ |
|
252 | 252 | 'changeset_status': ChangesetStatus.STATUS_APPROVED, |
|
253 | 253 | 'close_pull_request': '1', |
|
254 | 254 | 'text': 'Closing a PR', |
|
255 | 255 | 'csrf_token': csrf_token}, |
|
256 | 256 | status=302) |
|
257 | 257 | |
|
258 | 258 | action = 'user_closed_pull_request:%d' % pull_request_id |
|
259 | 259 | journal = UserLog.query()\ |
|
260 | 260 | .filter(UserLog.user_id == author)\ |
|
261 | 261 | .filter(UserLog.repository_id == repo)\ |
|
262 | 262 | .filter(UserLog.action == action)\ |
|
263 | 263 | .all() |
|
264 | 264 | assert len(journal) == 1 |
|
265 | 265 | |
|
266 | 266 | pull_request = PullRequest.get(pull_request_id) |
|
267 | 267 | assert pull_request.is_closed() |
|
268 | 268 | |
|
269 | 269 | # check only the latest status, not the review status |
|
270 | 270 | status = ChangesetStatusModel().get_status( |
|
271 | 271 | pull_request.source_repo, pull_request=pull_request) |
|
272 | 272 | assert status == ChangesetStatus.STATUS_APPROVED |
|
273 | 273 | |
|
274 | 274 | def test_reject_and_close_pull_request(self, pr_util, csrf_token): |
|
275 | 275 | pull_request = pr_util.create_pull_request() |
|
276 | 276 | pull_request_id = pull_request.pull_request_id |
|
277 | 277 | response = self.app.post( |
|
278 | 278 | url(controller='pullrequests', |
|
279 | 279 | action='update', |
|
280 | 280 | repo_name=pull_request.target_repo.scm_instance().name, |
|
281 | 281 | pull_request_id=str(pull_request.pull_request_id)), |
|
282 | 282 | params={'close_pull_request': 'true', '_method': 'put', |
|
283 | 283 | 'csrf_token': csrf_token}) |
|
284 | 284 | |
|
285 | 285 | pull_request = PullRequest.get(pull_request_id) |
|
286 | 286 | |
|
287 | 287 | assert response.json is True |
|
288 | 288 | assert pull_request.is_closed() |
|
289 | 289 | |
|
290 | 290 | # check only the latest status, not the review status |
|
291 | 291 | status = ChangesetStatusModel().get_status( |
|
292 | 292 | pull_request.source_repo, pull_request=pull_request) |
|
293 | 293 | assert status == ChangesetStatus.STATUS_REJECTED |
|
294 | 294 | |
|
295 | 295 | def test_comment_force_close_pull_request(self, pr_util, csrf_token): |
|
296 | 296 | pull_request = pr_util.create_pull_request() |
|
297 | 297 | pull_request_id = pull_request.pull_request_id |
|
298 | 298 | PullRequestModel().update_reviewers( |
|
299 | 299 | pull_request_id, [(1, ['reason'], False), (2, ['reason2'], False)]) |
|
300 | 300 | author = pull_request.user_id |
|
301 | 301 | repo = pull_request.target_repo.repo_id |
|
302 | 302 | self.app.post( |
|
303 | 303 | url(controller='pullrequests', |
|
304 | 304 | action='comment', |
|
305 | 305 | repo_name=pull_request.target_repo.scm_instance().name, |
|
306 | 306 | pull_request_id=str(pull_request_id)), |
|
307 | 307 | params={ |
|
308 | 308 | 'changeset_status': 'rejected', |
|
309 | 309 | 'close_pull_request': '1', |
|
310 | 310 | 'csrf_token': csrf_token}, |
|
311 | 311 | status=302) |
|
312 | 312 | |
|
313 | 313 | pull_request = PullRequest.get(pull_request_id) |
|
314 | 314 | |
|
315 | 315 | action = 'user_closed_pull_request:%d' % pull_request_id |
|
316 | 316 | journal = UserLog.query().filter( |
|
317 | 317 | UserLog.user_id == author, |
|
318 | 318 | UserLog.repository_id == repo, |
|
319 | 319 | UserLog.action == action).all() |
|
320 | 320 | assert len(journal) == 1 |
|
321 | 321 | |
|
322 | 322 | # check only the latest status, not the review status |
|
323 | 323 | status = ChangesetStatusModel().get_status( |
|
324 | 324 | pull_request.source_repo, pull_request=pull_request) |
|
325 | 325 | assert status == ChangesetStatus.STATUS_REJECTED |
|
326 | 326 | |
|
327 | 327 | def test_create_pull_request(self, backend, csrf_token): |
|
328 | 328 | commits = [ |
|
329 | 329 | {'message': 'ancestor'}, |
|
330 | 330 | {'message': 'change'}, |
|
331 | 331 | {'message': 'change2'}, |
|
332 | 332 | ] |
|
333 | 333 | commit_ids = backend.create_master_repo(commits) |
|
334 | 334 | target = backend.create_repo(heads=['ancestor']) |
|
335 | 335 | source = backend.create_repo(heads=['change2']) |
|
336 | 336 | |
|
337 | 337 | response = self.app.post( |
|
338 | 338 | url( |
|
339 | 339 | controller='pullrequests', |
|
340 | 340 | action='create', |
|
341 | 341 | repo_name=source.repo_name |
|
342 | 342 | ), |
|
343 | 343 | [ |
|
344 | 344 | ('source_repo', source.repo_name), |
|
345 | 345 | ('source_ref', 'branch:default:' + commit_ids['change2']), |
|
346 | 346 | ('target_repo', target.repo_name), |
|
347 | 347 | ('target_ref', 'branch:default:' + commit_ids['ancestor']), |
|
348 | 348 | ('common_ancestor', commit_ids['ancestor']), |
|
349 | 349 | ('pullrequest_desc', 'Description'), |
|
350 | 350 | ('pullrequest_title', 'Title'), |
|
351 | 351 | ('__start__', 'review_members:sequence'), |
|
352 | 352 | ('__start__', 'reviewer:mapping'), |
|
353 | 353 | ('user_id', '1'), |
|
354 | 354 | ('__start__', 'reasons:sequence'), |
|
355 | 355 | ('reason', 'Some reason'), |
|
356 | 356 | ('__end__', 'reasons:sequence'), |
|
357 | 357 | ('mandatory', 'False'), |
|
358 | 358 | ('__end__', 'reviewer:mapping'), |
|
359 | 359 | ('__end__', 'review_members:sequence'), |
|
360 | 360 | ('__start__', 'revisions:sequence'), |
|
361 | 361 | ('revisions', commit_ids['change']), |
|
362 | 362 | ('revisions', commit_ids['change2']), |
|
363 | 363 | ('__end__', 'revisions:sequence'), |
|
364 | 364 | ('user', ''), |
|
365 | 365 | ('csrf_token', csrf_token), |
|
366 | 366 | ], |
|
367 | 367 | status=302) |
|
368 | 368 | |
|
369 | 369 | location = response.headers['Location'] |
|
370 | 370 | pull_request_id = location.rsplit('/', 1)[1] |
|
371 | 371 | assert pull_request_id != 'new' |
|
372 | 372 | pull_request = PullRequest.get(int(pull_request_id)) |
|
373 | 373 | |
|
374 | 374 | # check that we have now both revisions |
|
375 | 375 | assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']] |
|
376 | 376 | assert pull_request.source_ref == 'branch:default:' + commit_ids['change2'] |
|
377 | 377 | expected_target_ref = 'branch:default:' + commit_ids['ancestor'] |
|
378 | 378 | assert pull_request.target_ref == expected_target_ref |
|
379 | 379 | |
|
380 | 380 | def test_reviewer_notifications(self, backend, csrf_token): |
|
381 | 381 | # We have to use the app.post for this test so it will create the |
|
382 | 382 | # notifications properly with the new PR |
|
383 | 383 | commits = [ |
|
384 | 384 | {'message': 'ancestor', |
|
385 | 385 | 'added': [FileNode('file_A', content='content_of_ancestor')]}, |
|
386 | 386 | {'message': 'change', |
|
387 | 387 | 'added': [FileNode('file_a', content='content_of_change')]}, |
|
388 | 388 | {'message': 'change-child'}, |
|
389 | 389 | {'message': 'ancestor-child', 'parents': ['ancestor'], |
|
390 | 390 | 'added': [ |
|
391 | 391 | FileNode('file_B', content='content_of_ancestor_child')]}, |
|
392 | 392 | {'message': 'ancestor-child-2'}, |
|
393 | 393 | ] |
|
394 | 394 | commit_ids = backend.create_master_repo(commits) |
|
395 | 395 | target = backend.create_repo(heads=['ancestor-child']) |
|
396 | 396 | source = backend.create_repo(heads=['change']) |
|
397 | 397 | |
|
398 | 398 | response = self.app.post( |
|
399 | 399 | url( |
|
400 | 400 | controller='pullrequests', |
|
401 | 401 | action='create', |
|
402 | 402 | repo_name=source.repo_name |
|
403 | 403 | ), |
|
404 | 404 | [ |
|
405 | 405 | ('source_repo', source.repo_name), |
|
406 | 406 | ('source_ref', 'branch:default:' + commit_ids['change']), |
|
407 | 407 | ('target_repo', target.repo_name), |
|
408 | 408 | ('target_ref', 'branch:default:' + commit_ids['ancestor-child']), |
|
409 | 409 | ('common_ancestor', commit_ids['ancestor']), |
|
410 | 410 | ('pullrequest_desc', 'Description'), |
|
411 | 411 | ('pullrequest_title', 'Title'), |
|
412 | 412 | ('__start__', 'review_members:sequence'), |
|
413 | 413 | ('__start__', 'reviewer:mapping'), |
|
414 | 414 | ('user_id', '2'), |
|
415 | 415 | ('__start__', 'reasons:sequence'), |
|
416 | 416 | ('reason', 'Some reason'), |
|
417 | 417 | ('__end__', 'reasons:sequence'), |
|
418 | 418 | ('mandatory', 'False'), |
|
419 | 419 | ('__end__', 'reviewer:mapping'), |
|
420 | 420 | ('__end__', 'review_members:sequence'), |
|
421 | 421 | ('__start__', 'revisions:sequence'), |
|
422 | 422 | ('revisions', commit_ids['change']), |
|
423 | 423 | ('__end__', 'revisions:sequence'), |
|
424 | 424 | ('user', ''), |
|
425 | 425 | ('csrf_token', csrf_token), |
|
426 | 426 | ], |
|
427 | 427 | status=302) |
|
428 | 428 | |
|
429 | 429 | location = response.headers['Location'] |
|
430 | 430 | |
|
431 | 431 | pull_request_id = location.rsplit('/', 1)[1] |
|
432 | 432 | assert pull_request_id != 'new' |
|
433 | 433 | pull_request = PullRequest.get(int(pull_request_id)) |
|
434 | 434 | |
|
435 | 435 | # Check that a notification was made |
|
436 | 436 | notifications = Notification.query()\ |
|
437 | 437 | .filter(Notification.created_by == pull_request.author.user_id, |
|
438 | 438 | Notification.type_ == Notification.TYPE_PULL_REQUEST, |
|
439 | 439 | Notification.subject.contains( |
|
440 | 440 | "wants you to review pull request #%s" % pull_request_id)) |
|
441 | 441 | assert len(notifications.all()) == 1 |
|
442 | 442 | |
|
443 | 443 | # Change reviewers and check that a notification was made |
|
444 | 444 | PullRequestModel().update_reviewers( |
|
445 | 445 | pull_request.pull_request_id, [(1, [], False)]) |
|
446 | 446 | assert len(notifications.all()) == 2 |
|
447 | 447 | |
|
448 | 448 | def test_create_pull_request_stores_ancestor_commit_id(self, backend, |
|
449 | 449 | csrf_token): |
|
450 | 450 | commits = [ |
|
451 | 451 | {'message': 'ancestor', |
|
452 | 452 | 'added': [FileNode('file_A', content='content_of_ancestor')]}, |
|
453 | 453 | {'message': 'change', |
|
454 | 454 | 'added': [FileNode('file_a', content='content_of_change')]}, |
|
455 | 455 | {'message': 'change-child'}, |
|
456 | 456 | {'message': 'ancestor-child', 'parents': ['ancestor'], |
|
457 | 457 | 'added': [ |
|
458 | 458 | FileNode('file_B', content='content_of_ancestor_child')]}, |
|
459 | 459 | {'message': 'ancestor-child-2'}, |
|
460 | 460 | ] |
|
461 | 461 | commit_ids = backend.create_master_repo(commits) |
|
462 | 462 | target = backend.create_repo(heads=['ancestor-child']) |
|
463 | 463 | source = backend.create_repo(heads=['change']) |
|
464 | 464 | |
|
465 | 465 | response = self.app.post( |
|
466 | 466 | url( |
|
467 | 467 | controller='pullrequests', |
|
468 | 468 | action='create', |
|
469 | 469 | repo_name=source.repo_name |
|
470 | 470 | ), |
|
471 | 471 | [ |
|
472 | 472 | ('source_repo', source.repo_name), |
|
473 | 473 | ('source_ref', 'branch:default:' + commit_ids['change']), |
|
474 | 474 | ('target_repo', target.repo_name), |
|
475 | 475 | ('target_ref', 'branch:default:' + commit_ids['ancestor-child']), |
|
476 | 476 | ('common_ancestor', commit_ids['ancestor']), |
|
477 | 477 | ('pullrequest_desc', 'Description'), |
|
478 | 478 | ('pullrequest_title', 'Title'), |
|
479 | 479 | ('__start__', 'review_members:sequence'), |
|
480 | 480 | ('__start__', 'reviewer:mapping'), |
|
481 | 481 | ('user_id', '1'), |
|
482 | 482 | ('__start__', 'reasons:sequence'), |
|
483 | 483 | ('reason', 'Some reason'), |
|
484 | 484 | ('__end__', 'reasons:sequence'), |
|
485 | 485 | ('mandatory', 'False'), |
|
486 | 486 | ('__end__', 'reviewer:mapping'), |
|
487 | 487 | ('__end__', 'review_members:sequence'), |
|
488 | 488 | ('__start__', 'revisions:sequence'), |
|
489 | 489 | ('revisions', commit_ids['change']), |
|
490 | 490 | ('__end__', 'revisions:sequence'), |
|
491 | 491 | ('user', ''), |
|
492 | 492 | ('csrf_token', csrf_token), |
|
493 | 493 | ], |
|
494 | 494 | status=302) |
|
495 | 495 | |
|
496 | 496 | location = response.headers['Location'] |
|
497 | 497 | |
|
498 | 498 | pull_request_id = location.rsplit('/', 1)[1] |
|
499 | 499 | assert pull_request_id != 'new' |
|
500 | 500 | pull_request = PullRequest.get(int(pull_request_id)) |
|
501 | 501 | |
|
502 | 502 | # target_ref has to point to the ancestor's commit_id in order to |
|
503 | 503 | # show the correct diff |
|
504 | 504 | expected_target_ref = 'branch:default:' + commit_ids['ancestor'] |
|
505 | 505 | assert pull_request.target_ref == expected_target_ref |
|
506 | 506 | |
|
507 | 507 | # Check generated diff contents |
|
508 | 508 | response = response.follow() |
|
509 | 509 | assert 'content_of_ancestor' not in response.body |
|
510 | 510 | assert 'content_of_ancestor-child' not in response.body |
|
511 | 511 | assert 'content_of_change' in response.body |
|
512 | 512 | |
|
513 | 513 | def test_merge_pull_request_enabled(self, pr_util, csrf_token): |
|
514 | 514 | # Clear any previous calls to rcextensions |
|
515 | 515 | rhodecode.EXTENSIONS.calls.clear() |
|
516 | 516 | |
|
517 | 517 | pull_request = pr_util.create_pull_request( |
|
518 | 518 | approved=True, mergeable=True) |
|
519 | 519 | pull_request_id = pull_request.pull_request_id |
|
520 | 520 | repo_name = pull_request.target_repo.scm_instance().name, |
|
521 | 521 | |
|
522 | 522 | response = self.app.post( |
|
523 | 523 | url(controller='pullrequests', |
|
524 | 524 | action='merge', |
|
525 | 525 | repo_name=str(repo_name[0]), |
|
526 | 526 | pull_request_id=str(pull_request_id)), |
|
527 | 527 | params={'csrf_token': csrf_token}).follow() |
|
528 | 528 | |
|
529 | 529 | pull_request = PullRequest.get(pull_request_id) |
|
530 | 530 | |
|
531 | 531 | assert response.status_int == 200 |
|
532 | 532 | assert pull_request.is_closed() |
|
533 | 533 | assert_pull_request_status( |
|
534 | 534 | pull_request, ChangesetStatus.STATUS_APPROVED) |
|
535 | 535 | |
|
536 | 536 | # Check the relevant log entries were added |
|
537 | 537 | user_logs = UserLog.query() \ |
|
538 | 538 | .filter(UserLog.version == UserLog.VERSION_1) \ |
|
539 | 539 | .order_by('-user_log_id').limit(3) |
|
540 | 540 | actions = [log.action for log in user_logs] |
|
541 | 541 | pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request) |
|
542 | 542 | expected_actions = [ |
|
543 | 543 | u'user_closed_pull_request:%d' % pull_request_id, |
|
544 | 544 | u'user_merged_pull_request:%d' % pull_request_id, |
|
545 | 545 | # The action below reflect that the post push actions were executed |
|
546 | 546 | u'user_commented_pull_request:%d' % pull_request_id, |
|
547 | 547 | ] |
|
548 | 548 | assert actions == expected_actions |
|
549 | 549 | |
|
550 | 550 | user_logs = UserLog.query() \ |
|
551 | 551 | .filter(UserLog.version == UserLog.VERSION_2) \ |
|
552 | 552 | .order_by('-user_log_id').limit(1) |
|
553 | 553 | actions = [log.action for log in user_logs] |
|
554 | 554 | assert actions == ['user.push'] |
|
555 | 555 | assert user_logs[0].action_data['commit_ids'] == pr_commit_ids |
|
556 | 556 | |
|
557 | 557 | # Check post_push rcextension was really executed |
|
558 | 558 | push_calls = rhodecode.EXTENSIONS.calls['post_push'] |
|
559 | 559 | assert len(push_calls) == 1 |
|
560 | 560 | unused_last_call_args, last_call_kwargs = push_calls[0] |
|
561 | 561 | assert last_call_kwargs['action'] == 'push' |
|
562 | 562 | assert last_call_kwargs['pushed_revs'] == pr_commit_ids |
|
563 | 563 | |
|
564 | 564 | def test_merge_pull_request_disabled(self, pr_util, csrf_token): |
|
565 | 565 | pull_request = pr_util.create_pull_request(mergeable=False) |
|
566 | 566 | pull_request_id = pull_request.pull_request_id |
|
567 | 567 | pull_request = PullRequest.get(pull_request_id) |
|
568 | 568 | |
|
569 | 569 | response = self.app.post( |
|
570 | 570 | url(controller='pullrequests', |
|
571 | 571 | action='merge', |
|
572 | 572 | repo_name=pull_request.target_repo.scm_instance().name, |
|
573 | 573 | pull_request_id=str(pull_request.pull_request_id)), |
|
574 | 574 | params={'csrf_token': csrf_token}).follow() |
|
575 | 575 | |
|
576 | 576 | assert response.status_int == 200 |
|
577 | 577 | response.mustcontain( |
|
578 | 578 | 'Merge is not currently possible because of below failed checks.') |
|
579 | 579 | response.mustcontain('Server-side pull request merging is disabled.') |
|
580 | 580 | |
|
581 | 581 | @pytest.mark.skip_backends('svn') |
|
582 | 582 | def test_merge_pull_request_not_approved(self, pr_util, csrf_token): |
|
583 | 583 | pull_request = pr_util.create_pull_request(mergeable=True) |
|
584 | 584 | pull_request_id = pull_request.pull_request_id |
|
585 | 585 | repo_name = pull_request.target_repo.scm_instance().name, |
|
586 | 586 | |
|
587 | 587 | response = self.app.post( |
|
588 | 588 | url(controller='pullrequests', |
|
589 | 589 | action='merge', |
|
590 | 590 | repo_name=str(repo_name[0]), |
|
591 | 591 | pull_request_id=str(pull_request_id)), |
|
592 | 592 | params={'csrf_token': csrf_token}).follow() |
|
593 | 593 | |
|
594 | 594 | assert response.status_int == 200 |
|
595 | 595 | |
|
596 | 596 | response.mustcontain( |
|
597 | 597 | 'Merge is not currently possible because of below failed checks.') |
|
598 | 598 | response.mustcontain('Pull request reviewer approval is pending.') |
|
599 | 599 | |
|
600 | 600 | def test_update_source_revision(self, backend, csrf_token): |
|
601 | 601 | commits = [ |
|
602 | 602 | {'message': 'ancestor'}, |
|
603 | 603 | {'message': 'change'}, |
|
604 | 604 | {'message': 'change-2'}, |
|
605 | 605 | ] |
|
606 | 606 | commit_ids = backend.create_master_repo(commits) |
|
607 | 607 | target = backend.create_repo(heads=['ancestor']) |
|
608 | 608 | source = backend.create_repo(heads=['change']) |
|
609 | 609 | |
|
610 | 610 | # create pr from a in source to A in target |
|
611 | 611 | pull_request = PullRequest() |
|
612 | 612 | pull_request.source_repo = source |
|
613 | 613 | # TODO: johbo: Make sure that we write the source ref this way! |
|
614 | 614 | pull_request.source_ref = 'branch:{branch}:{commit_id}'.format( |
|
615 | 615 | branch=backend.default_branch_name, commit_id=commit_ids['change']) |
|
616 | 616 | pull_request.target_repo = target |
|
617 | 617 | |
|
618 | 618 | pull_request.target_ref = 'branch:{branch}:{commit_id}'.format( |
|
619 | 619 | branch=backend.default_branch_name, |
|
620 | 620 | commit_id=commit_ids['ancestor']) |
|
621 | 621 | pull_request.revisions = [commit_ids['change']] |
|
622 | 622 | pull_request.title = u"Test" |
|
623 | 623 | pull_request.description = u"Description" |
|
624 | 624 | pull_request.author = UserModel().get_by_username( |
|
625 | 625 | TEST_USER_ADMIN_LOGIN) |
|
626 | 626 | Session().add(pull_request) |
|
627 | 627 | Session().commit() |
|
628 | 628 | pull_request_id = pull_request.pull_request_id |
|
629 | 629 | |
|
630 | 630 | # source has ancestor - change - change-2 |
|
631 | 631 | backend.pull_heads(source, heads=['change-2']) |
|
632 | 632 | |
|
633 | 633 | # update PR |
|
634 | 634 | self.app.post( |
|
635 | 635 | url(controller='pullrequests', action='update', |
|
636 | 636 | repo_name=target.repo_name, |
|
637 | 637 | pull_request_id=str(pull_request_id)), |
|
638 | 638 | params={'update_commits': 'true', '_method': 'put', |
|
639 | 639 | 'csrf_token': csrf_token}) |
|
640 | 640 | |
|
641 | 641 | # check that we have now both revisions |
|
642 | 642 | pull_request = PullRequest.get(pull_request_id) |
|
643 | 643 | assert pull_request.revisions == [ |
|
644 | 644 | commit_ids['change-2'], commit_ids['change']] |
|
645 | 645 | |
|
646 | 646 | # TODO: johbo: this should be a test on its own |
|
647 | 647 | response = self.app.get(url( |
|
648 | 648 | controller='pullrequests', action='index', |
|
649 | 649 | repo_name=target.repo_name)) |
|
650 | 650 | assert response.status_int == 200 |
|
651 | 651 | assert 'Pull request updated to' in response.body |
|
652 | 652 | assert 'with 1 added, 0 removed commits.' in response.body |
|
653 | 653 | |
|
654 | 654 | def test_update_target_revision(self, backend, csrf_token): |
|
655 | 655 | commits = [ |
|
656 | 656 | {'message': 'ancestor'}, |
|
657 | 657 | {'message': 'change'}, |
|
658 | 658 | {'message': 'ancestor-new', 'parents': ['ancestor']}, |
|
659 | 659 | {'message': 'change-rebased'}, |
|
660 | 660 | ] |
|
661 | 661 | commit_ids = backend.create_master_repo(commits) |
|
662 | 662 | target = backend.create_repo(heads=['ancestor']) |
|
663 | 663 | source = backend.create_repo(heads=['change']) |
|
664 | 664 | |
|
665 | 665 | # create pr from a in source to A in target |
|
666 | 666 | pull_request = PullRequest() |
|
667 | 667 | pull_request.source_repo = source |
|
668 | 668 | # TODO: johbo: Make sure that we write the source ref this way! |
|
669 | 669 | pull_request.source_ref = 'branch:{branch}:{commit_id}'.format( |
|
670 | 670 | branch=backend.default_branch_name, commit_id=commit_ids['change']) |
|
671 | 671 | pull_request.target_repo = target |
|
672 | 672 | # TODO: johbo: Target ref should be branch based, since tip can jump |
|
673 | 673 | # from branch to branch |
|
674 | 674 | pull_request.target_ref = 'branch:{branch}:{commit_id}'.format( |
|
675 | 675 | branch=backend.default_branch_name, |
|
676 | 676 | commit_id=commit_ids['ancestor']) |
|
677 | 677 | pull_request.revisions = [commit_ids['change']] |
|
678 | 678 | pull_request.title = u"Test" |
|
679 | 679 | pull_request.description = u"Description" |
|
680 | 680 | pull_request.author = UserModel().get_by_username( |
|
681 | 681 | TEST_USER_ADMIN_LOGIN) |
|
682 | 682 | Session().add(pull_request) |
|
683 | 683 | Session().commit() |
|
684 | 684 | pull_request_id = pull_request.pull_request_id |
|
685 | 685 | |
|
686 | 686 | # target has ancestor - ancestor-new |
|
687 | 687 | # source has ancestor - ancestor-new - change-rebased |
|
688 | 688 | backend.pull_heads(target, heads=['ancestor-new']) |
|
689 | 689 | backend.pull_heads(source, heads=['change-rebased']) |
|
690 | 690 | |
|
691 | 691 | # update PR |
|
692 | 692 | self.app.post( |
|
693 | 693 | url(controller='pullrequests', action='update', |
|
694 | 694 | repo_name=target.repo_name, |
|
695 | 695 | pull_request_id=str(pull_request_id)), |
|
696 | 696 | params={'update_commits': 'true', '_method': 'put', |
|
697 | 697 | 'csrf_token': csrf_token}, |
|
698 | 698 | status=200) |
|
699 | 699 | |
|
700 | 700 | # check that we have now both revisions |
|
701 | 701 | pull_request = PullRequest.get(pull_request_id) |
|
702 | 702 | assert pull_request.revisions == [commit_ids['change-rebased']] |
|
703 | 703 | assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format( |
|
704 | 704 | branch=backend.default_branch_name, |
|
705 | 705 | commit_id=commit_ids['ancestor-new']) |
|
706 | 706 | |
|
707 | 707 | # TODO: johbo: This should be a test on its own |
|
708 | 708 | response = self.app.get(url( |
|
709 | 709 | controller='pullrequests', action='index', |
|
710 | 710 | repo_name=target.repo_name)) |
|
711 | 711 | assert response.status_int == 200 |
|
712 | 712 | assert 'Pull request updated to' in response.body |
|
713 | 713 | assert 'with 1 added, 1 removed commits.' in response.body |
|
714 | 714 | |
|
715 | 715 | def test_update_of_ancestor_reference(self, backend, csrf_token): |
|
716 | 716 | commits = [ |
|
717 | 717 | {'message': 'ancestor'}, |
|
718 | 718 | {'message': 'change'}, |
|
719 | 719 | {'message': 'change-2'}, |
|
720 | 720 | {'message': 'ancestor-new', 'parents': ['ancestor']}, |
|
721 | 721 | {'message': 'change-rebased'}, |
|
722 | 722 | ] |
|
723 | 723 | commit_ids = backend.create_master_repo(commits) |
|
724 | 724 | target = backend.create_repo(heads=['ancestor']) |
|
725 | 725 | source = backend.create_repo(heads=['change']) |
|
726 | 726 | |
|
727 | 727 | # create pr from a in source to A in target |
|
728 | 728 | pull_request = PullRequest() |
|
729 | 729 | pull_request.source_repo = source |
|
730 | 730 | # TODO: johbo: Make sure that we write the source ref this way! |
|
731 | 731 | pull_request.source_ref = 'branch:{branch}:{commit_id}'.format( |
|
732 | 732 | branch=backend.default_branch_name, |
|
733 | 733 | commit_id=commit_ids['change']) |
|
734 | 734 | pull_request.target_repo = target |
|
735 | 735 | # TODO: johbo: Target ref should be branch based, since tip can jump |
|
736 | 736 | # from branch to branch |
|
737 | 737 | pull_request.target_ref = 'branch:{branch}:{commit_id}'.format( |
|
738 | 738 | branch=backend.default_branch_name, |
|
739 | 739 | commit_id=commit_ids['ancestor']) |
|
740 | 740 | pull_request.revisions = [commit_ids['change']] |
|
741 | 741 | pull_request.title = u"Test" |
|
742 | 742 | pull_request.description = u"Description" |
|
743 | 743 | pull_request.author = UserModel().get_by_username( |
|
744 | 744 | TEST_USER_ADMIN_LOGIN) |
|
745 | 745 | Session().add(pull_request) |
|
746 | 746 | Session().commit() |
|
747 | 747 | pull_request_id = pull_request.pull_request_id |
|
748 | 748 | |
|
749 | 749 | # target has ancestor - ancestor-new |
|
750 | 750 | # source has ancestor - ancestor-new - change-rebased |
|
751 | 751 | backend.pull_heads(target, heads=['ancestor-new']) |
|
752 | 752 | backend.pull_heads(source, heads=['change-rebased']) |
|
753 | 753 | |
|
754 | 754 | # update PR |
|
755 | 755 | self.app.post( |
|
756 | 756 | url(controller='pullrequests', action='update', |
|
757 | 757 | repo_name=target.repo_name, |
|
758 | 758 | pull_request_id=str(pull_request_id)), |
|
759 | 759 | params={'update_commits': 'true', '_method': 'put', |
|
760 | 760 | 'csrf_token': csrf_token}, |
|
761 | 761 | status=200) |
|
762 | 762 | |
|
763 | 763 | # Expect the target reference to be updated correctly |
|
764 | 764 | pull_request = PullRequest.get(pull_request_id) |
|
765 | 765 | assert pull_request.revisions == [commit_ids['change-rebased']] |
|
766 | 766 | expected_target_ref = 'branch:{branch}:{commit_id}'.format( |
|
767 | 767 | branch=backend.default_branch_name, |
|
768 | 768 | commit_id=commit_ids['ancestor-new']) |
|
769 | 769 | assert pull_request.target_ref == expected_target_ref |
|
770 | 770 | |
|
771 | 771 | def test_remove_pull_request_branch(self, backend_git, csrf_token): |
|
772 | 772 | branch_name = 'development' |
|
773 | 773 | commits = [ |
|
774 | 774 | {'message': 'initial-commit'}, |
|
775 | 775 | {'message': 'old-feature'}, |
|
776 | 776 | {'message': 'new-feature', 'branch': branch_name}, |
|
777 | 777 | ] |
|
778 | 778 | repo = backend_git.create_repo(commits) |
|
779 | 779 | commit_ids = backend_git.commit_ids |
|
780 | 780 | |
|
781 | 781 | pull_request = PullRequest() |
|
782 | 782 | pull_request.source_repo = repo |
|
783 | 783 | pull_request.target_repo = repo |
|
784 | 784 | pull_request.source_ref = 'branch:{branch}:{commit_id}'.format( |
|
785 | 785 | branch=branch_name, commit_id=commit_ids['new-feature']) |
|
786 | 786 | pull_request.target_ref = 'branch:{branch}:{commit_id}'.format( |
|
787 | 787 | branch=backend_git.default_branch_name, |
|
788 | 788 | commit_id=commit_ids['old-feature']) |
|
789 | 789 | pull_request.revisions = [commit_ids['new-feature']] |
|
790 | 790 | pull_request.title = u"Test" |
|
791 | 791 | pull_request.description = u"Description" |
|
792 | 792 | pull_request.author = UserModel().get_by_username( |
|
793 | 793 | TEST_USER_ADMIN_LOGIN) |
|
794 | 794 | Session().add(pull_request) |
|
795 | 795 | Session().commit() |
|
796 | 796 | |
|
797 | 797 | vcs = repo.scm_instance() |
|
798 | 798 | vcs.remove_ref('refs/heads/{}'.format(branch_name)) |
|
799 | 799 | |
|
800 | 800 | response = self.app.get(url( |
|
801 | 801 | controller='pullrequests', action='show', |
|
802 | 802 | repo_name=repo.repo_name, |
|
803 | 803 | pull_request_id=str(pull_request.pull_request_id))) |
|
804 | 804 | |
|
805 | 805 | assert response.status_int == 200 |
|
806 | 806 | assert_response = AssertResponse(response) |
|
807 | 807 | assert_response.element_contains( |
|
808 | 808 | '#changeset_compare_view_content .alert strong', |
|
809 | 809 | 'Missing commits') |
|
810 | 810 | assert_response.element_contains( |
|
811 | 811 | '#changeset_compare_view_content .alert', |
|
812 | 812 | 'This pull request cannot be displayed, because one or more' |
|
813 | 813 | ' commits no longer exist in the source repository.') |
|
814 | 814 | |
|
815 | 815 | def test_strip_commits_from_pull_request( |
|
816 | 816 | self, backend, pr_util, csrf_token): |
|
817 | 817 | commits = [ |
|
818 | 818 | {'message': 'initial-commit'}, |
|
819 | 819 | {'message': 'old-feature'}, |
|
820 | 820 | {'message': 'new-feature', 'parents': ['initial-commit']}, |
|
821 | 821 | ] |
|
822 | 822 | pull_request = pr_util.create_pull_request( |
|
823 | 823 | commits, target_head='initial-commit', source_head='new-feature', |
|
824 | 824 | revisions=['new-feature']) |
|
825 | 825 | |
|
826 | 826 | vcs = pr_util.source_repository.scm_instance() |
|
827 | 827 | if backend.alias == 'git': |
|
828 | 828 | vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master') |
|
829 | 829 | else: |
|
830 | 830 | vcs.strip(pr_util.commit_ids['new-feature']) |
|
831 | 831 | |
|
832 | 832 | response = self.app.get(url( |
|
833 | 833 | controller='pullrequests', action='show', |
|
834 | 834 | repo_name=pr_util.target_repository.repo_name, |
|
835 | 835 | pull_request_id=str(pull_request.pull_request_id))) |
|
836 | 836 | |
|
837 | 837 | assert response.status_int == 200 |
|
838 | 838 | assert_response = AssertResponse(response) |
|
839 | 839 | assert_response.element_contains( |
|
840 | 840 | '#changeset_compare_view_content .alert strong', |
|
841 | 841 | 'Missing commits') |
|
842 | 842 | assert_response.element_contains( |
|
843 | 843 | '#changeset_compare_view_content .alert', |
|
844 | 844 | 'This pull request cannot be displayed, because one or more' |
|
845 | 845 | ' commits no longer exist in the source repository.') |
|
846 | 846 | assert_response.element_contains( |
|
847 | 847 | '#update_commits', |
|
848 | 848 | 'Update commits') |
|
849 | 849 | |
|
850 | 850 | def test_strip_commits_and_update( |
|
851 | 851 | self, backend, pr_util, csrf_token): |
|
852 | 852 | commits = [ |
|
853 | 853 | {'message': 'initial-commit'}, |
|
854 | 854 | {'message': 'old-feature'}, |
|
855 | 855 | {'message': 'new-feature', 'parents': ['old-feature']}, |
|
856 | 856 | ] |
|
857 | 857 | pull_request = pr_util.create_pull_request( |
|
858 | 858 | commits, target_head='old-feature', source_head='new-feature', |
|
859 | 859 | revisions=['new-feature'], mergeable=True) |
|
860 | 860 | |
|
861 | 861 | vcs = pr_util.source_repository.scm_instance() |
|
862 | 862 | if backend.alias == 'git': |
|
863 | 863 | vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master') |
|
864 | 864 | else: |
|
865 | 865 | vcs.strip(pr_util.commit_ids['new-feature']) |
|
866 | 866 | |
|
867 | 867 | response = self.app.post( |
|
868 | 868 | url(controller='pullrequests', action='update', |
|
869 | 869 | repo_name=pull_request.target_repo.repo_name, |
|
870 | 870 | pull_request_id=str(pull_request.pull_request_id)), |
|
871 | 871 | params={'update_commits': 'true', '_method': 'put', |
|
872 | 872 | 'csrf_token': csrf_token}) |
|
873 | 873 | |
|
874 | 874 | assert response.status_int == 200 |
|
875 | 875 | assert response.body == 'true' |
|
876 | 876 | |
|
877 | 877 | # Make sure that after update, it won't raise 500 errors |
|
878 | 878 | response = self.app.get(url( |
|
879 | 879 | controller='pullrequests', action='show', |
|
880 | 880 | repo_name=pr_util.target_repository.repo_name, |
|
881 | 881 | pull_request_id=str(pull_request.pull_request_id))) |
|
882 | 882 | |
|
883 | 883 | assert response.status_int == 200 |
|
884 | 884 | assert_response = AssertResponse(response) |
|
885 | 885 | assert_response.element_contains( |
|
886 | 886 | '#changeset_compare_view_content .alert strong', |
|
887 | 887 | 'Missing commits') |
|
888 | 888 | |
|
889 | 889 | def test_branch_is_a_link(self, pr_util): |
|
890 | 890 | pull_request = pr_util.create_pull_request() |
|
891 | 891 | pull_request.source_ref = 'branch:origin:1234567890abcdef' |
|
892 | 892 | pull_request.target_ref = 'branch:target:abcdef1234567890' |
|
893 | 893 | Session().add(pull_request) |
|
894 | 894 | Session().commit() |
|
895 | 895 | |
|
896 | 896 | response = self.app.get(url( |
|
897 | 897 | controller='pullrequests', action='show', |
|
898 | 898 | repo_name=pull_request.target_repo.scm_instance().name, |
|
899 | 899 | pull_request_id=str(pull_request.pull_request_id))) |
|
900 | 900 | assert response.status_int == 200 |
|
901 | 901 | assert_response = AssertResponse(response) |
|
902 | 902 | |
|
903 | 903 | origin = assert_response.get_element('.pr-origininfo .tag') |
|
904 | 904 | origin_children = origin.getchildren() |
|
905 | 905 | assert len(origin_children) == 1 |
|
906 | 906 | target = assert_response.get_element('.pr-targetinfo .tag') |
|
907 | 907 | target_children = target.getchildren() |
|
908 | 908 | assert len(target_children) == 1 |
|
909 | 909 | |
|
910 | 910 | expected_origin_link = url( |
|
911 | 911 | 'changelog_home', |
|
912 | 912 | repo_name=pull_request.source_repo.scm_instance().name, |
|
913 | 913 | branch='origin') |
|
914 | 914 | expected_target_link = url( |
|
915 | 915 | 'changelog_home', |
|
916 | 916 | repo_name=pull_request.target_repo.scm_instance().name, |
|
917 | 917 | branch='target') |
|
918 | 918 | assert origin_children[0].attrib['href'] == expected_origin_link |
|
919 | 919 | assert origin_children[0].text == 'branch: origin' |
|
920 | 920 | assert target_children[0].attrib['href'] == expected_target_link |
|
921 | 921 | assert target_children[0].text == 'branch: target' |
|
922 | 922 | |
|
923 | 923 | def test_bookmark_is_not_a_link(self, pr_util): |
|
924 | 924 | pull_request = pr_util.create_pull_request() |
|
925 | 925 | pull_request.source_ref = 'bookmark:origin:1234567890abcdef' |
|
926 | 926 | pull_request.target_ref = 'bookmark:target:abcdef1234567890' |
|
927 | 927 | Session().add(pull_request) |
|
928 | 928 | Session().commit() |
|
929 | 929 | |
|
930 | 930 | response = self.app.get(url( |
|
931 | 931 | controller='pullrequests', action='show', |
|
932 | 932 | repo_name=pull_request.target_repo.scm_instance().name, |
|
933 | 933 | pull_request_id=str(pull_request.pull_request_id))) |
|
934 | 934 | assert response.status_int == 200 |
|
935 | 935 | assert_response = AssertResponse(response) |
|
936 | 936 | |
|
937 | 937 | origin = assert_response.get_element('.pr-origininfo .tag') |
|
938 | 938 | assert origin.text.strip() == 'bookmark: origin' |
|
939 | 939 | assert origin.getchildren() == [] |
|
940 | 940 | |
|
941 | 941 | target = assert_response.get_element('.pr-targetinfo .tag') |
|
942 | 942 | assert target.text.strip() == 'bookmark: target' |
|
943 | 943 | assert target.getchildren() == [] |
|
944 | 944 | |
|
945 | 945 | def test_tag_is_not_a_link(self, pr_util): |
|
946 | 946 | pull_request = pr_util.create_pull_request() |
|
947 | 947 | pull_request.source_ref = 'tag:origin:1234567890abcdef' |
|
948 | 948 | pull_request.target_ref = 'tag:target:abcdef1234567890' |
|
949 | 949 | Session().add(pull_request) |
|
950 | 950 | Session().commit() |
|
951 | 951 | |
|
952 | 952 | response = self.app.get(url( |
|
953 | 953 | controller='pullrequests', action='show', |
|
954 | 954 | repo_name=pull_request.target_repo.scm_instance().name, |
|
955 | 955 | pull_request_id=str(pull_request.pull_request_id))) |
|
956 | 956 | assert response.status_int == 200 |
|
957 | 957 | assert_response = AssertResponse(response) |
|
958 | 958 | |
|
959 | 959 | origin = assert_response.get_element('.pr-origininfo .tag') |
|
960 | 960 | assert origin.text.strip() == 'tag: origin' |
|
961 | 961 | assert origin.getchildren() == [] |
|
962 | 962 | |
|
963 | 963 | target = assert_response.get_element('.pr-targetinfo .tag') |
|
964 | 964 | assert target.text.strip() == 'tag: target' |
|
965 | 965 | assert target.getchildren() == [] |
|
966 | 966 | |
|
967 | ||
|
968 | ||
|
969 | 967 | @pytest.mark.parametrize('mergeable', [True, False]) |
|
970 | 968 | def test_shadow_repository_link( |
|
971 | self, mergeable, pr_util, http_host_stub): | |
|
969 | self, mergeable, pr_util, http_host_only_stub): | |
|
972 | 970 | """ |
|
973 | 971 | Check that the pull request summary page displays a link to the shadow |
|
974 | 972 | repository if the pull request is mergeable. If it is not mergeable |
|
975 | 973 | the link should not be displayed. |
|
976 | 974 | """ |
|
977 | 975 | pull_request = pr_util.create_pull_request( |
|
978 | 976 | mergeable=mergeable, enable_notifications=False) |
|
979 | 977 | target_repo = pull_request.target_repo.scm_instance() |
|
980 | 978 | pr_id = pull_request.pull_request_id |
|
981 | 979 | shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format( |
|
982 | host=http_host_stub, repo=target_repo.name, pr_id=pr_id) | |
|
980 | host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id) | |
|
983 | 981 | |
|
984 | 982 | response = self.app.get(url( |
|
985 | 983 | controller='pullrequests', action='show', |
|
986 | 984 | repo_name=target_repo.name, |
|
987 | 985 | pull_request_id=str(pr_id))) |
|
988 | 986 | |
|
989 | 987 | assertr = AssertResponse(response) |
|
990 | 988 | if mergeable: |
|
991 | 989 | assertr.element_value_contains( |
|
992 | 990 | 'div.pr-mergeinfo input', shadow_url) |
|
993 | 991 | assertr.element_value_contains( |
|
994 | 992 | 'div.pr-mergeinfo input', 'pr-merge') |
|
995 | 993 | else: |
|
996 | 994 | assertr.no_element_exists('div.pr-mergeinfo') |
|
997 | 995 | |
|
998 | 996 | |
|
999 | 997 | @pytest.mark.usefixtures('app') |
|
1000 | 998 | @pytest.mark.backends("git", "hg") |
|
1001 | 999 | class TestPullrequestsControllerDelete(object): |
|
1002 | 1000 | def test_pull_request_delete_button_permissions_admin( |
|
1003 | 1001 | self, autologin_user, user_admin, pr_util): |
|
1004 | 1002 | pull_request = pr_util.create_pull_request( |
|
1005 | 1003 | author=user_admin.username, enable_notifications=False) |
|
1006 | 1004 | |
|
1007 | 1005 | response = self.app.get(url( |
|
1008 | 1006 | controller='pullrequests', action='show', |
|
1009 | 1007 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1010 | 1008 | pull_request_id=str(pull_request.pull_request_id))) |
|
1011 | 1009 | |
|
1012 | 1010 | response.mustcontain('id="delete_pullrequest"') |
|
1013 | 1011 | response.mustcontain('Confirm to delete this pull request') |
|
1014 | 1012 | |
|
1015 | 1013 | def test_pull_request_delete_button_permissions_owner( |
|
1016 | 1014 | self, autologin_regular_user, user_regular, pr_util): |
|
1017 | 1015 | pull_request = pr_util.create_pull_request( |
|
1018 | 1016 | author=user_regular.username, enable_notifications=False) |
|
1019 | 1017 | |
|
1020 | 1018 | response = self.app.get(url( |
|
1021 | 1019 | controller='pullrequests', action='show', |
|
1022 | 1020 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1023 | 1021 | pull_request_id=str(pull_request.pull_request_id))) |
|
1024 | 1022 | |
|
1025 | 1023 | response.mustcontain('id="delete_pullrequest"') |
|
1026 | 1024 | response.mustcontain('Confirm to delete this pull request') |
|
1027 | 1025 | |
|
1028 | 1026 | def test_pull_request_delete_button_permissions_forbidden( |
|
1029 | 1027 | self, autologin_regular_user, user_regular, user_admin, pr_util): |
|
1030 | 1028 | pull_request = pr_util.create_pull_request( |
|
1031 | 1029 | author=user_admin.username, enable_notifications=False) |
|
1032 | 1030 | |
|
1033 | 1031 | response = self.app.get(url( |
|
1034 | 1032 | controller='pullrequests', action='show', |
|
1035 | 1033 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1036 | 1034 | pull_request_id=str(pull_request.pull_request_id))) |
|
1037 | 1035 | response.mustcontain(no=['id="delete_pullrequest"']) |
|
1038 | 1036 | response.mustcontain(no=['Confirm to delete this pull request']) |
|
1039 | 1037 | |
|
1040 | 1038 | def test_pull_request_delete_button_permissions_can_update_cannot_delete( |
|
1041 | 1039 | self, autologin_regular_user, user_regular, user_admin, pr_util, |
|
1042 | 1040 | user_util): |
|
1043 | 1041 | |
|
1044 | 1042 | pull_request = pr_util.create_pull_request( |
|
1045 | 1043 | author=user_admin.username, enable_notifications=False) |
|
1046 | 1044 | |
|
1047 | 1045 | user_util.grant_user_permission_to_repo( |
|
1048 | 1046 | pull_request.target_repo, user_regular, |
|
1049 | 1047 | 'repository.write') |
|
1050 | 1048 | |
|
1051 | 1049 | response = self.app.get(url( |
|
1052 | 1050 | controller='pullrequests', action='show', |
|
1053 | 1051 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1054 | 1052 | pull_request_id=str(pull_request.pull_request_id))) |
|
1055 | 1053 | |
|
1056 | 1054 | response.mustcontain('id="open_edit_pullrequest"') |
|
1057 | 1055 | response.mustcontain('id="delete_pullrequest"') |
|
1058 | 1056 | response.mustcontain(no=['Confirm to delete this pull request']) |
|
1059 | 1057 | |
|
1060 | 1058 | |
|
1061 | 1059 | def assert_pull_request_status(pull_request, expected_status): |
|
1062 | 1060 | status = ChangesetStatusModel().calculated_review_status( |
|
1063 | 1061 | pull_request=pull_request) |
|
1064 | 1062 | assert status == expected_status |
|
1065 | 1063 | |
|
1066 | 1064 | |
|
1067 | 1065 | @pytest.mark.parametrize('action', ['index', 'create']) |
|
1068 | 1066 | @pytest.mark.usefixtures("autologin_user") |
|
1069 | 1067 | def test_redirects_to_repo_summary_for_svn_repositories(backend_svn, app, action): |
|
1070 | 1068 | response = app.get(url( |
|
1071 | 1069 | controller='pullrequests', action=action, |
|
1072 | 1070 | repo_name=backend_svn.repo_name)) |
|
1073 | 1071 | assert response.status_int == 302 |
|
1074 | 1072 | |
|
1075 | 1073 | # Not allowed, redirect to the summary |
|
1076 | 1074 | redirected = response.follow() |
|
1077 | 1075 | summary_url = h.route_path('repo_summary', repo_name=backend_svn.repo_name) |
|
1078 | 1076 | |
|
1079 | 1077 | # URL adds leading slash and path doesn't have it |
|
1080 | 1078 | assert redirected.request.path == summary_url |
|
1081 | 1079 | |
|
1082 | 1080 | |
|
1083 | 1081 | def test_delete_comment_returns_404_if_comment_does_not_exist(pylonsapp): |
|
1084 | 1082 | # TODO: johbo: Global import not possible because models.forms blows up |
|
1085 | 1083 | from rhodecode.controllers.pullrequests import PullrequestsController |
|
1086 | 1084 | controller = PullrequestsController() |
|
1087 | 1085 | patcher = mock.patch( |
|
1088 | 1086 | 'rhodecode.model.db.BaseModel.get', return_value=None) |
|
1089 | 1087 | with pytest.raises(HTTPNotFound), patcher: |
|
1090 | 1088 | controller._delete_comment(1) |
@@ -1,415 +1,423 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2017 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import threading |
|
22 | 22 | import time |
|
23 | 23 | import logging |
|
24 | 24 | import os.path |
|
25 | 25 | import subprocess32 |
|
26 | 26 | import tempfile |
|
27 | 27 | import urllib2 |
|
28 | 28 | from lxml.html import fromstring, tostring |
|
29 | 29 | from lxml.cssselect import CSSSelector |
|
30 | 30 | from urlparse import urlparse, parse_qsl |
|
31 | 31 | from urllib import unquote_plus |
|
32 | 32 | |
|
33 | 33 | from webtest.app import ( |
|
34 | 34 | Request, TestResponse, TestApp, print_stderr, string_types) |
|
35 | 35 | |
|
36 | 36 | import pytest |
|
37 | 37 | import rc_testdata |
|
38 | 38 | |
|
39 | 39 | from rhodecode.model.db import User, Repository |
|
40 | 40 | from rhodecode.model.meta import Session |
|
41 | 41 | from rhodecode.model.scm import ScmModel |
|
42 | 42 | from rhodecode.lib.vcs.backends.svn.repository import SubversionRepository |
|
43 | 43 | from rhodecode.lib.vcs.backends.base import EmptyCommit |
|
44 | 44 | |
|
45 | 45 | |
|
46 | 46 | log = logging.getLogger(__name__) |
|
47 | 47 | |
|
48 | 48 | |
|
49 | 49 | class CustomTestResponse(TestResponse): |
|
50 | 50 | def _save_output(self, out): |
|
51 | 51 | f = tempfile.NamedTemporaryFile( |
|
52 | 52 | delete=False, prefix='rc-test-', suffix='.html') |
|
53 | 53 | f.write(out) |
|
54 | 54 | return f.name |
|
55 | 55 | |
|
56 | 56 | def mustcontain(self, *strings, **kw): |
|
57 | 57 | """ |
|
58 | 58 | Assert that the response contains all of the strings passed |
|
59 | 59 | in as arguments. |
|
60 | 60 | |
|
61 | 61 | Equivalent to:: |
|
62 | 62 | |
|
63 | 63 | assert string in res |
|
64 | 64 | """ |
|
65 | 65 | if 'no' in kw: |
|
66 | 66 | no = kw['no'] |
|
67 | 67 | del kw['no'] |
|
68 | 68 | if isinstance(no, string_types): |
|
69 | 69 | no = [no] |
|
70 | 70 | else: |
|
71 | 71 | no = [] |
|
72 | 72 | if kw: |
|
73 | 73 | raise TypeError( |
|
74 | 74 | "The only keyword argument allowed is 'no'") |
|
75 | 75 | |
|
76 | 76 | f = self._save_output(str(self)) |
|
77 | 77 | |
|
78 | 78 | for s in strings: |
|
79 | 79 | if not s in self: |
|
80 | 80 | print_stderr("Actual response (no %r):" % s) |
|
81 | 81 | print_stderr(str(self)) |
|
82 | 82 | raise IndexError( |
|
83 | 83 | "Body does not contain string %r, output saved as %s" % ( |
|
84 | 84 | s, f)) |
|
85 | 85 | |
|
86 | 86 | for no_s in no: |
|
87 | 87 | if no_s in self: |
|
88 | 88 | print_stderr("Actual response (has %r)" % no_s) |
|
89 | 89 | print_stderr(str(self)) |
|
90 | 90 | raise IndexError( |
|
91 | 91 | "Body contains bad string %r, output saved as %s" % ( |
|
92 | 92 | no_s, f)) |
|
93 | 93 | |
|
94 | 94 | def assert_response(self): |
|
95 | 95 | return AssertResponse(self) |
|
96 | 96 | |
|
97 | 97 | def get_session_from_response(self): |
|
98 | 98 | """ |
|
99 | 99 | This returns the session from a response object. Pylons has some magic |
|
100 | 100 | to make the session available as `response.session`. But pyramid |
|
101 | 101 | doesn't expose it. |
|
102 | 102 | """ |
|
103 | 103 | return self.request.environ['beaker.session'] |
|
104 | 104 | |
|
105 | 105 | |
|
106 | 106 | class TestRequest(Request): |
|
107 | 107 | |
|
108 | 108 | # for py.test |
|
109 | 109 | disabled = True |
|
110 | 110 | ResponseClass = CustomTestResponse |
|
111 | 111 | |
|
112 | 112 | |
|
113 | 113 | class CustomTestApp(TestApp): |
|
114 | 114 | """ |
|
115 | 115 | Custom app to make mustcontain more usefull |
|
116 | 116 | """ |
|
117 | 117 | RequestClass = TestRequest |
|
118 | 118 | |
|
119 | 119 | |
|
120 | 120 | def set_anonymous_access(enabled): |
|
121 | 121 | """(Dis)allows anonymous access depending on parameter `enabled`""" |
|
122 | 122 | user = User.get_default_user() |
|
123 | 123 | user.active = enabled |
|
124 | 124 | Session().add(user) |
|
125 | 125 | Session().commit() |
|
126 | 126 | time.sleep(1.5) # must sleep for cache (1s to expire) |
|
127 | 127 | log.info('anonymous access is now: %s', enabled) |
|
128 | 128 | assert enabled == User.get_default_user().active, ( |
|
129 | 129 | 'Cannot set anonymous access') |
|
130 | 130 | |
|
131 | 131 | |
|
132 | 132 | def check_xfail_backends(node, backend_alias): |
|
133 | 133 | # Using "xfail_backends" here intentionally, since this marks work |
|
134 | 134 | # which is "to be done" soon. |
|
135 | 135 | skip_marker = node.get_marker('xfail_backends') |
|
136 | 136 | if skip_marker and backend_alias in skip_marker.args: |
|
137 | 137 | msg = "Support for backend %s to be developed." % (backend_alias, ) |
|
138 | 138 | msg = skip_marker.kwargs.get('reason', msg) |
|
139 | 139 | pytest.xfail(msg) |
|
140 | 140 | |
|
141 | 141 | |
|
142 | 142 | def check_skip_backends(node, backend_alias): |
|
143 | 143 | # Using "skip_backends" here intentionally, since this marks work which is |
|
144 | 144 | # not supported. |
|
145 | 145 | skip_marker = node.get_marker('skip_backends') |
|
146 | 146 | if skip_marker and backend_alias in skip_marker.args: |
|
147 | 147 | msg = "Feature not supported for backend %s." % (backend_alias, ) |
|
148 | 148 | msg = skip_marker.kwargs.get('reason', msg) |
|
149 | 149 | pytest.skip(msg) |
|
150 | 150 | |
|
151 | 151 | |
|
152 | 152 | def extract_git_repo_from_dump(dump_name, repo_name): |
|
153 | 153 | """Create git repo `repo_name` from dump `dump_name`.""" |
|
154 | 154 | repos_path = ScmModel().repos_path |
|
155 | 155 | target_path = os.path.join(repos_path, repo_name) |
|
156 | 156 | rc_testdata.extract_git_dump(dump_name, target_path) |
|
157 | 157 | return target_path |
|
158 | 158 | |
|
159 | 159 | |
|
160 | 160 | def extract_hg_repo_from_dump(dump_name, repo_name): |
|
161 | 161 | """Create hg repo `repo_name` from dump `dump_name`.""" |
|
162 | 162 | repos_path = ScmModel().repos_path |
|
163 | 163 | target_path = os.path.join(repos_path, repo_name) |
|
164 | 164 | rc_testdata.extract_hg_dump(dump_name, target_path) |
|
165 | 165 | return target_path |
|
166 | 166 | |
|
167 | 167 | |
|
168 | 168 | def extract_svn_repo_from_dump(dump_name, repo_name): |
|
169 | 169 | """Create a svn repo `repo_name` from dump `dump_name`.""" |
|
170 | 170 | repos_path = ScmModel().repos_path |
|
171 | 171 | target_path = os.path.join(repos_path, repo_name) |
|
172 | 172 | SubversionRepository(target_path, create=True) |
|
173 | 173 | _load_svn_dump_into_repo(dump_name, target_path) |
|
174 | 174 | return target_path |
|
175 | 175 | |
|
176 | 176 | |
|
177 | 177 | def assert_message_in_log(log_records, message, levelno, module): |
|
178 | 178 | messages = [ |
|
179 | 179 | r.message for r in log_records |
|
180 | 180 | if r.module == module and r.levelno == levelno |
|
181 | 181 | ] |
|
182 | 182 | assert message in messages |
|
183 | 183 | |
|
184 | 184 | |
|
185 | 185 | def _load_svn_dump_into_repo(dump_name, repo_path): |
|
186 | 186 | """ |
|
187 | 187 | Utility to populate a svn repository with a named dump |
|
188 | 188 | |
|
189 | 189 | Currently the dumps are in rc_testdata. They might later on be |
|
190 | 190 | integrated with the main repository once they stabilize more. |
|
191 | 191 | """ |
|
192 | 192 | dump = rc_testdata.load_svn_dump(dump_name) |
|
193 | 193 | load_dump = subprocess32.Popen( |
|
194 | 194 | ['svnadmin', 'load', repo_path], |
|
195 | 195 | stdin=subprocess32.PIPE, stdout=subprocess32.PIPE, |
|
196 | 196 | stderr=subprocess32.PIPE) |
|
197 | 197 | out, err = load_dump.communicate(dump) |
|
198 | 198 | if load_dump.returncode != 0: |
|
199 | 199 | log.error("Output of load_dump command: %s", out) |
|
200 | 200 | log.error("Error output of load_dump command: %s", err) |
|
201 | 201 | raise Exception( |
|
202 | 202 | 'Failed to load dump "%s" into repository at path "%s".' |
|
203 | 203 | % (dump_name, repo_path)) |
|
204 | 204 | |
|
205 | 205 | |
|
206 | 206 | class AssertResponse(object): |
|
207 | 207 | """ |
|
208 | 208 | Utility that helps to assert things about a given HTML response. |
|
209 | 209 | """ |
|
210 | 210 | |
|
211 | 211 | def __init__(self, response): |
|
212 | 212 | self.response = response |
|
213 | 213 | |
|
214 | 214 | def get_imports(self): |
|
215 | 215 | return fromstring, tostring, CSSSelector |
|
216 | 216 | |
|
217 | 217 | def one_element_exists(self, css_selector): |
|
218 | 218 | self.get_element(css_selector) |
|
219 | 219 | |
|
220 | 220 | def no_element_exists(self, css_selector): |
|
221 | 221 | assert not self._get_elements(css_selector) |
|
222 | 222 | |
|
223 | 223 | def element_equals_to(self, css_selector, expected_content): |
|
224 | 224 | element = self.get_element(css_selector) |
|
225 | 225 | element_text = self._element_to_string(element) |
|
226 | 226 | assert expected_content in element_text |
|
227 | 227 | |
|
228 | 228 | def element_contains(self, css_selector, expected_content): |
|
229 | 229 | element = self.get_element(css_selector) |
|
230 | 230 | assert expected_content in element.text_content() |
|
231 | 231 | |
|
232 | 232 | def element_value_contains(self, css_selector, expected_content): |
|
233 | 233 | element = self.get_element(css_selector) |
|
234 | 234 | assert expected_content in element.value |
|
235 | 235 | |
|
236 | 236 | def contains_one_link(self, link_text, href): |
|
237 | 237 | fromstring, tostring, CSSSelector = self.get_imports() |
|
238 | 238 | doc = fromstring(self.response.body) |
|
239 | 239 | sel = CSSSelector('a[href]') |
|
240 | 240 | elements = [ |
|
241 | 241 | e for e in sel(doc) if e.text_content().strip() == link_text] |
|
242 | 242 | assert len(elements) == 1, "Did not find link or found multiple links" |
|
243 | 243 | self._ensure_url_equal(elements[0].attrib.get('href'), href) |
|
244 | 244 | |
|
245 | 245 | def contains_one_anchor(self, anchor_id): |
|
246 | 246 | fromstring, tostring, CSSSelector = self.get_imports() |
|
247 | 247 | doc = fromstring(self.response.body) |
|
248 | 248 | sel = CSSSelector('#' + anchor_id) |
|
249 | 249 | elements = sel(doc) |
|
250 | 250 | assert len(elements) == 1, 'cannot find 1 element {}'.format(anchor_id) |
|
251 | 251 | |
|
252 | 252 | def _ensure_url_equal(self, found, expected): |
|
253 | 253 | assert _Url(found) == _Url(expected) |
|
254 | 254 | |
|
255 | 255 | def get_element(self, css_selector): |
|
256 | 256 | elements = self._get_elements(css_selector) |
|
257 | 257 | assert len(elements) == 1, 'cannot find 1 element {}'.format(css_selector) |
|
258 | 258 | return elements[0] |
|
259 | 259 | |
|
260 | 260 | def get_elements(self, css_selector): |
|
261 | 261 | return self._get_elements(css_selector) |
|
262 | 262 | |
|
263 | 263 | def _get_elements(self, css_selector): |
|
264 | 264 | fromstring, tostring, CSSSelector = self.get_imports() |
|
265 | 265 | doc = fromstring(self.response.body) |
|
266 | 266 | sel = CSSSelector(css_selector) |
|
267 | 267 | elements = sel(doc) |
|
268 | 268 | return elements |
|
269 | 269 | |
|
270 | 270 | def _element_to_string(self, element): |
|
271 | 271 | fromstring, tostring, CSSSelector = self.get_imports() |
|
272 | 272 | return tostring(element) |
|
273 | 273 | |
|
274 | 274 | |
|
275 | 275 | class _Url(object): |
|
276 | 276 | """ |
|
277 | 277 | A url object that can be compared with other url orbjects |
|
278 | 278 | without regard to the vagaries of encoding, escaping, and ordering |
|
279 | 279 | of parameters in query strings. |
|
280 | 280 | |
|
281 | 281 | Inspired by |
|
282 | 282 | http://stackoverflow.com/questions/5371992/comparing-two-urls-in-python |
|
283 | 283 | """ |
|
284 | 284 | |
|
285 | 285 | def __init__(self, url): |
|
286 | 286 | parts = urlparse(url) |
|
287 | 287 | _query = frozenset(parse_qsl(parts.query)) |
|
288 | 288 | _path = unquote_plus(parts.path) |
|
289 | 289 | parts = parts._replace(query=_query, path=_path) |
|
290 | 290 | self.parts = parts |
|
291 | 291 | |
|
292 | 292 | def __eq__(self, other): |
|
293 | 293 | return self.parts == other.parts |
|
294 | 294 | |
|
295 | 295 | def __hash__(self): |
|
296 | 296 | return hash(self.parts) |
|
297 | 297 | |
|
298 | 298 | |
|
299 | 299 | def run_test_concurrently(times, raise_catched_exc=True): |
|
300 | 300 | """ |
|
301 | 301 | Add this decorator to small pieces of code that you want to test |
|
302 | 302 | concurrently |
|
303 | 303 | |
|
304 | 304 | ex: |
|
305 | 305 | |
|
306 | 306 | @test_concurrently(25) |
|
307 | 307 | def my_test_function(): |
|
308 | 308 | ... |
|
309 | 309 | """ |
|
310 | 310 | def test_concurrently_decorator(test_func): |
|
311 | 311 | def wrapper(*args, **kwargs): |
|
312 | 312 | exceptions = [] |
|
313 | 313 | |
|
314 | 314 | def call_test_func(): |
|
315 | 315 | try: |
|
316 | 316 | test_func(*args, **kwargs) |
|
317 | 317 | except Exception as e: |
|
318 | 318 | exceptions.append(e) |
|
319 | 319 | if raise_catched_exc: |
|
320 | 320 | raise |
|
321 | 321 | threads = [] |
|
322 | 322 | for i in range(times): |
|
323 | 323 | threads.append(threading.Thread(target=call_test_func)) |
|
324 | 324 | for t in threads: |
|
325 | 325 | t.start() |
|
326 | 326 | for t in threads: |
|
327 | 327 | t.join() |
|
328 | 328 | if exceptions: |
|
329 | 329 | raise Exception( |
|
330 | 330 | 'test_concurrently intercepted %s exceptions: %s' % ( |
|
331 | 331 | len(exceptions), exceptions)) |
|
332 | 332 | return wrapper |
|
333 | 333 | return test_concurrently_decorator |
|
334 | 334 | |
|
335 | 335 | |
|
336 | 336 | def wait_for_url(url, timeout=10): |
|
337 | 337 | """ |
|
338 | 338 | Wait until URL becomes reachable. |
|
339 | 339 | |
|
340 | 340 | It polls the URL until the timeout is reached or it became reachable. |
|
341 | 341 | If will call to `py.test.fail` in case the URL is not reachable. |
|
342 | 342 | """ |
|
343 | 343 | timeout = time.time() + timeout |
|
344 | 344 | last = 0 |
|
345 | 345 | wait = 0.1 |
|
346 | 346 | |
|
347 | 347 | while timeout > last: |
|
348 | 348 | last = time.time() |
|
349 | 349 | if is_url_reachable(url): |
|
350 | 350 | break |
|
351 | 351 | elif (last + wait) > time.time(): |
|
352 | 352 | # Go to sleep because not enough time has passed since last check. |
|
353 | 353 | time.sleep(wait) |
|
354 | 354 | else: |
|
355 | 355 | pytest.fail("Timeout while waiting for URL {}".format(url)) |
|
356 | 356 | |
|
357 | 357 | |
|
358 | 358 | def is_url_reachable(url): |
|
359 | 359 | try: |
|
360 | 360 | urllib2.urlopen(url) |
|
361 | 361 | except urllib2.URLError: |
|
362 | 362 | return False |
|
363 | 363 | return True |
|
364 | 364 | |
|
365 | 365 | |
|
366 | 366 | def repo_on_filesystem(repo_name): |
|
367 | 367 | from rhodecode.lib import vcs |
|
368 | 368 | from rhodecode.tests import TESTS_TMP_PATH |
|
369 | 369 | repo = vcs.get_vcs_instance( |
|
370 | 370 | os.path.join(TESTS_TMP_PATH, repo_name), create=False) |
|
371 | 371 | return repo is not None |
|
372 | 372 | |
|
373 | 373 | |
|
374 | 374 | def commit_change( |
|
375 | 375 | repo, filename, content, message, vcs_type, parent=None, newfile=False): |
|
376 | 376 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN |
|
377 | 377 | |
|
378 | 378 | repo = Repository.get_by_repo_name(repo) |
|
379 | 379 | _commit = parent |
|
380 | 380 | if not parent: |
|
381 | 381 | _commit = EmptyCommit(alias=vcs_type) |
|
382 | 382 | |
|
383 | 383 | if newfile: |
|
384 | 384 | nodes = { |
|
385 | 385 | filename: { |
|
386 | 386 | 'content': content |
|
387 | 387 | } |
|
388 | 388 | } |
|
389 | 389 | commit = ScmModel().create_nodes( |
|
390 | 390 | user=TEST_USER_ADMIN_LOGIN, repo=repo, |
|
391 | 391 | message=message, |
|
392 | 392 | nodes=nodes, |
|
393 | 393 | parent_commit=_commit, |
|
394 | 394 | author=TEST_USER_ADMIN_LOGIN, |
|
395 | 395 | ) |
|
396 | 396 | else: |
|
397 | 397 | commit = ScmModel().commit_change( |
|
398 | 398 | repo=repo.scm_instance(), repo_name=repo.repo_name, |
|
399 | 399 | commit=parent, user=TEST_USER_ADMIN_LOGIN, |
|
400 | 400 | author=TEST_USER_ADMIN_LOGIN, |
|
401 | 401 | message=message, |
|
402 | 402 | content=content, |
|
403 | 403 | f_path=filename |
|
404 | 404 | ) |
|
405 | 405 | return commit |
|
406 | 406 | |
|
407 | 407 | |
|
408 | 408 | def add_test_routes(config): |
|
409 | 409 | """ |
|
410 | 410 | Adds test routing that can be used in different functional tests |
|
411 | 411 | |
|
412 | 412 | """ |
|
413 | 413 | config.add_route(name='home', pattern='/') |
|
414 | 414 | config.add_route(name='repo_summary', pattern='/{repo_name}') |
|
415 | config.add_route(name='repo_summary_explicit', pattern='/{repo_name}/summary') | |
|
415 | 416 | config.add_route(name='repo_group_home', pattern='/{repo_group_name}') |
|
417 | ||
|
418 | config.add_route(name='pullrequest_show', | |
|
419 | pattern='/{repo_name}/pull-request/{pull_request_id}') | |
|
420 | config.add_route(name='pull_requests_global', | |
|
421 | pattern='/pull-request/{pull_request_id}') | |
|
422 | config.add_route(name='repo_commit', | |
|
423 | pattern='/{repo_name}/changeset/{commit_id}') |
General Comments 0
You need to be logged in to leave comments.
Login now