Show More
@@ -1,144 +1,146 b'' | |||
|
1 | 1 | # Copyright (C) 2016-2018 RhodeCode GmbH |
|
2 | 2 | # |
|
3 | 3 | # This program is free software: you can redistribute it and/or modify |
|
4 | 4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
5 | 5 | # (only), as published by the Free Software Foundation. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU Affero General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | # |
|
15 | 15 | # This program is dual-licensed. If you wish to learn more about the |
|
16 | 16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
17 | 17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
18 | 18 | |
|
19 | 19 | import logging |
|
20 | 20 | |
|
21 | 21 | from rhodecode.translation import lazy_ugettext |
|
22 | 22 | from rhodecode.events.repo import ( |
|
23 | 23 | RepoEvent, _commits_as_dict, _issues_as_dict) |
|
24 | 24 | |
|
25 | 25 | log = logging.getLogger(__name__) |
|
26 | 26 | |
|
27 | 27 | |
|
28 | 28 | class PullRequestEvent(RepoEvent): |
|
29 | 29 | """ |
|
30 | 30 | Base class for pull request events. |
|
31 | 31 | |
|
32 | 32 | :param pullrequest: a :class:`PullRequest` instance |
|
33 | 33 | """ |
|
34 | 34 | |
|
35 | 35 | def __init__(self, pullrequest): |
|
36 | 36 | super(PullRequestEvent, self).__init__(pullrequest.target_repo) |
|
37 | 37 | self.pullrequest = pullrequest |
|
38 | 38 | |
|
39 | 39 | def as_dict(self): |
|
40 | 40 | from rhodecode.model.pull_request import PullRequestModel |
|
41 | 41 | data = super(PullRequestEvent, self).as_dict() |
|
42 | 42 | |
|
43 | 43 | commits = _commits_as_dict( |
|
44 | 44 | self, |
|
45 | 45 | commit_ids=self.pullrequest.revisions, |
|
46 | 46 | repos=[self.pullrequest.source_repo] |
|
47 | 47 | ) |
|
48 | 48 | issues = _issues_as_dict(commits) |
|
49 | 49 | |
|
50 | 50 | data.update({ |
|
51 | 51 | 'pullrequest': { |
|
52 | 52 | 'title': self.pullrequest.title, |
|
53 | 53 | 'issues': issues, |
|
54 | 54 | 'pull_request_id': self.pullrequest.pull_request_id, |
|
55 | 55 | 'url': PullRequestModel().get_url( |
|
56 | 56 | self.pullrequest, request=self.request), |
|
57 | 57 | 'permalink_url': PullRequestModel().get_url( |
|
58 | 58 | self.pullrequest, request=self.request, permalink=True), |
|
59 | 'shadow_url': PullRequestModel().get_shadow_clone_url( | |
|
60 | self.pullrequest, request=self.request), | |
|
59 | 61 | 'status': self.pullrequest.calculated_review_status(), |
|
60 | 62 | 'commits': commits, |
|
61 | 63 | } |
|
62 | 64 | }) |
|
63 | 65 | return data |
|
64 | 66 | |
|
65 | 67 | |
|
66 | 68 | class PullRequestCreateEvent(PullRequestEvent): |
|
67 | 69 | """ |
|
68 | 70 | An instance of this class is emitted as an :term:`event` after a pull |
|
69 | 71 | request is created. |
|
70 | 72 | """ |
|
71 | 73 | name = 'pullrequest-create' |
|
72 | 74 | display_name = lazy_ugettext('pullrequest created') |
|
73 | 75 | |
|
74 | 76 | |
|
75 | 77 | class PullRequestCloseEvent(PullRequestEvent): |
|
76 | 78 | """ |
|
77 | 79 | An instance of this class is emitted as an :term:`event` after a pull |
|
78 | 80 | request is closed. |
|
79 | 81 | """ |
|
80 | 82 | name = 'pullrequest-close' |
|
81 | 83 | display_name = lazy_ugettext('pullrequest closed') |
|
82 | 84 | |
|
83 | 85 | |
|
84 | 86 | class PullRequestUpdateEvent(PullRequestEvent): |
|
85 | 87 | """ |
|
86 | 88 | An instance of this class is emitted as an :term:`event` after a pull |
|
87 | 89 | request's commits have been updated. |
|
88 | 90 | """ |
|
89 | 91 | name = 'pullrequest-update' |
|
90 | 92 | display_name = lazy_ugettext('pullrequest commits updated') |
|
91 | 93 | |
|
92 | 94 | |
|
93 | 95 | class PullRequestReviewEvent(PullRequestEvent): |
|
94 | 96 | """ |
|
95 | 97 | An instance of this class is emitted as an :term:`event` after a pull |
|
96 | 98 | request review has changed. |
|
97 | 99 | """ |
|
98 | 100 | name = 'pullrequest-review' |
|
99 | 101 | display_name = lazy_ugettext('pullrequest review changed') |
|
100 | 102 | |
|
101 | 103 | |
|
102 | 104 | class PullRequestMergeEvent(PullRequestEvent): |
|
103 | 105 | """ |
|
104 | 106 | An instance of this class is emitted as an :term:`event` after a pull |
|
105 | 107 | request is merged. |
|
106 | 108 | """ |
|
107 | 109 | name = 'pullrequest-merge' |
|
108 | 110 | display_name = lazy_ugettext('pullrequest merged') |
|
109 | 111 | |
|
110 | 112 | |
|
111 | 113 | class PullRequestCommentEvent(PullRequestEvent): |
|
112 | 114 | """ |
|
113 | 115 | An instance of this class is emitted as an :term:`event` after a pull |
|
114 | 116 | request comment is created. |
|
115 | 117 | """ |
|
116 | 118 | name = 'pullrequest-comment' |
|
117 | 119 | display_name = lazy_ugettext('pullrequest commented') |
|
118 | 120 | |
|
119 | 121 | def __init__(self, pullrequest, comment): |
|
120 | 122 | super(PullRequestCommentEvent, self).__init__(pullrequest) |
|
121 | 123 | self.comment = comment |
|
122 | 124 | |
|
123 | 125 | def as_dict(self): |
|
124 | 126 | from rhodecode.model.comment import CommentsModel |
|
125 | 127 | data = super(PullRequestCommentEvent, self).as_dict() |
|
126 | 128 | |
|
127 | 129 | status = None |
|
128 | 130 | if self.comment.status_change: |
|
129 | 131 | status = self.comment.status_change[0].status |
|
130 | 132 | |
|
131 | 133 | data.update({ |
|
132 | 134 | 'comment': { |
|
133 | 135 | 'status': status, |
|
134 | 136 | 'text': self.comment.text, |
|
135 | 137 | 'type': self.comment.comment_type, |
|
136 | 138 | 'file': self.comment.f_path, |
|
137 | 139 | 'line': self.comment.line_no, |
|
138 | 140 | 'url': CommentsModel().get_url( |
|
139 | 141 | self.comment, request=self.request), |
|
140 | 142 | 'permalink_url': CommentsModel().get_url( |
|
141 | 143 | self.comment, request=self.request, permalink=True), |
|
142 | 144 | } |
|
143 | 145 | }) |
|
144 | 146 | return data |
@@ -1,153 +1,154 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2012-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import colander |
|
22 | 22 | from rhodecode.translation import _ |
|
23 | 23 | |
|
24 | 24 | |
|
25 | 25 | class IntegrationTypeBase(object): |
|
26 | 26 | """ Base class for IntegrationType plugins """ |
|
27 | 27 | is_dummy = False |
|
28 | 28 | description = '' |
|
29 | 29 | |
|
30 | 30 | @classmethod |
|
31 | 31 | def icon(cls): |
|
32 | 32 | return ''' |
|
33 | 33 | <?xml version="1.0" encoding="UTF-8" standalone="no"?> |
|
34 | 34 | <svg |
|
35 | 35 | xmlns:dc="http://purl.org/dc/elements/1.1/" |
|
36 | 36 | xmlns:cc="http://creativecommons.org/ns#" |
|
37 | 37 | xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" |
|
38 | 38 | xmlns:svg="http://www.w3.org/2000/svg" |
|
39 | 39 | xmlns="http://www.w3.org/2000/svg" |
|
40 | 40 | xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" |
|
41 | 41 | xmlns:inkscape="http://setwww.inkscape.org/namespaces/inkscape" |
|
42 | 42 | viewBox="0 -256 1792 1792" |
|
43 | 43 | id="svg3025" |
|
44 | 44 | version="1.1" |
|
45 | 45 | inkscape:version="0.48.3.1 r9886" |
|
46 | 46 | width="100%" |
|
47 | 47 | height="100%" |
|
48 | 48 | sodipodi:docname="cog_font_awesome.svg"> |
|
49 | 49 | <metadata |
|
50 | 50 | id="metadata3035"> |
|
51 | 51 | <rdf:RDF> |
|
52 | 52 | <cc:Work |
|
53 | 53 | rdf:about=""> |
|
54 | 54 | <dc:format>image/svg+xml</dc:format> |
|
55 | 55 | <dc:type |
|
56 | 56 | rdf:resource="http://purl.org/dc/dcmitype/StillImage" /> |
|
57 | 57 | </cc:Work> |
|
58 | 58 | </rdf:RDF> |
|
59 | 59 | </metadata> |
|
60 | 60 | <defs |
|
61 | 61 | id="defs3033" /> |
|
62 | 62 | <sodipodi:namedview |
|
63 | 63 | pagecolor="#ffffff" |
|
64 | 64 | bordercolor="#666666" |
|
65 | 65 | borderopacity="1" |
|
66 | 66 | objecttolerance="10" |
|
67 | 67 | gridtolerance="10" |
|
68 | 68 | guidetolerance="10" |
|
69 | 69 | inkscape:pageopacity="0" |
|
70 | 70 | inkscape:pageshadow="2" |
|
71 | 71 | inkscape:window-width="640" |
|
72 | 72 | inkscape:window-height="480" |
|
73 | 73 | id="namedview3031" |
|
74 | 74 | showgrid="false" |
|
75 | 75 | inkscape:zoom="0.13169643" |
|
76 | 76 | inkscape:cx="896" |
|
77 | 77 | inkscape:cy="896" |
|
78 | 78 | inkscape:window-x="0" |
|
79 | 79 | inkscape:window-y="25" |
|
80 | 80 | inkscape:window-maximized="0" |
|
81 | 81 | inkscape:current-layer="svg3025" /> |
|
82 | 82 | <g |
|
83 | 83 | transform="matrix(1,0,0,-1,121.49153,1285.4237)" |
|
84 | 84 | id="g3027"> |
|
85 | 85 | <path |
|
86 | 86 | d="m 1024,640 q 0,106 -75,181 -75,75 -181,75 -106,0 -181,-75 -75,-75 -75,-181 0,-106 75,-181 75,-75 181,-75 106,0 181,75 75,75 75,181 z m 512,109 V 527 q 0,-12 -8,-23 -8,-11 -20,-13 l -185,-28 q -19,-54 -39,-91 35,-50 107,-138 10,-12 10,-25 0,-13 -9,-23 -27,-37 -99,-108 -72,-71 -94,-71 -12,0 -26,9 l -138,108 q -44,-23 -91,-38 -16,-136 -29,-186 -7,-28 -36,-28 H 657 q -14,0 -24.5,8.5 Q 622,-111 621,-98 L 593,86 q -49,16 -90,37 L 362,16 Q 352,7 337,7 323,7 312,18 186,132 147,186 q -7,10 -7,23 0,12 8,23 15,21 51,66.5 36,45.5 54,70.5 -27,50 -41,99 L 29,495 Q 16,497 8,507.5 0,518 0,531 v 222 q 0,12 8,23 8,11 19,13 l 186,28 q 14,46 39,92 -40,57 -107,138 -10,12 -10,24 0,10 9,23 26,36 98.5,107.5 72.5,71.5 94.5,71.5 13,0 26,-10 l 138,-107 q 44,23 91,38 16,136 29,186 7,28 36,28 h 222 q 14,0 24.5,-8.5 Q 914,1391 915,1378 l 28,-184 q 49,-16 90,-37 l 142,107 q 9,9 24,9 13,0 25,-10 129,-119 165,-170 7,-8 7,-22 0,-12 -8,-23 -15,-21 -51,-66.5 -36,-45.5 -54,-70.5 26,-50 41,-98 l 183,-28 q 13,-2 21,-12.5 8,-10.5 8,-23.5 z" |
|
87 | 87 | id="path3029" |
|
88 | 88 | inkscape:connector-curvature="0" |
|
89 | 89 | style="fill:currentColor" /> |
|
90 | 90 | </g> |
|
91 | 91 | </svg> |
|
92 | 92 | ''' |
|
93 | 93 | |
|
94 | 94 | def __init__(self, settings): |
|
95 | 95 | """ |
|
96 | 96 | :param settings: dict of settings to be used for the integration |
|
97 | 97 | """ |
|
98 | 98 | self.settings = settings |
|
99 | 99 | |
|
100 | 100 | def settings_schema(self): |
|
101 | 101 | """ |
|
102 | 102 | A colander schema of settings for the integration type |
|
103 | 103 | """ |
|
104 | 104 | return colander.Schema() |
|
105 | 105 | |
|
106 | 106 | |
|
107 | 107 | class EEIntegration(IntegrationTypeBase): |
|
108 | 108 | description = 'Integration available in RhodeCode EE edition.' |
|
109 | 109 | is_dummy = True |
|
110 | 110 | |
|
111 | 111 | def __init__(self, name, key, settings=None): |
|
112 | 112 | self.display_name = name |
|
113 | 113 | self.key = key |
|
114 | 114 | super(EEIntegration, self).__init__(settings) |
|
115 | 115 | |
|
116 | 116 | |
|
117 | 117 | # Helpers # |
|
118 | 118 | |
|
119 | 119 | # common vars for url template |
|
120 | 120 | CI_URL_VARS = [ |
|
121 | 121 | 'repo_name', |
|
122 | 122 | 'repo_type', |
|
123 | 123 | 'repo_id', |
|
124 | 124 | 'repo_url', |
|
125 | 125 | # extra repo fields |
|
126 | 126 | 'extra:<extra_key_name>', |
|
127 | 127 | |
|
128 | 128 | # special attrs below that we handle, using multi-call |
|
129 | 129 | 'branch', |
|
130 | 130 | 'commit_id', |
|
131 | 131 | |
|
132 | 132 | # pr events vars |
|
133 | 133 | 'pull_request_id', |
|
134 | 134 | 'pull_request_url', |
|
135 | 'pull_request_shadow_url', | |
|
135 | 136 | |
|
136 | 137 | # user who triggers the call |
|
137 | 138 | 'username', |
|
138 | 139 | 'user_id', |
|
139 | 140 | |
|
140 | 141 | ] |
|
141 | 142 | |
|
142 | 143 | |
|
143 | 144 | def get_auth(settings): |
|
144 | 145 | from requests.auth import HTTPBasicAuth |
|
145 | 146 | username = settings.get('username') |
|
146 | 147 | password = settings.get('password') |
|
147 | 148 | if username and password: |
|
148 | 149 | return HTTPBasicAuth(username, password) |
|
149 | 150 | return None |
|
150 | 151 | |
|
151 | 152 | |
|
152 | 153 | def get_url_vars(url_vars): |
|
153 | 154 | return ', '.join('${' + x + '}' for x in url_vars) |
@@ -1,394 +1,396 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2012-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | from __future__ import unicode_literals |
|
22 | 22 | import string |
|
23 | 23 | from collections import OrderedDict |
|
24 | 24 | |
|
25 | 25 | import deform |
|
26 | 26 | import deform.widget |
|
27 | 27 | import logging |
|
28 | 28 | import requests |
|
29 | 29 | import requests.adapters |
|
30 | 30 | import colander |
|
31 | 31 | from requests.packages.urllib3.util.retry import Retry |
|
32 | 32 | |
|
33 | 33 | import rhodecode |
|
34 | 34 | from rhodecode import events |
|
35 | 35 | from rhodecode.translation import _ |
|
36 | 36 | from rhodecode.integrations.types.base import ( |
|
37 | 37 | IntegrationTypeBase, get_auth, get_url_vars) |
|
38 | 38 | from rhodecode.lib.celerylib import run_task, async_task, RequestContextTask |
|
39 | 39 | |
|
40 | 40 | log = logging.getLogger(__name__) |
|
41 | 41 | |
|
42 | 42 | |
|
43 | 43 | # updating this required to update the `common_vars` passed in url calling func |
|
44 | 44 | WEBHOOK_URL_VARS = [ |
|
45 | 45 | 'repo_name', |
|
46 | 46 | 'repo_type', |
|
47 | 47 | 'repo_id', |
|
48 | 48 | 'repo_url', |
|
49 | 49 | # extra repo fields |
|
50 | 50 | 'extra:<extra_key_name>', |
|
51 | 51 | |
|
52 | 52 | # special attrs below that we handle, using multi-call |
|
53 | 53 | 'branch', |
|
54 | 54 | 'commit_id', |
|
55 | 55 | |
|
56 | 56 | # pr events vars |
|
57 | 57 | 'pull_request_id', |
|
58 | 58 | 'pull_request_url', |
|
59 | 'pull_request_shadow_url', | |
|
59 | 60 | |
|
60 | 61 | # user who triggers the call |
|
61 | 62 | 'username', |
|
62 | 63 | 'user_id', |
|
63 | 64 | |
|
64 | 65 | ] |
|
65 | 66 | URL_VARS = get_url_vars(WEBHOOK_URL_VARS) |
|
66 | 67 | |
|
67 | 68 | |
|
68 | 69 | class WebhookHandler(object): |
|
69 | 70 | def __init__(self, template_url, secret_token, headers): |
|
70 | 71 | self.template_url = template_url |
|
71 | 72 | self.secret_token = secret_token |
|
72 | 73 | self.headers = headers |
|
73 | 74 | |
|
74 | 75 | def get_base_parsed_template(self, data): |
|
75 | 76 | """ |
|
76 | 77 | initially parses the passed in template with some common variables |
|
77 | 78 | available on ALL calls |
|
78 | 79 | """ |
|
79 | 80 | # note: make sure to update the `WEBHOOK_URL_VARS` if this changes |
|
80 | 81 | common_vars = { |
|
81 | 82 | 'repo_name': data['repo']['repo_name'], |
|
82 | 83 | 'repo_type': data['repo']['repo_type'], |
|
83 | 84 | 'repo_id': data['repo']['repo_id'], |
|
84 | 85 | 'repo_url': data['repo']['url'], |
|
85 | 86 | 'username': data['actor']['username'], |
|
86 | 87 | 'user_id': data['actor']['user_id'] |
|
87 | 88 | } |
|
88 | 89 | |
|
89 | 90 | extra_vars = {} |
|
90 | 91 | for extra_key, extra_val in data['repo']['extra_fields'].items(): |
|
91 | 92 | extra_vars['extra__{}'.format(extra_key)] = extra_val |
|
92 | 93 | common_vars.update(extra_vars) |
|
93 | 94 | |
|
94 | 95 | template_url = self.template_url.replace('${extra:', '${extra__') |
|
95 | 96 | return string.Template(template_url).safe_substitute(**common_vars) |
|
96 | 97 | |
|
97 | 98 | def repo_push_event_handler(self, event, data): |
|
98 | 99 | url = self.get_base_parsed_template(data) |
|
99 | 100 | url_cals = [] |
|
100 | 101 | branch_data = OrderedDict() |
|
101 | 102 | for obj in data['push']['branches']: |
|
102 | 103 | branch_data[obj['name']] = obj |
|
103 | 104 | |
|
104 | 105 | branches_commits = OrderedDict() |
|
105 | 106 | for commit in data['push']['commits']: |
|
106 | 107 | if commit.get('git_ref_change'): |
|
107 | 108 | # special case for GIT that allows creating tags, |
|
108 | 109 | # deleting branches without associated commit |
|
109 | 110 | continue |
|
110 | 111 | |
|
111 | 112 | if commit['branch'] not in branches_commits: |
|
112 | 113 | branch_commits = {'branch': branch_data[commit['branch']], |
|
113 | 114 | 'commits': []} |
|
114 | 115 | branches_commits[commit['branch']] = branch_commits |
|
115 | 116 | |
|
116 | 117 | branch_commits = branches_commits[commit['branch']] |
|
117 | 118 | branch_commits['commits'].append(commit) |
|
118 | 119 | |
|
119 | 120 | if '${branch}' in url: |
|
120 | 121 | # call it multiple times, for each branch if used in variables |
|
121 | 122 | for branch, commit_ids in branches_commits.items(): |
|
122 | 123 | branch_url = string.Template(url).safe_substitute(branch=branch) |
|
123 | 124 | # call further down for each commit if used |
|
124 | 125 | if '${commit_id}' in branch_url: |
|
125 | 126 | for commit_data in commit_ids['commits']: |
|
126 | 127 | commit_id = commit_data['raw_id'] |
|
127 | 128 | commit_url = string.Template(branch_url).safe_substitute( |
|
128 | 129 | commit_id=commit_id) |
|
129 | 130 | # register per-commit call |
|
130 | 131 | log.debug( |
|
131 | 132 | 'register webhook call(%s) to url %s', event, commit_url) |
|
132 | 133 | url_cals.append((commit_url, self.secret_token, self.headers, data)) |
|
133 | 134 | |
|
134 | 135 | else: |
|
135 | 136 | # register per-branch call |
|
136 | 137 | log.debug( |
|
137 | 138 | 'register webhook call(%s) to url %s', event, branch_url) |
|
138 | 139 | url_cals.append((branch_url, self.secret_token, self.headers, data)) |
|
139 | 140 | |
|
140 | 141 | else: |
|
141 | 142 | log.debug( |
|
142 | 143 | 'register webhook call(%s) to url %s', event, url) |
|
143 | 144 | url_cals.append((url, self.secret_token, self.headers, data)) |
|
144 | 145 | |
|
145 | 146 | return url_cals |
|
146 | 147 | |
|
147 | 148 | def repo_create_event_handler(self, event, data): |
|
148 | 149 | url = self.get_base_parsed_template(data) |
|
149 | 150 | log.debug( |
|
150 | 151 | 'register webhook call(%s) to url %s', event, url) |
|
151 | 152 | return [(url, self.secret_token, self.headers, data)] |
|
152 | 153 | |
|
153 | 154 | def pull_request_event_handler(self, event, data): |
|
154 | 155 | url = self.get_base_parsed_template(data) |
|
155 | 156 | log.debug( |
|
156 | 157 | 'register webhook call(%s) to url %s', event, url) |
|
157 | 158 | url = string.Template(url).safe_substitute( |
|
158 | 159 | pull_request_id=data['pullrequest']['pull_request_id'], |
|
159 |
pull_request_url=data['pullrequest']['url'] |
|
|
160 | pull_request_url=data['pullrequest']['url'], | |
|
161 | pull_request_shadow_url=data['pullrequest']['shadow_url'],) | |
|
160 | 162 | return [(url, self.secret_token, self.headers, data)] |
|
161 | 163 | |
|
162 | 164 | def __call__(self, event, data): |
|
163 | 165 | if isinstance(event, events.RepoPushEvent): |
|
164 | 166 | return self.repo_push_event_handler(event, data) |
|
165 | 167 | elif isinstance(event, events.RepoCreateEvent): |
|
166 | 168 | return self.repo_create_event_handler(event, data) |
|
167 | 169 | elif isinstance(event, events.PullRequestEvent): |
|
168 | 170 | return self.pull_request_event_handler(event, data) |
|
169 | 171 | else: |
|
170 | 172 | raise ValueError('event type not supported: %s' % events) |
|
171 | 173 | |
|
172 | 174 | |
|
173 | 175 | class WebhookSettingsSchema(colander.Schema): |
|
174 | 176 | url = colander.SchemaNode( |
|
175 | 177 | colander.String(), |
|
176 | 178 | title=_('Webhook URL'), |
|
177 | 179 | description= |
|
178 | 180 | _('URL to which Webhook should submit data. Following variables ' |
|
179 | 181 | 'are allowed to be used: {vars}. Some of the variables would ' |
|
180 | 182 | 'trigger multiple calls, like ${{branch}} or ${{commit_id}}. ' |
|
181 | 183 | 'Webhook will be called as many times as unique objects in ' |
|
182 | 184 | 'data in such cases.').format(vars=URL_VARS), |
|
183 | 185 | missing=colander.required, |
|
184 | 186 | required=True, |
|
185 | 187 | validator=colander.url, |
|
186 | 188 | widget=deform.widget.TextInputWidget( |
|
187 | 189 | placeholder='https://www.example.com/webhook' |
|
188 | 190 | ), |
|
189 | 191 | ) |
|
190 | 192 | secret_token = colander.SchemaNode( |
|
191 | 193 | colander.String(), |
|
192 | 194 | title=_('Secret Token'), |
|
193 | 195 | description=_('Optional string used to validate received payloads. ' |
|
194 | 196 | 'It will be sent together with event data in JSON'), |
|
195 | 197 | default='', |
|
196 | 198 | missing='', |
|
197 | 199 | widget=deform.widget.TextInputWidget( |
|
198 | 200 | placeholder='e.g. secret_token' |
|
199 | 201 | ), |
|
200 | 202 | ) |
|
201 | 203 | username = colander.SchemaNode( |
|
202 | 204 | colander.String(), |
|
203 | 205 | title=_('Username'), |
|
204 | 206 | description=_('Optional username to authenticate the call.'), |
|
205 | 207 | default='', |
|
206 | 208 | missing='', |
|
207 | 209 | widget=deform.widget.TextInputWidget( |
|
208 | 210 | placeholder='e.g. admin' |
|
209 | 211 | ), |
|
210 | 212 | ) |
|
211 | 213 | password = colander.SchemaNode( |
|
212 | 214 | colander.String(), |
|
213 | 215 | title=_('Password'), |
|
214 | 216 | description=_('Optional password to authenticate the call.'), |
|
215 | 217 | default='', |
|
216 | 218 | missing='', |
|
217 | 219 | widget=deform.widget.PasswordWidget( |
|
218 | 220 | placeholder='e.g. secret.', |
|
219 | 221 | redisplay=True, |
|
220 | 222 | ), |
|
221 | 223 | ) |
|
222 | 224 | custom_header_key = colander.SchemaNode( |
|
223 | 225 | colander.String(), |
|
224 | 226 | title=_('Custom Header Key'), |
|
225 | 227 | description=_('Custom Header name to be set when calling endpoint.'), |
|
226 | 228 | default='', |
|
227 | 229 | missing='', |
|
228 | 230 | widget=deform.widget.TextInputWidget( |
|
229 | 231 | placeholder='e.g.Authorization' |
|
230 | 232 | ), |
|
231 | 233 | ) |
|
232 | 234 | custom_header_val = colander.SchemaNode( |
|
233 | 235 | colander.String(), |
|
234 | 236 | title=_('Custom Header Value'), |
|
235 | 237 | description=_('Custom Header value to be set when calling endpoint.'), |
|
236 | 238 | default='', |
|
237 | 239 | missing='', |
|
238 | 240 | widget=deform.widget.TextInputWidget( |
|
239 | 241 | placeholder='e.g. RcLogin auth=xxxx' |
|
240 | 242 | ), |
|
241 | 243 | ) |
|
242 | 244 | method_type = colander.SchemaNode( |
|
243 | 245 | colander.String(), |
|
244 | 246 | title=_('Call Method'), |
|
245 | 247 | description=_('Select if the Webhook call should be made ' |
|
246 | 248 | 'with POST or GET.'), |
|
247 | 249 | default='post', |
|
248 | 250 | missing='', |
|
249 | 251 | widget=deform.widget.RadioChoiceWidget( |
|
250 | 252 | values=[('get', 'GET'), ('post', 'POST')], |
|
251 | 253 | inline=True |
|
252 | 254 | ), |
|
253 | 255 | ) |
|
254 | 256 | |
|
255 | 257 | |
|
256 | 258 | class WebhookIntegrationType(IntegrationTypeBase): |
|
257 | 259 | key = 'webhook' |
|
258 | 260 | display_name = _('Webhook') |
|
259 | 261 | description = _('Post json events to a Webhook endpoint') |
|
260 | 262 | |
|
261 | 263 | @classmethod |
|
262 | 264 | def icon(cls): |
|
263 | 265 | return '''<?xml version="1.0" encoding="UTF-8" standalone="no"?><svg viewBox="0 0 256 239" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" preserveAspectRatio="xMidYMid"><g><path d="M119.540432,100.502743 C108.930124,118.338815 98.7646301,135.611455 88.3876025,152.753617 C85.7226696,157.154315 84.4040417,160.738531 86.5332204,166.333309 C92.4107024,181.787152 84.1193605,196.825836 68.5350381,200.908244 C53.8383677,204.759349 39.5192953,195.099955 36.6032893,179.365384 C34.0194114,165.437749 44.8274148,151.78491 60.1824106,149.608284 C61.4694072,149.424428 62.7821041,149.402681 64.944891,149.240571 C72.469175,136.623655 80.1773157,123.700312 88.3025935,110.073173 C73.611854,95.4654658 64.8677898,78.3885437 66.803227,57.2292132 C68.1712787,42.2715849 74.0527146,29.3462646 84.8033863,18.7517722 C105.393354,-1.53572199 136.805164,-4.82141828 161.048542,10.7510424 C184.333097,25.7086706 194.996783,54.8450075 185.906752,79.7822957 C179.052655,77.9239597 172.151111,76.049808 164.563565,73.9917997 C167.418285,60.1274266 165.306899,47.6765751 155.95591,37.0109123 C149.777932,29.9690049 141.850349,26.2780332 132.835442,24.9178894 C114.764113,22.1877169 97.0209573,33.7983633 91.7563309,51.5355878 C85.7800012,71.6669027 94.8245623,88.1111998 119.540432,100.502743 L119.540432,100.502743 Z" fill="#C73A63"></path><path d="M149.841194,79.4106285 C157.316054,92.5969067 164.905578,105.982857 172.427885,119.246236 C210.44865,107.483365 239.114472,128.530009 249.398582,151.063322 C261.81978,178.282014 253.328765,210.520191 228.933162,227.312431 C203.893073,244.551464 172.226236,241.605803 150.040866,219.46195 C155.694953,214.729124 161.376716,209.974552 167.44794,204.895759 C189.360489,219.088306 208.525074,218.420096 222.753207,201.614016 C234.885769,187.277151 234.622834,165.900356 222.138374,151.863988 C207.730339,135.66681 188.431321,135.172572 165.103273,150.721309 C155.426087,133.553447 145.58086,116.521995 136.210101,99.2295848 C133.05093,93.4015266 129.561608,90.0209366 122.440622,88.7873178 C110.547271,86.7253555 102.868785,76.5124151 102.408155,65.0698097 C101.955433,53.7537294 108.621719,43.5249733 119.04224,39.5394355 C129.363912,35.5914599 141.476705,38.7783085 148.419765,47.554004 C154.093621,54.7244134 155.896602,62.7943365 152.911402,71.6372484 C152.081082,74.1025091 151.00562,76.4886916 149.841194,79.4106285 L149.841194,79.4106285 Z" fill="#4B4B4B"></path><path d="M167.706921,187.209935 L121.936499,187.209935 C117.54964,205.253587 108.074103,219.821756 91.7464461,229.085759 C79.0544063,236.285822 65.3738898,238.72736 50.8136292,236.376762 C24.0061432,232.053165 2.08568567,207.920497 0.156179306,180.745298 C-2.02835403,149.962159 19.1309765,122.599149 47.3341915,116.452801 C49.2814904,123.524363 51.2485589,130.663141 53.1958579,137.716911 C27.3195169,150.919004 18.3639187,167.553089 25.6054984,188.352614 C31.9811726,206.657224 50.0900643,216.690262 69.7528413,212.809503 C89.8327554,208.847688 99.9567329,192.160226 98.7211371,165.37844 C117.75722,165.37844 136.809118,165.180745 155.847178,165.475311 C163.280522,165.591951 169.019617,164.820939 174.620326,158.267339 C183.840836,147.48306 200.811003,148.455721 210.741239,158.640984 C220.88894,169.049642 220.402609,185.79839 209.663799,195.768166 C199.302587,205.38802 182.933414,204.874012 173.240413,194.508846 C171.247644,192.37176 169.677943,189.835329 167.706921,187.209935 L167.706921,187.209935 Z" fill="#4A4A4A"></path></g></svg>''' |
|
264 | 266 | |
|
265 | 267 | valid_events = [ |
|
266 | 268 | events.PullRequestCloseEvent, |
|
267 | 269 | events.PullRequestMergeEvent, |
|
268 | 270 | events.PullRequestUpdateEvent, |
|
269 | 271 | events.PullRequestCommentEvent, |
|
270 | 272 | events.PullRequestReviewEvent, |
|
271 | 273 | events.PullRequestCreateEvent, |
|
272 | 274 | events.RepoPushEvent, |
|
273 | 275 | events.RepoCreateEvent, |
|
274 | 276 | ] |
|
275 | 277 | |
|
276 | 278 | def settings_schema(self): |
|
277 | 279 | schema = WebhookSettingsSchema() |
|
278 | 280 | schema.add(colander.SchemaNode( |
|
279 | 281 | colander.Set(), |
|
280 | 282 | widget=deform.widget.CheckboxChoiceWidget( |
|
281 | 283 | values=sorted( |
|
282 | 284 | [(e.name, e.display_name) for e in self.valid_events] |
|
283 | 285 | ) |
|
284 | 286 | ), |
|
285 | 287 | description="Events activated for this integration", |
|
286 | 288 | name='events' |
|
287 | 289 | )) |
|
288 | 290 | return schema |
|
289 | 291 | |
|
290 | 292 | def send_event(self, event): |
|
291 | 293 | log.debug('handling event %s with Webhook integration %s', |
|
292 | 294 | event.name, self) |
|
293 | 295 | |
|
294 | 296 | if event.__class__ not in self.valid_events: |
|
295 | 297 | log.debug('event not valid: %r' % event) |
|
296 | 298 | return |
|
297 | 299 | |
|
298 | 300 | if event.name not in self.settings['events']: |
|
299 | 301 | log.debug('event ignored: %r' % event) |
|
300 | 302 | return |
|
301 | 303 | |
|
302 | 304 | data = event.as_dict() |
|
303 | 305 | template_url = self.settings['url'] |
|
304 | 306 | |
|
305 | 307 | headers = {} |
|
306 | 308 | head_key = self.settings.get('custom_header_key') |
|
307 | 309 | head_val = self.settings.get('custom_header_val') |
|
308 | 310 | if head_key and head_val: |
|
309 | 311 | headers = {head_key: head_val} |
|
310 | 312 | |
|
311 | 313 | handler = WebhookHandler( |
|
312 | 314 | template_url, self.settings['secret_token'], headers) |
|
313 | 315 | |
|
314 | 316 | url_calls = handler(event, data) |
|
315 | 317 | log.debug('webhook: calling following urls: %s', |
|
316 | 318 | [x[0] for x in url_calls]) |
|
317 | 319 | |
|
318 | 320 | run_task(post_to_webhook, url_calls, self.settings) |
|
319 | 321 | |
|
320 | 322 | |
|
321 | 323 | @async_task(ignore_result=True, base=RequestContextTask) |
|
322 | 324 | def post_to_webhook(url_calls, settings): |
|
323 | 325 | """ |
|
324 | 326 | Example data:: |
|
325 | 327 | |
|
326 | 328 | {'actor': {'user_id': 2, 'username': u'admin'}, |
|
327 | 329 | 'actor_ip': u'192.168.157.1', |
|
328 | 330 | 'name': 'repo-push', |
|
329 | 331 | 'push': {'branches': [{'name': u'default', |
|
330 | 332 | 'url': 'http://rc.local:8080/hg-repo/changelog?branch=default'}], |
|
331 | 333 | 'commits': [{'author': u'Marcin Kuzminski <marcin@rhodecode.com>', |
|
332 | 334 | 'branch': u'default', |
|
333 | 335 | 'date': datetime.datetime(2017, 11, 30, 12, 59, 48), |
|
334 | 336 | 'issues': [], |
|
335 | 337 | 'mentions': [], |
|
336 | 338 | 'message': u'commit Thu 30 Nov 2017 13:59:48 CET', |
|
337 | 339 | 'message_html': u'commit Thu 30 Nov 2017 13:59:48 CET', |
|
338 | 340 | 'message_html_title': u'commit Thu 30 Nov 2017 13:59:48 CET', |
|
339 | 341 | 'parents': [{'raw_id': '431b772a5353dad9974b810dd3707d79e3a7f6e0'}], |
|
340 | 342 | 'permalink_url': u'http://rc.local:8080/_7/changeset/a815cc738b9651eb5ffbcfb1ce6ccd7c701a5ddf', |
|
341 | 343 | 'raw_id': 'a815cc738b9651eb5ffbcfb1ce6ccd7c701a5ddf', |
|
342 | 344 | 'refs': {'bookmarks': [], 'branches': [u'default'], 'tags': [u'tip']}, |
|
343 | 345 | 'reviewers': [], |
|
344 | 346 | 'revision': 9L, |
|
345 | 347 | 'short_id': 'a815cc738b96', |
|
346 | 348 | 'url': u'http://rc.local:8080/hg-repo/changeset/a815cc738b9651eb5ffbcfb1ce6ccd7c701a5ddf'}], |
|
347 | 349 | 'issues': {}}, |
|
348 | 350 | 'repo': {'extra_fields': '', |
|
349 | 351 | 'permalink_url': u'http://rc.local:8080/_7', |
|
350 | 352 | 'repo_id': 7, |
|
351 | 353 | 'repo_name': u'hg-repo', |
|
352 | 354 | 'repo_type': u'hg', |
|
353 | 355 | 'url': u'http://rc.local:8080/hg-repo'}, |
|
354 | 356 | 'server_url': u'http://rc.local:8080', |
|
355 | 357 | 'utc_timestamp': datetime.datetime(2017, 11, 30, 13, 0, 1, 569276) |
|
356 | 358 | |
|
357 | 359 | """ |
|
358 | 360 | max_retries = 3 |
|
359 | 361 | retries = Retry( |
|
360 | 362 | total=max_retries, |
|
361 | 363 | backoff_factor=0.15, |
|
362 | 364 | status_forcelist=[500, 502, 503, 504]) |
|
363 | 365 | call_headers = { |
|
364 | 366 | 'User-Agent': 'RhodeCode-webhook-caller/{}'.format( |
|
365 | 367 | rhodecode.__version__) |
|
366 | 368 | } # updated below with custom ones, allows override |
|
367 | 369 | |
|
370 | auth = get_auth(settings) | |
|
368 | 371 | for url, token, headers, data in url_calls: |
|
369 | 372 | req_session = requests.Session() |
|
370 | 373 | req_session.mount( # retry max N times |
|
371 | 374 | 'http://', requests.adapters.HTTPAdapter(max_retries=retries)) |
|
372 | 375 | |
|
373 | 376 | method = settings.get('method_type') or 'post' |
|
374 | 377 | call_method = getattr(req_session, method) |
|
375 | 378 | |
|
376 | 379 | headers = headers or {} |
|
377 | 380 | call_headers.update(headers) |
|
378 | auth = get_auth(settings) | |
|
379 | 381 | |
|
380 | 382 | log.debug('calling Webhook with method: %s, and auth:%s', |
|
381 | 383 | call_method, auth) |
|
382 | 384 | if settings.get('log_data'): |
|
383 | 385 | log.debug('calling webhook with data: %s', data) |
|
384 | 386 | resp = call_method(url, json={ |
|
385 | 387 | 'token': token, |
|
386 | 388 | 'event': data |
|
387 | 389 | }, headers=call_headers, auth=auth) |
|
388 | 390 | log.debug('Got Webhook response: %s', resp) |
|
389 | 391 | |
|
390 | 392 | try: |
|
391 | 393 | resp.raise_for_status() # raise exception on a failed request |
|
392 | 394 | except Exception: |
|
393 | 395 | log.error(resp.text) |
|
394 | 396 | raise |
@@ -1,1681 +1,1681 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2012-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | |
|
22 | 22 | """ |
|
23 | 23 | pull request model for RhodeCode |
|
24 | 24 | """ |
|
25 | 25 | |
|
26 | 26 | |
|
27 | 27 | import json |
|
28 | 28 | import logging |
|
29 | 29 | import datetime |
|
30 | 30 | import urllib |
|
31 | 31 | import collections |
|
32 | 32 | |
|
33 | 33 | from pyramid.threadlocal import get_current_request |
|
34 | 34 | |
|
35 | 35 | from rhodecode import events |
|
36 | 36 | from rhodecode.translation import lazy_ugettext#, _ |
|
37 | 37 | from rhodecode.lib import helpers as h, hooks_utils, diffs |
|
38 | 38 | from rhodecode.lib import audit_logger |
|
39 | 39 | from rhodecode.lib.compat import OrderedDict |
|
40 | 40 | from rhodecode.lib.hooks_daemon import prepare_callback_daemon |
|
41 | 41 | from rhodecode.lib.markup_renderer import ( |
|
42 | 42 | DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer) |
|
43 | 43 | from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe |
|
44 | 44 | from rhodecode.lib.vcs.backends.base import ( |
|
45 | 45 | Reference, MergeResponse, MergeFailureReason, UpdateFailureReason) |
|
46 | 46 | from rhodecode.lib.vcs.conf import settings as vcs_settings |
|
47 | 47 | from rhodecode.lib.vcs.exceptions import ( |
|
48 | 48 | CommitDoesNotExistError, EmptyRepositoryError) |
|
49 | 49 | from rhodecode.model import BaseModel |
|
50 | 50 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
51 | 51 | from rhodecode.model.comment import CommentsModel |
|
52 | 52 | from rhodecode.model.db import ( |
|
53 | 53 | or_, PullRequest, PullRequestReviewers, ChangesetStatus, |
|
54 | 54 | PullRequestVersion, ChangesetComment, Repository, RepoReviewRule) |
|
55 | 55 | from rhodecode.model.meta import Session |
|
56 | 56 | from rhodecode.model.notification import NotificationModel, \ |
|
57 | 57 | EmailNotificationModel |
|
58 | 58 | from rhodecode.model.scm import ScmModel |
|
59 | 59 | from rhodecode.model.settings import VcsSettingsModel |
|
60 | 60 | |
|
61 | 61 | |
|
62 | 62 | log = logging.getLogger(__name__) |
|
63 | 63 | |
|
64 | 64 | |
|
65 | 65 | # Data structure to hold the response data when updating commits during a pull |
|
66 | 66 | # request update. |
|
67 | 67 | UpdateResponse = collections.namedtuple('UpdateResponse', [ |
|
68 | 68 | 'executed', 'reason', 'new', 'old', 'changes', |
|
69 | 69 | 'source_changed', 'target_changed']) |
|
70 | 70 | |
|
71 | 71 | |
|
72 | 72 | class PullRequestModel(BaseModel): |
|
73 | 73 | |
|
74 | 74 | cls = PullRequest |
|
75 | 75 | |
|
76 | 76 | DIFF_CONTEXT = 3 |
|
77 | 77 | |
|
78 | 78 | MERGE_STATUS_MESSAGES = { |
|
79 | 79 | MergeFailureReason.NONE: lazy_ugettext( |
|
80 | 80 | 'This pull request can be automatically merged.'), |
|
81 | 81 | MergeFailureReason.UNKNOWN: lazy_ugettext( |
|
82 | 82 | 'This pull request cannot be merged because of an unhandled' |
|
83 | 83 | ' exception.'), |
|
84 | 84 | MergeFailureReason.MERGE_FAILED: lazy_ugettext( |
|
85 | 85 | 'This pull request cannot be merged because of merge conflicts.'), |
|
86 | 86 | MergeFailureReason.PUSH_FAILED: lazy_ugettext( |
|
87 | 87 | 'This pull request could not be merged because push to target' |
|
88 | 88 | ' failed.'), |
|
89 | 89 | MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext( |
|
90 | 90 | 'This pull request cannot be merged because the target is not a' |
|
91 | 91 | ' head.'), |
|
92 | 92 | MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext( |
|
93 | 93 | 'This pull request cannot be merged because the source contains' |
|
94 | 94 | ' more branches than the target.'), |
|
95 | 95 | MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext( |
|
96 | 96 | 'This pull request cannot be merged because the target has' |
|
97 | 97 | ' multiple heads.'), |
|
98 | 98 | MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext( |
|
99 | 99 | 'This pull request cannot be merged because the target repository' |
|
100 | 100 | ' is locked.'), |
|
101 | 101 | MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext( |
|
102 | 102 | 'This pull request cannot be merged because the target or the ' |
|
103 | 103 | 'source reference is missing.'), |
|
104 | 104 | MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext( |
|
105 | 105 | 'This pull request cannot be merged because the target ' |
|
106 | 106 | 'reference is missing.'), |
|
107 | 107 | MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext( |
|
108 | 108 | 'This pull request cannot be merged because the source ' |
|
109 | 109 | 'reference is missing.'), |
|
110 | 110 | MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext( |
|
111 | 111 | 'This pull request cannot be merged because of conflicts related ' |
|
112 | 112 | 'to sub repositories.'), |
|
113 | 113 | } |
|
114 | 114 | |
|
115 | 115 | UPDATE_STATUS_MESSAGES = { |
|
116 | 116 | UpdateFailureReason.NONE: lazy_ugettext( |
|
117 | 117 | 'Pull request update successful.'), |
|
118 | 118 | UpdateFailureReason.UNKNOWN: lazy_ugettext( |
|
119 | 119 | 'Pull request update failed because of an unknown error.'), |
|
120 | 120 | UpdateFailureReason.NO_CHANGE: lazy_ugettext( |
|
121 | 121 | 'No update needed because the source and target have not changed.'), |
|
122 | 122 | UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext( |
|
123 | 123 | 'Pull request cannot be updated because the reference type is ' |
|
124 | 124 | 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'), |
|
125 | 125 | UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext( |
|
126 | 126 | 'This pull request cannot be updated because the target ' |
|
127 | 127 | 'reference is missing.'), |
|
128 | 128 | UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext( |
|
129 | 129 | 'This pull request cannot be updated because the source ' |
|
130 | 130 | 'reference is missing.'), |
|
131 | 131 | } |
|
132 | 132 | |
|
133 | 133 | def __get_pull_request(self, pull_request): |
|
134 | 134 | return self._get_instance(( |
|
135 | 135 | PullRequest, PullRequestVersion), pull_request) |
|
136 | 136 | |
|
137 | 137 | def _check_perms(self, perms, pull_request, user, api=False): |
|
138 | 138 | if not api: |
|
139 | 139 | return h.HasRepoPermissionAny(*perms)( |
|
140 | 140 | user=user, repo_name=pull_request.target_repo.repo_name) |
|
141 | 141 | else: |
|
142 | 142 | return h.HasRepoPermissionAnyApi(*perms)( |
|
143 | 143 | user=user, repo_name=pull_request.target_repo.repo_name) |
|
144 | 144 | |
|
145 | 145 | def check_user_read(self, pull_request, user, api=False): |
|
146 | 146 | _perms = ('repository.admin', 'repository.write', 'repository.read',) |
|
147 | 147 | return self._check_perms(_perms, pull_request, user, api) |
|
148 | 148 | |
|
149 | 149 | def check_user_merge(self, pull_request, user, api=False): |
|
150 | 150 | _perms = ('repository.admin', 'repository.write', 'hg.admin',) |
|
151 | 151 | return self._check_perms(_perms, pull_request, user, api) |
|
152 | 152 | |
|
153 | 153 | def check_user_update(self, pull_request, user, api=False): |
|
154 | 154 | owner = user.user_id == pull_request.user_id |
|
155 | 155 | return self.check_user_merge(pull_request, user, api) or owner |
|
156 | 156 | |
|
157 | 157 | def check_user_delete(self, pull_request, user): |
|
158 | 158 | owner = user.user_id == pull_request.user_id |
|
159 | 159 | _perms = ('repository.admin',) |
|
160 | 160 | return self._check_perms(_perms, pull_request, user) or owner |
|
161 | 161 | |
|
162 | 162 | def check_user_change_status(self, pull_request, user, api=False): |
|
163 | 163 | reviewer = user.user_id in [x.user_id for x in |
|
164 | 164 | pull_request.reviewers] |
|
165 | 165 | return self.check_user_update(pull_request, user, api) or reviewer |
|
166 | 166 | |
|
167 | 167 | def check_user_comment(self, pull_request, user): |
|
168 | 168 | owner = user.user_id == pull_request.user_id |
|
169 | 169 | return self.check_user_read(pull_request, user) or owner |
|
170 | 170 | |
|
171 | 171 | def get(self, pull_request): |
|
172 | 172 | return self.__get_pull_request(pull_request) |
|
173 | 173 | |
|
174 | 174 | def _prepare_get_all_query(self, repo_name, source=False, statuses=None, |
|
175 | 175 | opened_by=None, order_by=None, |
|
176 | 176 | order_dir='desc'): |
|
177 | 177 | repo = None |
|
178 | 178 | if repo_name: |
|
179 | 179 | repo = self._get_repo(repo_name) |
|
180 | 180 | |
|
181 | 181 | q = PullRequest.query() |
|
182 | 182 | |
|
183 | 183 | # source or target |
|
184 | 184 | if repo and source: |
|
185 | 185 | q = q.filter(PullRequest.source_repo == repo) |
|
186 | 186 | elif repo: |
|
187 | 187 | q = q.filter(PullRequest.target_repo == repo) |
|
188 | 188 | |
|
189 | 189 | # closed,opened |
|
190 | 190 | if statuses: |
|
191 | 191 | q = q.filter(PullRequest.status.in_(statuses)) |
|
192 | 192 | |
|
193 | 193 | # opened by filter |
|
194 | 194 | if opened_by: |
|
195 | 195 | q = q.filter(PullRequest.user_id.in_(opened_by)) |
|
196 | 196 | |
|
197 | 197 | if order_by: |
|
198 | 198 | order_map = { |
|
199 | 199 | 'name_raw': PullRequest.pull_request_id, |
|
200 | 200 | 'title': PullRequest.title, |
|
201 | 201 | 'updated_on_raw': PullRequest.updated_on, |
|
202 | 202 | 'target_repo': PullRequest.target_repo_id |
|
203 | 203 | } |
|
204 | 204 | if order_dir == 'asc': |
|
205 | 205 | q = q.order_by(order_map[order_by].asc()) |
|
206 | 206 | else: |
|
207 | 207 | q = q.order_by(order_map[order_by].desc()) |
|
208 | 208 | |
|
209 | 209 | return q |
|
210 | 210 | |
|
211 | 211 | def count_all(self, repo_name, source=False, statuses=None, |
|
212 | 212 | opened_by=None): |
|
213 | 213 | """ |
|
214 | 214 | Count the number of pull requests for a specific repository. |
|
215 | 215 | |
|
216 | 216 | :param repo_name: target or source repo |
|
217 | 217 | :param source: boolean flag to specify if repo_name refers to source |
|
218 | 218 | :param statuses: list of pull request statuses |
|
219 | 219 | :param opened_by: author user of the pull request |
|
220 | 220 | :returns: int number of pull requests |
|
221 | 221 | """ |
|
222 | 222 | q = self._prepare_get_all_query( |
|
223 | 223 | repo_name, source=source, statuses=statuses, opened_by=opened_by) |
|
224 | 224 | |
|
225 | 225 | return q.count() |
|
226 | 226 | |
|
227 | 227 | def get_all(self, repo_name, source=False, statuses=None, opened_by=None, |
|
228 | 228 | offset=0, length=None, order_by=None, order_dir='desc'): |
|
229 | 229 | """ |
|
230 | 230 | Get all pull requests for a specific repository. |
|
231 | 231 | |
|
232 | 232 | :param repo_name: target or source repo |
|
233 | 233 | :param source: boolean flag to specify if repo_name refers to source |
|
234 | 234 | :param statuses: list of pull request statuses |
|
235 | 235 | :param opened_by: author user of the pull request |
|
236 | 236 | :param offset: pagination offset |
|
237 | 237 | :param length: length of returned list |
|
238 | 238 | :param order_by: order of the returned list |
|
239 | 239 | :param order_dir: 'asc' or 'desc' ordering direction |
|
240 | 240 | :returns: list of pull requests |
|
241 | 241 | """ |
|
242 | 242 | q = self._prepare_get_all_query( |
|
243 | 243 | repo_name, source=source, statuses=statuses, opened_by=opened_by, |
|
244 | 244 | order_by=order_by, order_dir=order_dir) |
|
245 | 245 | |
|
246 | 246 | if length: |
|
247 | 247 | pull_requests = q.limit(length).offset(offset).all() |
|
248 | 248 | else: |
|
249 | 249 | pull_requests = q.all() |
|
250 | 250 | |
|
251 | 251 | return pull_requests |
|
252 | 252 | |
|
253 | 253 | def count_awaiting_review(self, repo_name, source=False, statuses=None, |
|
254 | 254 | opened_by=None): |
|
255 | 255 | """ |
|
256 | 256 | Count the number of pull requests for a specific repository that are |
|
257 | 257 | awaiting review. |
|
258 | 258 | |
|
259 | 259 | :param repo_name: target or source repo |
|
260 | 260 | :param source: boolean flag to specify if repo_name refers to source |
|
261 | 261 | :param statuses: list of pull request statuses |
|
262 | 262 | :param opened_by: author user of the pull request |
|
263 | 263 | :returns: int number of pull requests |
|
264 | 264 | """ |
|
265 | 265 | pull_requests = self.get_awaiting_review( |
|
266 | 266 | repo_name, source=source, statuses=statuses, opened_by=opened_by) |
|
267 | 267 | |
|
268 | 268 | return len(pull_requests) |
|
269 | 269 | |
|
270 | 270 | def get_awaiting_review(self, repo_name, source=False, statuses=None, |
|
271 | 271 | opened_by=None, offset=0, length=None, |
|
272 | 272 | order_by=None, order_dir='desc'): |
|
273 | 273 | """ |
|
274 | 274 | Get all pull requests for a specific repository that are awaiting |
|
275 | 275 | review. |
|
276 | 276 | |
|
277 | 277 | :param repo_name: target or source repo |
|
278 | 278 | :param source: boolean flag to specify if repo_name refers to source |
|
279 | 279 | :param statuses: list of pull request statuses |
|
280 | 280 | :param opened_by: author user of the pull request |
|
281 | 281 | :param offset: pagination offset |
|
282 | 282 | :param length: length of returned list |
|
283 | 283 | :param order_by: order of the returned list |
|
284 | 284 | :param order_dir: 'asc' or 'desc' ordering direction |
|
285 | 285 | :returns: list of pull requests |
|
286 | 286 | """ |
|
287 | 287 | pull_requests = self.get_all( |
|
288 | 288 | repo_name, source=source, statuses=statuses, opened_by=opened_by, |
|
289 | 289 | order_by=order_by, order_dir=order_dir) |
|
290 | 290 | |
|
291 | 291 | _filtered_pull_requests = [] |
|
292 | 292 | for pr in pull_requests: |
|
293 | 293 | status = pr.calculated_review_status() |
|
294 | 294 | if status in [ChangesetStatus.STATUS_NOT_REVIEWED, |
|
295 | 295 | ChangesetStatus.STATUS_UNDER_REVIEW]: |
|
296 | 296 | _filtered_pull_requests.append(pr) |
|
297 | 297 | if length: |
|
298 | 298 | return _filtered_pull_requests[offset:offset+length] |
|
299 | 299 | else: |
|
300 | 300 | return _filtered_pull_requests |
|
301 | 301 | |
|
302 | 302 | def count_awaiting_my_review(self, repo_name, source=False, statuses=None, |
|
303 | 303 | opened_by=None, user_id=None): |
|
304 | 304 | """ |
|
305 | 305 | Count the number of pull requests for a specific repository that are |
|
306 | 306 | awaiting review from a specific user. |
|
307 | 307 | |
|
308 | 308 | :param repo_name: target or source repo |
|
309 | 309 | :param source: boolean flag to specify if repo_name refers to source |
|
310 | 310 | :param statuses: list of pull request statuses |
|
311 | 311 | :param opened_by: author user of the pull request |
|
312 | 312 | :param user_id: reviewer user of the pull request |
|
313 | 313 | :returns: int number of pull requests |
|
314 | 314 | """ |
|
315 | 315 | pull_requests = self.get_awaiting_my_review( |
|
316 | 316 | repo_name, source=source, statuses=statuses, opened_by=opened_by, |
|
317 | 317 | user_id=user_id) |
|
318 | 318 | |
|
319 | 319 | return len(pull_requests) |
|
320 | 320 | |
|
321 | 321 | def get_awaiting_my_review(self, repo_name, source=False, statuses=None, |
|
322 | 322 | opened_by=None, user_id=None, offset=0, |
|
323 | 323 | length=None, order_by=None, order_dir='desc'): |
|
324 | 324 | """ |
|
325 | 325 | Get all pull requests for a specific repository that are awaiting |
|
326 | 326 | review from a specific user. |
|
327 | 327 | |
|
328 | 328 | :param repo_name: target or source repo |
|
329 | 329 | :param source: boolean flag to specify if repo_name refers to source |
|
330 | 330 | :param statuses: list of pull request statuses |
|
331 | 331 | :param opened_by: author user of the pull request |
|
332 | 332 | :param user_id: reviewer user of the pull request |
|
333 | 333 | :param offset: pagination offset |
|
334 | 334 | :param length: length of returned list |
|
335 | 335 | :param order_by: order of the returned list |
|
336 | 336 | :param order_dir: 'asc' or 'desc' ordering direction |
|
337 | 337 | :returns: list of pull requests |
|
338 | 338 | """ |
|
339 | 339 | pull_requests = self.get_all( |
|
340 | 340 | repo_name, source=source, statuses=statuses, opened_by=opened_by, |
|
341 | 341 | order_by=order_by, order_dir=order_dir) |
|
342 | 342 | |
|
343 | 343 | _my = PullRequestModel().get_not_reviewed(user_id) |
|
344 | 344 | my_participation = [] |
|
345 | 345 | for pr in pull_requests: |
|
346 | 346 | if pr in _my: |
|
347 | 347 | my_participation.append(pr) |
|
348 | 348 | _filtered_pull_requests = my_participation |
|
349 | 349 | if length: |
|
350 | 350 | return _filtered_pull_requests[offset:offset+length] |
|
351 | 351 | else: |
|
352 | 352 | return _filtered_pull_requests |
|
353 | 353 | |
|
354 | 354 | def get_not_reviewed(self, user_id): |
|
355 | 355 | return [ |
|
356 | 356 | x.pull_request for x in PullRequestReviewers.query().filter( |
|
357 | 357 | PullRequestReviewers.user_id == user_id).all() |
|
358 | 358 | ] |
|
359 | 359 | |
|
360 | 360 | def _prepare_participating_query(self, user_id=None, statuses=None, |
|
361 | 361 | order_by=None, order_dir='desc'): |
|
362 | 362 | q = PullRequest.query() |
|
363 | 363 | if user_id: |
|
364 | 364 | reviewers_subquery = Session().query( |
|
365 | 365 | PullRequestReviewers.pull_request_id).filter( |
|
366 | 366 | PullRequestReviewers.user_id == user_id).subquery() |
|
367 | 367 | user_filter = or_( |
|
368 | 368 | PullRequest.user_id == user_id, |
|
369 | 369 | PullRequest.pull_request_id.in_(reviewers_subquery) |
|
370 | 370 | ) |
|
371 | 371 | q = PullRequest.query().filter(user_filter) |
|
372 | 372 | |
|
373 | 373 | # closed,opened |
|
374 | 374 | if statuses: |
|
375 | 375 | q = q.filter(PullRequest.status.in_(statuses)) |
|
376 | 376 | |
|
377 | 377 | if order_by: |
|
378 | 378 | order_map = { |
|
379 | 379 | 'name_raw': PullRequest.pull_request_id, |
|
380 | 380 | 'title': PullRequest.title, |
|
381 | 381 | 'updated_on_raw': PullRequest.updated_on, |
|
382 | 382 | 'target_repo': PullRequest.target_repo_id |
|
383 | 383 | } |
|
384 | 384 | if order_dir == 'asc': |
|
385 | 385 | q = q.order_by(order_map[order_by].asc()) |
|
386 | 386 | else: |
|
387 | 387 | q = q.order_by(order_map[order_by].desc()) |
|
388 | 388 | |
|
389 | 389 | return q |
|
390 | 390 | |
|
391 | 391 | def count_im_participating_in(self, user_id=None, statuses=None): |
|
392 | 392 | q = self._prepare_participating_query(user_id, statuses=statuses) |
|
393 | 393 | return q.count() |
|
394 | 394 | |
|
395 | 395 | def get_im_participating_in( |
|
396 | 396 | self, user_id=None, statuses=None, offset=0, |
|
397 | 397 | length=None, order_by=None, order_dir='desc'): |
|
398 | 398 | """ |
|
399 | 399 | Get all Pull requests that i'm participating in, or i have opened |
|
400 | 400 | """ |
|
401 | 401 | |
|
402 | 402 | q = self._prepare_participating_query( |
|
403 | 403 | user_id, statuses=statuses, order_by=order_by, |
|
404 | 404 | order_dir=order_dir) |
|
405 | 405 | |
|
406 | 406 | if length: |
|
407 | 407 | pull_requests = q.limit(length).offset(offset).all() |
|
408 | 408 | else: |
|
409 | 409 | pull_requests = q.all() |
|
410 | 410 | |
|
411 | 411 | return pull_requests |
|
412 | 412 | |
|
413 | 413 | def get_versions(self, pull_request): |
|
414 | 414 | """ |
|
415 | 415 | returns version of pull request sorted by ID descending |
|
416 | 416 | """ |
|
417 | 417 | return PullRequestVersion.query()\ |
|
418 | 418 | .filter(PullRequestVersion.pull_request == pull_request)\ |
|
419 | 419 | .order_by(PullRequestVersion.pull_request_version_id.asc())\ |
|
420 | 420 | .all() |
|
421 | 421 | |
|
422 | 422 | def get_pr_version(self, pull_request_id, version=None): |
|
423 | 423 | at_version = None |
|
424 | 424 | |
|
425 | 425 | if version and version == 'latest': |
|
426 | 426 | pull_request_ver = PullRequest.get(pull_request_id) |
|
427 | 427 | pull_request_obj = pull_request_ver |
|
428 | 428 | _org_pull_request_obj = pull_request_obj |
|
429 | 429 | at_version = 'latest' |
|
430 | 430 | elif version: |
|
431 | 431 | pull_request_ver = PullRequestVersion.get_or_404(version) |
|
432 | 432 | pull_request_obj = pull_request_ver |
|
433 | 433 | _org_pull_request_obj = pull_request_ver.pull_request |
|
434 | 434 | at_version = pull_request_ver.pull_request_version_id |
|
435 | 435 | else: |
|
436 | 436 | _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404( |
|
437 | 437 | pull_request_id) |
|
438 | 438 | |
|
439 | 439 | pull_request_display_obj = PullRequest.get_pr_display_object( |
|
440 | 440 | pull_request_obj, _org_pull_request_obj) |
|
441 | 441 | |
|
442 | 442 | return _org_pull_request_obj, pull_request_obj, \ |
|
443 | 443 | pull_request_display_obj, at_version |
|
444 | 444 | |
|
445 | 445 | def create(self, created_by, source_repo, source_ref, target_repo, |
|
446 | 446 | target_ref, revisions, reviewers, title, description=None, |
|
447 | 447 | reviewer_data=None, translator=None): |
|
448 | 448 | translator = translator or get_current_request().translate |
|
449 | 449 | |
|
450 | 450 | created_by_user = self._get_user(created_by) |
|
451 | 451 | source_repo = self._get_repo(source_repo) |
|
452 | 452 | target_repo = self._get_repo(target_repo) |
|
453 | 453 | |
|
454 | 454 | pull_request = PullRequest() |
|
455 | 455 | pull_request.source_repo = source_repo |
|
456 | 456 | pull_request.source_ref = source_ref |
|
457 | 457 | pull_request.target_repo = target_repo |
|
458 | 458 | pull_request.target_ref = target_ref |
|
459 | 459 | pull_request.revisions = revisions |
|
460 | 460 | pull_request.title = title |
|
461 | 461 | pull_request.description = description |
|
462 | 462 | pull_request.author = created_by_user |
|
463 | 463 | pull_request.reviewer_data = reviewer_data |
|
464 | 464 | |
|
465 | 465 | Session().add(pull_request) |
|
466 | 466 | Session().flush() |
|
467 | 467 | |
|
468 | 468 | reviewer_ids = set() |
|
469 | 469 | # members / reviewers |
|
470 | 470 | for reviewer_object in reviewers: |
|
471 | 471 | user_id, reasons, mandatory, rules = reviewer_object |
|
472 | 472 | user = self._get_user(user_id) |
|
473 | 473 | |
|
474 | 474 | # skip duplicates |
|
475 | 475 | if user.user_id in reviewer_ids: |
|
476 | 476 | continue |
|
477 | 477 | |
|
478 | 478 | reviewer_ids.add(user.user_id) |
|
479 | 479 | |
|
480 | 480 | reviewer = PullRequestReviewers() |
|
481 | 481 | reviewer.user = user |
|
482 | 482 | reviewer.pull_request = pull_request |
|
483 | 483 | reviewer.reasons = reasons |
|
484 | 484 | reviewer.mandatory = mandatory |
|
485 | 485 | |
|
486 | 486 | # NOTE(marcink): pick only first rule for now |
|
487 | 487 | rule_id = rules[0] if rules else None |
|
488 | 488 | rule = RepoReviewRule.get(rule_id) if rule_id else None |
|
489 | 489 | if rule: |
|
490 | 490 | review_group = rule.user_group_vote_rule() |
|
491 | 491 | if review_group: |
|
492 | 492 | # NOTE(marcink): |
|
493 | 493 | # again, can be that user is member of more, |
|
494 | 494 | # but we pick the first same, as default reviewers algo |
|
495 | 495 | review_group = review_group[0] |
|
496 | 496 | |
|
497 | 497 | rule_data = { |
|
498 | 498 | 'rule_name': |
|
499 | 499 | rule.review_rule_name, |
|
500 | 500 | 'rule_user_group_entry_id': |
|
501 | 501 | review_group.repo_review_rule_users_group_id, |
|
502 | 502 | 'rule_user_group_name': |
|
503 | 503 | review_group.users_group.users_group_name, |
|
504 | 504 | 'rule_user_group_members': |
|
505 | 505 | [x.user.username for x in review_group.users_group.members], |
|
506 | 506 | } |
|
507 | 507 | # e.g {'vote_rule': -1, 'mandatory': True} |
|
508 | 508 | rule_data.update(review_group.rule_data()) |
|
509 | 509 | |
|
510 | 510 | reviewer.rule_data = rule_data |
|
511 | 511 | |
|
512 | 512 | Session().add(reviewer) |
|
513 | 513 | |
|
514 | 514 | # Set approval status to "Under Review" for all commits which are |
|
515 | 515 | # part of this pull request. |
|
516 | 516 | ChangesetStatusModel().set_status( |
|
517 | 517 | repo=target_repo, |
|
518 | 518 | status=ChangesetStatus.STATUS_UNDER_REVIEW, |
|
519 | 519 | user=created_by_user, |
|
520 | 520 | pull_request=pull_request |
|
521 | 521 | ) |
|
522 | 522 | |
|
523 | 523 | MergeCheck.validate( |
|
524 | 524 | pull_request, user=created_by_user, translator=translator) |
|
525 | 525 | |
|
526 | 526 | self.notify_reviewers(pull_request, reviewer_ids) |
|
527 | 527 | self._trigger_pull_request_hook( |
|
528 | 528 | pull_request, created_by_user, 'create') |
|
529 | 529 | |
|
530 | 530 | creation_data = pull_request.get_api_data(with_merge_state=False) |
|
531 | 531 | self._log_audit_action( |
|
532 | 532 | 'repo.pull_request.create', {'data': creation_data}, |
|
533 | 533 | created_by_user, pull_request) |
|
534 | 534 | |
|
535 | 535 | return pull_request |
|
536 | 536 | |
|
537 | 537 | def _trigger_pull_request_hook(self, pull_request, user, action): |
|
538 | 538 | pull_request = self.__get_pull_request(pull_request) |
|
539 | 539 | target_scm = pull_request.target_repo.scm_instance() |
|
540 | 540 | if action == 'create': |
|
541 | 541 | trigger_hook = hooks_utils.trigger_log_create_pull_request_hook |
|
542 | 542 | elif action == 'merge': |
|
543 | 543 | trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook |
|
544 | 544 | elif action == 'close': |
|
545 | 545 | trigger_hook = hooks_utils.trigger_log_close_pull_request_hook |
|
546 | 546 | elif action == 'review_status_change': |
|
547 | 547 | trigger_hook = hooks_utils.trigger_log_review_pull_request_hook |
|
548 | 548 | elif action == 'update': |
|
549 | 549 | trigger_hook = hooks_utils.trigger_log_update_pull_request_hook |
|
550 | 550 | else: |
|
551 | 551 | return |
|
552 | 552 | |
|
553 | 553 | trigger_hook( |
|
554 | 554 | username=user.username, |
|
555 | 555 | repo_name=pull_request.target_repo.repo_name, |
|
556 | 556 | repo_alias=target_scm.alias, |
|
557 | 557 | pull_request=pull_request) |
|
558 | 558 | |
|
559 | 559 | def _get_commit_ids(self, pull_request): |
|
560 | 560 | """ |
|
561 | 561 | Return the commit ids of the merged pull request. |
|
562 | 562 | |
|
563 | 563 | This method is not dealing correctly yet with the lack of autoupdates |
|
564 | 564 | nor with the implicit target updates. |
|
565 | 565 | For example: if a commit in the source repo is already in the target it |
|
566 | 566 | will be reported anyways. |
|
567 | 567 | """ |
|
568 | 568 | merge_rev = pull_request.merge_rev |
|
569 | 569 | if merge_rev is None: |
|
570 | 570 | raise ValueError('This pull request was not merged yet') |
|
571 | 571 | |
|
572 | 572 | commit_ids = list(pull_request.revisions) |
|
573 | 573 | if merge_rev not in commit_ids: |
|
574 | 574 | commit_ids.append(merge_rev) |
|
575 | 575 | |
|
576 | 576 | return commit_ids |
|
577 | 577 | |
|
578 | 578 | def merge(self, pull_request, user, extras): |
|
579 | 579 | log.debug("Merging pull request %s", pull_request.pull_request_id) |
|
580 | 580 | merge_state = self._merge_pull_request(pull_request, user, extras) |
|
581 | 581 | if merge_state.executed: |
|
582 | 582 | log.debug( |
|
583 | 583 | "Merge was successful, updating the pull request comments.") |
|
584 | 584 | self._comment_and_close_pr(pull_request, user, merge_state) |
|
585 | 585 | |
|
586 | 586 | self._log_audit_action( |
|
587 | 587 | 'repo.pull_request.merge', |
|
588 | 588 | {'merge_state': merge_state.__dict__}, |
|
589 | 589 | user, pull_request) |
|
590 | 590 | |
|
591 | 591 | else: |
|
592 | 592 | log.warn("Merge failed, not updating the pull request.") |
|
593 | 593 | return merge_state |
|
594 | 594 | |
|
595 | 595 | def _merge_pull_request(self, pull_request, user, extras, merge_msg=None): |
|
596 | 596 | target_vcs = pull_request.target_repo.scm_instance() |
|
597 | 597 | source_vcs = pull_request.source_repo.scm_instance() |
|
598 | 598 | target_ref = self._refresh_reference( |
|
599 | 599 | pull_request.target_ref_parts, target_vcs) |
|
600 | 600 | |
|
601 | 601 | message = merge_msg or ( |
|
602 | 602 | 'Merge pull request #%(pr_id)s from ' |
|
603 | 603 | '%(source_repo)s %(source_ref_name)s\n\n %(pr_title)s') % { |
|
604 | 604 | 'pr_id': pull_request.pull_request_id, |
|
605 | 605 | 'source_repo': source_vcs.name, |
|
606 | 606 | 'source_ref_name': pull_request.source_ref_parts.name, |
|
607 | 607 | 'pr_title': pull_request.title |
|
608 | 608 | } |
|
609 | 609 | |
|
610 | 610 | workspace_id = self._workspace_id(pull_request) |
|
611 | 611 | use_rebase = self._use_rebase_for_merging(pull_request) |
|
612 | 612 | close_branch = self._close_branch_before_merging(pull_request) |
|
613 | 613 | |
|
614 | 614 | callback_daemon, extras = prepare_callback_daemon( |
|
615 | 615 | extras, protocol=vcs_settings.HOOKS_PROTOCOL, |
|
616 | 616 | use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS) |
|
617 | 617 | |
|
618 | 618 | with callback_daemon: |
|
619 | 619 | # TODO: johbo: Implement a clean way to run a config_override |
|
620 | 620 | # for a single call. |
|
621 | 621 | target_vcs.config.set( |
|
622 | 622 | 'rhodecode', 'RC_SCM_DATA', json.dumps(extras)) |
|
623 | 623 | merge_state = target_vcs.merge( |
|
624 | 624 | target_ref, source_vcs, pull_request.source_ref_parts, |
|
625 | 625 | workspace_id, user_name=user.username, |
|
626 | 626 | user_email=user.email, message=message, use_rebase=use_rebase, |
|
627 | 627 | close_branch=close_branch) |
|
628 | 628 | return merge_state |
|
629 | 629 | |
|
630 | 630 | def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None): |
|
631 | 631 | pull_request.merge_rev = merge_state.merge_ref.commit_id |
|
632 | 632 | pull_request.updated_on = datetime.datetime.now() |
|
633 | 633 | close_msg = close_msg or 'Pull request merged and closed' |
|
634 | 634 | |
|
635 | 635 | CommentsModel().create( |
|
636 | 636 | text=safe_unicode(close_msg), |
|
637 | 637 | repo=pull_request.target_repo.repo_id, |
|
638 | 638 | user=user.user_id, |
|
639 | 639 | pull_request=pull_request.pull_request_id, |
|
640 | 640 | f_path=None, |
|
641 | 641 | line_no=None, |
|
642 | 642 | closing_pr=True |
|
643 | 643 | ) |
|
644 | 644 | |
|
645 | 645 | Session().add(pull_request) |
|
646 | 646 | Session().flush() |
|
647 | 647 | # TODO: paris: replace invalidation with less radical solution |
|
648 | 648 | ScmModel().mark_for_invalidation( |
|
649 | 649 | pull_request.target_repo.repo_name) |
|
650 | 650 | self._trigger_pull_request_hook(pull_request, user, 'merge') |
|
651 | 651 | |
|
652 | 652 | def has_valid_update_type(self, pull_request): |
|
653 | 653 | source_ref_type = pull_request.source_ref_parts.type |
|
654 | 654 | return source_ref_type in ['book', 'branch', 'tag'] |
|
655 | 655 | |
|
656 | 656 | def update_commits(self, pull_request): |
|
657 | 657 | """ |
|
658 | 658 | Get the updated list of commits for the pull request |
|
659 | 659 | and return the new pull request version and the list |
|
660 | 660 | of commits processed by this update action |
|
661 | 661 | """ |
|
662 | 662 | pull_request = self.__get_pull_request(pull_request) |
|
663 | 663 | source_ref_type = pull_request.source_ref_parts.type |
|
664 | 664 | source_ref_name = pull_request.source_ref_parts.name |
|
665 | 665 | source_ref_id = pull_request.source_ref_parts.commit_id |
|
666 | 666 | |
|
667 | 667 | target_ref_type = pull_request.target_ref_parts.type |
|
668 | 668 | target_ref_name = pull_request.target_ref_parts.name |
|
669 | 669 | target_ref_id = pull_request.target_ref_parts.commit_id |
|
670 | 670 | |
|
671 | 671 | if not self.has_valid_update_type(pull_request): |
|
672 | 672 | log.debug( |
|
673 | 673 | "Skipping update of pull request %s due to ref type: %s", |
|
674 | 674 | pull_request, source_ref_type) |
|
675 | 675 | return UpdateResponse( |
|
676 | 676 | executed=False, |
|
677 | 677 | reason=UpdateFailureReason.WRONG_REF_TYPE, |
|
678 | 678 | old=pull_request, new=None, changes=None, |
|
679 | 679 | source_changed=False, target_changed=False) |
|
680 | 680 | |
|
681 | 681 | # source repo |
|
682 | 682 | source_repo = pull_request.source_repo.scm_instance() |
|
683 | 683 | try: |
|
684 | 684 | source_commit = source_repo.get_commit(commit_id=source_ref_name) |
|
685 | 685 | except CommitDoesNotExistError: |
|
686 | 686 | return UpdateResponse( |
|
687 | 687 | executed=False, |
|
688 | 688 | reason=UpdateFailureReason.MISSING_SOURCE_REF, |
|
689 | 689 | old=pull_request, new=None, changes=None, |
|
690 | 690 | source_changed=False, target_changed=False) |
|
691 | 691 | |
|
692 | 692 | source_changed = source_ref_id != source_commit.raw_id |
|
693 | 693 | |
|
694 | 694 | # target repo |
|
695 | 695 | target_repo = pull_request.target_repo.scm_instance() |
|
696 | 696 | try: |
|
697 | 697 | target_commit = target_repo.get_commit(commit_id=target_ref_name) |
|
698 | 698 | except CommitDoesNotExistError: |
|
699 | 699 | return UpdateResponse( |
|
700 | 700 | executed=False, |
|
701 | 701 | reason=UpdateFailureReason.MISSING_TARGET_REF, |
|
702 | 702 | old=pull_request, new=None, changes=None, |
|
703 | 703 | source_changed=False, target_changed=False) |
|
704 | 704 | target_changed = target_ref_id != target_commit.raw_id |
|
705 | 705 | |
|
706 | 706 | if not (source_changed or target_changed): |
|
707 | 707 | log.debug("Nothing changed in pull request %s", pull_request) |
|
708 | 708 | return UpdateResponse( |
|
709 | 709 | executed=False, |
|
710 | 710 | reason=UpdateFailureReason.NO_CHANGE, |
|
711 | 711 | old=pull_request, new=None, changes=None, |
|
712 | 712 | source_changed=target_changed, target_changed=source_changed) |
|
713 | 713 | |
|
714 | 714 | change_in_found = 'target repo' if target_changed else 'source repo' |
|
715 | 715 | log.debug('Updating pull request because of change in %s detected', |
|
716 | 716 | change_in_found) |
|
717 | 717 | |
|
718 | 718 | # Finally there is a need for an update, in case of source change |
|
719 | 719 | # we create a new version, else just an update |
|
720 | 720 | if source_changed: |
|
721 | 721 | pull_request_version = self._create_version_from_snapshot(pull_request) |
|
722 | 722 | self._link_comments_to_version(pull_request_version) |
|
723 | 723 | else: |
|
724 | 724 | try: |
|
725 | 725 | ver = pull_request.versions[-1] |
|
726 | 726 | except IndexError: |
|
727 | 727 | ver = None |
|
728 | 728 | |
|
729 | 729 | pull_request.pull_request_version_id = \ |
|
730 | 730 | ver.pull_request_version_id if ver else None |
|
731 | 731 | pull_request_version = pull_request |
|
732 | 732 | |
|
733 | 733 | try: |
|
734 | 734 | if target_ref_type in ('tag', 'branch', 'book'): |
|
735 | 735 | target_commit = target_repo.get_commit(target_ref_name) |
|
736 | 736 | else: |
|
737 | 737 | target_commit = target_repo.get_commit(target_ref_id) |
|
738 | 738 | except CommitDoesNotExistError: |
|
739 | 739 | return UpdateResponse( |
|
740 | 740 | executed=False, |
|
741 | 741 | reason=UpdateFailureReason.MISSING_TARGET_REF, |
|
742 | 742 | old=pull_request, new=None, changes=None, |
|
743 | 743 | source_changed=source_changed, target_changed=target_changed) |
|
744 | 744 | |
|
745 | 745 | # re-compute commit ids |
|
746 | 746 | old_commit_ids = pull_request.revisions |
|
747 | 747 | pre_load = ["author", "branch", "date", "message"] |
|
748 | 748 | commit_ranges = target_repo.compare( |
|
749 | 749 | target_commit.raw_id, source_commit.raw_id, source_repo, merge=True, |
|
750 | 750 | pre_load=pre_load) |
|
751 | 751 | |
|
752 | 752 | ancestor = target_repo.get_common_ancestor( |
|
753 | 753 | target_commit.raw_id, source_commit.raw_id, source_repo) |
|
754 | 754 | |
|
755 | 755 | pull_request.source_ref = '%s:%s:%s' % ( |
|
756 | 756 | source_ref_type, source_ref_name, source_commit.raw_id) |
|
757 | 757 | pull_request.target_ref = '%s:%s:%s' % ( |
|
758 | 758 | target_ref_type, target_ref_name, ancestor) |
|
759 | 759 | |
|
760 | 760 | pull_request.revisions = [ |
|
761 | 761 | commit.raw_id for commit in reversed(commit_ranges)] |
|
762 | 762 | pull_request.updated_on = datetime.datetime.now() |
|
763 | 763 | Session().add(pull_request) |
|
764 | 764 | new_commit_ids = pull_request.revisions |
|
765 | 765 | |
|
766 | 766 | old_diff_data, new_diff_data = self._generate_update_diffs( |
|
767 | 767 | pull_request, pull_request_version) |
|
768 | 768 | |
|
769 | 769 | # calculate commit and file changes |
|
770 | 770 | changes = self._calculate_commit_id_changes( |
|
771 | 771 | old_commit_ids, new_commit_ids) |
|
772 | 772 | file_changes = self._calculate_file_changes( |
|
773 | 773 | old_diff_data, new_diff_data) |
|
774 | 774 | |
|
775 | 775 | # set comments as outdated if DIFFS changed |
|
776 | 776 | CommentsModel().outdate_comments( |
|
777 | 777 | pull_request, old_diff_data=old_diff_data, |
|
778 | 778 | new_diff_data=new_diff_data) |
|
779 | 779 | |
|
780 | 780 | commit_changes = (changes.added or changes.removed) |
|
781 | 781 | file_node_changes = ( |
|
782 | 782 | file_changes.added or file_changes.modified or file_changes.removed) |
|
783 | 783 | pr_has_changes = commit_changes or file_node_changes |
|
784 | 784 | |
|
785 | 785 | # Add an automatic comment to the pull request, in case |
|
786 | 786 | # anything has changed |
|
787 | 787 | if pr_has_changes: |
|
788 | 788 | update_comment = CommentsModel().create( |
|
789 | 789 | text=self._render_update_message(changes, file_changes), |
|
790 | 790 | repo=pull_request.target_repo, |
|
791 | 791 | user=pull_request.author, |
|
792 | 792 | pull_request=pull_request, |
|
793 | 793 | send_email=False, renderer=DEFAULT_COMMENTS_RENDERER) |
|
794 | 794 | |
|
795 | 795 | # Update status to "Under Review" for added commits |
|
796 | 796 | for commit_id in changes.added: |
|
797 | 797 | ChangesetStatusModel().set_status( |
|
798 | 798 | repo=pull_request.source_repo, |
|
799 | 799 | status=ChangesetStatus.STATUS_UNDER_REVIEW, |
|
800 | 800 | comment=update_comment, |
|
801 | 801 | user=pull_request.author, |
|
802 | 802 | pull_request=pull_request, |
|
803 | 803 | revision=commit_id) |
|
804 | 804 | |
|
805 | 805 | log.debug( |
|
806 | 806 | 'Updated pull request %s, added_ids: %s, common_ids: %s, ' |
|
807 | 807 | 'removed_ids: %s', pull_request.pull_request_id, |
|
808 | 808 | changes.added, changes.common, changes.removed) |
|
809 | 809 | log.debug( |
|
810 | 810 | 'Updated pull request with the following file changes: %s', |
|
811 | 811 | file_changes) |
|
812 | 812 | |
|
813 | 813 | log.info( |
|
814 | 814 | "Updated pull request %s from commit %s to commit %s, " |
|
815 | 815 | "stored new version %s of this pull request.", |
|
816 | 816 | pull_request.pull_request_id, source_ref_id, |
|
817 | 817 | pull_request.source_ref_parts.commit_id, |
|
818 | 818 | pull_request_version.pull_request_version_id) |
|
819 | 819 | Session().commit() |
|
820 | 820 | self._trigger_pull_request_hook( |
|
821 | 821 | pull_request, pull_request.author, 'update') |
|
822 | 822 | |
|
823 | 823 | return UpdateResponse( |
|
824 | 824 | executed=True, reason=UpdateFailureReason.NONE, |
|
825 | 825 | old=pull_request, new=pull_request_version, changes=changes, |
|
826 | 826 | source_changed=source_changed, target_changed=target_changed) |
|
827 | 827 | |
|
828 | 828 | def _create_version_from_snapshot(self, pull_request): |
|
829 | 829 | version = PullRequestVersion() |
|
830 | 830 | version.title = pull_request.title |
|
831 | 831 | version.description = pull_request.description |
|
832 | 832 | version.status = pull_request.status |
|
833 | 833 | version.created_on = datetime.datetime.now() |
|
834 | 834 | version.updated_on = pull_request.updated_on |
|
835 | 835 | version.user_id = pull_request.user_id |
|
836 | 836 | version.source_repo = pull_request.source_repo |
|
837 | 837 | version.source_ref = pull_request.source_ref |
|
838 | 838 | version.target_repo = pull_request.target_repo |
|
839 | 839 | version.target_ref = pull_request.target_ref |
|
840 | 840 | |
|
841 | 841 | version._last_merge_source_rev = pull_request._last_merge_source_rev |
|
842 | 842 | version._last_merge_target_rev = pull_request._last_merge_target_rev |
|
843 | 843 | version.last_merge_status = pull_request.last_merge_status |
|
844 | 844 | version.shadow_merge_ref = pull_request.shadow_merge_ref |
|
845 | 845 | version.merge_rev = pull_request.merge_rev |
|
846 | 846 | version.reviewer_data = pull_request.reviewer_data |
|
847 | 847 | |
|
848 | 848 | version.revisions = pull_request.revisions |
|
849 | 849 | version.pull_request = pull_request |
|
850 | 850 | Session().add(version) |
|
851 | 851 | Session().flush() |
|
852 | 852 | |
|
853 | 853 | return version |
|
854 | 854 | |
|
855 | 855 | def _generate_update_diffs(self, pull_request, pull_request_version): |
|
856 | 856 | |
|
857 | 857 | diff_context = ( |
|
858 | 858 | self.DIFF_CONTEXT + |
|
859 | 859 | CommentsModel.needed_extra_diff_context()) |
|
860 | 860 | |
|
861 | 861 | source_repo = pull_request_version.source_repo |
|
862 | 862 | source_ref_id = pull_request_version.source_ref_parts.commit_id |
|
863 | 863 | target_ref_id = pull_request_version.target_ref_parts.commit_id |
|
864 | 864 | old_diff = self._get_diff_from_pr_or_version( |
|
865 | 865 | source_repo, source_ref_id, target_ref_id, context=diff_context) |
|
866 | 866 | |
|
867 | 867 | source_repo = pull_request.source_repo |
|
868 | 868 | source_ref_id = pull_request.source_ref_parts.commit_id |
|
869 | 869 | target_ref_id = pull_request.target_ref_parts.commit_id |
|
870 | 870 | |
|
871 | 871 | new_diff = self._get_diff_from_pr_or_version( |
|
872 | 872 | source_repo, source_ref_id, target_ref_id, context=diff_context) |
|
873 | 873 | |
|
874 | 874 | old_diff_data = diffs.DiffProcessor(old_diff) |
|
875 | 875 | old_diff_data.prepare() |
|
876 | 876 | new_diff_data = diffs.DiffProcessor(new_diff) |
|
877 | 877 | new_diff_data.prepare() |
|
878 | 878 | |
|
879 | 879 | return old_diff_data, new_diff_data |
|
880 | 880 | |
|
881 | 881 | def _link_comments_to_version(self, pull_request_version): |
|
882 | 882 | """ |
|
883 | 883 | Link all unlinked comments of this pull request to the given version. |
|
884 | 884 | |
|
885 | 885 | :param pull_request_version: The `PullRequestVersion` to which |
|
886 | 886 | the comments shall be linked. |
|
887 | 887 | |
|
888 | 888 | """ |
|
889 | 889 | pull_request = pull_request_version.pull_request |
|
890 | 890 | comments = ChangesetComment.query()\ |
|
891 | 891 | .filter( |
|
892 | 892 | # TODO: johbo: Should we query for the repo at all here? |
|
893 | 893 | # Pending decision on how comments of PRs are to be related |
|
894 | 894 | # to either the source repo, the target repo or no repo at all. |
|
895 | 895 | ChangesetComment.repo_id == pull_request.target_repo.repo_id, |
|
896 | 896 | ChangesetComment.pull_request == pull_request, |
|
897 | 897 | ChangesetComment.pull_request_version == None)\ |
|
898 | 898 | .order_by(ChangesetComment.comment_id.asc()) |
|
899 | 899 | |
|
900 | 900 | # TODO: johbo: Find out why this breaks if it is done in a bulk |
|
901 | 901 | # operation. |
|
902 | 902 | for comment in comments: |
|
903 | 903 | comment.pull_request_version_id = ( |
|
904 | 904 | pull_request_version.pull_request_version_id) |
|
905 | 905 | Session().add(comment) |
|
906 | 906 | |
|
907 | 907 | def _calculate_commit_id_changes(self, old_ids, new_ids): |
|
908 | 908 | added = [x for x in new_ids if x not in old_ids] |
|
909 | 909 | common = [x for x in new_ids if x in old_ids] |
|
910 | 910 | removed = [x for x in old_ids if x not in new_ids] |
|
911 | 911 | total = new_ids |
|
912 | 912 | return ChangeTuple(added, common, removed, total) |
|
913 | 913 | |
|
914 | 914 | def _calculate_file_changes(self, old_diff_data, new_diff_data): |
|
915 | 915 | |
|
916 | 916 | old_files = OrderedDict() |
|
917 | 917 | for diff_data in old_diff_data.parsed_diff: |
|
918 | 918 | old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff']) |
|
919 | 919 | |
|
920 | 920 | added_files = [] |
|
921 | 921 | modified_files = [] |
|
922 | 922 | removed_files = [] |
|
923 | 923 | for diff_data in new_diff_data.parsed_diff: |
|
924 | 924 | new_filename = diff_data['filename'] |
|
925 | 925 | new_hash = md5_safe(diff_data['raw_diff']) |
|
926 | 926 | |
|
927 | 927 | old_hash = old_files.get(new_filename) |
|
928 | 928 | if not old_hash: |
|
929 | 929 | # file is not present in old diff, means it's added |
|
930 | 930 | added_files.append(new_filename) |
|
931 | 931 | else: |
|
932 | 932 | if new_hash != old_hash: |
|
933 | 933 | modified_files.append(new_filename) |
|
934 | 934 | # now remove a file from old, since we have seen it already |
|
935 | 935 | del old_files[new_filename] |
|
936 | 936 | |
|
937 | 937 | # removed files is when there are present in old, but not in NEW, |
|
938 | 938 | # since we remove old files that are present in new diff, left-overs |
|
939 | 939 | # if any should be the removed files |
|
940 | 940 | removed_files.extend(old_files.keys()) |
|
941 | 941 | |
|
942 | 942 | return FileChangeTuple(added_files, modified_files, removed_files) |
|
943 | 943 | |
|
944 | 944 | def _render_update_message(self, changes, file_changes): |
|
945 | 945 | """ |
|
946 | 946 | render the message using DEFAULT_COMMENTS_RENDERER (RST renderer), |
|
947 | 947 | so it's always looking the same disregarding on which default |
|
948 | 948 | renderer system is using. |
|
949 | 949 | |
|
950 | 950 | :param changes: changes named tuple |
|
951 | 951 | :param file_changes: file changes named tuple |
|
952 | 952 | |
|
953 | 953 | """ |
|
954 | 954 | new_status = ChangesetStatus.get_status_lbl( |
|
955 | 955 | ChangesetStatus.STATUS_UNDER_REVIEW) |
|
956 | 956 | |
|
957 | 957 | changed_files = ( |
|
958 | 958 | file_changes.added + file_changes.modified + file_changes.removed) |
|
959 | 959 | |
|
960 | 960 | params = { |
|
961 | 961 | 'under_review_label': new_status, |
|
962 | 962 | 'added_commits': changes.added, |
|
963 | 963 | 'removed_commits': changes.removed, |
|
964 | 964 | 'changed_files': changed_files, |
|
965 | 965 | 'added_files': file_changes.added, |
|
966 | 966 | 'modified_files': file_changes.modified, |
|
967 | 967 | 'removed_files': file_changes.removed, |
|
968 | 968 | } |
|
969 | 969 | renderer = RstTemplateRenderer() |
|
970 | 970 | return renderer.render('pull_request_update.mako', **params) |
|
971 | 971 | |
|
972 | 972 | def edit(self, pull_request, title, description, user): |
|
973 | 973 | pull_request = self.__get_pull_request(pull_request) |
|
974 | 974 | old_data = pull_request.get_api_data(with_merge_state=False) |
|
975 | 975 | if pull_request.is_closed(): |
|
976 | 976 | raise ValueError('This pull request is closed') |
|
977 | 977 | if title: |
|
978 | 978 | pull_request.title = title |
|
979 | 979 | pull_request.description = description |
|
980 | 980 | pull_request.updated_on = datetime.datetime.now() |
|
981 | 981 | Session().add(pull_request) |
|
982 | 982 | self._log_audit_action( |
|
983 | 983 | 'repo.pull_request.edit', {'old_data': old_data}, |
|
984 | 984 | user, pull_request) |
|
985 | 985 | |
|
986 | 986 | def update_reviewers(self, pull_request, reviewer_data, user): |
|
987 | 987 | """ |
|
988 | 988 | Update the reviewers in the pull request |
|
989 | 989 | |
|
990 | 990 | :param pull_request: the pr to update |
|
991 | 991 | :param reviewer_data: list of tuples |
|
992 | 992 | [(user, ['reason1', 'reason2'], mandatory_flag, [rules])] |
|
993 | 993 | """ |
|
994 | 994 | pull_request = self.__get_pull_request(pull_request) |
|
995 | 995 | if pull_request.is_closed(): |
|
996 | 996 | raise ValueError('This pull request is closed') |
|
997 | 997 | |
|
998 | 998 | reviewers = {} |
|
999 | 999 | for user_id, reasons, mandatory, rules in reviewer_data: |
|
1000 | 1000 | if isinstance(user_id, (int, basestring)): |
|
1001 | 1001 | user_id = self._get_user(user_id).user_id |
|
1002 | 1002 | reviewers[user_id] = { |
|
1003 | 1003 | 'reasons': reasons, 'mandatory': mandatory} |
|
1004 | 1004 | |
|
1005 | 1005 | reviewers_ids = set(reviewers.keys()) |
|
1006 | 1006 | current_reviewers = PullRequestReviewers.query()\ |
|
1007 | 1007 | .filter(PullRequestReviewers.pull_request == |
|
1008 | 1008 | pull_request).all() |
|
1009 | 1009 | current_reviewers_ids = set([x.user.user_id for x in current_reviewers]) |
|
1010 | 1010 | |
|
1011 | 1011 | ids_to_add = reviewers_ids.difference(current_reviewers_ids) |
|
1012 | 1012 | ids_to_remove = current_reviewers_ids.difference(reviewers_ids) |
|
1013 | 1013 | |
|
1014 | 1014 | log.debug("Adding %s reviewers", ids_to_add) |
|
1015 | 1015 | log.debug("Removing %s reviewers", ids_to_remove) |
|
1016 | 1016 | changed = False |
|
1017 | 1017 | for uid in ids_to_add: |
|
1018 | 1018 | changed = True |
|
1019 | 1019 | _usr = self._get_user(uid) |
|
1020 | 1020 | reviewer = PullRequestReviewers() |
|
1021 | 1021 | reviewer.user = _usr |
|
1022 | 1022 | reviewer.pull_request = pull_request |
|
1023 | 1023 | reviewer.reasons = reviewers[uid]['reasons'] |
|
1024 | 1024 | # NOTE(marcink): mandatory shouldn't be changed now |
|
1025 | 1025 | # reviewer.mandatory = reviewers[uid]['reasons'] |
|
1026 | 1026 | Session().add(reviewer) |
|
1027 | 1027 | self._log_audit_action( |
|
1028 | 1028 | 'repo.pull_request.reviewer.add', {'data': reviewer.get_dict()}, |
|
1029 | 1029 | user, pull_request) |
|
1030 | 1030 | |
|
1031 | 1031 | for uid in ids_to_remove: |
|
1032 | 1032 | changed = True |
|
1033 | 1033 | reviewers = PullRequestReviewers.query()\ |
|
1034 | 1034 | .filter(PullRequestReviewers.user_id == uid, |
|
1035 | 1035 | PullRequestReviewers.pull_request == pull_request)\ |
|
1036 | 1036 | .all() |
|
1037 | 1037 | # use .all() in case we accidentally added the same person twice |
|
1038 | 1038 | # this CAN happen due to the lack of DB checks |
|
1039 | 1039 | for obj in reviewers: |
|
1040 | 1040 | old_data = obj.get_dict() |
|
1041 | 1041 | Session().delete(obj) |
|
1042 | 1042 | self._log_audit_action( |
|
1043 | 1043 | 'repo.pull_request.reviewer.delete', |
|
1044 | 1044 | {'old_data': old_data}, user, pull_request) |
|
1045 | 1045 | |
|
1046 | 1046 | if changed: |
|
1047 | 1047 | pull_request.updated_on = datetime.datetime.now() |
|
1048 | 1048 | Session().add(pull_request) |
|
1049 | 1049 | |
|
1050 | 1050 | self.notify_reviewers(pull_request, ids_to_add) |
|
1051 | 1051 | return ids_to_add, ids_to_remove |
|
1052 | 1052 | |
|
1053 | 1053 | def get_url(self, pull_request, request=None, permalink=False): |
|
1054 | 1054 | if not request: |
|
1055 | 1055 | request = get_current_request() |
|
1056 | 1056 | |
|
1057 | 1057 | if permalink: |
|
1058 | 1058 | return request.route_url( |
|
1059 | 1059 | 'pull_requests_global', |
|
1060 | 1060 | pull_request_id=pull_request.pull_request_id,) |
|
1061 | 1061 | else: |
|
1062 | 1062 | return request.route_url('pullrequest_show', |
|
1063 | 1063 | repo_name=safe_str(pull_request.target_repo.repo_name), |
|
1064 | 1064 | pull_request_id=pull_request.pull_request_id,) |
|
1065 | 1065 | |
|
1066 | def get_shadow_clone_url(self, pull_request): | |
|
1066 | def get_shadow_clone_url(self, pull_request, request=None): | |
|
1067 | 1067 | """ |
|
1068 | 1068 | Returns qualified url pointing to the shadow repository. If this pull |
|
1069 | 1069 | request is closed there is no shadow repository and ``None`` will be |
|
1070 | 1070 | returned. |
|
1071 | 1071 | """ |
|
1072 | 1072 | if pull_request.is_closed(): |
|
1073 | 1073 | return None |
|
1074 | 1074 | else: |
|
1075 | pr_url = urllib.unquote(self.get_url(pull_request)) | |
|
1075 | pr_url = urllib.unquote(self.get_url(pull_request, request=request)) | |
|
1076 | 1076 | return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url)) |
|
1077 | 1077 | |
|
1078 | 1078 | def notify_reviewers(self, pull_request, reviewers_ids): |
|
1079 | 1079 | # notification to reviewers |
|
1080 | 1080 | if not reviewers_ids: |
|
1081 | 1081 | return |
|
1082 | 1082 | |
|
1083 | 1083 | pull_request_obj = pull_request |
|
1084 | 1084 | # get the current participants of this pull request |
|
1085 | 1085 | recipients = reviewers_ids |
|
1086 | 1086 | notification_type = EmailNotificationModel.TYPE_PULL_REQUEST |
|
1087 | 1087 | |
|
1088 | 1088 | pr_source_repo = pull_request_obj.source_repo |
|
1089 | 1089 | pr_target_repo = pull_request_obj.target_repo |
|
1090 | 1090 | |
|
1091 | 1091 | pr_url = h.route_url('pullrequest_show', |
|
1092 | 1092 | repo_name=pr_target_repo.repo_name, |
|
1093 | 1093 | pull_request_id=pull_request_obj.pull_request_id,) |
|
1094 | 1094 | |
|
1095 | 1095 | # set some variables for email notification |
|
1096 | 1096 | pr_target_repo_url = h.route_url( |
|
1097 | 1097 | 'repo_summary', repo_name=pr_target_repo.repo_name) |
|
1098 | 1098 | |
|
1099 | 1099 | pr_source_repo_url = h.route_url( |
|
1100 | 1100 | 'repo_summary', repo_name=pr_source_repo.repo_name) |
|
1101 | 1101 | |
|
1102 | 1102 | # pull request specifics |
|
1103 | 1103 | pull_request_commits = [ |
|
1104 | 1104 | (x.raw_id, x.message) |
|
1105 | 1105 | for x in map(pr_source_repo.get_commit, pull_request.revisions)] |
|
1106 | 1106 | |
|
1107 | 1107 | kwargs = { |
|
1108 | 1108 | 'user': pull_request.author, |
|
1109 | 1109 | 'pull_request': pull_request_obj, |
|
1110 | 1110 | 'pull_request_commits': pull_request_commits, |
|
1111 | 1111 | |
|
1112 | 1112 | 'pull_request_target_repo': pr_target_repo, |
|
1113 | 1113 | 'pull_request_target_repo_url': pr_target_repo_url, |
|
1114 | 1114 | |
|
1115 | 1115 | 'pull_request_source_repo': pr_source_repo, |
|
1116 | 1116 | 'pull_request_source_repo_url': pr_source_repo_url, |
|
1117 | 1117 | |
|
1118 | 1118 | 'pull_request_url': pr_url, |
|
1119 | 1119 | } |
|
1120 | 1120 | |
|
1121 | 1121 | # pre-generate the subject for notification itself |
|
1122 | 1122 | (subject, |
|
1123 | 1123 | _h, _e, # we don't care about those |
|
1124 | 1124 | body_plaintext) = EmailNotificationModel().render_email( |
|
1125 | 1125 | notification_type, **kwargs) |
|
1126 | 1126 | |
|
1127 | 1127 | # create notification objects, and emails |
|
1128 | 1128 | NotificationModel().create( |
|
1129 | 1129 | created_by=pull_request.author, |
|
1130 | 1130 | notification_subject=subject, |
|
1131 | 1131 | notification_body=body_plaintext, |
|
1132 | 1132 | notification_type=notification_type, |
|
1133 | 1133 | recipients=recipients, |
|
1134 | 1134 | email_kwargs=kwargs, |
|
1135 | 1135 | ) |
|
1136 | 1136 | |
|
1137 | 1137 | def delete(self, pull_request, user): |
|
1138 | 1138 | pull_request = self.__get_pull_request(pull_request) |
|
1139 | 1139 | old_data = pull_request.get_api_data(with_merge_state=False) |
|
1140 | 1140 | self._cleanup_merge_workspace(pull_request) |
|
1141 | 1141 | self._log_audit_action( |
|
1142 | 1142 | 'repo.pull_request.delete', {'old_data': old_data}, |
|
1143 | 1143 | user, pull_request) |
|
1144 | 1144 | Session().delete(pull_request) |
|
1145 | 1145 | |
|
1146 | 1146 | def close_pull_request(self, pull_request, user): |
|
1147 | 1147 | pull_request = self.__get_pull_request(pull_request) |
|
1148 | 1148 | self._cleanup_merge_workspace(pull_request) |
|
1149 | 1149 | pull_request.status = PullRequest.STATUS_CLOSED |
|
1150 | 1150 | pull_request.updated_on = datetime.datetime.now() |
|
1151 | 1151 | Session().add(pull_request) |
|
1152 | 1152 | self._trigger_pull_request_hook( |
|
1153 | 1153 | pull_request, pull_request.author, 'close') |
|
1154 | 1154 | |
|
1155 | 1155 | pr_data = pull_request.get_api_data(with_merge_state=False) |
|
1156 | 1156 | self._log_audit_action( |
|
1157 | 1157 | 'repo.pull_request.close', {'data': pr_data}, user, pull_request) |
|
1158 | 1158 | |
|
1159 | 1159 | def close_pull_request_with_comment( |
|
1160 | 1160 | self, pull_request, user, repo, message=None): |
|
1161 | 1161 | |
|
1162 | 1162 | pull_request_review_status = pull_request.calculated_review_status() |
|
1163 | 1163 | |
|
1164 | 1164 | if pull_request_review_status == ChangesetStatus.STATUS_APPROVED: |
|
1165 | 1165 | # approved only if we have voting consent |
|
1166 | 1166 | status = ChangesetStatus.STATUS_APPROVED |
|
1167 | 1167 | else: |
|
1168 | 1168 | status = ChangesetStatus.STATUS_REJECTED |
|
1169 | 1169 | status_lbl = ChangesetStatus.get_status_lbl(status) |
|
1170 | 1170 | |
|
1171 | 1171 | default_message = ( |
|
1172 | 1172 | 'Closing with status change {transition_icon} {status}.' |
|
1173 | 1173 | ).format(transition_icon='>', status=status_lbl) |
|
1174 | 1174 | text = message or default_message |
|
1175 | 1175 | |
|
1176 | 1176 | # create a comment, and link it to new status |
|
1177 | 1177 | comment = CommentsModel().create( |
|
1178 | 1178 | text=text, |
|
1179 | 1179 | repo=repo.repo_id, |
|
1180 | 1180 | user=user.user_id, |
|
1181 | 1181 | pull_request=pull_request.pull_request_id, |
|
1182 | 1182 | status_change=status_lbl, |
|
1183 | 1183 | status_change_type=status, |
|
1184 | 1184 | closing_pr=True |
|
1185 | 1185 | ) |
|
1186 | 1186 | |
|
1187 | 1187 | # calculate old status before we change it |
|
1188 | 1188 | old_calculated_status = pull_request.calculated_review_status() |
|
1189 | 1189 | ChangesetStatusModel().set_status( |
|
1190 | 1190 | repo.repo_id, |
|
1191 | 1191 | status, |
|
1192 | 1192 | user.user_id, |
|
1193 | 1193 | comment=comment, |
|
1194 | 1194 | pull_request=pull_request.pull_request_id |
|
1195 | 1195 | ) |
|
1196 | 1196 | |
|
1197 | 1197 | Session().flush() |
|
1198 | 1198 | events.trigger(events.PullRequestCommentEvent(pull_request, comment)) |
|
1199 | 1199 | # we now calculate the status of pull request again, and based on that |
|
1200 | 1200 | # calculation trigger status change. This might happen in cases |
|
1201 | 1201 | # that non-reviewer admin closes a pr, which means his vote doesn't |
|
1202 | 1202 | # change the status, while if he's a reviewer this might change it. |
|
1203 | 1203 | calculated_status = pull_request.calculated_review_status() |
|
1204 | 1204 | if old_calculated_status != calculated_status: |
|
1205 | 1205 | self._trigger_pull_request_hook( |
|
1206 | 1206 | pull_request, user, 'review_status_change') |
|
1207 | 1207 | |
|
1208 | 1208 | # finally close the PR |
|
1209 | 1209 | PullRequestModel().close_pull_request( |
|
1210 | 1210 | pull_request.pull_request_id, user) |
|
1211 | 1211 | |
|
1212 | 1212 | return comment, status |
|
1213 | 1213 | |
|
1214 | 1214 | def merge_status(self, pull_request, translator=None): |
|
1215 | 1215 | _ = translator or get_current_request().translate |
|
1216 | 1216 | |
|
1217 | 1217 | if not self._is_merge_enabled(pull_request): |
|
1218 | 1218 | return False, _('Server-side pull request merging is disabled.') |
|
1219 | 1219 | if pull_request.is_closed(): |
|
1220 | 1220 | return False, _('This pull request is closed.') |
|
1221 | 1221 | merge_possible, msg = self._check_repo_requirements( |
|
1222 | 1222 | target=pull_request.target_repo, source=pull_request.source_repo, |
|
1223 | 1223 | translator=_) |
|
1224 | 1224 | if not merge_possible: |
|
1225 | 1225 | return merge_possible, msg |
|
1226 | 1226 | |
|
1227 | 1227 | try: |
|
1228 | 1228 | resp = self._try_merge(pull_request) |
|
1229 | 1229 | log.debug("Merge response: %s", resp) |
|
1230 | 1230 | status = resp.possible, self.merge_status_message( |
|
1231 | 1231 | resp.failure_reason) |
|
1232 | 1232 | except NotImplementedError: |
|
1233 | 1233 | status = False, _('Pull request merging is not supported.') |
|
1234 | 1234 | |
|
1235 | 1235 | return status |
|
1236 | 1236 | |
|
1237 | 1237 | def _check_repo_requirements(self, target, source, translator): |
|
1238 | 1238 | """ |
|
1239 | 1239 | Check if `target` and `source` have compatible requirements. |
|
1240 | 1240 | |
|
1241 | 1241 | Currently this is just checking for largefiles. |
|
1242 | 1242 | """ |
|
1243 | 1243 | _ = translator |
|
1244 | 1244 | target_has_largefiles = self._has_largefiles(target) |
|
1245 | 1245 | source_has_largefiles = self._has_largefiles(source) |
|
1246 | 1246 | merge_possible = True |
|
1247 | 1247 | message = u'' |
|
1248 | 1248 | |
|
1249 | 1249 | if target_has_largefiles != source_has_largefiles: |
|
1250 | 1250 | merge_possible = False |
|
1251 | 1251 | if source_has_largefiles: |
|
1252 | 1252 | message = _( |
|
1253 | 1253 | 'Target repository large files support is disabled.') |
|
1254 | 1254 | else: |
|
1255 | 1255 | message = _( |
|
1256 | 1256 | 'Source repository large files support is disabled.') |
|
1257 | 1257 | |
|
1258 | 1258 | return merge_possible, message |
|
1259 | 1259 | |
|
1260 | 1260 | def _has_largefiles(self, repo): |
|
1261 | 1261 | largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings( |
|
1262 | 1262 | 'extensions', 'largefiles') |
|
1263 | 1263 | return largefiles_ui and largefiles_ui[0].active |
|
1264 | 1264 | |
|
1265 | 1265 | def _try_merge(self, pull_request): |
|
1266 | 1266 | """ |
|
1267 | 1267 | Try to merge the pull request and return the merge status. |
|
1268 | 1268 | """ |
|
1269 | 1269 | log.debug( |
|
1270 | 1270 | "Trying out if the pull request %s can be merged.", |
|
1271 | 1271 | pull_request.pull_request_id) |
|
1272 | 1272 | target_vcs = pull_request.target_repo.scm_instance() |
|
1273 | 1273 | |
|
1274 | 1274 | # Refresh the target reference. |
|
1275 | 1275 | try: |
|
1276 | 1276 | target_ref = self._refresh_reference( |
|
1277 | 1277 | pull_request.target_ref_parts, target_vcs) |
|
1278 | 1278 | except CommitDoesNotExistError: |
|
1279 | 1279 | merge_state = MergeResponse( |
|
1280 | 1280 | False, False, None, MergeFailureReason.MISSING_TARGET_REF) |
|
1281 | 1281 | return merge_state |
|
1282 | 1282 | |
|
1283 | 1283 | target_locked = pull_request.target_repo.locked |
|
1284 | 1284 | if target_locked and target_locked[0]: |
|
1285 | 1285 | log.debug("The target repository is locked.") |
|
1286 | 1286 | merge_state = MergeResponse( |
|
1287 | 1287 | False, False, None, MergeFailureReason.TARGET_IS_LOCKED) |
|
1288 | 1288 | elif self._needs_merge_state_refresh(pull_request, target_ref): |
|
1289 | 1289 | log.debug("Refreshing the merge status of the repository.") |
|
1290 | 1290 | merge_state = self._refresh_merge_state( |
|
1291 | 1291 | pull_request, target_vcs, target_ref) |
|
1292 | 1292 | else: |
|
1293 | 1293 | possible = pull_request.\ |
|
1294 | 1294 | last_merge_status == MergeFailureReason.NONE |
|
1295 | 1295 | merge_state = MergeResponse( |
|
1296 | 1296 | possible, False, None, pull_request.last_merge_status) |
|
1297 | 1297 | |
|
1298 | 1298 | return merge_state |
|
1299 | 1299 | |
|
1300 | 1300 | def _refresh_reference(self, reference, vcs_repository): |
|
1301 | 1301 | if reference.type in ('branch', 'book'): |
|
1302 | 1302 | name_or_id = reference.name |
|
1303 | 1303 | else: |
|
1304 | 1304 | name_or_id = reference.commit_id |
|
1305 | 1305 | refreshed_commit = vcs_repository.get_commit(name_or_id) |
|
1306 | 1306 | refreshed_reference = Reference( |
|
1307 | 1307 | reference.type, reference.name, refreshed_commit.raw_id) |
|
1308 | 1308 | return refreshed_reference |
|
1309 | 1309 | |
|
1310 | 1310 | def _needs_merge_state_refresh(self, pull_request, target_reference): |
|
1311 | 1311 | return not( |
|
1312 | 1312 | pull_request.revisions and |
|
1313 | 1313 | pull_request.revisions[0] == pull_request._last_merge_source_rev and |
|
1314 | 1314 | target_reference.commit_id == pull_request._last_merge_target_rev) |
|
1315 | 1315 | |
|
1316 | 1316 | def _refresh_merge_state(self, pull_request, target_vcs, target_reference): |
|
1317 | 1317 | workspace_id = self._workspace_id(pull_request) |
|
1318 | 1318 | source_vcs = pull_request.source_repo.scm_instance() |
|
1319 | 1319 | use_rebase = self._use_rebase_for_merging(pull_request) |
|
1320 | 1320 | close_branch = self._close_branch_before_merging(pull_request) |
|
1321 | 1321 | merge_state = target_vcs.merge( |
|
1322 | 1322 | target_reference, source_vcs, pull_request.source_ref_parts, |
|
1323 | 1323 | workspace_id, dry_run=True, use_rebase=use_rebase, |
|
1324 | 1324 | close_branch=close_branch) |
|
1325 | 1325 | |
|
1326 | 1326 | # Do not store the response if there was an unknown error. |
|
1327 | 1327 | if merge_state.failure_reason != MergeFailureReason.UNKNOWN: |
|
1328 | 1328 | pull_request._last_merge_source_rev = \ |
|
1329 | 1329 | pull_request.source_ref_parts.commit_id |
|
1330 | 1330 | pull_request._last_merge_target_rev = target_reference.commit_id |
|
1331 | 1331 | pull_request.last_merge_status = merge_state.failure_reason |
|
1332 | 1332 | pull_request.shadow_merge_ref = merge_state.merge_ref |
|
1333 | 1333 | Session().add(pull_request) |
|
1334 | 1334 | Session().commit() |
|
1335 | 1335 | |
|
1336 | 1336 | return merge_state |
|
1337 | 1337 | |
|
1338 | 1338 | def _workspace_id(self, pull_request): |
|
1339 | 1339 | workspace_id = 'pr-%s' % pull_request.pull_request_id |
|
1340 | 1340 | return workspace_id |
|
1341 | 1341 | |
|
1342 | 1342 | def merge_status_message(self, status_code): |
|
1343 | 1343 | """ |
|
1344 | 1344 | Return a human friendly error message for the given merge status code. |
|
1345 | 1345 | """ |
|
1346 | 1346 | return self.MERGE_STATUS_MESSAGES[status_code] |
|
1347 | 1347 | |
|
1348 | 1348 | def generate_repo_data(self, repo, commit_id=None, branch=None, |
|
1349 | 1349 | bookmark=None, translator=None): |
|
1350 | 1350 | from rhodecode.model.repo import RepoModel |
|
1351 | 1351 | |
|
1352 | 1352 | all_refs, selected_ref = \ |
|
1353 | 1353 | self._get_repo_pullrequest_sources( |
|
1354 | 1354 | repo.scm_instance(), commit_id=commit_id, |
|
1355 | 1355 | branch=branch, bookmark=bookmark, translator=translator) |
|
1356 | 1356 | |
|
1357 | 1357 | refs_select2 = [] |
|
1358 | 1358 | for element in all_refs: |
|
1359 | 1359 | children = [{'id': x[0], 'text': x[1]} for x in element[0]] |
|
1360 | 1360 | refs_select2.append({'text': element[1], 'children': children}) |
|
1361 | 1361 | |
|
1362 | 1362 | return { |
|
1363 | 1363 | 'user': { |
|
1364 | 1364 | 'user_id': repo.user.user_id, |
|
1365 | 1365 | 'username': repo.user.username, |
|
1366 | 1366 | 'firstname': repo.user.first_name, |
|
1367 | 1367 | 'lastname': repo.user.last_name, |
|
1368 | 1368 | 'gravatar_link': h.gravatar_url(repo.user.email, 14), |
|
1369 | 1369 | }, |
|
1370 | 1370 | 'name': repo.repo_name, |
|
1371 | 1371 | 'link': RepoModel().get_url(repo), |
|
1372 | 1372 | 'description': h.chop_at_smart(repo.description_safe, '\n'), |
|
1373 | 1373 | 'refs': { |
|
1374 | 1374 | 'all_refs': all_refs, |
|
1375 | 1375 | 'selected_ref': selected_ref, |
|
1376 | 1376 | 'select2_refs': refs_select2 |
|
1377 | 1377 | } |
|
1378 | 1378 | } |
|
1379 | 1379 | |
|
1380 | 1380 | def generate_pullrequest_title(self, source, source_ref, target): |
|
1381 | 1381 | return u'{source}#{at_ref} to {target}'.format( |
|
1382 | 1382 | source=source, |
|
1383 | 1383 | at_ref=source_ref, |
|
1384 | 1384 | target=target, |
|
1385 | 1385 | ) |
|
1386 | 1386 | |
|
1387 | 1387 | def _cleanup_merge_workspace(self, pull_request): |
|
1388 | 1388 | # Merging related cleanup |
|
1389 | 1389 | target_scm = pull_request.target_repo.scm_instance() |
|
1390 | 1390 | workspace_id = 'pr-%s' % pull_request.pull_request_id |
|
1391 | 1391 | |
|
1392 | 1392 | try: |
|
1393 | 1393 | target_scm.cleanup_merge_workspace(workspace_id) |
|
1394 | 1394 | except NotImplementedError: |
|
1395 | 1395 | pass |
|
1396 | 1396 | |
|
1397 | 1397 | def _get_repo_pullrequest_sources( |
|
1398 | 1398 | self, repo, commit_id=None, branch=None, bookmark=None, |
|
1399 | 1399 | translator=None): |
|
1400 | 1400 | """ |
|
1401 | 1401 | Return a structure with repo's interesting commits, suitable for |
|
1402 | 1402 | the selectors in pullrequest controller |
|
1403 | 1403 | |
|
1404 | 1404 | :param commit_id: a commit that must be in the list somehow |
|
1405 | 1405 | and selected by default |
|
1406 | 1406 | :param branch: a branch that must be in the list and selected |
|
1407 | 1407 | by default - even if closed |
|
1408 | 1408 | :param bookmark: a bookmark that must be in the list and selected |
|
1409 | 1409 | """ |
|
1410 | 1410 | _ = translator or get_current_request().translate |
|
1411 | 1411 | |
|
1412 | 1412 | commit_id = safe_str(commit_id) if commit_id else None |
|
1413 | 1413 | branch = safe_str(branch) if branch else None |
|
1414 | 1414 | bookmark = safe_str(bookmark) if bookmark else None |
|
1415 | 1415 | |
|
1416 | 1416 | selected = None |
|
1417 | 1417 | |
|
1418 | 1418 | # order matters: first source that has commit_id in it will be selected |
|
1419 | 1419 | sources = [] |
|
1420 | 1420 | sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark)) |
|
1421 | 1421 | sources.append(('branch', repo.branches.items(), _('Branches'), branch)) |
|
1422 | 1422 | |
|
1423 | 1423 | if commit_id: |
|
1424 | 1424 | ref_commit = (h.short_id(commit_id), commit_id) |
|
1425 | 1425 | sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id)) |
|
1426 | 1426 | |
|
1427 | 1427 | sources.append( |
|
1428 | 1428 | ('branch', repo.branches_closed.items(), _('Closed Branches'), branch), |
|
1429 | 1429 | ) |
|
1430 | 1430 | |
|
1431 | 1431 | groups = [] |
|
1432 | 1432 | for group_key, ref_list, group_name, match in sources: |
|
1433 | 1433 | group_refs = [] |
|
1434 | 1434 | for ref_name, ref_id in ref_list: |
|
1435 | 1435 | ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id) |
|
1436 | 1436 | group_refs.append((ref_key, ref_name)) |
|
1437 | 1437 | |
|
1438 | 1438 | if not selected: |
|
1439 | 1439 | if set([commit_id, match]) & set([ref_id, ref_name]): |
|
1440 | 1440 | selected = ref_key |
|
1441 | 1441 | |
|
1442 | 1442 | if group_refs: |
|
1443 | 1443 | groups.append((group_refs, group_name)) |
|
1444 | 1444 | |
|
1445 | 1445 | if not selected: |
|
1446 | 1446 | ref = commit_id or branch or bookmark |
|
1447 | 1447 | if ref: |
|
1448 | 1448 | raise CommitDoesNotExistError( |
|
1449 | 1449 | 'No commit refs could be found matching: %s' % ref) |
|
1450 | 1450 | elif repo.DEFAULT_BRANCH_NAME in repo.branches: |
|
1451 | 1451 | selected = 'branch:%s:%s' % ( |
|
1452 | 1452 | repo.DEFAULT_BRANCH_NAME, |
|
1453 | 1453 | repo.branches[repo.DEFAULT_BRANCH_NAME] |
|
1454 | 1454 | ) |
|
1455 | 1455 | elif repo.commit_ids: |
|
1456 | 1456 | # make the user select in this case |
|
1457 | 1457 | selected = None |
|
1458 | 1458 | else: |
|
1459 | 1459 | raise EmptyRepositoryError() |
|
1460 | 1460 | return groups, selected |
|
1461 | 1461 | |
|
1462 | 1462 | def get_diff(self, source_repo, source_ref_id, target_ref_id, context=DIFF_CONTEXT): |
|
1463 | 1463 | return self._get_diff_from_pr_or_version( |
|
1464 | 1464 | source_repo, source_ref_id, target_ref_id, context=context) |
|
1465 | 1465 | |
|
1466 | 1466 | def _get_diff_from_pr_or_version( |
|
1467 | 1467 | self, source_repo, source_ref_id, target_ref_id, context): |
|
1468 | 1468 | target_commit = source_repo.get_commit( |
|
1469 | 1469 | commit_id=safe_str(target_ref_id)) |
|
1470 | 1470 | source_commit = source_repo.get_commit( |
|
1471 | 1471 | commit_id=safe_str(source_ref_id)) |
|
1472 | 1472 | if isinstance(source_repo, Repository): |
|
1473 | 1473 | vcs_repo = source_repo.scm_instance() |
|
1474 | 1474 | else: |
|
1475 | 1475 | vcs_repo = source_repo |
|
1476 | 1476 | |
|
1477 | 1477 | # TODO: johbo: In the context of an update, we cannot reach |
|
1478 | 1478 | # the old commit anymore with our normal mechanisms. It needs |
|
1479 | 1479 | # some sort of special support in the vcs layer to avoid this |
|
1480 | 1480 | # workaround. |
|
1481 | 1481 | if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and |
|
1482 | 1482 | vcs_repo.alias == 'git'): |
|
1483 | 1483 | source_commit.raw_id = safe_str(source_ref_id) |
|
1484 | 1484 | |
|
1485 | 1485 | log.debug('calculating diff between ' |
|
1486 | 1486 | 'source_ref:%s and target_ref:%s for repo `%s`', |
|
1487 | 1487 | target_ref_id, source_ref_id, |
|
1488 | 1488 | safe_unicode(vcs_repo.path)) |
|
1489 | 1489 | |
|
1490 | 1490 | vcs_diff = vcs_repo.get_diff( |
|
1491 | 1491 | commit1=target_commit, commit2=source_commit, context=context) |
|
1492 | 1492 | return vcs_diff |
|
1493 | 1493 | |
|
1494 | 1494 | def _is_merge_enabled(self, pull_request): |
|
1495 | 1495 | return self._get_general_setting( |
|
1496 | 1496 | pull_request, 'rhodecode_pr_merge_enabled') |
|
1497 | 1497 | |
|
1498 | 1498 | def _use_rebase_for_merging(self, pull_request): |
|
1499 | 1499 | repo_type = pull_request.target_repo.repo_type |
|
1500 | 1500 | if repo_type == 'hg': |
|
1501 | 1501 | return self._get_general_setting( |
|
1502 | 1502 | pull_request, 'rhodecode_hg_use_rebase_for_merging') |
|
1503 | 1503 | elif repo_type == 'git': |
|
1504 | 1504 | return self._get_general_setting( |
|
1505 | 1505 | pull_request, 'rhodecode_git_use_rebase_for_merging') |
|
1506 | 1506 | |
|
1507 | 1507 | return False |
|
1508 | 1508 | |
|
1509 | 1509 | def _close_branch_before_merging(self, pull_request): |
|
1510 | 1510 | repo_type = pull_request.target_repo.repo_type |
|
1511 | 1511 | if repo_type == 'hg': |
|
1512 | 1512 | return self._get_general_setting( |
|
1513 | 1513 | pull_request, 'rhodecode_hg_close_branch_before_merging') |
|
1514 | 1514 | elif repo_type == 'git': |
|
1515 | 1515 | return self._get_general_setting( |
|
1516 | 1516 | pull_request, 'rhodecode_git_close_branch_before_merging') |
|
1517 | 1517 | |
|
1518 | 1518 | return False |
|
1519 | 1519 | |
|
1520 | 1520 | def _get_general_setting(self, pull_request, settings_key, default=False): |
|
1521 | 1521 | settings_model = VcsSettingsModel(repo=pull_request.target_repo) |
|
1522 | 1522 | settings = settings_model.get_general_settings() |
|
1523 | 1523 | return settings.get(settings_key, default) |
|
1524 | 1524 | |
|
1525 | 1525 | def _log_audit_action(self, action, action_data, user, pull_request): |
|
1526 | 1526 | audit_logger.store( |
|
1527 | 1527 | action=action, |
|
1528 | 1528 | action_data=action_data, |
|
1529 | 1529 | user=user, |
|
1530 | 1530 | repo=pull_request.target_repo) |
|
1531 | 1531 | |
|
1532 | 1532 | def get_reviewer_functions(self): |
|
1533 | 1533 | """ |
|
1534 | 1534 | Fetches functions for validation and fetching default reviewers. |
|
1535 | 1535 | If available we use the EE package, else we fallback to CE |
|
1536 | 1536 | package functions |
|
1537 | 1537 | """ |
|
1538 | 1538 | try: |
|
1539 | 1539 | from rc_reviewers.utils import get_default_reviewers_data |
|
1540 | 1540 | from rc_reviewers.utils import validate_default_reviewers |
|
1541 | 1541 | except ImportError: |
|
1542 | 1542 | from rhodecode.apps.repository.utils import \ |
|
1543 | 1543 | get_default_reviewers_data |
|
1544 | 1544 | from rhodecode.apps.repository.utils import \ |
|
1545 | 1545 | validate_default_reviewers |
|
1546 | 1546 | |
|
1547 | 1547 | return get_default_reviewers_data, validate_default_reviewers |
|
1548 | 1548 | |
|
1549 | 1549 | |
|
1550 | 1550 | class MergeCheck(object): |
|
1551 | 1551 | """ |
|
1552 | 1552 | Perform Merge Checks and returns a check object which stores information |
|
1553 | 1553 | about merge errors, and merge conditions |
|
1554 | 1554 | """ |
|
1555 | 1555 | TODO_CHECK = 'todo' |
|
1556 | 1556 | PERM_CHECK = 'perm' |
|
1557 | 1557 | REVIEW_CHECK = 'review' |
|
1558 | 1558 | MERGE_CHECK = 'merge' |
|
1559 | 1559 | |
|
1560 | 1560 | def __init__(self): |
|
1561 | 1561 | self.review_status = None |
|
1562 | 1562 | self.merge_possible = None |
|
1563 | 1563 | self.merge_msg = '' |
|
1564 | 1564 | self.failed = None |
|
1565 | 1565 | self.errors = [] |
|
1566 | 1566 | self.error_details = OrderedDict() |
|
1567 | 1567 | |
|
1568 | 1568 | def push_error(self, error_type, message, error_key, details): |
|
1569 | 1569 | self.failed = True |
|
1570 | 1570 | self.errors.append([error_type, message]) |
|
1571 | 1571 | self.error_details[error_key] = dict( |
|
1572 | 1572 | details=details, |
|
1573 | 1573 | error_type=error_type, |
|
1574 | 1574 | message=message |
|
1575 | 1575 | ) |
|
1576 | 1576 | |
|
1577 | 1577 | @classmethod |
|
1578 | 1578 | def validate(cls, pull_request, user, translator, fail_early=False): |
|
1579 | 1579 | _ = translator |
|
1580 | 1580 | merge_check = cls() |
|
1581 | 1581 | |
|
1582 | 1582 | # permissions to merge |
|
1583 | 1583 | user_allowed_to_merge = PullRequestModel().check_user_merge( |
|
1584 | 1584 | pull_request, user) |
|
1585 | 1585 | if not user_allowed_to_merge: |
|
1586 | 1586 | log.debug("MergeCheck: cannot merge, approval is pending.") |
|
1587 | 1587 | |
|
1588 | 1588 | msg = _('User `{}` not allowed to perform merge.').format(user.username) |
|
1589 | 1589 | merge_check.push_error('error', msg, cls.PERM_CHECK, user.username) |
|
1590 | 1590 | if fail_early: |
|
1591 | 1591 | return merge_check |
|
1592 | 1592 | |
|
1593 | 1593 | # review status, must be always present |
|
1594 | 1594 | review_status = pull_request.calculated_review_status() |
|
1595 | 1595 | merge_check.review_status = review_status |
|
1596 | 1596 | |
|
1597 | 1597 | status_approved = review_status == ChangesetStatus.STATUS_APPROVED |
|
1598 | 1598 | if not status_approved: |
|
1599 | 1599 | log.debug("MergeCheck: cannot merge, approval is pending.") |
|
1600 | 1600 | |
|
1601 | 1601 | msg = _('Pull request reviewer approval is pending.') |
|
1602 | 1602 | |
|
1603 | 1603 | merge_check.push_error( |
|
1604 | 1604 | 'warning', msg, cls.REVIEW_CHECK, review_status) |
|
1605 | 1605 | |
|
1606 | 1606 | if fail_early: |
|
1607 | 1607 | return merge_check |
|
1608 | 1608 | |
|
1609 | 1609 | # left over TODOs |
|
1610 | 1610 | todos = CommentsModel().get_unresolved_todos(pull_request) |
|
1611 | 1611 | if todos: |
|
1612 | 1612 | log.debug("MergeCheck: cannot merge, {} " |
|
1613 | 1613 | "unresolved todos left.".format(len(todos))) |
|
1614 | 1614 | |
|
1615 | 1615 | if len(todos) == 1: |
|
1616 | 1616 | msg = _('Cannot merge, {} TODO still not resolved.').format( |
|
1617 | 1617 | len(todos)) |
|
1618 | 1618 | else: |
|
1619 | 1619 | msg = _('Cannot merge, {} TODOs still not resolved.').format( |
|
1620 | 1620 | len(todos)) |
|
1621 | 1621 | |
|
1622 | 1622 | merge_check.push_error('warning', msg, cls.TODO_CHECK, todos) |
|
1623 | 1623 | |
|
1624 | 1624 | if fail_early: |
|
1625 | 1625 | return merge_check |
|
1626 | 1626 | |
|
1627 | 1627 | # merge possible |
|
1628 | 1628 | merge_status, msg = PullRequestModel().merge_status( |
|
1629 | 1629 | pull_request, translator=translator) |
|
1630 | 1630 | merge_check.merge_possible = merge_status |
|
1631 | 1631 | merge_check.merge_msg = msg |
|
1632 | 1632 | if not merge_status: |
|
1633 | 1633 | log.debug( |
|
1634 | 1634 | "MergeCheck: cannot merge, pull request merge not possible.") |
|
1635 | 1635 | merge_check.push_error('warning', msg, cls.MERGE_CHECK, None) |
|
1636 | 1636 | |
|
1637 | 1637 | if fail_early: |
|
1638 | 1638 | return merge_check |
|
1639 | 1639 | |
|
1640 | 1640 | log.debug('MergeCheck: is failed: %s', merge_check.failed) |
|
1641 | 1641 | return merge_check |
|
1642 | 1642 | |
|
1643 | 1643 | @classmethod |
|
1644 | 1644 | def get_merge_conditions(cls, pull_request, translator): |
|
1645 | 1645 | _ = translator |
|
1646 | 1646 | merge_details = {} |
|
1647 | 1647 | |
|
1648 | 1648 | model = PullRequestModel() |
|
1649 | 1649 | use_rebase = model._use_rebase_for_merging(pull_request) |
|
1650 | 1650 | |
|
1651 | 1651 | if use_rebase: |
|
1652 | 1652 | merge_details['merge_strategy'] = dict( |
|
1653 | 1653 | details={}, |
|
1654 | 1654 | message=_('Merge strategy: rebase') |
|
1655 | 1655 | ) |
|
1656 | 1656 | else: |
|
1657 | 1657 | merge_details['merge_strategy'] = dict( |
|
1658 | 1658 | details={}, |
|
1659 | 1659 | message=_('Merge strategy: explicit merge commit') |
|
1660 | 1660 | ) |
|
1661 | 1661 | |
|
1662 | 1662 | close_branch = model._close_branch_before_merging(pull_request) |
|
1663 | 1663 | if close_branch: |
|
1664 | 1664 | repo_type = pull_request.target_repo.repo_type |
|
1665 | 1665 | if repo_type == 'hg': |
|
1666 | 1666 | close_msg = _('Source branch will be closed after merge.') |
|
1667 | 1667 | elif repo_type == 'git': |
|
1668 | 1668 | close_msg = _('Source branch will be deleted after merge.') |
|
1669 | 1669 | |
|
1670 | 1670 | merge_details['close_branch'] = dict( |
|
1671 | 1671 | details={}, |
|
1672 | 1672 | message=close_msg |
|
1673 | 1673 | ) |
|
1674 | 1674 | |
|
1675 | 1675 | return merge_details |
|
1676 | 1676 | |
|
1677 | 1677 | ChangeTuple = collections.namedtuple( |
|
1678 | 1678 | 'ChangeTuple', ['added', 'common', 'removed', 'total']) |
|
1679 | 1679 | |
|
1680 | 1680 | FileChangeTuple = collections.namedtuple( |
|
1681 | 1681 | 'FileChangeTuple', ['added', 'modified', 'removed']) |
General Comments 0
You need to be logged in to leave comments.
Login now