Show More
@@ -0,0 +1,29 b'' | |||||
|
1 | <!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> | |||
|
2 | ||||
|
3 | <html> | |||
|
4 | <head></head> | |||
|
5 | ||||
|
6 | <body> | |||
|
7 | ||||
|
8 | SUBJECT: | |||
|
9 | <pre>${c.subject}</pre> | |||
|
10 | ||||
|
11 | HEADERS: | |||
|
12 | <pre> | |||
|
13 | ${c.headers} | |||
|
14 | </pre> | |||
|
15 | ||||
|
16 | PLAINTEXT: | |||
|
17 | <pre> | |||
|
18 | ${c.email_body_plaintext|n} | |||
|
19 | </pre> | |||
|
20 | ||||
|
21 | </body> | |||
|
22 | </html> | |||
|
23 | <br/><br/> | |||
|
24 | ||||
|
25 | HTML: | |||
|
26 | ||||
|
27 | ${c.email_body|n} | |||
|
28 | ||||
|
29 |
@@ -0,0 +1,49 b'' | |||||
|
1 | ## -*- coding: utf-8 -*- | |||
|
2 | <%inherit file="/base/base.mako"/> | |||
|
3 | ||||
|
4 | <%def name="title()"> | |||
|
5 | ${_('Show notification')} ${c.rhodecode_user.username} | |||
|
6 | %if c.rhodecode_name: | |||
|
7 | · ${h.branding(c.rhodecode_name)} | |||
|
8 | %endif | |||
|
9 | </%def> | |||
|
10 | ||||
|
11 | <%def name="breadcrumbs_links()"> | |||
|
12 | ${h.link_to(_('My Notifications'), h.route_path('notifications_show_all'))} | |||
|
13 | » | |||
|
14 | ${_('Show notification')} | |||
|
15 | </%def> | |||
|
16 | ||||
|
17 | <%def name="menu_bar_nav()"> | |||
|
18 | ${self.menu_items(active='admin')} | |||
|
19 | </%def> | |||
|
20 | ||||
|
21 | <%def name="main()"> | |||
|
22 | <div class="box"> | |||
|
23 | ||||
|
24 | <!-- box / title --> | |||
|
25 | <div class="title"> | |||
|
26 | Rendered plain text using markup renderer | |||
|
27 | </div> | |||
|
28 | <div class="table"> | |||
|
29 | <div > | |||
|
30 | <div class="notification-header"> | |||
|
31 | GRAVATAR | |||
|
32 | <div class="desc"> | |||
|
33 | DESC | |||
|
34 | </div> | |||
|
35 | </div> | |||
|
36 | <div class="notification-body"> | |||
|
37 | <div class="notification-subject"> | |||
|
38 | <h3>${_('Subject')}: ${c.subject}</h3> | |||
|
39 | </div> | |||
|
40 | ${c.email_body|n} | |||
|
41 | </div> | |||
|
42 | </div> | |||
|
43 | </div> | |||
|
44 | </div> | |||
|
45 | ||||
|
46 | </%def> | |||
|
47 | ||||
|
48 | ||||
|
49 |
@@ -0,0 +1,34 b'' | |||||
|
1 | ## -*- coding: utf-8 -*- | |||
|
2 | <%inherit file="/debug_style/index.html"/> | |||
|
3 | ||||
|
4 | <%def name="breadcrumbs_links()"> | |||
|
5 | ${h.link_to(_('Style'), h.route_path('debug_style_home'))} | |||
|
6 | » | |||
|
7 | ${c.active} | |||
|
8 | </%def> | |||
|
9 | ||||
|
10 | ||||
|
11 | <%def name="real_main()"> | |||
|
12 | <div class="box"> | |||
|
13 | <div class="title"> | |||
|
14 | ${self.breadcrumbs()} | |||
|
15 | </div> | |||
|
16 | ||||
|
17 | <div class='sidebar-col-wrapper'> | |||
|
18 | ${self.sidebar()} | |||
|
19 | <div class="main-content"> | |||
|
20 | <h2>Emails</h2> | |||
|
21 | <ul> | |||
|
22 | % for elem in sorted(c.email_types.keys()): | |||
|
23 | <li> | |||
|
24 | <a href="${request.route_path('debug_style_email', email_id=elem, _query={'user':c.rhodecode_user.username})}">${elem}</a> | |||
|
25 | | | |||
|
26 | <a href="${request.route_path('debug_style_email_plain_rendered', email_id=elem, _query={'user':c.rhodecode_user.username})}">plain rendered</a> | |||
|
27 | </li> | |||
|
28 | % endfor | |||
|
29 | </ul> | |||
|
30 | ||||
|
31 | </div> <!-- .main-content --> | |||
|
32 | </div> | |||
|
33 | </div> <!-- .box --> | |||
|
34 | </%def> |
@@ -1,51 +1,59 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2016-2019 RhodeCode GmbH |
|
3 | # Copyright (C) 2016-2019 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 | from rhodecode.apps._base import ADMIN_PREFIX |
|
20 | from rhodecode.apps._base import ADMIN_PREFIX | |
21 | from rhodecode.lib.utils2 import str2bool |
|
21 | from rhodecode.lib.utils2 import str2bool | |
22 |
|
22 | |||
23 |
|
23 | |||
24 | class DebugStylePredicate(object): |
|
24 | class DebugStylePredicate(object): | |
25 | def __init__(self, val, config): |
|
25 | def __init__(self, val, config): | |
26 | self.val = val |
|
26 | self.val = val | |
27 |
|
27 | |||
28 | def text(self): |
|
28 | def text(self): | |
29 | return 'debug style route = %s' % self.val |
|
29 | return 'debug style route = %s' % self.val | |
30 |
|
30 | |||
31 | phash = text |
|
31 | phash = text | |
32 |
|
32 | |||
33 | def __call__(self, info, request): |
|
33 | def __call__(self, info, request): | |
34 | return str2bool(request.registry.settings.get('debug_style')) |
|
34 | return str2bool(request.registry.settings.get('debug_style')) | |
35 |
|
35 | |||
36 |
|
36 | |||
37 | def includeme(config): |
|
37 | def includeme(config): | |
38 | config.add_route_predicate( |
|
38 | config.add_route_predicate( | |
39 | 'debug_style', DebugStylePredicate) |
|
39 | 'debug_style', DebugStylePredicate) | |
40 |
|
40 | |||
41 | config.add_route( |
|
41 | config.add_route( | |
42 | name='debug_style_home', |
|
42 | name='debug_style_home', | |
43 | pattern=ADMIN_PREFIX + '/debug_style', |
|
43 | pattern=ADMIN_PREFIX + '/debug_style', | |
44 | debug_style=True) |
|
44 | debug_style=True) | |
45 | config.add_route( |
|
45 | config.add_route( | |
|
46 | name='debug_style_email', | |||
|
47 | pattern=ADMIN_PREFIX + '/debug_style/email/{email_id}', | |||
|
48 | debug_style=True) | |||
|
49 | config.add_route( | |||
|
50 | name='debug_style_email_plain_rendered', | |||
|
51 | pattern=ADMIN_PREFIX + '/debug_style/email-rendered/{email_id}', | |||
|
52 | debug_style=True) | |||
|
53 | config.add_route( | |||
46 | name='debug_style_template', |
|
54 | name='debug_style_template', | |
47 | pattern=ADMIN_PREFIX + '/debug_style/t/{t_path}', |
|
55 | pattern=ADMIN_PREFIX + '/debug_style/t/{t_path}', | |
48 | debug_style=True) |
|
56 | debug_style=True) | |
49 |
|
57 | |||
50 | # Scan module for configuration decorators. |
|
58 | # Scan module for configuration decorators. | |
51 | config.scan('.views', ignore='.tests') |
|
59 | config.scan('.views', ignore='.tests') |
@@ -1,59 +1,338 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2016-2019 RhodeCode GmbH |
|
3 | # Copyright (C) 2016-2019 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import os |
|
21 | import os | |
22 | import logging |
|
22 | import logging | |
|
23 | import datetime | |||
23 |
|
24 | |||
24 | from pyramid.view import view_config |
|
25 | from pyramid.view import view_config | |
25 | from pyramid.renderers import render_to_response |
|
26 | from pyramid.renderers import render_to_response | |
26 | from rhodecode.apps._base import BaseAppView |
|
27 | from rhodecode.apps._base import BaseAppView | |
|
28 | from rhodecode.lib.celerylib import run_task, tasks | |||
|
29 | from rhodecode.lib.utils2 import AttributeDict | |||
|
30 | from rhodecode.model.db import User | |||
|
31 | from rhodecode.model.notification import EmailNotificationModel | |||
27 |
|
32 | |||
28 | log = logging.getLogger(__name__) |
|
33 | log = logging.getLogger(__name__) | |
29 |
|
34 | |||
30 |
|
35 | |||
31 | class DebugStyleView(BaseAppView): |
|
36 | class DebugStyleView(BaseAppView): | |
32 | def load_default_context(self): |
|
37 | def load_default_context(self): | |
33 | c = self._get_local_tmpl_context() |
|
38 | c = self._get_local_tmpl_context() | |
34 |
|
39 | |||
35 | return c |
|
40 | return c | |
36 |
|
41 | |||
37 | @view_config( |
|
42 | @view_config( | |
38 | route_name='debug_style_home', request_method='GET', |
|
43 | route_name='debug_style_home', request_method='GET', | |
39 | renderer=None) |
|
44 | renderer=None) | |
40 | def index(self): |
|
45 | def index(self): | |
41 | c = self.load_default_context() |
|
46 | c = self.load_default_context() | |
42 | c.active = 'index' |
|
47 | c.active = 'index' | |
43 |
|
48 | |||
44 | return render_to_response( |
|
49 | return render_to_response( | |
45 | 'debug_style/index.html', self._get_template_context(c), |
|
50 | 'debug_style/index.html', self._get_template_context(c), | |
46 | request=self.request) |
|
51 | request=self.request) | |
47 |
|
52 | |||
48 | @view_config( |
|
53 | @view_config( | |
|
54 | route_name='debug_style_email', request_method='GET', | |||
|
55 | renderer=None) | |||
|
56 | @view_config( | |||
|
57 | route_name='debug_style_email_plain_rendered', request_method='GET', | |||
|
58 | renderer=None) | |||
|
59 | def render_email(self): | |||
|
60 | c = self.load_default_context() | |||
|
61 | email_id = self.request.matchdict['email_id'] | |||
|
62 | c.active = 'emails' | |||
|
63 | ||||
|
64 | pr = AttributeDict( | |||
|
65 | pull_request_id=123, | |||
|
66 | title='digital_ocean: fix redis, elastic search start on boot, ' | |||
|
67 | 'fix fd limits on supervisor, set postgres 11 version', | |||
|
68 | description=''' | |||
|
69 | Check if we should use full-topic or mini-topic. | |||
|
70 | ||||
|
71 | - full topic produces some problems with merge states etc | |||
|
72 | - server-mini-topic needs probably tweeks. | |||
|
73 | ''', | |||
|
74 | repo_name='foobar', | |||
|
75 | source_ref_parts=AttributeDict(type='branch', name='fix-ticket-2000'), | |||
|
76 | target_ref_parts=AttributeDict(type='branch', name='master'), | |||
|
77 | ) | |||
|
78 | target_repo = AttributeDict(repo_name='repo_group/target_repo') | |||
|
79 | source_repo = AttributeDict(repo_name='repo_group/source_repo') | |||
|
80 | user = User.get_by_username(self.request.GET.get('user')) or self._rhodecode_db_user | |||
|
81 | ||||
|
82 | email_kwargs = { | |||
|
83 | 'test': {}, | |||
|
84 | 'message': { | |||
|
85 | 'body': 'message body !' | |||
|
86 | }, | |||
|
87 | 'email_test': { | |||
|
88 | 'user': user, | |||
|
89 | 'date': datetime.datetime.now(), | |||
|
90 | 'rhodecode_version': c.rhodecode_version | |||
|
91 | }, | |||
|
92 | 'password_reset': { | |||
|
93 | 'password_reset_url': 'http://example.com/reset-rhodecode-password/token', | |||
|
94 | ||||
|
95 | 'user': user, | |||
|
96 | 'date': datetime.datetime.now(), | |||
|
97 | 'email': 'test@rhodecode.com', | |||
|
98 | 'first_admin_email': User.get_first_super_admin().email | |||
|
99 | }, | |||
|
100 | 'password_reset_confirmation': { | |||
|
101 | 'new_password': 'new-password-example', | |||
|
102 | 'user': user, | |||
|
103 | 'date': datetime.datetime.now(), | |||
|
104 | 'email': 'test@rhodecode.com', | |||
|
105 | 'first_admin_email': User.get_first_super_admin().email | |||
|
106 | }, | |||
|
107 | 'registration': { | |||
|
108 | 'user': user, | |||
|
109 | 'date': datetime.datetime.now(), | |||
|
110 | }, | |||
|
111 | ||||
|
112 | 'pull_request_comment': { | |||
|
113 | 'user': user, | |||
|
114 | ||||
|
115 | 'status_change': None, | |||
|
116 | 'status_change_type': None, | |||
|
117 | ||||
|
118 | 'pull_request': pr, | |||
|
119 | 'pull_request_commits': [], | |||
|
120 | ||||
|
121 | 'pull_request_target_repo': target_repo, | |||
|
122 | 'pull_request_target_repo_url': 'http://target-repo/url', | |||
|
123 | ||||
|
124 | 'pull_request_source_repo': source_repo, | |||
|
125 | 'pull_request_source_repo_url': 'http://source-repo/url', | |||
|
126 | ||||
|
127 | 'pull_request_url': 'http://localhost/pr1', | |||
|
128 | 'pr_comment_url': 'http://comment-url', | |||
|
129 | ||||
|
130 | 'comment_file': None, | |||
|
131 | 'comment_line': None, | |||
|
132 | 'comment_type': 'note', | |||
|
133 | 'comment_body': 'This is my comment body. *I like !*', | |||
|
134 | ||||
|
135 | 'renderer_type': 'markdown', | |||
|
136 | 'mention': True, | |||
|
137 | ||||
|
138 | }, | |||
|
139 | 'pull_request_comment+status': { | |||
|
140 | 'user': user, | |||
|
141 | ||||
|
142 | 'status_change': 'approved', | |||
|
143 | 'status_change_type': 'approved', | |||
|
144 | ||||
|
145 | 'pull_request': pr, | |||
|
146 | 'pull_request_commits': [], | |||
|
147 | ||||
|
148 | 'pull_request_target_repo': target_repo, | |||
|
149 | 'pull_request_target_repo_url': 'http://target-repo/url', | |||
|
150 | ||||
|
151 | 'pull_request_source_repo': source_repo, | |||
|
152 | 'pull_request_source_repo_url': 'http://source-repo/url', | |||
|
153 | ||||
|
154 | 'pull_request_url': 'http://localhost/pr1', | |||
|
155 | 'pr_comment_url': 'http://comment-url', | |||
|
156 | ||||
|
157 | 'comment_type': 'todo', | |||
|
158 | 'comment_file': None, | |||
|
159 | 'comment_line': None, | |||
|
160 | 'comment_body': ''' | |||
|
161 | I think something like this would be better | |||
|
162 | ||||
|
163 | ```py | |||
|
164 | ||||
|
165 | def db(): | |||
|
166 | global connection | |||
|
167 | return connection | |||
|
168 | ||||
|
169 | ``` | |||
|
170 | ||||
|
171 | ''', | |||
|
172 | ||||
|
173 | 'renderer_type': 'markdown', | |||
|
174 | 'mention': True, | |||
|
175 | ||||
|
176 | }, | |||
|
177 | 'pull_request_comment+file': { | |||
|
178 | 'user': user, | |||
|
179 | ||||
|
180 | 'status_change': None, | |||
|
181 | 'status_change_type': None, | |||
|
182 | ||||
|
183 | 'pull_request': pr, | |||
|
184 | 'pull_request_commits': [], | |||
|
185 | ||||
|
186 | 'pull_request_target_repo': target_repo, | |||
|
187 | 'pull_request_target_repo_url': 'http://target-repo/url', | |||
|
188 | ||||
|
189 | 'pull_request_source_repo': source_repo, | |||
|
190 | 'pull_request_source_repo_url': 'http://source-repo/url', | |||
|
191 | ||||
|
192 | 'pull_request_url': 'http://localhost/pr1', | |||
|
193 | ||||
|
194 | 'pr_comment_url': 'http://comment-url', | |||
|
195 | ||||
|
196 | 'comment_file': 'rhodecode/model/db.py', | |||
|
197 | 'comment_line': 'o1210', | |||
|
198 | 'comment_type': 'todo', | |||
|
199 | 'comment_body': ''' | |||
|
200 | I like this ! | |||
|
201 | ||||
|
202 | But please check this code:: | |||
|
203 | ||||
|
204 | def main(): | |||
|
205 | print 'ok' | |||
|
206 | ||||
|
207 | This should work better ! | |||
|
208 | ''', | |||
|
209 | ||||
|
210 | 'renderer_type': 'rst', | |||
|
211 | 'mention': True, | |||
|
212 | ||||
|
213 | }, | |||
|
214 | ||||
|
215 | 'cs_comment': { | |||
|
216 | 'user': user, | |||
|
217 | 'commit': AttributeDict(idx=123, raw_id='a'*40, message='Commit message'), | |||
|
218 | 'status_change': None, | |||
|
219 | 'status_change_type': None, | |||
|
220 | ||||
|
221 | 'commit_target_repo_url': 'http://foo.example.com/#comment1', | |||
|
222 | 'repo_name': 'test-repo', | |||
|
223 | 'comment_type': 'note', | |||
|
224 | 'comment_file': None, | |||
|
225 | 'comment_line': None, | |||
|
226 | 'commit_comment_url': 'http://comment-url', | |||
|
227 | 'comment_body': 'This is my comment body. *I like !*', | |||
|
228 | 'renderer_type': 'markdown', | |||
|
229 | 'mention': True, | |||
|
230 | }, | |||
|
231 | 'cs_comment+status': { | |||
|
232 | 'user': user, | |||
|
233 | 'commit': AttributeDict(idx=123, raw_id='a' * 40, message='Commit message'), | |||
|
234 | 'status_change': 'approved', | |||
|
235 | 'status_change_type': 'approved', | |||
|
236 | ||||
|
237 | 'commit_target_repo_url': 'http://foo.example.com/#comment1', | |||
|
238 | 'repo_name': 'test-repo', | |||
|
239 | 'comment_type': 'note', | |||
|
240 | 'comment_file': None, | |||
|
241 | 'comment_line': None, | |||
|
242 | 'commit_comment_url': 'http://comment-url', | |||
|
243 | 'comment_body': ''' | |||
|
244 | Hello **world** | |||
|
245 | ||||
|
246 | This is a multiline comment :) | |||
|
247 | ||||
|
248 | - list | |||
|
249 | - list2 | |||
|
250 | ''', | |||
|
251 | 'renderer_type': 'markdown', | |||
|
252 | 'mention': True, | |||
|
253 | }, | |||
|
254 | 'cs_comment+file': { | |||
|
255 | 'user': user, | |||
|
256 | 'commit': AttributeDict(idx=123, raw_id='a' * 40, message='Commit message'), | |||
|
257 | 'status_change': None, | |||
|
258 | 'status_change_type': None, | |||
|
259 | ||||
|
260 | 'commit_target_repo_url': 'http://foo.example.com/#comment1', | |||
|
261 | 'repo_name': 'test-repo', | |||
|
262 | ||||
|
263 | 'comment_type': 'note', | |||
|
264 | 'comment_file': 'test-file.py', | |||
|
265 | 'comment_line': 'n100', | |||
|
266 | ||||
|
267 | 'commit_comment_url': 'http://comment-url', | |||
|
268 | 'comment_body': 'This is my comment body. *I like !*', | |||
|
269 | 'renderer_type': 'markdown', | |||
|
270 | 'mention': True, | |||
|
271 | }, | |||
|
272 | ||||
|
273 | 'pull_request': { | |||
|
274 | 'user': user, | |||
|
275 | 'pull_request': pr, | |||
|
276 | 'pull_request_commits': [ | |||
|
277 | ('472d1df03bf7206e278fcedc6ac92b46b01c4e21', '''\ | |||
|
278 | my-account: moved email closer to profile as it's similar data just moved outside. | |||
|
279 | '''), | |||
|
280 | ('cbfa3061b6de2696c7161ed15ba5c6a0045f90a7', '''\ | |||
|
281 | users: description edit fixes | |||
|
282 | ||||
|
283 | - tests | |||
|
284 | - added metatags info | |||
|
285 | '''), | |||
|
286 | ], | |||
|
287 | ||||
|
288 | 'pull_request_target_repo': target_repo, | |||
|
289 | 'pull_request_target_repo_url': 'http://target-repo/url', | |||
|
290 | ||||
|
291 | 'pull_request_source_repo': source_repo, | |||
|
292 | 'pull_request_source_repo_url': 'http://source-repo/url', | |||
|
293 | ||||
|
294 | 'pull_request_url': 'http://code.rhodecode.com/_pull-request/123', | |||
|
295 | } | |||
|
296 | ||||
|
297 | } | |||
|
298 | ||||
|
299 | template_type = email_id.split('+')[0] | |||
|
300 | (c.subject, c.headers, c.email_body, | |||
|
301 | c.email_body_plaintext) = EmailNotificationModel().render_email( | |||
|
302 | template_type, **email_kwargs.get(email_id, {})) | |||
|
303 | ||||
|
304 | test_email = self.request.GET.get('email') | |||
|
305 | if test_email: | |||
|
306 | recipients = [test_email] | |||
|
307 | run_task(tasks.send_email, recipients, c.subject, | |||
|
308 | c.email_body_plaintext, c.email_body) | |||
|
309 | ||||
|
310 | if self.request.matched_route.name == 'debug_style_email_plain_rendered': | |||
|
311 | template = 'debug_style/email_plain_rendered.mako' | |||
|
312 | else: | |||
|
313 | template = 'debug_style/email.mako' | |||
|
314 | return render_to_response( | |||
|
315 | template, self._get_template_context(c), | |||
|
316 | request=self.request) | |||
|
317 | ||||
|
318 | @view_config( | |||
49 | route_name='debug_style_template', request_method='GET', |
|
319 | route_name='debug_style_template', request_method='GET', | |
50 | renderer=None) |
|
320 | renderer=None) | |
51 | def template(self): |
|
321 | def template(self): | |
52 | t_path = self.request.matchdict['t_path'] |
|
322 | t_path = self.request.matchdict['t_path'] | |
53 | c = self.load_default_context() |
|
323 | c = self.load_default_context() | |
54 | c.active = os.path.splitext(t_path)[0] |
|
324 | c.active = os.path.splitext(t_path)[0] | |
55 | c.came_from = '' |
|
325 | c.came_from = '' | |
|
326 | c.email_types = { | |||
|
327 | 'cs_comment+file': {}, | |||
|
328 | 'cs_comment+status': {}, | |||
|
329 | ||||
|
330 | 'pull_request_comment+file': {}, | |||
|
331 | 'pull_request_comment+status': {}, | |||
|
332 | } | |||
|
333 | c.email_types.update(EmailNotificationModel.email_types) | |||
56 |
|
334 | |||
57 | return render_to_response( |
|
335 | return render_to_response( | |
58 | 'debug_style/' + t_path, self._get_template_context(c), |
|
336 | 'debug_style/' + t_path, self._get_template_context(c), | |
59 | request=self.request) No newline at end of file |
|
337 | request=self.request) | |
|
338 |
@@ -1,320 +1,320 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2019 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2019 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import pytest |
|
21 | import pytest | |
22 |
|
22 | |||
23 | from rhodecode.tests import TestController |
|
23 | from rhodecode.tests import TestController | |
24 |
|
24 | |||
25 | from rhodecode.model.db import ( |
|
25 | from rhodecode.model.db import ( | |
26 | ChangesetComment, Notification, UserNotification) |
|
26 | ChangesetComment, Notification, UserNotification) | |
27 | from rhodecode.model.meta import Session |
|
27 | from rhodecode.model.meta import Session | |
28 | from rhodecode.lib import helpers as h |
|
28 | from rhodecode.lib import helpers as h | |
29 |
|
29 | |||
30 |
|
30 | |||
31 | def route_path(name, params=None, **kwargs): |
|
31 | def route_path(name, params=None, **kwargs): | |
32 | import urllib |
|
32 | import urllib | |
33 |
|
33 | |||
34 | base_url = { |
|
34 | base_url = { | |
35 | 'repo_commit': '/{repo_name}/changeset/{commit_id}', |
|
35 | 'repo_commit': '/{repo_name}/changeset/{commit_id}', | |
36 | 'repo_commit_comment_create': '/{repo_name}/changeset/{commit_id}/comment/create', |
|
36 | 'repo_commit_comment_create': '/{repo_name}/changeset/{commit_id}/comment/create', | |
37 | 'repo_commit_comment_preview': '/{repo_name}/changeset/{commit_id}/comment/preview', |
|
37 | 'repo_commit_comment_preview': '/{repo_name}/changeset/{commit_id}/comment/preview', | |
38 | 'repo_commit_comment_delete': '/{repo_name}/changeset/{commit_id}/comment/{comment_id}/delete', |
|
38 | 'repo_commit_comment_delete': '/{repo_name}/changeset/{commit_id}/comment/{comment_id}/delete', | |
39 | }[name].format(**kwargs) |
|
39 | }[name].format(**kwargs) | |
40 |
|
40 | |||
41 | if params: |
|
41 | if params: | |
42 | base_url = '{}?{}'.format(base_url, urllib.urlencode(params)) |
|
42 | base_url = '{}?{}'.format(base_url, urllib.urlencode(params)) | |
43 | return base_url |
|
43 | return base_url | |
44 |
|
44 | |||
45 |
|
45 | |||
46 | @pytest.mark.backends("git", "hg", "svn") |
|
46 | @pytest.mark.backends("git", "hg", "svn") | |
47 | class TestRepoCommitCommentsView(TestController): |
|
47 | class TestRepoCommitCommentsView(TestController): | |
48 |
|
48 | |||
49 | @pytest.fixture(autouse=True) |
|
49 | @pytest.fixture(autouse=True) | |
50 | def prepare(self, request, baseapp): |
|
50 | def prepare(self, request, baseapp): | |
51 | for x in ChangesetComment.query().all(): |
|
51 | for x in ChangesetComment.query().all(): | |
52 | Session().delete(x) |
|
52 | Session().delete(x) | |
53 | Session().commit() |
|
53 | Session().commit() | |
54 |
|
54 | |||
55 | for x in Notification.query().all(): |
|
55 | for x in Notification.query().all(): | |
56 | Session().delete(x) |
|
56 | Session().delete(x) | |
57 | Session().commit() |
|
57 | Session().commit() | |
58 |
|
58 | |||
59 | request.addfinalizer(self.cleanup) |
|
59 | request.addfinalizer(self.cleanup) | |
60 |
|
60 | |||
61 | def cleanup(self): |
|
61 | def cleanup(self): | |
62 | for x in ChangesetComment.query().all(): |
|
62 | for x in ChangesetComment.query().all(): | |
63 | Session().delete(x) |
|
63 | Session().delete(x) | |
64 | Session().commit() |
|
64 | Session().commit() | |
65 |
|
65 | |||
66 | for x in Notification.query().all(): |
|
66 | for x in Notification.query().all(): | |
67 | Session().delete(x) |
|
67 | Session().delete(x) | |
68 | Session().commit() |
|
68 | Session().commit() | |
69 |
|
69 | |||
70 | @pytest.mark.parametrize('comment_type', ChangesetComment.COMMENT_TYPES) |
|
70 | @pytest.mark.parametrize('comment_type', ChangesetComment.COMMENT_TYPES) | |
71 | def test_create(self, comment_type, backend): |
|
71 | def test_create(self, comment_type, backend): | |
72 | self.log_user() |
|
72 | self.log_user() | |
73 | commit = backend.repo.get_commit('300') |
|
73 | commit = backend.repo.get_commit('300') | |
74 | commit_id = commit.raw_id |
|
74 | commit_id = commit.raw_id | |
75 | text = u'CommentOnCommit' |
|
75 | text = u'CommentOnCommit' | |
76 |
|
76 | |||
77 | params = {'text': text, 'csrf_token': self.csrf_token, |
|
77 | params = {'text': text, 'csrf_token': self.csrf_token, | |
78 | 'comment_type': comment_type} |
|
78 | 'comment_type': comment_type} | |
79 | self.app.post( |
|
79 | self.app.post( | |
80 | route_path('repo_commit_comment_create', |
|
80 | route_path('repo_commit_comment_create', | |
81 | repo_name=backend.repo_name, commit_id=commit_id), |
|
81 | repo_name=backend.repo_name, commit_id=commit_id), | |
82 | params=params) |
|
82 | params=params) | |
83 |
|
83 | |||
84 | response = self.app.get( |
|
84 | response = self.app.get( | |
85 | route_path('repo_commit', |
|
85 | route_path('repo_commit', | |
86 | repo_name=backend.repo_name, commit_id=commit_id)) |
|
86 | repo_name=backend.repo_name, commit_id=commit_id)) | |
87 |
|
87 | |||
88 | # test DB |
|
88 | # test DB | |
89 | assert ChangesetComment.query().count() == 1 |
|
89 | assert ChangesetComment.query().count() == 1 | |
90 | assert_comment_links(response, ChangesetComment.query().count(), 0) |
|
90 | assert_comment_links(response, ChangesetComment.query().count(), 0) | |
91 |
|
91 | |||
92 | assert Notification.query().count() == 1 |
|
92 | assert Notification.query().count() == 1 | |
93 | assert ChangesetComment.query().count() == 1 |
|
93 | assert ChangesetComment.query().count() == 1 | |
94 |
|
94 | |||
95 | notification = Notification.query().all()[0] |
|
95 | notification = Notification.query().all()[0] | |
96 |
|
96 | |||
97 | comment_id = ChangesetComment.query().first().comment_id |
|
97 | comment_id = ChangesetComment.query().first().comment_id | |
98 | assert notification.type_ == Notification.TYPE_CHANGESET_COMMENT |
|
98 | assert notification.type_ == Notification.TYPE_CHANGESET_COMMENT | |
99 |
|
99 | |||
100 | author = notification.created_by_user.username_and_name |
|
100 | author = notification.created_by_user.username_and_name | |
101 | sbj = '{0} left a {1} on commit `{2}` in the {3} repository'.format( |
|
101 | sbj = '@{0} left a {1} on commit `{2}` in the `{3}` repository'.format( | |
102 | author, comment_type, h.show_id(commit), backend.repo_name) |
|
102 | author, comment_type, h.show_id(commit), backend.repo_name) | |
103 | assert sbj == notification.subject |
|
103 | assert sbj == notification.subject | |
104 |
|
104 | |||
105 | lnk = (u'/{0}/changeset/{1}#comment-{2}'.format( |
|
105 | lnk = (u'/{0}/changeset/{1}#comment-{2}'.format( | |
106 | backend.repo_name, commit_id, comment_id)) |
|
106 | backend.repo_name, commit_id, comment_id)) | |
107 | assert lnk in notification.body |
|
107 | assert lnk in notification.body | |
108 |
|
108 | |||
109 | @pytest.mark.parametrize('comment_type', ChangesetComment.COMMENT_TYPES) |
|
109 | @pytest.mark.parametrize('comment_type', ChangesetComment.COMMENT_TYPES) | |
110 | def test_create_inline(self, comment_type, backend): |
|
110 | def test_create_inline(self, comment_type, backend): | |
111 | self.log_user() |
|
111 | self.log_user() | |
112 | commit = backend.repo.get_commit('300') |
|
112 | commit = backend.repo.get_commit('300') | |
113 | commit_id = commit.raw_id |
|
113 | commit_id = commit.raw_id | |
114 | text = u'CommentOnCommit' |
|
114 | text = u'CommentOnCommit' | |
115 | f_path = 'vcs/web/simplevcs/views/repository.py' |
|
115 | f_path = 'vcs/web/simplevcs/views/repository.py' | |
116 | line = 'n1' |
|
116 | line = 'n1' | |
117 |
|
117 | |||
118 | params = {'text': text, 'f_path': f_path, 'line': line, |
|
118 | params = {'text': text, 'f_path': f_path, 'line': line, | |
119 | 'comment_type': comment_type, |
|
119 | 'comment_type': comment_type, | |
120 | 'csrf_token': self.csrf_token} |
|
120 | 'csrf_token': self.csrf_token} | |
121 |
|
121 | |||
122 | self.app.post( |
|
122 | self.app.post( | |
123 | route_path('repo_commit_comment_create', |
|
123 | route_path('repo_commit_comment_create', | |
124 | repo_name=backend.repo_name, commit_id=commit_id), |
|
124 | repo_name=backend.repo_name, commit_id=commit_id), | |
125 | params=params) |
|
125 | params=params) | |
126 |
|
126 | |||
127 | response = self.app.get( |
|
127 | response = self.app.get( | |
128 | route_path('repo_commit', |
|
128 | route_path('repo_commit', | |
129 | repo_name=backend.repo_name, commit_id=commit_id)) |
|
129 | repo_name=backend.repo_name, commit_id=commit_id)) | |
130 |
|
130 | |||
131 | # test DB |
|
131 | # test DB | |
132 | assert ChangesetComment.query().count() == 1 |
|
132 | assert ChangesetComment.query().count() == 1 | |
133 | assert_comment_links(response, 0, ChangesetComment.query().count()) |
|
133 | assert_comment_links(response, 0, ChangesetComment.query().count()) | |
134 |
|
134 | |||
135 | if backend.alias == 'svn': |
|
135 | if backend.alias == 'svn': | |
136 | response.mustcontain( |
|
136 | response.mustcontain( | |
137 | '''data-f-path="vcs/commands/summary.py" ''' |
|
137 | '''data-f-path="vcs/commands/summary.py" ''' | |
138 | '''data-anchor-id="c-300-ad05457a43f8"''' |
|
138 | '''data-anchor-id="c-300-ad05457a43f8"''' | |
139 | ) |
|
139 | ) | |
140 | if backend.alias == 'git': |
|
140 | if backend.alias == 'git': | |
141 | response.mustcontain( |
|
141 | response.mustcontain( | |
142 | '''data-f-path="vcs/backends/hg.py" ''' |
|
142 | '''data-f-path="vcs/backends/hg.py" ''' | |
143 | '''data-anchor-id="c-883e775e89ea-9c390eb52cd6"''' |
|
143 | '''data-anchor-id="c-883e775e89ea-9c390eb52cd6"''' | |
144 | ) |
|
144 | ) | |
145 |
|
145 | |||
146 | if backend.alias == 'hg': |
|
146 | if backend.alias == 'hg': | |
147 | response.mustcontain( |
|
147 | response.mustcontain( | |
148 | '''data-f-path="vcs/backends/hg.py" ''' |
|
148 | '''data-f-path="vcs/backends/hg.py" ''' | |
149 | '''data-anchor-id="c-e58d85a3973b-9c390eb52cd6"''' |
|
149 | '''data-anchor-id="c-e58d85a3973b-9c390eb52cd6"''' | |
150 | ) |
|
150 | ) | |
151 |
|
151 | |||
152 | assert Notification.query().count() == 1 |
|
152 | assert Notification.query().count() == 1 | |
153 | assert ChangesetComment.query().count() == 1 |
|
153 | assert ChangesetComment.query().count() == 1 | |
154 |
|
154 | |||
155 | notification = Notification.query().all()[0] |
|
155 | notification = Notification.query().all()[0] | |
156 | comment = ChangesetComment.query().first() |
|
156 | comment = ChangesetComment.query().first() | |
157 | assert notification.type_ == Notification.TYPE_CHANGESET_COMMENT |
|
157 | assert notification.type_ == Notification.TYPE_CHANGESET_COMMENT | |
158 |
|
158 | |||
159 | assert comment.revision == commit_id |
|
159 | assert comment.revision == commit_id | |
160 |
|
160 | |||
161 | author = notification.created_by_user.username_and_name |
|
161 | author = notification.created_by_user.username_and_name | |
162 | sbj = '{0} left a {1} on file `{2}` in commit `{3}` in the {4} repository'.format( |
|
162 | sbj = '@{0} left a {1} on file `{2}` in commit `{3}` in the `{4}` repository'.format( | |
163 | author, comment_type, f_path, h.show_id(commit), backend.repo_name) |
|
163 | author, comment_type, f_path, h.show_id(commit), backend.repo_name) | |
164 |
|
164 | |||
165 | assert sbj == notification.subject |
|
165 | assert sbj == notification.subject | |
166 |
|
166 | |||
167 | lnk = (u'/{0}/changeset/{1}#comment-{2}'.format( |
|
167 | lnk = (u'/{0}/changeset/{1}#comment-{2}'.format( | |
168 | backend.repo_name, commit_id, comment.comment_id)) |
|
168 | backend.repo_name, commit_id, comment.comment_id)) | |
169 | assert lnk in notification.body |
|
169 | assert lnk in notification.body | |
170 | assert 'on line n1' in notification.body |
|
170 | assert 'on line n1' in notification.body | |
171 |
|
171 | |||
172 | def test_create_with_mention(self, backend): |
|
172 | def test_create_with_mention(self, backend): | |
173 | self.log_user() |
|
173 | self.log_user() | |
174 |
|
174 | |||
175 | commit_id = backend.repo.get_commit('300').raw_id |
|
175 | commit_id = backend.repo.get_commit('300').raw_id | |
176 | text = u'@test_regular check CommentOnCommit' |
|
176 | text = u'@test_regular check CommentOnCommit' | |
177 |
|
177 | |||
178 | params = {'text': text, 'csrf_token': self.csrf_token} |
|
178 | params = {'text': text, 'csrf_token': self.csrf_token} | |
179 | self.app.post( |
|
179 | self.app.post( | |
180 | route_path('repo_commit_comment_create', |
|
180 | route_path('repo_commit_comment_create', | |
181 | repo_name=backend.repo_name, commit_id=commit_id), |
|
181 | repo_name=backend.repo_name, commit_id=commit_id), | |
182 | params=params) |
|
182 | params=params) | |
183 |
|
183 | |||
184 | response = self.app.get( |
|
184 | response = self.app.get( | |
185 | route_path('repo_commit', |
|
185 | route_path('repo_commit', | |
186 | repo_name=backend.repo_name, commit_id=commit_id)) |
|
186 | repo_name=backend.repo_name, commit_id=commit_id)) | |
187 | # test DB |
|
187 | # test DB | |
188 | assert ChangesetComment.query().count() == 1 |
|
188 | assert ChangesetComment.query().count() == 1 | |
189 | assert_comment_links(response, ChangesetComment.query().count(), 0) |
|
189 | assert_comment_links(response, ChangesetComment.query().count(), 0) | |
190 |
|
190 | |||
191 | notification = Notification.query().one() |
|
191 | notification = Notification.query().one() | |
192 |
|
192 | |||
193 | assert len(notification.recipients) == 2 |
|
193 | assert len(notification.recipients) == 2 | |
194 | users = [x.username for x in notification.recipients] |
|
194 | users = [x.username for x in notification.recipients] | |
195 |
|
195 | |||
196 | # test_regular gets notification by @mention |
|
196 | # test_regular gets notification by @mention | |
197 | assert sorted(users) == [u'test_admin', u'test_regular'] |
|
197 | assert sorted(users) == [u'test_admin', u'test_regular'] | |
198 |
|
198 | |||
199 | def test_create_with_status_change(self, backend): |
|
199 | def test_create_with_status_change(self, backend): | |
200 | self.log_user() |
|
200 | self.log_user() | |
201 | commit = backend.repo.get_commit('300') |
|
201 | commit = backend.repo.get_commit('300') | |
202 | commit_id = commit.raw_id |
|
202 | commit_id = commit.raw_id | |
203 | text = u'CommentOnCommit' |
|
203 | text = u'CommentOnCommit' | |
204 | f_path = 'vcs/web/simplevcs/views/repository.py' |
|
204 | f_path = 'vcs/web/simplevcs/views/repository.py' | |
205 | line = 'n1' |
|
205 | line = 'n1' | |
206 |
|
206 | |||
207 | params = {'text': text, 'changeset_status': 'approved', |
|
207 | params = {'text': text, 'changeset_status': 'approved', | |
208 | 'csrf_token': self.csrf_token} |
|
208 | 'csrf_token': self.csrf_token} | |
209 |
|
209 | |||
210 | self.app.post( |
|
210 | self.app.post( | |
211 | route_path( |
|
211 | route_path( | |
212 | 'repo_commit_comment_create', |
|
212 | 'repo_commit_comment_create', | |
213 | repo_name=backend.repo_name, commit_id=commit_id), |
|
213 | repo_name=backend.repo_name, commit_id=commit_id), | |
214 | params=params) |
|
214 | params=params) | |
215 |
|
215 | |||
216 | response = self.app.get( |
|
216 | response = self.app.get( | |
217 | route_path('repo_commit', |
|
217 | route_path('repo_commit', | |
218 | repo_name=backend.repo_name, commit_id=commit_id)) |
|
218 | repo_name=backend.repo_name, commit_id=commit_id)) | |
219 |
|
219 | |||
220 | # test DB |
|
220 | # test DB | |
221 | assert ChangesetComment.query().count() == 1 |
|
221 | assert ChangesetComment.query().count() == 1 | |
222 | assert_comment_links(response, ChangesetComment.query().count(), 0) |
|
222 | assert_comment_links(response, ChangesetComment.query().count(), 0) | |
223 |
|
223 | |||
224 | assert Notification.query().count() == 1 |
|
224 | assert Notification.query().count() == 1 | |
225 | assert ChangesetComment.query().count() == 1 |
|
225 | assert ChangesetComment.query().count() == 1 | |
226 |
|
226 | |||
227 | notification = Notification.query().all()[0] |
|
227 | notification = Notification.query().all()[0] | |
228 |
|
228 | |||
229 | comment_id = ChangesetComment.query().first().comment_id |
|
229 | comment_id = ChangesetComment.query().first().comment_id | |
230 | assert notification.type_ == Notification.TYPE_CHANGESET_COMMENT |
|
230 | assert notification.type_ == Notification.TYPE_CHANGESET_COMMENT | |
231 |
|
231 | |||
232 | author = notification.created_by_user.username_and_name |
|
232 | author = notification.created_by_user.username_and_name | |
233 | sbj = '[status: Approved] {0} left a note on commit `{1}` in the {2} repository'.format( |
|
233 | sbj = '[status: Approved] @{0} left a note on commit `{1}` in the `{2}` repository'.format( | |
234 | author, h.show_id(commit), backend.repo_name) |
|
234 | author, h.show_id(commit), backend.repo_name) | |
235 | assert sbj == notification.subject |
|
235 | assert sbj == notification.subject | |
236 |
|
236 | |||
237 | lnk = (u'/{0}/changeset/{1}#comment-{2}'.format( |
|
237 | lnk = (u'/{0}/changeset/{1}#comment-{2}'.format( | |
238 | backend.repo_name, commit_id, comment_id)) |
|
238 | backend.repo_name, commit_id, comment_id)) | |
239 | assert lnk in notification.body |
|
239 | assert lnk in notification.body | |
240 |
|
240 | |||
241 | def test_delete(self, backend): |
|
241 | def test_delete(self, backend): | |
242 | self.log_user() |
|
242 | self.log_user() | |
243 | commit_id = backend.repo.get_commit('300').raw_id |
|
243 | commit_id = backend.repo.get_commit('300').raw_id | |
244 | text = u'CommentOnCommit' |
|
244 | text = u'CommentOnCommit' | |
245 |
|
245 | |||
246 | params = {'text': text, 'csrf_token': self.csrf_token} |
|
246 | params = {'text': text, 'csrf_token': self.csrf_token} | |
247 | self.app.post( |
|
247 | self.app.post( | |
248 | route_path( |
|
248 | route_path( | |
249 | 'repo_commit_comment_create', |
|
249 | 'repo_commit_comment_create', | |
250 | repo_name=backend.repo_name, commit_id=commit_id), |
|
250 | repo_name=backend.repo_name, commit_id=commit_id), | |
251 | params=params) |
|
251 | params=params) | |
252 |
|
252 | |||
253 | comments = ChangesetComment.query().all() |
|
253 | comments = ChangesetComment.query().all() | |
254 | assert len(comments) == 1 |
|
254 | assert len(comments) == 1 | |
255 | comment_id = comments[0].comment_id |
|
255 | comment_id = comments[0].comment_id | |
256 |
|
256 | |||
257 | self.app.post( |
|
257 | self.app.post( | |
258 | route_path('repo_commit_comment_delete', |
|
258 | route_path('repo_commit_comment_delete', | |
259 | repo_name=backend.repo_name, |
|
259 | repo_name=backend.repo_name, | |
260 | commit_id=commit_id, |
|
260 | commit_id=commit_id, | |
261 | comment_id=comment_id), |
|
261 | comment_id=comment_id), | |
262 | params={'csrf_token': self.csrf_token}) |
|
262 | params={'csrf_token': self.csrf_token}) | |
263 |
|
263 | |||
264 | comments = ChangesetComment.query().all() |
|
264 | comments = ChangesetComment.query().all() | |
265 | assert len(comments) == 0 |
|
265 | assert len(comments) == 0 | |
266 |
|
266 | |||
267 | response = self.app.get( |
|
267 | response = self.app.get( | |
268 | route_path('repo_commit', |
|
268 | route_path('repo_commit', | |
269 | repo_name=backend.repo_name, commit_id=commit_id)) |
|
269 | repo_name=backend.repo_name, commit_id=commit_id)) | |
270 | assert_comment_links(response, 0, 0) |
|
270 | assert_comment_links(response, 0, 0) | |
271 |
|
271 | |||
272 | @pytest.mark.parametrize('renderer, input, output', [ |
|
272 | @pytest.mark.parametrize('renderer, input, output', [ | |
273 | ('rst', 'plain text', '<p>plain text</p>'), |
|
273 | ('rst', 'plain text', '<p>plain text</p>'), | |
274 | ('rst', 'header\n======', '<h1 class="title">header</h1>'), |
|
274 | ('rst', 'header\n======', '<h1 class="title">header</h1>'), | |
275 | ('rst', '*italics*', '<em>italics</em>'), |
|
275 | ('rst', '*italics*', '<em>italics</em>'), | |
276 | ('rst', '**bold**', '<strong>bold</strong>'), |
|
276 | ('rst', '**bold**', '<strong>bold</strong>'), | |
277 | ('markdown', 'plain text', '<p>plain text</p>'), |
|
277 | ('markdown', 'plain text', '<p>plain text</p>'), | |
278 | ('markdown', '# header', '<h1>header</h1>'), |
|
278 | ('markdown', '# header', '<h1>header</h1>'), | |
279 | ('markdown', '*italics*', '<em>italics</em>'), |
|
279 | ('markdown', '*italics*', '<em>italics</em>'), | |
280 | ('markdown', '**bold**', '<strong>bold</strong>'), |
|
280 | ('markdown', '**bold**', '<strong>bold</strong>'), | |
281 | ], ids=['rst-plain', 'rst-header', 'rst-italics', 'rst-bold', 'md-plain', |
|
281 | ], ids=['rst-plain', 'rst-header', 'rst-italics', 'rst-bold', 'md-plain', | |
282 | 'md-header', 'md-italics', 'md-bold', ]) |
|
282 | 'md-header', 'md-italics', 'md-bold', ]) | |
283 | def test_preview(self, renderer, input, output, backend, xhr_header): |
|
283 | def test_preview(self, renderer, input, output, backend, xhr_header): | |
284 | self.log_user() |
|
284 | self.log_user() | |
285 | params = { |
|
285 | params = { | |
286 | 'renderer': renderer, |
|
286 | 'renderer': renderer, | |
287 | 'text': input, |
|
287 | 'text': input, | |
288 | 'csrf_token': self.csrf_token |
|
288 | 'csrf_token': self.csrf_token | |
289 | } |
|
289 | } | |
290 | commit_id = '0' * 16 # fake this for tests |
|
290 | commit_id = '0' * 16 # fake this for tests | |
291 | response = self.app.post( |
|
291 | response = self.app.post( | |
292 | route_path('repo_commit_comment_preview', |
|
292 | route_path('repo_commit_comment_preview', | |
293 | repo_name=backend.repo_name, commit_id=commit_id,), |
|
293 | repo_name=backend.repo_name, commit_id=commit_id,), | |
294 | params=params, |
|
294 | params=params, | |
295 | extra_environ=xhr_header) |
|
295 | extra_environ=xhr_header) | |
296 |
|
296 | |||
297 | response.mustcontain(output) |
|
297 | response.mustcontain(output) | |
298 |
|
298 | |||
299 |
|
299 | |||
300 | def assert_comment_links(response, comments, inline_comments): |
|
300 | def assert_comment_links(response, comments, inline_comments): | |
301 | if comments == 1: |
|
301 | if comments == 1: | |
302 | comments_text = "%d General" % comments |
|
302 | comments_text = "%d General" % comments | |
303 | else: |
|
303 | else: | |
304 | comments_text = "%d General" % comments |
|
304 | comments_text = "%d General" % comments | |
305 |
|
305 | |||
306 | if inline_comments == 1: |
|
306 | if inline_comments == 1: | |
307 | inline_comments_text = "%d Inline" % inline_comments |
|
307 | inline_comments_text = "%d Inline" % inline_comments | |
308 | else: |
|
308 | else: | |
309 | inline_comments_text = "%d Inline" % inline_comments |
|
309 | inline_comments_text = "%d Inline" % inline_comments | |
310 |
|
310 | |||
311 | if comments: |
|
311 | if comments: | |
312 | response.mustcontain('<a href="#comments">%s</a>,' % comments_text) |
|
312 | response.mustcontain('<a href="#comments">%s</a>,' % comments_text) | |
313 | else: |
|
313 | else: | |
314 | response.mustcontain(comments_text) |
|
314 | response.mustcontain(comments_text) | |
315 |
|
315 | |||
316 | if inline_comments: |
|
316 | if inline_comments: | |
317 | response.mustcontain( |
|
317 | response.mustcontain( | |
318 | 'id="inline-comments-counter">%s' % inline_comments_text) |
|
318 | 'id="inline-comments-counter">%s' % inline_comments_text) | |
319 | else: |
|
319 | else: | |
320 | response.mustcontain(inline_comments_text) |
|
320 | response.mustcontain(inline_comments_text) |
@@ -1,1221 +1,1217 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2019 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2019 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 | import mock |
|
20 | import mock | |
21 | import pytest |
|
21 | import pytest | |
22 |
|
22 | |||
23 | import rhodecode |
|
23 | import rhodecode | |
24 | from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason |
|
24 | from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason | |
25 | from rhodecode.lib.vcs.nodes import FileNode |
|
25 | from rhodecode.lib.vcs.nodes import FileNode | |
26 | from rhodecode.lib import helpers as h |
|
26 | from rhodecode.lib import helpers as h | |
27 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
27 | from rhodecode.model.changeset_status import ChangesetStatusModel | |
28 | from rhodecode.model.db import ( |
|
28 | from rhodecode.model.db import ( | |
29 | PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment, Repository) |
|
29 | PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment, Repository) | |
30 | from rhodecode.model.meta import Session |
|
30 | from rhodecode.model.meta import Session | |
31 | from rhodecode.model.pull_request import PullRequestModel |
|
31 | from rhodecode.model.pull_request import PullRequestModel | |
32 | from rhodecode.model.user import UserModel |
|
32 | from rhodecode.model.user import UserModel | |
33 | from rhodecode.tests import ( |
|
33 | from rhodecode.tests import ( | |
34 | assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN) |
|
34 | assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN) | |
35 |
|
35 | |||
36 |
|
36 | |||
37 | def route_path(name, params=None, **kwargs): |
|
37 | def route_path(name, params=None, **kwargs): | |
38 | import urllib |
|
38 | import urllib | |
39 |
|
39 | |||
40 | base_url = { |
|
40 | base_url = { | |
41 | 'repo_changelog': '/{repo_name}/changelog', |
|
41 | 'repo_changelog': '/{repo_name}/changelog', | |
42 | 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}', |
|
42 | 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}', | |
43 | 'repo_commits': '/{repo_name}/commits', |
|
43 | 'repo_commits': '/{repo_name}/commits', | |
44 | 'repo_commits_file': '/{repo_name}/commits/{commit_id}/{f_path}', |
|
44 | 'repo_commits_file': '/{repo_name}/commits/{commit_id}/{f_path}', | |
45 | 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}', |
|
45 | 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}', | |
46 | 'pullrequest_show_all': '/{repo_name}/pull-request', |
|
46 | 'pullrequest_show_all': '/{repo_name}/pull-request', | |
47 | 'pullrequest_show_all_data': '/{repo_name}/pull-request-data', |
|
47 | 'pullrequest_show_all_data': '/{repo_name}/pull-request-data', | |
48 | 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}', |
|
48 | 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}', | |
49 | 'pullrequest_repo_targets': '/{repo_name}/pull-request/repo-destinations', |
|
49 | 'pullrequest_repo_targets': '/{repo_name}/pull-request/repo-destinations', | |
50 | 'pullrequest_new': '/{repo_name}/pull-request/new', |
|
50 | 'pullrequest_new': '/{repo_name}/pull-request/new', | |
51 | 'pullrequest_create': '/{repo_name}/pull-request/create', |
|
51 | 'pullrequest_create': '/{repo_name}/pull-request/create', | |
52 | 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update', |
|
52 | 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update', | |
53 | 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge', |
|
53 | 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge', | |
54 | 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete', |
|
54 | 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete', | |
55 | 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment', |
|
55 | 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment', | |
56 | 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete', |
|
56 | 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete', | |
57 | }[name].format(**kwargs) |
|
57 | }[name].format(**kwargs) | |
58 |
|
58 | |||
59 | if params: |
|
59 | if params: | |
60 | base_url = '{}?{}'.format(base_url, urllib.urlencode(params)) |
|
60 | base_url = '{}?{}'.format(base_url, urllib.urlencode(params)) | |
61 | return base_url |
|
61 | return base_url | |
62 |
|
62 | |||
63 |
|
63 | |||
64 | @pytest.mark.usefixtures('app', 'autologin_user') |
|
64 | @pytest.mark.usefixtures('app', 'autologin_user') | |
65 | @pytest.mark.backends("git", "hg") |
|
65 | @pytest.mark.backends("git", "hg") | |
66 | class TestPullrequestsView(object): |
|
66 | class TestPullrequestsView(object): | |
67 |
|
67 | |||
68 | def test_index(self, backend): |
|
68 | def test_index(self, backend): | |
69 | self.app.get(route_path( |
|
69 | self.app.get(route_path( | |
70 | 'pullrequest_new', |
|
70 | 'pullrequest_new', | |
71 | repo_name=backend.repo_name)) |
|
71 | repo_name=backend.repo_name)) | |
72 |
|
72 | |||
73 | def test_option_menu_create_pull_request_exists(self, backend): |
|
73 | def test_option_menu_create_pull_request_exists(self, backend): | |
74 | repo_name = backend.repo_name |
|
74 | repo_name = backend.repo_name | |
75 | response = self.app.get(h.route_path('repo_summary', repo_name=repo_name)) |
|
75 | response = self.app.get(h.route_path('repo_summary', repo_name=repo_name)) | |
76 |
|
76 | |||
77 | create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path( |
|
77 | create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path( | |
78 | 'pullrequest_new', repo_name=repo_name) |
|
78 | 'pullrequest_new', repo_name=repo_name) | |
79 | response.mustcontain(create_pr_link) |
|
79 | response.mustcontain(create_pr_link) | |
80 |
|
80 | |||
81 | def test_create_pr_form_with_raw_commit_id(self, backend): |
|
81 | def test_create_pr_form_with_raw_commit_id(self, backend): | |
82 | repo = backend.repo |
|
82 | repo = backend.repo | |
83 |
|
83 | |||
84 | self.app.get( |
|
84 | self.app.get( | |
85 | route_path('pullrequest_new', repo_name=repo.repo_name, |
|
85 | route_path('pullrequest_new', repo_name=repo.repo_name, | |
86 | commit=repo.get_commit().raw_id), |
|
86 | commit=repo.get_commit().raw_id), | |
87 | status=200) |
|
87 | status=200) | |
88 |
|
88 | |||
89 | @pytest.mark.parametrize('pr_merge_enabled', [True, False]) |
|
89 | @pytest.mark.parametrize('pr_merge_enabled', [True, False]) | |
90 | @pytest.mark.parametrize('range_diff', ["0", "1"]) |
|
90 | @pytest.mark.parametrize('range_diff', ["0", "1"]) | |
91 | def test_show(self, pr_util, pr_merge_enabled, range_diff): |
|
91 | def test_show(self, pr_util, pr_merge_enabled, range_diff): | |
92 | pull_request = pr_util.create_pull_request( |
|
92 | pull_request = pr_util.create_pull_request( | |
93 | mergeable=pr_merge_enabled, enable_notifications=False) |
|
93 | mergeable=pr_merge_enabled, enable_notifications=False) | |
94 |
|
94 | |||
95 | response = self.app.get(route_path( |
|
95 | response = self.app.get(route_path( | |
96 | 'pullrequest_show', |
|
96 | 'pullrequest_show', | |
97 | repo_name=pull_request.target_repo.scm_instance().name, |
|
97 | repo_name=pull_request.target_repo.scm_instance().name, | |
98 | pull_request_id=pull_request.pull_request_id, |
|
98 | pull_request_id=pull_request.pull_request_id, | |
99 | params={'range-diff': range_diff})) |
|
99 | params={'range-diff': range_diff})) | |
100 |
|
100 | |||
101 | for commit_id in pull_request.revisions: |
|
101 | for commit_id in pull_request.revisions: | |
102 | response.mustcontain(commit_id) |
|
102 | response.mustcontain(commit_id) | |
103 |
|
103 | |||
104 | assert pull_request.target_ref_parts.type in response |
|
104 | assert pull_request.target_ref_parts.type in response | |
105 | assert pull_request.target_ref_parts.name in response |
|
105 | assert pull_request.target_ref_parts.name in response | |
106 | target_clone_url = pull_request.target_repo.clone_url() |
|
106 | target_clone_url = pull_request.target_repo.clone_url() | |
107 | assert target_clone_url in response |
|
107 | assert target_clone_url in response | |
108 |
|
108 | |||
109 | assert 'class="pull-request-merge"' in response |
|
109 | assert 'class="pull-request-merge"' in response | |
110 | if pr_merge_enabled: |
|
110 | if pr_merge_enabled: | |
111 | response.mustcontain('Pull request reviewer approval is pending') |
|
111 | response.mustcontain('Pull request reviewer approval is pending') | |
112 | else: |
|
112 | else: | |
113 | response.mustcontain('Server-side pull request merging is disabled.') |
|
113 | response.mustcontain('Server-side pull request merging is disabled.') | |
114 |
|
114 | |||
115 | if range_diff == "1": |
|
115 | if range_diff == "1": | |
116 | response.mustcontain('Turn off: Show the diff as commit range') |
|
116 | response.mustcontain('Turn off: Show the diff as commit range') | |
117 |
|
117 | |||
118 | def test_close_status_visibility(self, pr_util, user_util, csrf_token): |
|
118 | def test_close_status_visibility(self, pr_util, user_util, csrf_token): | |
119 | # Logout |
|
119 | # Logout | |
120 | response = self.app.post( |
|
120 | response = self.app.post( | |
121 | h.route_path('logout'), |
|
121 | h.route_path('logout'), | |
122 | params={'csrf_token': csrf_token}) |
|
122 | params={'csrf_token': csrf_token}) | |
123 | # Login as regular user |
|
123 | # Login as regular user | |
124 | response = self.app.post(h.route_path('login'), |
|
124 | response = self.app.post(h.route_path('login'), | |
125 | {'username': TEST_USER_REGULAR_LOGIN, |
|
125 | {'username': TEST_USER_REGULAR_LOGIN, | |
126 | 'password': 'test12'}) |
|
126 | 'password': 'test12'}) | |
127 |
|
127 | |||
128 | pull_request = pr_util.create_pull_request( |
|
128 | pull_request = pr_util.create_pull_request( | |
129 | author=TEST_USER_REGULAR_LOGIN) |
|
129 | author=TEST_USER_REGULAR_LOGIN) | |
130 |
|
130 | |||
131 | response = self.app.get(route_path( |
|
131 | response = self.app.get(route_path( | |
132 | 'pullrequest_show', |
|
132 | 'pullrequest_show', | |
133 | repo_name=pull_request.target_repo.scm_instance().name, |
|
133 | repo_name=pull_request.target_repo.scm_instance().name, | |
134 | pull_request_id=pull_request.pull_request_id)) |
|
134 | pull_request_id=pull_request.pull_request_id)) | |
135 |
|
135 | |||
136 | response.mustcontain('Server-side pull request merging is disabled.') |
|
136 | response.mustcontain('Server-side pull request merging is disabled.') | |
137 |
|
137 | |||
138 | assert_response = response.assert_response() |
|
138 | assert_response = response.assert_response() | |
139 | # for regular user without a merge permissions, we don't see it |
|
139 | # for regular user without a merge permissions, we don't see it | |
140 | assert_response.no_element_exists('#close-pull-request-action') |
|
140 | assert_response.no_element_exists('#close-pull-request-action') | |
141 |
|
141 | |||
142 | user_util.grant_user_permission_to_repo( |
|
142 | user_util.grant_user_permission_to_repo( | |
143 | pull_request.target_repo, |
|
143 | pull_request.target_repo, | |
144 | UserModel().get_by_username(TEST_USER_REGULAR_LOGIN), |
|
144 | UserModel().get_by_username(TEST_USER_REGULAR_LOGIN), | |
145 | 'repository.write') |
|
145 | 'repository.write') | |
146 | response = self.app.get(route_path( |
|
146 | response = self.app.get(route_path( | |
147 | 'pullrequest_show', |
|
147 | 'pullrequest_show', | |
148 | repo_name=pull_request.target_repo.scm_instance().name, |
|
148 | repo_name=pull_request.target_repo.scm_instance().name, | |
149 | pull_request_id=pull_request.pull_request_id)) |
|
149 | pull_request_id=pull_request.pull_request_id)) | |
150 |
|
150 | |||
151 | response.mustcontain('Server-side pull request merging is disabled.') |
|
151 | response.mustcontain('Server-side pull request merging is disabled.') | |
152 |
|
152 | |||
153 | assert_response = response.assert_response() |
|
153 | assert_response = response.assert_response() | |
154 | # now regular user has a merge permissions, we have CLOSE button |
|
154 | # now regular user has a merge permissions, we have CLOSE button | |
155 | assert_response.one_element_exists('#close-pull-request-action') |
|
155 | assert_response.one_element_exists('#close-pull-request-action') | |
156 |
|
156 | |||
157 | def test_show_invalid_commit_id(self, pr_util): |
|
157 | def test_show_invalid_commit_id(self, pr_util): | |
158 | # Simulating invalid revisions which will cause a lookup error |
|
158 | # Simulating invalid revisions which will cause a lookup error | |
159 | pull_request = pr_util.create_pull_request() |
|
159 | pull_request = pr_util.create_pull_request() | |
160 | pull_request.revisions = ['invalid'] |
|
160 | pull_request.revisions = ['invalid'] | |
161 | Session().add(pull_request) |
|
161 | Session().add(pull_request) | |
162 | Session().commit() |
|
162 | Session().commit() | |
163 |
|
163 | |||
164 | response = self.app.get(route_path( |
|
164 | response = self.app.get(route_path( | |
165 | 'pullrequest_show', |
|
165 | 'pullrequest_show', | |
166 | repo_name=pull_request.target_repo.scm_instance().name, |
|
166 | repo_name=pull_request.target_repo.scm_instance().name, | |
167 | pull_request_id=pull_request.pull_request_id)) |
|
167 | pull_request_id=pull_request.pull_request_id)) | |
168 |
|
168 | |||
169 | for commit_id in pull_request.revisions: |
|
169 | for commit_id in pull_request.revisions: | |
170 | response.mustcontain(commit_id) |
|
170 | response.mustcontain(commit_id) | |
171 |
|
171 | |||
172 | def test_show_invalid_source_reference(self, pr_util): |
|
172 | def test_show_invalid_source_reference(self, pr_util): | |
173 | pull_request = pr_util.create_pull_request() |
|
173 | pull_request = pr_util.create_pull_request() | |
174 | pull_request.source_ref = 'branch:b:invalid' |
|
174 | pull_request.source_ref = 'branch:b:invalid' | |
175 | Session().add(pull_request) |
|
175 | Session().add(pull_request) | |
176 | Session().commit() |
|
176 | Session().commit() | |
177 |
|
177 | |||
178 | self.app.get(route_path( |
|
178 | self.app.get(route_path( | |
179 | 'pullrequest_show', |
|
179 | 'pullrequest_show', | |
180 | repo_name=pull_request.target_repo.scm_instance().name, |
|
180 | repo_name=pull_request.target_repo.scm_instance().name, | |
181 | pull_request_id=pull_request.pull_request_id)) |
|
181 | pull_request_id=pull_request.pull_request_id)) | |
182 |
|
182 | |||
183 | def test_edit_title_description(self, pr_util, csrf_token): |
|
183 | def test_edit_title_description(self, pr_util, csrf_token): | |
184 | pull_request = pr_util.create_pull_request() |
|
184 | pull_request = pr_util.create_pull_request() | |
185 | pull_request_id = pull_request.pull_request_id |
|
185 | pull_request_id = pull_request.pull_request_id | |
186 |
|
186 | |||
187 | response = self.app.post( |
|
187 | response = self.app.post( | |
188 | route_path('pullrequest_update', |
|
188 | route_path('pullrequest_update', | |
189 | repo_name=pull_request.target_repo.repo_name, |
|
189 | repo_name=pull_request.target_repo.repo_name, | |
190 | pull_request_id=pull_request_id), |
|
190 | pull_request_id=pull_request_id), | |
191 | params={ |
|
191 | params={ | |
192 | 'edit_pull_request': 'true', |
|
192 | 'edit_pull_request': 'true', | |
193 | 'title': 'New title', |
|
193 | 'title': 'New title', | |
194 | 'description': 'New description', |
|
194 | 'description': 'New description', | |
195 | 'csrf_token': csrf_token}) |
|
195 | 'csrf_token': csrf_token}) | |
196 |
|
196 | |||
197 | assert_session_flash( |
|
197 | assert_session_flash( | |
198 | response, u'Pull request title & description updated.', |
|
198 | response, u'Pull request title & description updated.', | |
199 | category='success') |
|
199 | category='success') | |
200 |
|
200 | |||
201 | pull_request = PullRequest.get(pull_request_id) |
|
201 | pull_request = PullRequest.get(pull_request_id) | |
202 | assert pull_request.title == 'New title' |
|
202 | assert pull_request.title == 'New title' | |
203 | assert pull_request.description == 'New description' |
|
203 | assert pull_request.description == 'New description' | |
204 |
|
204 | |||
205 | def test_edit_title_description_closed(self, pr_util, csrf_token): |
|
205 | def test_edit_title_description_closed(self, pr_util, csrf_token): | |
206 | pull_request = pr_util.create_pull_request() |
|
206 | pull_request = pr_util.create_pull_request() | |
207 | pull_request_id = pull_request.pull_request_id |
|
207 | pull_request_id = pull_request.pull_request_id | |
208 | repo_name = pull_request.target_repo.repo_name |
|
208 | repo_name = pull_request.target_repo.repo_name | |
209 | pr_util.close() |
|
209 | pr_util.close() | |
210 |
|
210 | |||
211 | response = self.app.post( |
|
211 | response = self.app.post( | |
212 | route_path('pullrequest_update', |
|
212 | route_path('pullrequest_update', | |
213 | repo_name=repo_name, pull_request_id=pull_request_id), |
|
213 | repo_name=repo_name, pull_request_id=pull_request_id), | |
214 | params={ |
|
214 | params={ | |
215 | 'edit_pull_request': 'true', |
|
215 | 'edit_pull_request': 'true', | |
216 | 'title': 'New title', |
|
216 | 'title': 'New title', | |
217 | 'description': 'New description', |
|
217 | 'description': 'New description', | |
218 | 'csrf_token': csrf_token}, status=200) |
|
218 | 'csrf_token': csrf_token}, status=200) | |
219 | assert_session_flash( |
|
219 | assert_session_flash( | |
220 | response, u'Cannot update closed pull requests.', |
|
220 | response, u'Cannot update closed pull requests.', | |
221 | category='error') |
|
221 | category='error') | |
222 |
|
222 | |||
223 | def test_update_invalid_source_reference(self, pr_util, csrf_token): |
|
223 | def test_update_invalid_source_reference(self, pr_util, csrf_token): | |
224 | from rhodecode.lib.vcs.backends.base import UpdateFailureReason |
|
224 | from rhodecode.lib.vcs.backends.base import UpdateFailureReason | |
225 |
|
225 | |||
226 | pull_request = pr_util.create_pull_request() |
|
226 | pull_request = pr_util.create_pull_request() | |
227 | pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id' |
|
227 | pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id' | |
228 | Session().add(pull_request) |
|
228 | Session().add(pull_request) | |
229 | Session().commit() |
|
229 | Session().commit() | |
230 |
|
230 | |||
231 | pull_request_id = pull_request.pull_request_id |
|
231 | pull_request_id = pull_request.pull_request_id | |
232 |
|
232 | |||
233 | response = self.app.post( |
|
233 | response = self.app.post( | |
234 | route_path('pullrequest_update', |
|
234 | route_path('pullrequest_update', | |
235 | repo_name=pull_request.target_repo.repo_name, |
|
235 | repo_name=pull_request.target_repo.repo_name, | |
236 | pull_request_id=pull_request_id), |
|
236 | pull_request_id=pull_request_id), | |
237 | params={'update_commits': 'true', 'csrf_token': csrf_token}) |
|
237 | params={'update_commits': 'true', 'csrf_token': csrf_token}) | |
238 |
|
238 | |||
239 | expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[ |
|
239 | expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[ | |
240 | UpdateFailureReason.MISSING_SOURCE_REF]) |
|
240 | UpdateFailureReason.MISSING_SOURCE_REF]) | |
241 | assert_session_flash(response, expected_msg, category='error') |
|
241 | assert_session_flash(response, expected_msg, category='error') | |
242 |
|
242 | |||
243 | def test_missing_target_reference(self, pr_util, csrf_token): |
|
243 | def test_missing_target_reference(self, pr_util, csrf_token): | |
244 | from rhodecode.lib.vcs.backends.base import MergeFailureReason |
|
244 | from rhodecode.lib.vcs.backends.base import MergeFailureReason | |
245 | pull_request = pr_util.create_pull_request( |
|
245 | pull_request = pr_util.create_pull_request( | |
246 | approved=True, mergeable=True) |
|
246 | approved=True, mergeable=True) | |
247 | unicode_reference = u'branch:invalid-branch:invalid-commit-id' |
|
247 | unicode_reference = u'branch:invalid-branch:invalid-commit-id' | |
248 | pull_request.target_ref = unicode_reference |
|
248 | pull_request.target_ref = unicode_reference | |
249 | Session().add(pull_request) |
|
249 | Session().add(pull_request) | |
250 | Session().commit() |
|
250 | Session().commit() | |
251 |
|
251 | |||
252 | pull_request_id = pull_request.pull_request_id |
|
252 | pull_request_id = pull_request.pull_request_id | |
253 | pull_request_url = route_path( |
|
253 | pull_request_url = route_path( | |
254 | 'pullrequest_show', |
|
254 | 'pullrequest_show', | |
255 | repo_name=pull_request.target_repo.repo_name, |
|
255 | repo_name=pull_request.target_repo.repo_name, | |
256 | pull_request_id=pull_request_id) |
|
256 | pull_request_id=pull_request_id) | |
257 |
|
257 | |||
258 | response = self.app.get(pull_request_url) |
|
258 | response = self.app.get(pull_request_url) | |
259 | target_ref_id = 'invalid-branch' |
|
259 | target_ref_id = 'invalid-branch' | |
260 | merge_resp = MergeResponse( |
|
260 | merge_resp = MergeResponse( | |
261 | True, True, '', MergeFailureReason.MISSING_TARGET_REF, |
|
261 | True, True, '', MergeFailureReason.MISSING_TARGET_REF, | |
262 | metadata={'target_ref': PullRequest.unicode_to_reference(unicode_reference)}) |
|
262 | metadata={'target_ref': PullRequest.unicode_to_reference(unicode_reference)}) | |
263 | response.assert_response().element_contains( |
|
263 | response.assert_response().element_contains( | |
264 | 'span[data-role="merge-message"]', merge_resp.merge_status_message) |
|
264 | 'span[data-role="merge-message"]', merge_resp.merge_status_message) | |
265 |
|
265 | |||
266 | def test_comment_and_close_pull_request_custom_message_approved( |
|
266 | def test_comment_and_close_pull_request_custom_message_approved( | |
267 | self, pr_util, csrf_token, xhr_header): |
|
267 | self, pr_util, csrf_token, xhr_header): | |
268 |
|
268 | |||
269 | pull_request = pr_util.create_pull_request(approved=True) |
|
269 | pull_request = pr_util.create_pull_request(approved=True) | |
270 | pull_request_id = pull_request.pull_request_id |
|
270 | pull_request_id = pull_request.pull_request_id | |
271 | author = pull_request.user_id |
|
271 | author = pull_request.user_id | |
272 | repo = pull_request.target_repo.repo_id |
|
272 | repo = pull_request.target_repo.repo_id | |
273 |
|
273 | |||
274 | self.app.post( |
|
274 | self.app.post( | |
275 | route_path('pullrequest_comment_create', |
|
275 | route_path('pullrequest_comment_create', | |
276 | repo_name=pull_request.target_repo.scm_instance().name, |
|
276 | repo_name=pull_request.target_repo.scm_instance().name, | |
277 | pull_request_id=pull_request_id), |
|
277 | pull_request_id=pull_request_id), | |
278 | params={ |
|
278 | params={ | |
279 | 'close_pull_request': '1', |
|
279 | 'close_pull_request': '1', | |
280 | 'text': 'Closing a PR', |
|
280 | 'text': 'Closing a PR', | |
281 | 'csrf_token': csrf_token}, |
|
281 | 'csrf_token': csrf_token}, | |
282 | extra_environ=xhr_header,) |
|
282 | extra_environ=xhr_header,) | |
283 |
|
283 | |||
284 | journal = UserLog.query()\ |
|
284 | journal = UserLog.query()\ | |
285 | .filter(UserLog.user_id == author)\ |
|
285 | .filter(UserLog.user_id == author)\ | |
286 | .filter(UserLog.repository_id == repo) \ |
|
286 | .filter(UserLog.repository_id == repo) \ | |
287 | .order_by(UserLog.user_log_id.asc()) \ |
|
287 | .order_by(UserLog.user_log_id.asc()) \ | |
288 | .all() |
|
288 | .all() | |
289 | assert journal[-1].action == 'repo.pull_request.close' |
|
289 | assert journal[-1].action == 'repo.pull_request.close' | |
290 |
|
290 | |||
291 | pull_request = PullRequest.get(pull_request_id) |
|
291 | pull_request = PullRequest.get(pull_request_id) | |
292 | assert pull_request.is_closed() |
|
292 | assert pull_request.is_closed() | |
293 |
|
293 | |||
294 | status = ChangesetStatusModel().get_status( |
|
294 | status = ChangesetStatusModel().get_status( | |
295 | pull_request.source_repo, pull_request=pull_request) |
|
295 | pull_request.source_repo, pull_request=pull_request) | |
296 | assert status == ChangesetStatus.STATUS_APPROVED |
|
296 | assert status == ChangesetStatus.STATUS_APPROVED | |
297 | comments = ChangesetComment().query() \ |
|
297 | comments = ChangesetComment().query() \ | |
298 | .filter(ChangesetComment.pull_request == pull_request) \ |
|
298 | .filter(ChangesetComment.pull_request == pull_request) \ | |
299 | .order_by(ChangesetComment.comment_id.asc())\ |
|
299 | .order_by(ChangesetComment.comment_id.asc())\ | |
300 | .all() |
|
300 | .all() | |
301 | assert comments[-1].text == 'Closing a PR' |
|
301 | assert comments[-1].text == 'Closing a PR' | |
302 |
|
302 | |||
303 | def test_comment_force_close_pull_request_rejected( |
|
303 | def test_comment_force_close_pull_request_rejected( | |
304 | self, pr_util, csrf_token, xhr_header): |
|
304 | self, pr_util, csrf_token, xhr_header): | |
305 | pull_request = pr_util.create_pull_request() |
|
305 | pull_request = pr_util.create_pull_request() | |
306 | pull_request_id = pull_request.pull_request_id |
|
306 | pull_request_id = pull_request.pull_request_id | |
307 | PullRequestModel().update_reviewers( |
|
307 | PullRequestModel().update_reviewers( | |
308 | pull_request_id, [(1, ['reason'], False, []), (2, ['reason2'], False, [])], |
|
308 | pull_request_id, [(1, ['reason'], False, []), (2, ['reason2'], False, [])], | |
309 | pull_request.author) |
|
309 | pull_request.author) | |
310 | author = pull_request.user_id |
|
310 | author = pull_request.user_id | |
311 | repo = pull_request.target_repo.repo_id |
|
311 | repo = pull_request.target_repo.repo_id | |
312 |
|
312 | |||
313 | self.app.post( |
|
313 | self.app.post( | |
314 | route_path('pullrequest_comment_create', |
|
314 | route_path('pullrequest_comment_create', | |
315 | repo_name=pull_request.target_repo.scm_instance().name, |
|
315 | repo_name=pull_request.target_repo.scm_instance().name, | |
316 | pull_request_id=pull_request_id), |
|
316 | pull_request_id=pull_request_id), | |
317 | params={ |
|
317 | params={ | |
318 | 'close_pull_request': '1', |
|
318 | 'close_pull_request': '1', | |
319 | 'csrf_token': csrf_token}, |
|
319 | 'csrf_token': csrf_token}, | |
320 | extra_environ=xhr_header) |
|
320 | extra_environ=xhr_header) | |
321 |
|
321 | |||
322 | pull_request = PullRequest.get(pull_request_id) |
|
322 | pull_request = PullRequest.get(pull_request_id) | |
323 |
|
323 | |||
324 | journal = UserLog.query()\ |
|
324 | journal = UserLog.query()\ | |
325 | .filter(UserLog.user_id == author, UserLog.repository_id == repo) \ |
|
325 | .filter(UserLog.user_id == author, UserLog.repository_id == repo) \ | |
326 | .order_by(UserLog.user_log_id.asc()) \ |
|
326 | .order_by(UserLog.user_log_id.asc()) \ | |
327 | .all() |
|
327 | .all() | |
328 | assert journal[-1].action == 'repo.pull_request.close' |
|
328 | assert journal[-1].action == 'repo.pull_request.close' | |
329 |
|
329 | |||
330 | # check only the latest status, not the review status |
|
330 | # check only the latest status, not the review status | |
331 | status = ChangesetStatusModel().get_status( |
|
331 | status = ChangesetStatusModel().get_status( | |
332 | pull_request.source_repo, pull_request=pull_request) |
|
332 | pull_request.source_repo, pull_request=pull_request) | |
333 | assert status == ChangesetStatus.STATUS_REJECTED |
|
333 | assert status == ChangesetStatus.STATUS_REJECTED | |
334 |
|
334 | |||
335 | def test_comment_and_close_pull_request( |
|
335 | def test_comment_and_close_pull_request( | |
336 | self, pr_util, csrf_token, xhr_header): |
|
336 | self, pr_util, csrf_token, xhr_header): | |
337 | pull_request = pr_util.create_pull_request() |
|
337 | pull_request = pr_util.create_pull_request() | |
338 | pull_request_id = pull_request.pull_request_id |
|
338 | pull_request_id = pull_request.pull_request_id | |
339 |
|
339 | |||
340 | response = self.app.post( |
|
340 | response = self.app.post( | |
341 | route_path('pullrequest_comment_create', |
|
341 | route_path('pullrequest_comment_create', | |
342 | repo_name=pull_request.target_repo.scm_instance().name, |
|
342 | repo_name=pull_request.target_repo.scm_instance().name, | |
343 | pull_request_id=pull_request.pull_request_id), |
|
343 | pull_request_id=pull_request.pull_request_id), | |
344 | params={ |
|
344 | params={ | |
345 | 'close_pull_request': 'true', |
|
345 | 'close_pull_request': 'true', | |
346 | 'csrf_token': csrf_token}, |
|
346 | 'csrf_token': csrf_token}, | |
347 | extra_environ=xhr_header) |
|
347 | extra_environ=xhr_header) | |
348 |
|
348 | |||
349 | assert response.json |
|
349 | assert response.json | |
350 |
|
350 | |||
351 | pull_request = PullRequest.get(pull_request_id) |
|
351 | pull_request = PullRequest.get(pull_request_id) | |
352 | assert pull_request.is_closed() |
|
352 | assert pull_request.is_closed() | |
353 |
|
353 | |||
354 | # check only the latest status, not the review status |
|
354 | # check only the latest status, not the review status | |
355 | status = ChangesetStatusModel().get_status( |
|
355 | status = ChangesetStatusModel().get_status( | |
356 | pull_request.source_repo, pull_request=pull_request) |
|
356 | pull_request.source_repo, pull_request=pull_request) | |
357 | assert status == ChangesetStatus.STATUS_REJECTED |
|
357 | assert status == ChangesetStatus.STATUS_REJECTED | |
358 |
|
358 | |||
359 | def test_create_pull_request(self, backend, csrf_token): |
|
359 | def test_create_pull_request(self, backend, csrf_token): | |
360 | commits = [ |
|
360 | commits = [ | |
361 | {'message': 'ancestor'}, |
|
361 | {'message': 'ancestor'}, | |
362 | {'message': 'change'}, |
|
362 | {'message': 'change'}, | |
363 | {'message': 'change2'}, |
|
363 | {'message': 'change2'}, | |
364 | ] |
|
364 | ] | |
365 | commit_ids = backend.create_master_repo(commits) |
|
365 | commit_ids = backend.create_master_repo(commits) | |
366 | target = backend.create_repo(heads=['ancestor']) |
|
366 | target = backend.create_repo(heads=['ancestor']) | |
367 | source = backend.create_repo(heads=['change2']) |
|
367 | source = backend.create_repo(heads=['change2']) | |
368 |
|
368 | |||
369 | response = self.app.post( |
|
369 | response = self.app.post( | |
370 | route_path('pullrequest_create', repo_name=source.repo_name), |
|
370 | route_path('pullrequest_create', repo_name=source.repo_name), | |
371 | [ |
|
371 | [ | |
372 | ('source_repo', source.repo_name), |
|
372 | ('source_repo', source.repo_name), | |
373 | ('source_ref', 'branch:default:' + commit_ids['change2']), |
|
373 | ('source_ref', 'branch:default:' + commit_ids['change2']), | |
374 | ('target_repo', target.repo_name), |
|
374 | ('target_repo', target.repo_name), | |
375 | ('target_ref', 'branch:default:' + commit_ids['ancestor']), |
|
375 | ('target_ref', 'branch:default:' + commit_ids['ancestor']), | |
376 | ('common_ancestor', commit_ids['ancestor']), |
|
376 | ('common_ancestor', commit_ids['ancestor']), | |
377 | ('pullrequest_title', 'Title'), |
|
377 | ('pullrequest_title', 'Title'), | |
378 | ('pullrequest_desc', 'Description'), |
|
378 | ('pullrequest_desc', 'Description'), | |
379 | ('description_renderer', 'markdown'), |
|
379 | ('description_renderer', 'markdown'), | |
380 | ('__start__', 'review_members:sequence'), |
|
380 | ('__start__', 'review_members:sequence'), | |
381 | ('__start__', 'reviewer:mapping'), |
|
381 | ('__start__', 'reviewer:mapping'), | |
382 | ('user_id', '1'), |
|
382 | ('user_id', '1'), | |
383 | ('__start__', 'reasons:sequence'), |
|
383 | ('__start__', 'reasons:sequence'), | |
384 | ('reason', 'Some reason'), |
|
384 | ('reason', 'Some reason'), | |
385 | ('__end__', 'reasons:sequence'), |
|
385 | ('__end__', 'reasons:sequence'), | |
386 | ('__start__', 'rules:sequence'), |
|
386 | ('__start__', 'rules:sequence'), | |
387 | ('__end__', 'rules:sequence'), |
|
387 | ('__end__', 'rules:sequence'), | |
388 | ('mandatory', 'False'), |
|
388 | ('mandatory', 'False'), | |
389 | ('__end__', 'reviewer:mapping'), |
|
389 | ('__end__', 'reviewer:mapping'), | |
390 | ('__end__', 'review_members:sequence'), |
|
390 | ('__end__', 'review_members:sequence'), | |
391 | ('__start__', 'revisions:sequence'), |
|
391 | ('__start__', 'revisions:sequence'), | |
392 | ('revisions', commit_ids['change']), |
|
392 | ('revisions', commit_ids['change']), | |
393 | ('revisions', commit_ids['change2']), |
|
393 | ('revisions', commit_ids['change2']), | |
394 | ('__end__', 'revisions:sequence'), |
|
394 | ('__end__', 'revisions:sequence'), | |
395 | ('user', ''), |
|
395 | ('user', ''), | |
396 | ('csrf_token', csrf_token), |
|
396 | ('csrf_token', csrf_token), | |
397 | ], |
|
397 | ], | |
398 | status=302) |
|
398 | status=302) | |
399 |
|
399 | |||
400 | location = response.headers['Location'] |
|
400 | location = response.headers['Location'] | |
401 | pull_request_id = location.rsplit('/', 1)[1] |
|
401 | pull_request_id = location.rsplit('/', 1)[1] | |
402 | assert pull_request_id != 'new' |
|
402 | assert pull_request_id != 'new' | |
403 | pull_request = PullRequest.get(int(pull_request_id)) |
|
403 | pull_request = PullRequest.get(int(pull_request_id)) | |
404 |
|
404 | |||
405 | # check that we have now both revisions |
|
405 | # check that we have now both revisions | |
406 | assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']] |
|
406 | assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']] | |
407 | assert pull_request.source_ref == 'branch:default:' + commit_ids['change2'] |
|
407 | assert pull_request.source_ref == 'branch:default:' + commit_ids['change2'] | |
408 | expected_target_ref = 'branch:default:' + commit_ids['ancestor'] |
|
408 | expected_target_ref = 'branch:default:' + commit_ids['ancestor'] | |
409 | assert pull_request.target_ref == expected_target_ref |
|
409 | assert pull_request.target_ref == expected_target_ref | |
410 |
|
410 | |||
411 | def test_reviewer_notifications(self, backend, csrf_token): |
|
411 | def test_reviewer_notifications(self, backend, csrf_token): | |
412 | # We have to use the app.post for this test so it will create the |
|
412 | # We have to use the app.post for this test so it will create the | |
413 | # notifications properly with the new PR |
|
413 | # notifications properly with the new PR | |
414 | commits = [ |
|
414 | commits = [ | |
415 | {'message': 'ancestor', |
|
415 | {'message': 'ancestor', | |
416 | 'added': [FileNode('file_A', content='content_of_ancestor')]}, |
|
416 | 'added': [FileNode('file_A', content='content_of_ancestor')]}, | |
417 | {'message': 'change', |
|
417 | {'message': 'change', | |
418 | 'added': [FileNode('file_a', content='content_of_change')]}, |
|
418 | 'added': [FileNode('file_a', content='content_of_change')]}, | |
419 | {'message': 'change-child'}, |
|
419 | {'message': 'change-child'}, | |
420 | {'message': 'ancestor-child', 'parents': ['ancestor'], |
|
420 | {'message': 'ancestor-child', 'parents': ['ancestor'], | |
421 | 'added': [ |
|
421 | 'added': [ | |
422 | FileNode('file_B', content='content_of_ancestor_child')]}, |
|
422 | FileNode('file_B', content='content_of_ancestor_child')]}, | |
423 | {'message': 'ancestor-child-2'}, |
|
423 | {'message': 'ancestor-child-2'}, | |
424 | ] |
|
424 | ] | |
425 | commit_ids = backend.create_master_repo(commits) |
|
425 | commit_ids = backend.create_master_repo(commits) | |
426 | target = backend.create_repo(heads=['ancestor-child']) |
|
426 | target = backend.create_repo(heads=['ancestor-child']) | |
427 | source = backend.create_repo(heads=['change']) |
|
427 | source = backend.create_repo(heads=['change']) | |
428 |
|
428 | |||
429 | response = self.app.post( |
|
429 | response = self.app.post( | |
430 | route_path('pullrequest_create', repo_name=source.repo_name), |
|
430 | route_path('pullrequest_create', repo_name=source.repo_name), | |
431 | [ |
|
431 | [ | |
432 | ('source_repo', source.repo_name), |
|
432 | ('source_repo', source.repo_name), | |
433 | ('source_ref', 'branch:default:' + commit_ids['change']), |
|
433 | ('source_ref', 'branch:default:' + commit_ids['change']), | |
434 | ('target_repo', target.repo_name), |
|
434 | ('target_repo', target.repo_name), | |
435 | ('target_ref', 'branch:default:' + commit_ids['ancestor-child']), |
|
435 | ('target_ref', 'branch:default:' + commit_ids['ancestor-child']), | |
436 | ('common_ancestor', commit_ids['ancestor']), |
|
436 | ('common_ancestor', commit_ids['ancestor']), | |
437 | ('pullrequest_title', 'Title'), |
|
437 | ('pullrequest_title', 'Title'), | |
438 | ('pullrequest_desc', 'Description'), |
|
438 | ('pullrequest_desc', 'Description'), | |
439 | ('description_renderer', 'markdown'), |
|
439 | ('description_renderer', 'markdown'), | |
440 | ('__start__', 'review_members:sequence'), |
|
440 | ('__start__', 'review_members:sequence'), | |
441 | ('__start__', 'reviewer:mapping'), |
|
441 | ('__start__', 'reviewer:mapping'), | |
442 | ('user_id', '2'), |
|
442 | ('user_id', '2'), | |
443 | ('__start__', 'reasons:sequence'), |
|
443 | ('__start__', 'reasons:sequence'), | |
444 | ('reason', 'Some reason'), |
|
444 | ('reason', 'Some reason'), | |
445 | ('__end__', 'reasons:sequence'), |
|
445 | ('__end__', 'reasons:sequence'), | |
446 | ('__start__', 'rules:sequence'), |
|
446 | ('__start__', 'rules:sequence'), | |
447 | ('__end__', 'rules:sequence'), |
|
447 | ('__end__', 'rules:sequence'), | |
448 | ('mandatory', 'False'), |
|
448 | ('mandatory', 'False'), | |
449 | ('__end__', 'reviewer:mapping'), |
|
449 | ('__end__', 'reviewer:mapping'), | |
450 | ('__end__', 'review_members:sequence'), |
|
450 | ('__end__', 'review_members:sequence'), | |
451 | ('__start__', 'revisions:sequence'), |
|
451 | ('__start__', 'revisions:sequence'), | |
452 | ('revisions', commit_ids['change']), |
|
452 | ('revisions', commit_ids['change']), | |
453 | ('__end__', 'revisions:sequence'), |
|
453 | ('__end__', 'revisions:sequence'), | |
454 | ('user', ''), |
|
454 | ('user', ''), | |
455 | ('csrf_token', csrf_token), |
|
455 | ('csrf_token', csrf_token), | |
456 | ], |
|
456 | ], | |
457 | status=302) |
|
457 | status=302) | |
458 |
|
458 | |||
459 | location = response.headers['Location'] |
|
459 | location = response.headers['Location'] | |
460 |
|
460 | |||
461 | pull_request_id = location.rsplit('/', 1)[1] |
|
461 | pull_request_id = location.rsplit('/', 1)[1] | |
462 | assert pull_request_id != 'new' |
|
462 | assert pull_request_id != 'new' | |
463 | pull_request = PullRequest.get(int(pull_request_id)) |
|
463 | pull_request = PullRequest.get(int(pull_request_id)) | |
464 |
|
464 | |||
465 | # Check that a notification was made |
|
465 | # Check that a notification was made | |
466 | notifications = Notification.query()\ |
|
466 | notifications = Notification.query()\ | |
467 | .filter(Notification.created_by == pull_request.author.user_id, |
|
467 | .filter(Notification.created_by == pull_request.author.user_id, | |
468 | Notification.type_ == Notification.TYPE_PULL_REQUEST, |
|
468 | Notification.type_ == Notification.TYPE_PULL_REQUEST, | |
469 | Notification.subject.contains( |
|
469 | Notification.subject.contains( | |
470 |
" |
|
470 | "requested a pull request review. !%s" % pull_request_id)) | |
471 | assert len(notifications.all()) == 1 |
|
471 | assert len(notifications.all()) == 1 | |
472 |
|
472 | |||
473 | # Change reviewers and check that a notification was made |
|
473 | # Change reviewers and check that a notification was made | |
474 | PullRequestModel().update_reviewers( |
|
474 | PullRequestModel().update_reviewers( | |
475 | pull_request.pull_request_id, [(1, [], False, [])], |
|
475 | pull_request.pull_request_id, [(1, [], False, [])], | |
476 | pull_request.author) |
|
476 | pull_request.author) | |
477 | assert len(notifications.all()) == 2 |
|
477 | assert len(notifications.all()) == 2 | |
478 |
|
478 | |||
479 | def test_create_pull_request_stores_ancestor_commit_id(self, backend, |
|
479 | def test_create_pull_request_stores_ancestor_commit_id(self, backend, | |
480 | csrf_token): |
|
480 | csrf_token): | |
481 | commits = [ |
|
481 | commits = [ | |
482 | {'message': 'ancestor', |
|
482 | {'message': 'ancestor', | |
483 | 'added': [FileNode('file_A', content='content_of_ancestor')]}, |
|
483 | 'added': [FileNode('file_A', content='content_of_ancestor')]}, | |
484 | {'message': 'change', |
|
484 | {'message': 'change', | |
485 | 'added': [FileNode('file_a', content='content_of_change')]}, |
|
485 | 'added': [FileNode('file_a', content='content_of_change')]}, | |
486 | {'message': 'change-child'}, |
|
486 | {'message': 'change-child'}, | |
487 | {'message': 'ancestor-child', 'parents': ['ancestor'], |
|
487 | {'message': 'ancestor-child', 'parents': ['ancestor'], | |
488 | 'added': [ |
|
488 | 'added': [ | |
489 | FileNode('file_B', content='content_of_ancestor_child')]}, |
|
489 | FileNode('file_B', content='content_of_ancestor_child')]}, | |
490 | {'message': 'ancestor-child-2'}, |
|
490 | {'message': 'ancestor-child-2'}, | |
491 | ] |
|
491 | ] | |
492 | commit_ids = backend.create_master_repo(commits) |
|
492 | commit_ids = backend.create_master_repo(commits) | |
493 | target = backend.create_repo(heads=['ancestor-child']) |
|
493 | target = backend.create_repo(heads=['ancestor-child']) | |
494 | source = backend.create_repo(heads=['change']) |
|
494 | source = backend.create_repo(heads=['change']) | |
495 |
|
495 | |||
496 | response = self.app.post( |
|
496 | response = self.app.post( | |
497 | route_path('pullrequest_create', repo_name=source.repo_name), |
|
497 | route_path('pullrequest_create', repo_name=source.repo_name), | |
498 | [ |
|
498 | [ | |
499 | ('source_repo', source.repo_name), |
|
499 | ('source_repo', source.repo_name), | |
500 | ('source_ref', 'branch:default:' + commit_ids['change']), |
|
500 | ('source_ref', 'branch:default:' + commit_ids['change']), | |
501 | ('target_repo', target.repo_name), |
|
501 | ('target_repo', target.repo_name), | |
502 | ('target_ref', 'branch:default:' + commit_ids['ancestor-child']), |
|
502 | ('target_ref', 'branch:default:' + commit_ids['ancestor-child']), | |
503 | ('common_ancestor', commit_ids['ancestor']), |
|
503 | ('common_ancestor', commit_ids['ancestor']), | |
504 | ('pullrequest_title', 'Title'), |
|
504 | ('pullrequest_title', 'Title'), | |
505 | ('pullrequest_desc', 'Description'), |
|
505 | ('pullrequest_desc', 'Description'), | |
506 | ('description_renderer', 'markdown'), |
|
506 | ('description_renderer', 'markdown'), | |
507 | ('__start__', 'review_members:sequence'), |
|
507 | ('__start__', 'review_members:sequence'), | |
508 | ('__start__', 'reviewer:mapping'), |
|
508 | ('__start__', 'reviewer:mapping'), | |
509 | ('user_id', '1'), |
|
509 | ('user_id', '1'), | |
510 | ('__start__', 'reasons:sequence'), |
|
510 | ('__start__', 'reasons:sequence'), | |
511 | ('reason', 'Some reason'), |
|
511 | ('reason', 'Some reason'), | |
512 | ('__end__', 'reasons:sequence'), |
|
512 | ('__end__', 'reasons:sequence'), | |
513 | ('__start__', 'rules:sequence'), |
|
513 | ('__start__', 'rules:sequence'), | |
514 | ('__end__', 'rules:sequence'), |
|
514 | ('__end__', 'rules:sequence'), | |
515 | ('mandatory', 'False'), |
|
515 | ('mandatory', 'False'), | |
516 | ('__end__', 'reviewer:mapping'), |
|
516 | ('__end__', 'reviewer:mapping'), | |
517 | ('__end__', 'review_members:sequence'), |
|
517 | ('__end__', 'review_members:sequence'), | |
518 | ('__start__', 'revisions:sequence'), |
|
518 | ('__start__', 'revisions:sequence'), | |
519 | ('revisions', commit_ids['change']), |
|
519 | ('revisions', commit_ids['change']), | |
520 | ('__end__', 'revisions:sequence'), |
|
520 | ('__end__', 'revisions:sequence'), | |
521 | ('user', ''), |
|
521 | ('user', ''), | |
522 | ('csrf_token', csrf_token), |
|
522 | ('csrf_token', csrf_token), | |
523 | ], |
|
523 | ], | |
524 | status=302) |
|
524 | status=302) | |
525 |
|
525 | |||
526 | location = response.headers['Location'] |
|
526 | location = response.headers['Location'] | |
527 |
|
527 | |||
528 | pull_request_id = location.rsplit('/', 1)[1] |
|
528 | pull_request_id = location.rsplit('/', 1)[1] | |
529 | assert pull_request_id != 'new' |
|
529 | assert pull_request_id != 'new' | |
530 | pull_request = PullRequest.get(int(pull_request_id)) |
|
530 | pull_request = PullRequest.get(int(pull_request_id)) | |
531 |
|
531 | |||
532 | # target_ref has to point to the ancestor's commit_id in order to |
|
532 | # target_ref has to point to the ancestor's commit_id in order to | |
533 | # show the correct diff |
|
533 | # show the correct diff | |
534 | expected_target_ref = 'branch:default:' + commit_ids['ancestor'] |
|
534 | expected_target_ref = 'branch:default:' + commit_ids['ancestor'] | |
535 | assert pull_request.target_ref == expected_target_ref |
|
535 | assert pull_request.target_ref == expected_target_ref | |
536 |
|
536 | |||
537 | # Check generated diff contents |
|
537 | # Check generated diff contents | |
538 | response = response.follow() |
|
538 | response = response.follow() | |
539 | assert 'content_of_ancestor' not in response.body |
|
539 | assert 'content_of_ancestor' not in response.body | |
540 | assert 'content_of_ancestor-child' not in response.body |
|
540 | assert 'content_of_ancestor-child' not in response.body | |
541 | assert 'content_of_change' in response.body |
|
541 | assert 'content_of_change' in response.body | |
542 |
|
542 | |||
543 | def test_merge_pull_request_enabled(self, pr_util, csrf_token): |
|
543 | def test_merge_pull_request_enabled(self, pr_util, csrf_token): | |
544 | # Clear any previous calls to rcextensions |
|
544 | # Clear any previous calls to rcextensions | |
545 | rhodecode.EXTENSIONS.calls.clear() |
|
545 | rhodecode.EXTENSIONS.calls.clear() | |
546 |
|
546 | |||
547 | pull_request = pr_util.create_pull_request( |
|
547 | pull_request = pr_util.create_pull_request( | |
548 | approved=True, mergeable=True) |
|
548 | approved=True, mergeable=True) | |
549 | pull_request_id = pull_request.pull_request_id |
|
549 | pull_request_id = pull_request.pull_request_id | |
550 | repo_name = pull_request.target_repo.scm_instance().name, |
|
550 | repo_name = pull_request.target_repo.scm_instance().name, | |
551 |
|
551 | |||
552 | response = self.app.post( |
|
552 | url = route_path('pullrequest_merge', | |
553 | route_path('pullrequest_merge', |
|
553 | repo_name=str(repo_name[0]), | |
554 | repo_name=str(repo_name[0]), |
|
554 | pull_request_id=pull_request_id) | |
555 | pull_request_id=pull_request_id), |
|
555 | response = self.app.post(url, params={'csrf_token': csrf_token}).follow() | |
556 | params={'csrf_token': csrf_token}).follow() |
|
|||
557 |
|
556 | |||
558 | pull_request = PullRequest.get(pull_request_id) |
|
557 | pull_request = PullRequest.get(pull_request_id) | |
559 |
|
558 | |||
560 | assert response.status_int == 200 |
|
559 | assert response.status_int == 200 | |
561 | assert pull_request.is_closed() |
|
560 | assert pull_request.is_closed() | |
562 | assert_pull_request_status( |
|
561 | assert_pull_request_status( | |
563 | pull_request, ChangesetStatus.STATUS_APPROVED) |
|
562 | pull_request, ChangesetStatus.STATUS_APPROVED) | |
564 |
|
563 | |||
565 | # Check the relevant log entries were added |
|
564 | # Check the relevant log entries were added | |
566 | user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(3) |
|
565 | user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(3) | |
567 | actions = [log.action for log in user_logs] |
|
566 | actions = [log.action for log in user_logs] | |
568 | pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request) |
|
567 | pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request) | |
569 | expected_actions = [ |
|
568 | expected_actions = [ | |
570 | u'repo.pull_request.close', |
|
569 | u'repo.pull_request.close', | |
571 | u'repo.pull_request.merge', |
|
570 | u'repo.pull_request.merge', | |
572 | u'repo.pull_request.comment.create' |
|
571 | u'repo.pull_request.comment.create' | |
573 | ] |
|
572 | ] | |
574 | assert actions == expected_actions |
|
573 | assert actions == expected_actions | |
575 |
|
574 | |||
576 | user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(4) |
|
575 | user_logs = UserLog.query().order_by(UserLog.user_log_id.desc()).limit(4) | |
577 | actions = [log for log in user_logs] |
|
576 | actions = [log for log in user_logs] | |
578 | assert actions[-1].action == 'user.push' |
|
577 | assert actions[-1].action == 'user.push' | |
579 | assert actions[-1].action_data['commit_ids'] == pr_commit_ids |
|
578 | assert actions[-1].action_data['commit_ids'] == pr_commit_ids | |
580 |
|
579 | |||
581 | # Check post_push rcextension was really executed |
|
580 | # Check post_push rcextension was really executed | |
582 | push_calls = rhodecode.EXTENSIONS.calls['_push_hook'] |
|
581 | push_calls = rhodecode.EXTENSIONS.calls['_push_hook'] | |
583 | assert len(push_calls) == 1 |
|
582 | assert len(push_calls) == 1 | |
584 | unused_last_call_args, last_call_kwargs = push_calls[0] |
|
583 | unused_last_call_args, last_call_kwargs = push_calls[0] | |
585 | assert last_call_kwargs['action'] == 'push' |
|
584 | assert last_call_kwargs['action'] == 'push' | |
586 | assert last_call_kwargs['commit_ids'] == pr_commit_ids |
|
585 | assert last_call_kwargs['commit_ids'] == pr_commit_ids | |
587 |
|
586 | |||
588 | def test_merge_pull_request_disabled(self, pr_util, csrf_token): |
|
587 | def test_merge_pull_request_disabled(self, pr_util, csrf_token): | |
589 | pull_request = pr_util.create_pull_request(mergeable=False) |
|
588 | pull_request = pr_util.create_pull_request(mergeable=False) | |
590 | pull_request_id = pull_request.pull_request_id |
|
589 | pull_request_id = pull_request.pull_request_id | |
591 | pull_request = PullRequest.get(pull_request_id) |
|
590 | pull_request = PullRequest.get(pull_request_id) | |
592 |
|
591 | |||
593 | response = self.app.post( |
|
592 | response = self.app.post( | |
594 | route_path('pullrequest_merge', |
|
593 | route_path('pullrequest_merge', | |
595 | repo_name=pull_request.target_repo.scm_instance().name, |
|
594 | repo_name=pull_request.target_repo.scm_instance().name, | |
596 | pull_request_id=pull_request.pull_request_id), |
|
595 | pull_request_id=pull_request.pull_request_id), | |
597 | params={'csrf_token': csrf_token}).follow() |
|
596 | params={'csrf_token': csrf_token}).follow() | |
598 |
|
597 | |||
599 | assert response.status_int == 200 |
|
598 | assert response.status_int == 200 | |
600 | response.mustcontain( |
|
599 | response.mustcontain( | |
601 | 'Merge is not currently possible because of below failed checks.') |
|
600 | 'Merge is not currently possible because of below failed checks.') | |
602 | response.mustcontain('Server-side pull request merging is disabled.') |
|
601 | response.mustcontain('Server-side pull request merging is disabled.') | |
603 |
|
602 | |||
604 | @pytest.mark.skip_backends('svn') |
|
603 | @pytest.mark.skip_backends('svn') | |
605 | def test_merge_pull_request_not_approved(self, pr_util, csrf_token): |
|
604 | def test_merge_pull_request_not_approved(self, pr_util, csrf_token): | |
606 | pull_request = pr_util.create_pull_request(mergeable=True) |
|
605 | pull_request = pr_util.create_pull_request(mergeable=True) | |
607 | pull_request_id = pull_request.pull_request_id |
|
606 | pull_request_id = pull_request.pull_request_id | |
608 | repo_name = pull_request.target_repo.scm_instance().name |
|
607 | repo_name = pull_request.target_repo.scm_instance().name | |
609 |
|
608 | |||
610 | response = self.app.post( |
|
609 | response = self.app.post( | |
611 | route_path('pullrequest_merge', |
|
610 | route_path('pullrequest_merge', | |
612 | repo_name=repo_name, pull_request_id=pull_request_id), |
|
611 | repo_name=repo_name, pull_request_id=pull_request_id), | |
613 | params={'csrf_token': csrf_token}).follow() |
|
612 | params={'csrf_token': csrf_token}).follow() | |
614 |
|
613 | |||
615 | assert response.status_int == 200 |
|
614 | assert response.status_int == 200 | |
616 |
|
615 | |||
617 | response.mustcontain( |
|
616 | response.mustcontain( | |
618 | 'Merge is not currently possible because of below failed checks.') |
|
617 | 'Merge is not currently possible because of below failed checks.') | |
619 | response.mustcontain('Pull request reviewer approval is pending.') |
|
618 | response.mustcontain('Pull request reviewer approval is pending.') | |
620 |
|
619 | |||
621 | def test_merge_pull_request_renders_failure_reason( |
|
620 | def test_merge_pull_request_renders_failure_reason( | |
622 | self, user_regular, csrf_token, pr_util): |
|
621 | self, user_regular, csrf_token, pr_util): | |
623 | pull_request = pr_util.create_pull_request(mergeable=True, approved=True) |
|
622 | pull_request = pr_util.create_pull_request(mergeable=True, approved=True) | |
624 | pull_request_id = pull_request.pull_request_id |
|
623 | pull_request_id = pull_request.pull_request_id | |
625 | repo_name = pull_request.target_repo.scm_instance().name |
|
624 | repo_name = pull_request.target_repo.scm_instance().name | |
626 |
|
625 | |||
627 | merge_resp = MergeResponse(True, False, 'STUB_COMMIT_ID', |
|
626 | merge_resp = MergeResponse(True, False, 'STUB_COMMIT_ID', | |
628 | MergeFailureReason.PUSH_FAILED, |
|
627 | MergeFailureReason.PUSH_FAILED, | |
629 | metadata={'target': 'shadow repo', |
|
628 | metadata={'target': 'shadow repo', | |
630 | 'merge_commit': 'xxx'}) |
|
629 | 'merge_commit': 'xxx'}) | |
631 | model_patcher = mock.patch.multiple( |
|
630 | model_patcher = mock.patch.multiple( | |
632 | PullRequestModel, |
|
631 | PullRequestModel, | |
633 | merge_repo=mock.Mock(return_value=merge_resp), |
|
632 | merge_repo=mock.Mock(return_value=merge_resp), | |
634 | merge_status=mock.Mock(return_value=(True, 'WRONG_MESSAGE'))) |
|
633 | merge_status=mock.Mock(return_value=(True, 'WRONG_MESSAGE'))) | |
635 |
|
634 | |||
636 | with model_patcher: |
|
635 | with model_patcher: | |
637 | response = self.app.post( |
|
636 | response = self.app.post( | |
638 | route_path('pullrequest_merge', |
|
637 | route_path('pullrequest_merge', | |
639 | repo_name=repo_name, |
|
638 | repo_name=repo_name, | |
640 | pull_request_id=pull_request_id), |
|
639 | pull_request_id=pull_request_id), | |
641 | params={'csrf_token': csrf_token}, status=302) |
|
640 | params={'csrf_token': csrf_token}, status=302) | |
642 |
|
641 | |||
643 | merge_resp = MergeResponse(True, True, '', MergeFailureReason.PUSH_FAILED, |
|
642 | merge_resp = MergeResponse(True, True, '', MergeFailureReason.PUSH_FAILED, | |
644 | metadata={'target': 'shadow repo', |
|
643 | metadata={'target': 'shadow repo', | |
645 | 'merge_commit': 'xxx'}) |
|
644 | 'merge_commit': 'xxx'}) | |
646 | assert_session_flash(response, merge_resp.merge_status_message) |
|
645 | assert_session_flash(response, merge_resp.merge_status_message) | |
647 |
|
646 | |||
648 | def test_update_source_revision(self, backend, csrf_token): |
|
647 | def test_update_source_revision(self, backend, csrf_token): | |
649 | commits = [ |
|
648 | commits = [ | |
650 | {'message': 'ancestor'}, |
|
649 | {'message': 'ancestor'}, | |
651 | {'message': 'change'}, |
|
650 | {'message': 'change'}, | |
652 | {'message': 'change-2'}, |
|
651 | {'message': 'change-2'}, | |
653 | ] |
|
652 | ] | |
654 | commit_ids = backend.create_master_repo(commits) |
|
653 | commit_ids = backend.create_master_repo(commits) | |
655 | target = backend.create_repo(heads=['ancestor']) |
|
654 | target = backend.create_repo(heads=['ancestor']) | |
656 | source = backend.create_repo(heads=['change']) |
|
655 | source = backend.create_repo(heads=['change']) | |
657 |
|
656 | |||
658 | # create pr from a in source to A in target |
|
657 | # create pr from a in source to A in target | |
659 | pull_request = PullRequest() |
|
658 | pull_request = PullRequest() | |
660 |
|
659 | |||
661 | pull_request.source_repo = source |
|
660 | pull_request.source_repo = source | |
662 | pull_request.source_ref = 'branch:{branch}:{commit_id}'.format( |
|
661 | pull_request.source_ref = 'branch:{branch}:{commit_id}'.format( | |
663 | branch=backend.default_branch_name, commit_id=commit_ids['change']) |
|
662 | branch=backend.default_branch_name, commit_id=commit_ids['change']) | |
664 |
|
663 | |||
665 | pull_request.target_repo = target |
|
664 | pull_request.target_repo = target | |
666 | pull_request.target_ref = 'branch:{branch}:{commit_id}'.format( |
|
665 | pull_request.target_ref = 'branch:{branch}:{commit_id}'.format( | |
667 | branch=backend.default_branch_name, commit_id=commit_ids['ancestor']) |
|
666 | branch=backend.default_branch_name, commit_id=commit_ids['ancestor']) | |
668 |
|
667 | |||
669 | pull_request.revisions = [commit_ids['change']] |
|
668 | pull_request.revisions = [commit_ids['change']] | |
670 | pull_request.title = u"Test" |
|
669 | pull_request.title = u"Test" | |
671 | pull_request.description = u"Description" |
|
670 | pull_request.description = u"Description" | |
672 | pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) |
|
671 | pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) | |
673 | pull_request.pull_request_state = PullRequest.STATE_CREATED |
|
672 | pull_request.pull_request_state = PullRequest.STATE_CREATED | |
674 | Session().add(pull_request) |
|
673 | Session().add(pull_request) | |
675 | Session().commit() |
|
674 | Session().commit() | |
676 | pull_request_id = pull_request.pull_request_id |
|
675 | pull_request_id = pull_request.pull_request_id | |
677 |
|
676 | |||
678 | # source has ancestor - change - change-2 |
|
677 | # source has ancestor - change - change-2 | |
679 | backend.pull_heads(source, heads=['change-2']) |
|
678 | backend.pull_heads(source, heads=['change-2']) | |
680 |
|
679 | |||
681 | # update PR |
|
680 | # update PR | |
682 | self.app.post( |
|
681 | self.app.post( | |
683 | route_path('pullrequest_update', |
|
682 | route_path('pullrequest_update', | |
684 | repo_name=target.repo_name, pull_request_id=pull_request_id), |
|
683 | repo_name=target.repo_name, pull_request_id=pull_request_id), | |
685 | params={'update_commits': 'true', 'csrf_token': csrf_token}) |
|
684 | params={'update_commits': 'true', 'csrf_token': csrf_token}) | |
686 |
|
685 | |||
687 | response = self.app.get( |
|
686 | response = self.app.get( | |
688 | route_path('pullrequest_show', |
|
687 | route_path('pullrequest_show', | |
689 | repo_name=target.repo_name, |
|
688 | repo_name=target.repo_name, | |
690 | pull_request_id=pull_request.pull_request_id)) |
|
689 | pull_request_id=pull_request.pull_request_id)) | |
691 |
|
690 | |||
692 | assert response.status_int == 200 |
|
691 | assert response.status_int == 200 | |
693 | assert 'Pull request updated to' in response.body |
|
692 | assert 'Pull request updated to' in response.body | |
694 | assert 'with 1 added, 0 removed commits.' in response.body |
|
693 | assert 'with 1 added, 0 removed commits.' in response.body | |
695 |
|
694 | |||
696 | # check that we have now both revisions |
|
695 | # check that we have now both revisions | |
697 | pull_request = PullRequest.get(pull_request_id) |
|
696 | pull_request = PullRequest.get(pull_request_id) | |
698 | assert pull_request.revisions == [commit_ids['change-2'], commit_ids['change']] |
|
697 | assert pull_request.revisions == [commit_ids['change-2'], commit_ids['change']] | |
699 |
|
698 | |||
700 | def test_update_target_revision(self, backend, csrf_token): |
|
699 | def test_update_target_revision(self, backend, csrf_token): | |
701 | commits = [ |
|
700 | commits = [ | |
702 | {'message': 'ancestor'}, |
|
701 | {'message': 'ancestor'}, | |
703 | {'message': 'change'}, |
|
702 | {'message': 'change'}, | |
704 | {'message': 'ancestor-new', 'parents': ['ancestor']}, |
|
703 | {'message': 'ancestor-new', 'parents': ['ancestor']}, | |
705 | {'message': 'change-rebased'}, |
|
704 | {'message': 'change-rebased'}, | |
706 | ] |
|
705 | ] | |
707 | commit_ids = backend.create_master_repo(commits) |
|
706 | commit_ids = backend.create_master_repo(commits) | |
708 | target = backend.create_repo(heads=['ancestor']) |
|
707 | target = backend.create_repo(heads=['ancestor']) | |
709 | source = backend.create_repo(heads=['change']) |
|
708 | source = backend.create_repo(heads=['change']) | |
710 |
|
709 | |||
711 | # create pr from a in source to A in target |
|
710 | # create pr from a in source to A in target | |
712 | pull_request = PullRequest() |
|
711 | pull_request = PullRequest() | |
713 |
|
712 | |||
714 | pull_request.source_repo = source |
|
713 | pull_request.source_repo = source | |
715 | pull_request.source_ref = 'branch:{branch}:{commit_id}'.format( |
|
714 | pull_request.source_ref = 'branch:{branch}:{commit_id}'.format( | |
716 | branch=backend.default_branch_name, commit_id=commit_ids['change']) |
|
715 | branch=backend.default_branch_name, commit_id=commit_ids['change']) | |
717 |
|
716 | |||
718 | pull_request.target_repo = target |
|
717 | pull_request.target_repo = target | |
719 | pull_request.target_ref = 'branch:{branch}:{commit_id}'.format( |
|
718 | pull_request.target_ref = 'branch:{branch}:{commit_id}'.format( | |
720 | branch=backend.default_branch_name, commit_id=commit_ids['ancestor']) |
|
719 | branch=backend.default_branch_name, commit_id=commit_ids['ancestor']) | |
721 |
|
720 | |||
722 | pull_request.revisions = [commit_ids['change']] |
|
721 | pull_request.revisions = [commit_ids['change']] | |
723 | pull_request.title = u"Test" |
|
722 | pull_request.title = u"Test" | |
724 | pull_request.description = u"Description" |
|
723 | pull_request.description = u"Description" | |
725 | pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) |
|
724 | pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) | |
726 | pull_request.pull_request_state = PullRequest.STATE_CREATED |
|
725 | pull_request.pull_request_state = PullRequest.STATE_CREATED | |
727 |
|
726 | |||
728 | Session().add(pull_request) |
|
727 | Session().add(pull_request) | |
729 | Session().commit() |
|
728 | Session().commit() | |
730 | pull_request_id = pull_request.pull_request_id |
|
729 | pull_request_id = pull_request.pull_request_id | |
731 |
|
730 | |||
732 | # target has ancestor - ancestor-new |
|
731 | # target has ancestor - ancestor-new | |
733 | # source has ancestor - ancestor-new - change-rebased |
|
732 | # source has ancestor - ancestor-new - change-rebased | |
734 | backend.pull_heads(target, heads=['ancestor-new']) |
|
733 | backend.pull_heads(target, heads=['ancestor-new']) | |
735 | backend.pull_heads(source, heads=['change-rebased']) |
|
734 | backend.pull_heads(source, heads=['change-rebased']) | |
736 |
|
735 | |||
737 | # update PR |
|
736 | # update PR | |
738 | self.app.post( |
|
737 | url = route_path('pullrequest_update', | |
739 | route_path('pullrequest_update', |
|
738 | repo_name=target.repo_name, | |
740 | repo_name=target.repo_name, |
|
739 | pull_request_id=pull_request_id) | |
741 | pull_request_id=pull_request_id), |
|
740 | self.app.post(url, | |
742 | params={'update_commits': 'true', 'csrf_token': csrf_token}, |
|
741 | params={'update_commits': 'true', 'csrf_token': csrf_token}, | |
743 | status=200) |
|
742 | status=200) | |
744 |
|
743 | |||
745 | # check that we have now both revisions |
|
744 | # check that we have now both revisions | |
746 | pull_request = PullRequest.get(pull_request_id) |
|
745 | pull_request = PullRequest.get(pull_request_id) | |
747 | assert pull_request.revisions == [commit_ids['change-rebased']] |
|
746 | assert pull_request.revisions == [commit_ids['change-rebased']] | |
748 | assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format( |
|
747 | assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format( | |
749 | branch=backend.default_branch_name, commit_id=commit_ids['ancestor-new']) |
|
748 | branch=backend.default_branch_name, commit_id=commit_ids['ancestor-new']) | |
750 |
|
749 | |||
751 | response = self.app.get( |
|
750 | response = self.app.get( | |
752 | route_path('pullrequest_show', |
|
751 | route_path('pullrequest_show', | |
753 | repo_name=target.repo_name, |
|
752 | repo_name=target.repo_name, | |
754 | pull_request_id=pull_request.pull_request_id)) |
|
753 | pull_request_id=pull_request.pull_request_id)) | |
755 | assert response.status_int == 200 |
|
754 | assert response.status_int == 200 | |
756 | assert 'Pull request updated to' in response.body |
|
755 | assert 'Pull request updated to' in response.body | |
757 | assert 'with 1 added, 1 removed commits.' in response.body |
|
756 | assert 'with 1 added, 1 removed commits.' in response.body | |
758 |
|
757 | |||
759 | def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token): |
|
758 | def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token): | |
760 | backend = backend_git |
|
759 | backend = backend_git | |
761 | commits = [ |
|
760 | commits = [ | |
762 | {'message': 'master-commit-1'}, |
|
761 | {'message': 'master-commit-1'}, | |
763 | {'message': 'master-commit-2-change-1'}, |
|
762 | {'message': 'master-commit-2-change-1'}, | |
764 | {'message': 'master-commit-3-change-2'}, |
|
763 | {'message': 'master-commit-3-change-2'}, | |
765 |
|
764 | |||
766 | {'message': 'feat-commit-1', 'parents': ['master-commit-1']}, |
|
765 | {'message': 'feat-commit-1', 'parents': ['master-commit-1']}, | |
767 | {'message': 'feat-commit-2'}, |
|
766 | {'message': 'feat-commit-2'}, | |
768 | ] |
|
767 | ] | |
769 | commit_ids = backend.create_master_repo(commits) |
|
768 | commit_ids = backend.create_master_repo(commits) | |
770 | target = backend.create_repo(heads=['master-commit-3-change-2']) |
|
769 | target = backend.create_repo(heads=['master-commit-3-change-2']) | |
771 | source = backend.create_repo(heads=['feat-commit-2']) |
|
770 | source = backend.create_repo(heads=['feat-commit-2']) | |
772 |
|
771 | |||
773 | # create pr from a in source to A in target |
|
772 | # create pr from a in source to A in target | |
774 | pull_request = PullRequest() |
|
773 | pull_request = PullRequest() | |
775 | pull_request.source_repo = source |
|
774 | pull_request.source_repo = source | |
776 |
|
775 | |||
777 | pull_request.source_ref = 'branch:{branch}:{commit_id}'.format( |
|
776 | pull_request.source_ref = 'branch:{branch}:{commit_id}'.format( | |
778 | branch=backend.default_branch_name, |
|
777 | branch=backend.default_branch_name, | |
779 | commit_id=commit_ids['master-commit-3-change-2']) |
|
778 | commit_id=commit_ids['master-commit-3-change-2']) | |
780 |
|
779 | |||
781 | pull_request.target_repo = target |
|
780 | pull_request.target_repo = target | |
782 | pull_request.target_ref = 'branch:{branch}:{commit_id}'.format( |
|
781 | pull_request.target_ref = 'branch:{branch}:{commit_id}'.format( | |
783 | branch=backend.default_branch_name, commit_id=commit_ids['feat-commit-2']) |
|
782 | branch=backend.default_branch_name, commit_id=commit_ids['feat-commit-2']) | |
784 |
|
783 | |||
785 | pull_request.revisions = [ |
|
784 | pull_request.revisions = [ | |
786 | commit_ids['feat-commit-1'], |
|
785 | commit_ids['feat-commit-1'], | |
787 | commit_ids['feat-commit-2'] |
|
786 | commit_ids['feat-commit-2'] | |
788 | ] |
|
787 | ] | |
789 | pull_request.title = u"Test" |
|
788 | pull_request.title = u"Test" | |
790 | pull_request.description = u"Description" |
|
789 | pull_request.description = u"Description" | |
791 | pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) |
|
790 | pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) | |
792 | pull_request.pull_request_state = PullRequest.STATE_CREATED |
|
791 | pull_request.pull_request_state = PullRequest.STATE_CREATED | |
793 | Session().add(pull_request) |
|
792 | Session().add(pull_request) | |
794 | Session().commit() |
|
793 | Session().commit() | |
795 | pull_request_id = pull_request.pull_request_id |
|
794 | pull_request_id = pull_request.pull_request_id | |
796 |
|
795 | |||
797 | # PR is created, now we simulate a force-push into target, |
|
796 | # PR is created, now we simulate a force-push into target, | |
798 | # that drops a 2 last commits |
|
797 | # that drops a 2 last commits | |
799 | vcsrepo = target.scm_instance() |
|
798 | vcsrepo = target.scm_instance() | |
800 | vcsrepo.config.clear_section('hooks') |
|
799 | vcsrepo.config.clear_section('hooks') | |
801 | vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2']) |
|
800 | vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2']) | |
802 |
|
801 | |||
803 | # update PR |
|
802 | # update PR | |
804 | self.app.post( |
|
803 | url = route_path('pullrequest_update', | |
805 | route_path('pullrequest_update', |
|
804 | repo_name=target.repo_name, | |
806 | repo_name=target.repo_name, |
|
805 | pull_request_id=pull_request_id) | |
807 | pull_request_id=pull_request_id), |
|
806 | self.app.post(url, | |
808 | params={'update_commits': 'true', 'csrf_token': csrf_token}, |
|
807 | params={'update_commits': 'true', 'csrf_token': csrf_token}, | |
809 | status=200) |
|
808 | status=200) | |
810 |
|
809 | |||
811 | response = self.app.get(route_path('pullrequest_new', repo_name=target.repo_name)) |
|
810 | response = self.app.get(route_path('pullrequest_new', repo_name=target.repo_name)) | |
812 | assert response.status_int == 200 |
|
811 | assert response.status_int == 200 | |
813 | response.mustcontain('Pull request updated to') |
|
812 | response.mustcontain('Pull request updated to') | |
814 | response.mustcontain('with 0 added, 0 removed commits.') |
|
813 | response.mustcontain('with 0 added, 0 removed commits.') | |
815 |
|
814 | |||
816 | def test_update_of_ancestor_reference(self, backend, csrf_token): |
|
815 | def test_update_of_ancestor_reference(self, backend, csrf_token): | |
817 | commits = [ |
|
816 | commits = [ | |
818 | {'message': 'ancestor'}, |
|
817 | {'message': 'ancestor'}, | |
819 | {'message': 'change'}, |
|
818 | {'message': 'change'}, | |
820 | {'message': 'change-2'}, |
|
819 | {'message': 'change-2'}, | |
821 | {'message': 'ancestor-new', 'parents': ['ancestor']}, |
|
820 | {'message': 'ancestor-new', 'parents': ['ancestor']}, | |
822 | {'message': 'change-rebased'}, |
|
821 | {'message': 'change-rebased'}, | |
823 | ] |
|
822 | ] | |
824 | commit_ids = backend.create_master_repo(commits) |
|
823 | commit_ids = backend.create_master_repo(commits) | |
825 | target = backend.create_repo(heads=['ancestor']) |
|
824 | target = backend.create_repo(heads=['ancestor']) | |
826 | source = backend.create_repo(heads=['change']) |
|
825 | source = backend.create_repo(heads=['change']) | |
827 |
|
826 | |||
828 | # create pr from a in source to A in target |
|
827 | # create pr from a in source to A in target | |
829 | pull_request = PullRequest() |
|
828 | pull_request = PullRequest() | |
830 | pull_request.source_repo = source |
|
829 | pull_request.source_repo = source | |
831 |
|
830 | |||
832 | pull_request.source_ref = 'branch:{branch}:{commit_id}'.format( |
|
831 | pull_request.source_ref = 'branch:{branch}:{commit_id}'.format( | |
833 | branch=backend.default_branch_name, commit_id=commit_ids['change']) |
|
832 | branch=backend.default_branch_name, commit_id=commit_ids['change']) | |
834 | pull_request.target_repo = target |
|
833 | pull_request.target_repo = target | |
835 | pull_request.target_ref = 'branch:{branch}:{commit_id}'.format( |
|
834 | pull_request.target_ref = 'branch:{branch}:{commit_id}'.format( | |
836 | branch=backend.default_branch_name, commit_id=commit_ids['ancestor']) |
|
835 | branch=backend.default_branch_name, commit_id=commit_ids['ancestor']) | |
837 | pull_request.revisions = [commit_ids['change']] |
|
836 | pull_request.revisions = [commit_ids['change']] | |
838 | pull_request.title = u"Test" |
|
837 | pull_request.title = u"Test" | |
839 | pull_request.description = u"Description" |
|
838 | pull_request.description = u"Description" | |
840 | pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) |
|
839 | pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) | |
841 | pull_request.pull_request_state = PullRequest.STATE_CREATED |
|
840 | pull_request.pull_request_state = PullRequest.STATE_CREATED | |
842 | Session().add(pull_request) |
|
841 | Session().add(pull_request) | |
843 | Session().commit() |
|
842 | Session().commit() | |
844 | pull_request_id = pull_request.pull_request_id |
|
843 | pull_request_id = pull_request.pull_request_id | |
845 |
|
844 | |||
846 | # target has ancestor - ancestor-new |
|
845 | # target has ancestor - ancestor-new | |
847 | # source has ancestor - ancestor-new - change-rebased |
|
846 | # source has ancestor - ancestor-new - change-rebased | |
848 | backend.pull_heads(target, heads=['ancestor-new']) |
|
847 | backend.pull_heads(target, heads=['ancestor-new']) | |
849 | backend.pull_heads(source, heads=['change-rebased']) |
|
848 | backend.pull_heads(source, heads=['change-rebased']) | |
850 |
|
849 | |||
851 | # update PR |
|
850 | # update PR | |
852 | self.app.post( |
|
851 | self.app.post( | |
853 | route_path('pullrequest_update', |
|
852 | route_path('pullrequest_update', | |
854 | repo_name=target.repo_name, pull_request_id=pull_request_id), |
|
853 | repo_name=target.repo_name, pull_request_id=pull_request_id), | |
855 | params={'update_commits': 'true', 'csrf_token': csrf_token}, |
|
854 | params={'update_commits': 'true', 'csrf_token': csrf_token}, | |
856 | status=200) |
|
855 | status=200) | |
857 |
|
856 | |||
858 | # Expect the target reference to be updated correctly |
|
857 | # Expect the target reference to be updated correctly | |
859 | pull_request = PullRequest.get(pull_request_id) |
|
858 | pull_request = PullRequest.get(pull_request_id) | |
860 | assert pull_request.revisions == [commit_ids['change-rebased']] |
|
859 | assert pull_request.revisions == [commit_ids['change-rebased']] | |
861 | expected_target_ref = 'branch:{branch}:{commit_id}'.format( |
|
860 | expected_target_ref = 'branch:{branch}:{commit_id}'.format( | |
862 | branch=backend.default_branch_name, |
|
861 | branch=backend.default_branch_name, | |
863 | commit_id=commit_ids['ancestor-new']) |
|
862 | commit_id=commit_ids['ancestor-new']) | |
864 | assert pull_request.target_ref == expected_target_ref |
|
863 | assert pull_request.target_ref == expected_target_ref | |
865 |
|
864 | |||
866 | def test_remove_pull_request_branch(self, backend_git, csrf_token): |
|
865 | def test_remove_pull_request_branch(self, backend_git, csrf_token): | |
867 | branch_name = 'development' |
|
866 | branch_name = 'development' | |
868 | commits = [ |
|
867 | commits = [ | |
869 | {'message': 'initial-commit'}, |
|
868 | {'message': 'initial-commit'}, | |
870 | {'message': 'old-feature'}, |
|
869 | {'message': 'old-feature'}, | |
871 | {'message': 'new-feature', 'branch': branch_name}, |
|
870 | {'message': 'new-feature', 'branch': branch_name}, | |
872 | ] |
|
871 | ] | |
873 | repo = backend_git.create_repo(commits) |
|
872 | repo = backend_git.create_repo(commits) | |
874 | repo_name = repo.repo_name |
|
873 | repo_name = repo.repo_name | |
875 | commit_ids = backend_git.commit_ids |
|
874 | commit_ids = backend_git.commit_ids | |
876 |
|
875 | |||
877 | pull_request = PullRequest() |
|
876 | pull_request = PullRequest() | |
878 | pull_request.source_repo = repo |
|
877 | pull_request.source_repo = repo | |
879 | pull_request.target_repo = repo |
|
878 | pull_request.target_repo = repo | |
880 | pull_request.source_ref = 'branch:{branch}:{commit_id}'.format( |
|
879 | pull_request.source_ref = 'branch:{branch}:{commit_id}'.format( | |
881 | branch=branch_name, commit_id=commit_ids['new-feature']) |
|
880 | branch=branch_name, commit_id=commit_ids['new-feature']) | |
882 | pull_request.target_ref = 'branch:{branch}:{commit_id}'.format( |
|
881 | pull_request.target_ref = 'branch:{branch}:{commit_id}'.format( | |
883 | branch=backend_git.default_branch_name, commit_id=commit_ids['old-feature']) |
|
882 | branch=backend_git.default_branch_name, commit_id=commit_ids['old-feature']) | |
884 | pull_request.revisions = [commit_ids['new-feature']] |
|
883 | pull_request.revisions = [commit_ids['new-feature']] | |
885 | pull_request.title = u"Test" |
|
884 | pull_request.title = u"Test" | |
886 | pull_request.description = u"Description" |
|
885 | pull_request.description = u"Description" | |
887 | pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) |
|
886 | pull_request.author = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN) | |
888 | pull_request.pull_request_state = PullRequest.STATE_CREATED |
|
887 | pull_request.pull_request_state = PullRequest.STATE_CREATED | |
889 | Session().add(pull_request) |
|
888 | Session().add(pull_request) | |
890 | Session().commit() |
|
889 | Session().commit() | |
891 |
|
890 | |||
892 | pull_request_id = pull_request.pull_request_id |
|
891 | pull_request_id = pull_request.pull_request_id | |
893 |
|
892 | |||
894 | vcs = repo.scm_instance() |
|
893 | vcs = repo.scm_instance() | |
895 | vcs.remove_ref('refs/heads/{}'.format(branch_name)) |
|
894 | vcs.remove_ref('refs/heads/{}'.format(branch_name)) | |
896 |
|
895 | |||
897 | response = self.app.get(route_path( |
|
896 | response = self.app.get(route_path( | |
898 | 'pullrequest_show', |
|
897 | 'pullrequest_show', | |
899 | repo_name=repo_name, |
|
898 | repo_name=repo_name, | |
900 | pull_request_id=pull_request_id)) |
|
899 | pull_request_id=pull_request_id)) | |
901 |
|
900 | |||
902 | assert response.status_int == 200 |
|
901 | assert response.status_int == 200 | |
903 |
|
902 | |||
904 | response.assert_response().element_contains( |
|
903 | response.assert_response().element_contains( | |
905 | '#changeset_compare_view_content .alert strong', |
|
904 | '#changeset_compare_view_content .alert strong', | |
906 | 'Missing commits') |
|
905 | 'Missing commits') | |
907 | response.assert_response().element_contains( |
|
906 | response.assert_response().element_contains( | |
908 | '#changeset_compare_view_content .alert', |
|
907 | '#changeset_compare_view_content .alert', | |
909 | 'This pull request cannot be displayed, because one or more' |
|
908 | 'This pull request cannot be displayed, because one or more' | |
910 | ' commits no longer exist in the source repository.') |
|
909 | ' commits no longer exist in the source repository.') | |
911 |
|
910 | |||
912 | def test_strip_commits_from_pull_request( |
|
911 | def test_strip_commits_from_pull_request( | |
913 | self, backend, pr_util, csrf_token): |
|
912 | self, backend, pr_util, csrf_token): | |
914 | commits = [ |
|
913 | commits = [ | |
915 | {'message': 'initial-commit'}, |
|
914 | {'message': 'initial-commit'}, | |
916 | {'message': 'old-feature'}, |
|
915 | {'message': 'old-feature'}, | |
917 | {'message': 'new-feature', 'parents': ['initial-commit']}, |
|
916 | {'message': 'new-feature', 'parents': ['initial-commit']}, | |
918 | ] |
|
917 | ] | |
919 | pull_request = pr_util.create_pull_request( |
|
918 | pull_request = pr_util.create_pull_request( | |
920 | commits, target_head='initial-commit', source_head='new-feature', |
|
919 | commits, target_head='initial-commit', source_head='new-feature', | |
921 | revisions=['new-feature']) |
|
920 | revisions=['new-feature']) | |
922 |
|
921 | |||
923 | vcs = pr_util.source_repository.scm_instance() |
|
922 | vcs = pr_util.source_repository.scm_instance() | |
924 | if backend.alias == 'git': |
|
923 | if backend.alias == 'git': | |
925 | vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master') |
|
924 | vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master') | |
926 | else: |
|
925 | else: | |
927 | vcs.strip(pr_util.commit_ids['new-feature']) |
|
926 | vcs.strip(pr_util.commit_ids['new-feature']) | |
928 |
|
927 | |||
929 | response = self.app.get(route_path( |
|
928 | response = self.app.get(route_path( | |
930 | 'pullrequest_show', |
|
929 | 'pullrequest_show', | |
931 | repo_name=pr_util.target_repository.repo_name, |
|
930 | repo_name=pr_util.target_repository.repo_name, | |
932 | pull_request_id=pull_request.pull_request_id)) |
|
931 | pull_request_id=pull_request.pull_request_id)) | |
933 |
|
932 | |||
934 | assert response.status_int == 200 |
|
933 | assert response.status_int == 200 | |
935 |
|
934 | |||
936 | response.assert_response().element_contains( |
|
935 | response.assert_response().element_contains( | |
937 | '#changeset_compare_view_content .alert strong', |
|
936 | '#changeset_compare_view_content .alert strong', | |
938 | 'Missing commits') |
|
937 | 'Missing commits') | |
939 | response.assert_response().element_contains( |
|
938 | response.assert_response().element_contains( | |
940 | '#changeset_compare_view_content .alert', |
|
939 | '#changeset_compare_view_content .alert', | |
941 | 'This pull request cannot be displayed, because one or more' |
|
940 | 'This pull request cannot be displayed, because one or more' | |
942 | ' commits no longer exist in the source repository.') |
|
941 | ' commits no longer exist in the source repository.') | |
943 | response.assert_response().element_contains( |
|
942 | response.assert_response().element_contains( | |
944 | '#update_commits', |
|
943 | '#update_commits', | |
945 | 'Update commits') |
|
944 | 'Update commits') | |
946 |
|
945 | |||
947 | def test_strip_commits_and_update( |
|
946 | def test_strip_commits_and_update( | |
948 | self, backend, pr_util, csrf_token): |
|
947 | self, backend, pr_util, csrf_token): | |
949 | commits = [ |
|
948 | commits = [ | |
950 | {'message': 'initial-commit'}, |
|
949 | {'message': 'initial-commit'}, | |
951 | {'message': 'old-feature'}, |
|
950 | {'message': 'old-feature'}, | |
952 | {'message': 'new-feature', 'parents': ['old-feature']}, |
|
951 | {'message': 'new-feature', 'parents': ['old-feature']}, | |
953 | ] |
|
952 | ] | |
954 | pull_request = pr_util.create_pull_request( |
|
953 | pull_request = pr_util.create_pull_request( | |
955 | commits, target_head='old-feature', source_head='new-feature', |
|
954 | commits, target_head='old-feature', source_head='new-feature', | |
956 | revisions=['new-feature'], mergeable=True) |
|
955 | revisions=['new-feature'], mergeable=True) | |
957 |
|
956 | |||
958 | vcs = pr_util.source_repository.scm_instance() |
|
957 | vcs = pr_util.source_repository.scm_instance() | |
959 | if backend.alias == 'git': |
|
958 | if backend.alias == 'git': | |
960 | vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master') |
|
959 | vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master') | |
961 | else: |
|
960 | else: | |
962 | vcs.strip(pr_util.commit_ids['new-feature']) |
|
961 | vcs.strip(pr_util.commit_ids['new-feature']) | |
963 |
|
962 | |||
964 | response = self.app.post( |
|
963 | url = route_path('pullrequest_update', | |
965 | route_path('pullrequest_update', |
|
964 | repo_name=pull_request.target_repo.repo_name, | |
966 | repo_name=pull_request.target_repo.repo_name, |
|
965 | pull_request_id=pull_request.pull_request_id) | |
967 | pull_request_id=pull_request.pull_request_id), |
|
966 | response = self.app.post(url, | |
968 | params={'update_commits': 'true', |
|
967 | params={'update_commits': 'true', | |
969 | 'csrf_token': csrf_token}) |
|
968 | 'csrf_token': csrf_token}) | |
970 |
|
969 | |||
971 | assert response.status_int == 200 |
|
970 | assert response.status_int == 200 | |
972 | assert response.body == 'true' |
|
971 | assert response.body == 'true' | |
973 |
|
972 | |||
974 | # Make sure that after update, it won't raise 500 errors |
|
973 | # Make sure that after update, it won't raise 500 errors | |
975 | response = self.app.get(route_path( |
|
974 | response = self.app.get(route_path( | |
976 | 'pullrequest_show', |
|
975 | 'pullrequest_show', | |
977 | repo_name=pr_util.target_repository.repo_name, |
|
976 | repo_name=pr_util.target_repository.repo_name, | |
978 | pull_request_id=pull_request.pull_request_id)) |
|
977 | pull_request_id=pull_request.pull_request_id)) | |
979 |
|
978 | |||
980 | assert response.status_int == 200 |
|
979 | assert response.status_int == 200 | |
981 | response.assert_response().element_contains( |
|
980 | response.assert_response().element_contains( | |
982 | '#changeset_compare_view_content .alert strong', |
|
981 | '#changeset_compare_view_content .alert strong', | |
983 | 'Missing commits') |
|
982 | 'Missing commits') | |
984 |
|
983 | |||
985 | def test_branch_is_a_link(self, pr_util): |
|
984 | def test_branch_is_a_link(self, pr_util): | |
986 | pull_request = pr_util.create_pull_request() |
|
985 | pull_request = pr_util.create_pull_request() | |
987 | pull_request.source_ref = 'branch:origin:1234567890abcdef' |
|
986 | pull_request.source_ref = 'branch:origin:1234567890abcdef' | |
988 | pull_request.target_ref = 'branch:target:abcdef1234567890' |
|
987 | pull_request.target_ref = 'branch:target:abcdef1234567890' | |
989 | Session().add(pull_request) |
|
988 | Session().add(pull_request) | |
990 | Session().commit() |
|
989 | Session().commit() | |
991 |
|
990 | |||
992 | response = self.app.get(route_path( |
|
991 | response = self.app.get(route_path( | |
993 | 'pullrequest_show', |
|
992 | 'pullrequest_show', | |
994 | repo_name=pull_request.target_repo.scm_instance().name, |
|
993 | repo_name=pull_request.target_repo.scm_instance().name, | |
995 | pull_request_id=pull_request.pull_request_id)) |
|
994 | pull_request_id=pull_request.pull_request_id)) | |
996 | assert response.status_int == 200 |
|
995 | assert response.status_int == 200 | |
997 |
|
996 | |||
998 | origin = response.assert_response().get_element('.pr-origininfo .tag') |
|
997 | origin = response.assert_response().get_element('.pr-origininfo .tag') | |
999 | origin_children = origin.getchildren() |
|
998 | origin_children = origin.getchildren() | |
1000 | assert len(origin_children) == 1 |
|
999 | assert len(origin_children) == 1 | |
1001 | target = response.assert_response().get_element('.pr-targetinfo .tag') |
|
1000 | target = response.assert_response().get_element('.pr-targetinfo .tag') | |
1002 | target_children = target.getchildren() |
|
1001 | target_children = target.getchildren() | |
1003 | assert len(target_children) == 1 |
|
1002 | assert len(target_children) == 1 | |
1004 |
|
1003 | |||
1005 | expected_origin_link = route_path( |
|
1004 | expected_origin_link = route_path( | |
1006 | 'repo_commits', |
|
1005 | 'repo_commits', | |
1007 | repo_name=pull_request.source_repo.scm_instance().name, |
|
1006 | repo_name=pull_request.source_repo.scm_instance().name, | |
1008 | params=dict(branch='origin')) |
|
1007 | params=dict(branch='origin')) | |
1009 | expected_target_link = route_path( |
|
1008 | expected_target_link = route_path( | |
1010 | 'repo_commits', |
|
1009 | 'repo_commits', | |
1011 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1010 | repo_name=pull_request.target_repo.scm_instance().name, | |
1012 | params=dict(branch='target')) |
|
1011 | params=dict(branch='target')) | |
1013 | assert origin_children[0].attrib['href'] == expected_origin_link |
|
1012 | assert origin_children[0].attrib['href'] == expected_origin_link | |
1014 | assert origin_children[0].text == 'branch: origin' |
|
1013 | assert origin_children[0].text == 'branch: origin' | |
1015 | assert target_children[0].attrib['href'] == expected_target_link |
|
1014 | assert target_children[0].attrib['href'] == expected_target_link | |
1016 | assert target_children[0].text == 'branch: target' |
|
1015 | assert target_children[0].text == 'branch: target' | |
1017 |
|
1016 | |||
1018 | def test_bookmark_is_not_a_link(self, pr_util): |
|
1017 | def test_bookmark_is_not_a_link(self, pr_util): | |
1019 | pull_request = pr_util.create_pull_request() |
|
1018 | pull_request = pr_util.create_pull_request() | |
1020 | pull_request.source_ref = 'bookmark:origin:1234567890abcdef' |
|
1019 | pull_request.source_ref = 'bookmark:origin:1234567890abcdef' | |
1021 | pull_request.target_ref = 'bookmark:target:abcdef1234567890' |
|
1020 | pull_request.target_ref = 'bookmark:target:abcdef1234567890' | |
1022 | Session().add(pull_request) |
|
1021 | Session().add(pull_request) | |
1023 | Session().commit() |
|
1022 | Session().commit() | |
1024 |
|
1023 | |||
1025 | response = self.app.get(route_path( |
|
1024 | response = self.app.get(route_path( | |
1026 | 'pullrequest_show', |
|
1025 | 'pullrequest_show', | |
1027 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1026 | repo_name=pull_request.target_repo.scm_instance().name, | |
1028 | pull_request_id=pull_request.pull_request_id)) |
|
1027 | pull_request_id=pull_request.pull_request_id)) | |
1029 | assert response.status_int == 200 |
|
1028 | assert response.status_int == 200 | |
1030 |
|
1029 | |||
1031 | origin = response.assert_response().get_element('.pr-origininfo .tag') |
|
1030 | origin = response.assert_response().get_element('.pr-origininfo .tag') | |
1032 | assert origin.text.strip() == 'bookmark: origin' |
|
1031 | assert origin.text.strip() == 'bookmark: origin' | |
1033 | assert origin.getchildren() == [] |
|
1032 | assert origin.getchildren() == [] | |
1034 |
|
1033 | |||
1035 | target = response.assert_response().get_element('.pr-targetinfo .tag') |
|
1034 | target = response.assert_response().get_element('.pr-targetinfo .tag') | |
1036 | assert target.text.strip() == 'bookmark: target' |
|
1035 | assert target.text.strip() == 'bookmark: target' | |
1037 | assert target.getchildren() == [] |
|
1036 | assert target.getchildren() == [] | |
1038 |
|
1037 | |||
1039 | def test_tag_is_not_a_link(self, pr_util): |
|
1038 | def test_tag_is_not_a_link(self, pr_util): | |
1040 | pull_request = pr_util.create_pull_request() |
|
1039 | pull_request = pr_util.create_pull_request() | |
1041 | pull_request.source_ref = 'tag:origin:1234567890abcdef' |
|
1040 | pull_request.source_ref = 'tag:origin:1234567890abcdef' | |
1042 | pull_request.target_ref = 'tag:target:abcdef1234567890' |
|
1041 | pull_request.target_ref = 'tag:target:abcdef1234567890' | |
1043 | Session().add(pull_request) |
|
1042 | Session().add(pull_request) | |
1044 | Session().commit() |
|
1043 | Session().commit() | |
1045 |
|
1044 | |||
1046 | response = self.app.get(route_path( |
|
1045 | response = self.app.get(route_path( | |
1047 | 'pullrequest_show', |
|
1046 | 'pullrequest_show', | |
1048 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1047 | repo_name=pull_request.target_repo.scm_instance().name, | |
1049 | pull_request_id=pull_request.pull_request_id)) |
|
1048 | pull_request_id=pull_request.pull_request_id)) | |
1050 | assert response.status_int == 200 |
|
1049 | assert response.status_int == 200 | |
1051 |
|
1050 | |||
1052 | origin = response.assert_response().get_element('.pr-origininfo .tag') |
|
1051 | origin = response.assert_response().get_element('.pr-origininfo .tag') | |
1053 | assert origin.text.strip() == 'tag: origin' |
|
1052 | assert origin.text.strip() == 'tag: origin' | |
1054 | assert origin.getchildren() == [] |
|
1053 | assert origin.getchildren() == [] | |
1055 |
|
1054 | |||
1056 | target = response.assert_response().get_element('.pr-targetinfo .tag') |
|
1055 | target = response.assert_response().get_element('.pr-targetinfo .tag') | |
1057 | assert target.text.strip() == 'tag: target' |
|
1056 | assert target.text.strip() == 'tag: target' | |
1058 | assert target.getchildren() == [] |
|
1057 | assert target.getchildren() == [] | |
1059 |
|
1058 | |||
1060 | @pytest.mark.parametrize('mergeable', [True, False]) |
|
1059 | @pytest.mark.parametrize('mergeable', [True, False]) | |
1061 | def test_shadow_repository_link( |
|
1060 | def test_shadow_repository_link( | |
1062 | self, mergeable, pr_util, http_host_only_stub): |
|
1061 | self, mergeable, pr_util, http_host_only_stub): | |
1063 | """ |
|
1062 | """ | |
1064 | Check that the pull request summary page displays a link to the shadow |
|
1063 | Check that the pull request summary page displays a link to the shadow | |
1065 | repository if the pull request is mergeable. If it is not mergeable |
|
1064 | repository if the pull request is mergeable. If it is not mergeable | |
1066 | the link should not be displayed. |
|
1065 | the link should not be displayed. | |
1067 | """ |
|
1066 | """ | |
1068 | pull_request = pr_util.create_pull_request( |
|
1067 | pull_request = pr_util.create_pull_request( | |
1069 | mergeable=mergeable, enable_notifications=False) |
|
1068 | mergeable=mergeable, enable_notifications=False) | |
1070 | target_repo = pull_request.target_repo.scm_instance() |
|
1069 | target_repo = pull_request.target_repo.scm_instance() | |
1071 | pr_id = pull_request.pull_request_id |
|
1070 | pr_id = pull_request.pull_request_id | |
1072 | shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format( |
|
1071 | shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format( | |
1073 | host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id) |
|
1072 | host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id) | |
1074 |
|
1073 | |||
1075 | response = self.app.get(route_path( |
|
1074 | response = self.app.get(route_path( | |
1076 | 'pullrequest_show', |
|
1075 | 'pullrequest_show', | |
1077 | repo_name=target_repo.name, |
|
1076 | repo_name=target_repo.name, | |
1078 | pull_request_id=pr_id)) |
|
1077 | pull_request_id=pr_id)) | |
1079 |
|
1078 | |||
1080 | if mergeable: |
|
1079 | if mergeable: | |
1081 | response.assert_response().element_value_contains( |
|
1080 | response.assert_response().element_value_contains( | |
1082 | 'input.pr-mergeinfo', shadow_url) |
|
1081 | 'input.pr-mergeinfo', shadow_url) | |
1083 | response.assert_response().element_value_contains( |
|
1082 | response.assert_response().element_value_contains( | |
1084 | 'input.pr-mergeinfo ', 'pr-merge') |
|
1083 | 'input.pr-mergeinfo ', 'pr-merge') | |
1085 | else: |
|
1084 | else: | |
1086 | response.assert_response().no_element_exists('.pr-mergeinfo') |
|
1085 | response.assert_response().no_element_exists('.pr-mergeinfo') | |
1087 |
|
1086 | |||
1088 |
|
1087 | |||
1089 | @pytest.mark.usefixtures('app') |
|
1088 | @pytest.mark.usefixtures('app') | |
1090 | @pytest.mark.backends("git", "hg") |
|
1089 | @pytest.mark.backends("git", "hg") | |
1091 | class TestPullrequestsControllerDelete(object): |
|
1090 | class TestPullrequestsControllerDelete(object): | |
1092 | def test_pull_request_delete_button_permissions_admin( |
|
1091 | def test_pull_request_delete_button_permissions_admin( | |
1093 | self, autologin_user, user_admin, pr_util): |
|
1092 | self, autologin_user, user_admin, pr_util): | |
1094 | pull_request = pr_util.create_pull_request( |
|
1093 | pull_request = pr_util.create_pull_request( | |
1095 | author=user_admin.username, enable_notifications=False) |
|
1094 | author=user_admin.username, enable_notifications=False) | |
1096 |
|
1095 | |||
1097 | response = self.app.get(route_path( |
|
1096 | response = self.app.get(route_path( | |
1098 | 'pullrequest_show', |
|
1097 | 'pullrequest_show', | |
1099 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1098 | repo_name=pull_request.target_repo.scm_instance().name, | |
1100 | pull_request_id=pull_request.pull_request_id)) |
|
1099 | pull_request_id=pull_request.pull_request_id)) | |
1101 |
|
1100 | |||
1102 | response.mustcontain('id="delete_pullrequest"') |
|
1101 | response.mustcontain('id="delete_pullrequest"') | |
1103 | response.mustcontain('Confirm to delete this pull request') |
|
1102 | response.mustcontain('Confirm to delete this pull request') | |
1104 |
|
1103 | |||
1105 | def test_pull_request_delete_button_permissions_owner( |
|
1104 | def test_pull_request_delete_button_permissions_owner( | |
1106 | self, autologin_regular_user, user_regular, pr_util): |
|
1105 | self, autologin_regular_user, user_regular, pr_util): | |
1107 | pull_request = pr_util.create_pull_request( |
|
1106 | pull_request = pr_util.create_pull_request( | |
1108 | author=user_regular.username, enable_notifications=False) |
|
1107 | author=user_regular.username, enable_notifications=False) | |
1109 |
|
1108 | |||
1110 | response = self.app.get(route_path( |
|
1109 | response = self.app.get(route_path( | |
1111 | 'pullrequest_show', |
|
1110 | 'pullrequest_show', | |
1112 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1111 | repo_name=pull_request.target_repo.scm_instance().name, | |
1113 | pull_request_id=pull_request.pull_request_id)) |
|
1112 | pull_request_id=pull_request.pull_request_id)) | |
1114 |
|
1113 | |||
1115 | response.mustcontain('id="delete_pullrequest"') |
|
1114 | response.mustcontain('id="delete_pullrequest"') | |
1116 | response.mustcontain('Confirm to delete this pull request') |
|
1115 | response.mustcontain('Confirm to delete this pull request') | |
1117 |
|
1116 | |||
1118 | def test_pull_request_delete_button_permissions_forbidden( |
|
1117 | def test_pull_request_delete_button_permissions_forbidden( | |
1119 | self, autologin_regular_user, user_regular, user_admin, pr_util): |
|
1118 | self, autologin_regular_user, user_regular, user_admin, pr_util): | |
1120 | pull_request = pr_util.create_pull_request( |
|
1119 | pull_request = pr_util.create_pull_request( | |
1121 | author=user_admin.username, enable_notifications=False) |
|
1120 | author=user_admin.username, enable_notifications=False) | |
1122 |
|
1121 | |||
1123 | response = self.app.get(route_path( |
|
1122 | response = self.app.get(route_path( | |
1124 | 'pullrequest_show', |
|
1123 | 'pullrequest_show', | |
1125 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1124 | repo_name=pull_request.target_repo.scm_instance().name, | |
1126 | pull_request_id=pull_request.pull_request_id)) |
|
1125 | pull_request_id=pull_request.pull_request_id)) | |
1127 | response.mustcontain(no=['id="delete_pullrequest"']) |
|
1126 | response.mustcontain(no=['id="delete_pullrequest"']) | |
1128 | response.mustcontain(no=['Confirm to delete this pull request']) |
|
1127 | response.mustcontain(no=['Confirm to delete this pull request']) | |
1129 |
|
1128 | |||
1130 | def test_pull_request_delete_button_permissions_can_update_cannot_delete( |
|
1129 | def test_pull_request_delete_button_permissions_can_update_cannot_delete( | |
1131 | self, autologin_regular_user, user_regular, user_admin, pr_util, |
|
1130 | self, autologin_regular_user, user_regular, user_admin, pr_util, | |
1132 | user_util): |
|
1131 | user_util): | |
1133 |
|
1132 | |||
1134 | pull_request = pr_util.create_pull_request( |
|
1133 | pull_request = pr_util.create_pull_request( | |
1135 | author=user_admin.username, enable_notifications=False) |
|
1134 | author=user_admin.username, enable_notifications=False) | |
1136 |
|
1135 | |||
1137 | user_util.grant_user_permission_to_repo( |
|
1136 | user_util.grant_user_permission_to_repo( | |
1138 | pull_request.target_repo, user_regular, |
|
1137 | pull_request.target_repo, user_regular, | |
1139 | 'repository.write') |
|
1138 | 'repository.write') | |
1140 |
|
1139 | |||
1141 | response = self.app.get(route_path( |
|
1140 | response = self.app.get(route_path( | |
1142 | 'pullrequest_show', |
|
1141 | 'pullrequest_show', | |
1143 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1142 | repo_name=pull_request.target_repo.scm_instance().name, | |
1144 | pull_request_id=pull_request.pull_request_id)) |
|
1143 | pull_request_id=pull_request.pull_request_id)) | |
1145 |
|
1144 | |||
1146 | response.mustcontain('id="open_edit_pullrequest"') |
|
1145 | response.mustcontain('id="open_edit_pullrequest"') | |
1147 | response.mustcontain('id="delete_pullrequest"') |
|
1146 | response.mustcontain('id="delete_pullrequest"') | |
1148 | response.mustcontain(no=['Confirm to delete this pull request']) |
|
1147 | response.mustcontain(no=['Confirm to delete this pull request']) | |
1149 |
|
1148 | |||
1150 | def test_delete_comment_returns_404_if_comment_does_not_exist( |
|
1149 | def test_delete_comment_returns_404_if_comment_does_not_exist( | |
1151 | self, autologin_user, pr_util, user_admin, csrf_token, xhr_header): |
|
1150 | self, autologin_user, pr_util, user_admin, csrf_token, xhr_header): | |
1152 |
|
1151 | |||
1153 | pull_request = pr_util.create_pull_request( |
|
1152 | pull_request = pr_util.create_pull_request( | |
1154 | author=user_admin.username, enable_notifications=False) |
|
1153 | author=user_admin.username, enable_notifications=False) | |
1155 |
|
1154 | |||
1156 | self.app.post( |
|
1155 | self.app.post( | |
1157 | route_path( |
|
1156 | route_path( | |
1158 | 'pullrequest_comment_delete', |
|
1157 | 'pullrequest_comment_delete', | |
1159 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1158 | repo_name=pull_request.target_repo.scm_instance().name, | |
1160 | pull_request_id=pull_request.pull_request_id, |
|
1159 | pull_request_id=pull_request.pull_request_id, | |
1161 | comment_id=1024404), |
|
1160 | comment_id=1024404), | |
1162 | extra_environ=xhr_header, |
|
1161 | extra_environ=xhr_header, | |
1163 | params={'csrf_token': csrf_token}, |
|
1162 | params={'csrf_token': csrf_token}, | |
1164 | status=404 |
|
1163 | status=404 | |
1165 | ) |
|
1164 | ) | |
1166 |
|
1165 | |||
1167 | def test_delete_comment( |
|
1166 | def test_delete_comment( | |
1168 | self, autologin_user, pr_util, user_admin, csrf_token, xhr_header): |
|
1167 | self, autologin_user, pr_util, user_admin, csrf_token, xhr_header): | |
1169 |
|
1168 | |||
1170 | pull_request = pr_util.create_pull_request( |
|
1169 | pull_request = pr_util.create_pull_request( | |
1171 | author=user_admin.username, enable_notifications=False) |
|
1170 | author=user_admin.username, enable_notifications=False) | |
1172 | comment = pr_util.create_comment() |
|
1171 | comment = pr_util.create_comment() | |
1173 | comment_id = comment.comment_id |
|
1172 | comment_id = comment.comment_id | |
1174 |
|
1173 | |||
1175 | response = self.app.post( |
|
1174 | response = self.app.post( | |
1176 | route_path( |
|
1175 | route_path( | |
1177 | 'pullrequest_comment_delete', |
|
1176 | 'pullrequest_comment_delete', | |
1178 | repo_name=pull_request.target_repo.scm_instance().name, |
|
1177 | repo_name=pull_request.target_repo.scm_instance().name, | |
1179 | pull_request_id=pull_request.pull_request_id, |
|
1178 | pull_request_id=pull_request.pull_request_id, | |
1180 | comment_id=comment_id), |
|
1179 | comment_id=comment_id), | |
1181 | extra_environ=xhr_header, |
|
1180 | extra_environ=xhr_header, | |
1182 | params={'csrf_token': csrf_token}, |
|
1181 | params={'csrf_token': csrf_token}, | |
1183 | status=200 |
|
1182 | status=200 | |
1184 | ) |
|
1183 | ) | |
1185 | assert response.body == 'true' |
|
1184 | assert response.body == 'true' | |
1186 |
|
1185 | |||
1187 | @pytest.mark.parametrize('url_type', [ |
|
1186 | @pytest.mark.parametrize('url_type', [ | |
1188 | 'pullrequest_new', |
|
1187 | 'pullrequest_new', | |
1189 | 'pullrequest_create', |
|
1188 | 'pullrequest_create', | |
1190 | 'pullrequest_update', |
|
1189 | 'pullrequest_update', | |
1191 | 'pullrequest_merge', |
|
1190 | 'pullrequest_merge', | |
1192 | ]) |
|
1191 | ]) | |
1193 | def test_pull_request_is_forbidden_on_archived_repo( |
|
1192 | def test_pull_request_is_forbidden_on_archived_repo( | |
1194 | self, autologin_user, backend, xhr_header, user_util, url_type): |
|
1193 | self, autologin_user, backend, xhr_header, user_util, url_type): | |
1195 |
|
1194 | |||
1196 | # create a temporary repo |
|
1195 | # create a temporary repo | |
1197 | source = user_util.create_repo(repo_type=backend.alias) |
|
1196 | source = user_util.create_repo(repo_type=backend.alias) | |
1198 | repo_name = source.repo_name |
|
1197 | repo_name = source.repo_name | |
1199 | repo = Repository.get_by_repo_name(repo_name) |
|
1198 | repo = Repository.get_by_repo_name(repo_name) | |
1200 | repo.archived = True |
|
1199 | repo.archived = True | |
1201 | Session().commit() |
|
1200 | Session().commit() | |
1202 |
|
1201 | |||
1203 | response = self.app.get( |
|
1202 | response = self.app.get( | |
1204 | route_path(url_type, repo_name=repo_name, pull_request_id=1), status=302) |
|
1203 | route_path(url_type, repo_name=repo_name, pull_request_id=1), status=302) | |
1205 |
|
1204 | |||
1206 | msg = 'Action not supported for archived repository.' |
|
1205 | msg = 'Action not supported for archived repository.' | |
1207 | assert_session_flash(response, msg) |
|
1206 | assert_session_flash(response, msg) | |
1208 |
|
1207 | |||
1209 |
|
1208 | |||
1210 | def assert_pull_request_status(pull_request, expected_status): |
|
1209 | def assert_pull_request_status(pull_request, expected_status): | |
1211 | status = ChangesetStatusModel().calculated_review_status( |
|
1210 | status = ChangesetStatusModel().calculated_review_status(pull_request=pull_request) | |
1212 | pull_request=pull_request) |
|
|||
1213 | assert status == expected_status |
|
1211 | assert status == expected_status | |
1214 |
|
1212 | |||
1215 |
|
1213 | |||
1216 | @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create']) |
|
1214 | @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create']) | |
1217 | @pytest.mark.usefixtures("autologin_user") |
|
1215 | @pytest.mark.usefixtures("autologin_user") | |
1218 | def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route): |
|
1216 | def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route): | |
1219 | response = app.get( |
|
1217 | app.get(route_path(route, repo_name=backend_svn.repo_name), status=404) | |
1220 | route_path(route, repo_name=backend_svn.repo_name), status=404) |
|
|||
1221 |
|
@@ -1,734 +1,740 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2011-2019 RhodeCode GmbH |
|
3 | # Copyright (C) 2011-2019 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | comments model for RhodeCode |
|
22 | comments model for RhodeCode | |
23 | """ |
|
23 | """ | |
24 |
|
24 | |||
25 | import logging |
|
25 | import logging | |
26 | import traceback |
|
26 | import traceback | |
27 | import collections |
|
27 | import collections | |
28 |
|
28 | |||
29 | from pyramid.threadlocal import get_current_registry, get_current_request |
|
29 | from pyramid.threadlocal import get_current_registry, get_current_request | |
30 | from sqlalchemy.sql.expression import null |
|
30 | from sqlalchemy.sql.expression import null | |
31 | from sqlalchemy.sql.functions import coalesce |
|
31 | from sqlalchemy.sql.functions import coalesce | |
32 |
|
32 | |||
33 | from rhodecode.lib import helpers as h, diffs, channelstream |
|
33 | from rhodecode.lib import helpers as h, diffs, channelstream | |
34 | from rhodecode.lib import audit_logger |
|
34 | from rhodecode.lib import audit_logger | |
35 | from rhodecode.lib.utils2 import extract_mentioned_users, safe_str |
|
35 | from rhodecode.lib.utils2 import extract_mentioned_users, safe_str | |
36 | from rhodecode.model import BaseModel |
|
36 | from rhodecode.model import BaseModel | |
37 | from rhodecode.model.db import ( |
|
37 | from rhodecode.model.db import ( | |
38 | ChangesetComment, User, Notification, PullRequest, AttributeDict) |
|
38 | ChangesetComment, User, Notification, PullRequest, AttributeDict) | |
39 | from rhodecode.model.notification import NotificationModel |
|
39 | from rhodecode.model.notification import NotificationModel | |
40 | from rhodecode.model.meta import Session |
|
40 | from rhodecode.model.meta import Session | |
41 | from rhodecode.model.settings import VcsSettingsModel |
|
41 | from rhodecode.model.settings import VcsSettingsModel | |
42 | from rhodecode.model.notification import EmailNotificationModel |
|
42 | from rhodecode.model.notification import EmailNotificationModel | |
43 | from rhodecode.model.validation_schema.schemas import comment_schema |
|
43 | from rhodecode.model.validation_schema.schemas import comment_schema | |
44 |
|
44 | |||
45 |
|
45 | |||
46 | log = logging.getLogger(__name__) |
|
46 | log = logging.getLogger(__name__) | |
47 |
|
47 | |||
48 |
|
48 | |||
49 | class CommentsModel(BaseModel): |
|
49 | class CommentsModel(BaseModel): | |
50 |
|
50 | |||
51 | cls = ChangesetComment |
|
51 | cls = ChangesetComment | |
52 |
|
52 | |||
53 | DIFF_CONTEXT_BEFORE = 3 |
|
53 | DIFF_CONTEXT_BEFORE = 3 | |
54 | DIFF_CONTEXT_AFTER = 3 |
|
54 | DIFF_CONTEXT_AFTER = 3 | |
55 |
|
55 | |||
56 | def __get_commit_comment(self, changeset_comment): |
|
56 | def __get_commit_comment(self, changeset_comment): | |
57 | return self._get_instance(ChangesetComment, changeset_comment) |
|
57 | return self._get_instance(ChangesetComment, changeset_comment) | |
58 |
|
58 | |||
59 | def __get_pull_request(self, pull_request): |
|
59 | def __get_pull_request(self, pull_request): | |
60 | return self._get_instance(PullRequest, pull_request) |
|
60 | return self._get_instance(PullRequest, pull_request) | |
61 |
|
61 | |||
62 | def _extract_mentions(self, s): |
|
62 | def _extract_mentions(self, s): | |
63 | user_objects = [] |
|
63 | user_objects = [] | |
64 | for username in extract_mentioned_users(s): |
|
64 | for username in extract_mentioned_users(s): | |
65 | user_obj = User.get_by_username(username, case_insensitive=True) |
|
65 | user_obj = User.get_by_username(username, case_insensitive=True) | |
66 | if user_obj: |
|
66 | if user_obj: | |
67 | user_objects.append(user_obj) |
|
67 | user_objects.append(user_obj) | |
68 | return user_objects |
|
68 | return user_objects | |
69 |
|
69 | |||
70 | def _get_renderer(self, global_renderer='rst', request=None): |
|
70 | def _get_renderer(self, global_renderer='rst', request=None): | |
71 | request = request or get_current_request() |
|
71 | request = request or get_current_request() | |
72 |
|
72 | |||
73 | try: |
|
73 | try: | |
74 | global_renderer = request.call_context.visual.default_renderer |
|
74 | global_renderer = request.call_context.visual.default_renderer | |
75 | except AttributeError: |
|
75 | except AttributeError: | |
76 | log.debug("Renderer not set, falling back " |
|
76 | log.debug("Renderer not set, falling back " | |
77 | "to default renderer '%s'", global_renderer) |
|
77 | "to default renderer '%s'", global_renderer) | |
78 | except Exception: |
|
78 | except Exception: | |
79 | log.error(traceback.format_exc()) |
|
79 | log.error(traceback.format_exc()) | |
80 | return global_renderer |
|
80 | return global_renderer | |
81 |
|
81 | |||
82 | def aggregate_comments(self, comments, versions, show_version, inline=False): |
|
82 | def aggregate_comments(self, comments, versions, show_version, inline=False): | |
83 | # group by versions, and count until, and display objects |
|
83 | # group by versions, and count until, and display objects | |
84 |
|
84 | |||
85 | comment_groups = collections.defaultdict(list) |
|
85 | comment_groups = collections.defaultdict(list) | |
86 | [comment_groups[ |
|
86 | [comment_groups[ | |
87 | _co.pull_request_version_id].append(_co) for _co in comments] |
|
87 | _co.pull_request_version_id].append(_co) for _co in comments] | |
88 |
|
88 | |||
89 | def yield_comments(pos): |
|
89 | def yield_comments(pos): | |
90 | for co in comment_groups[pos]: |
|
90 | for co in comment_groups[pos]: | |
91 | yield co |
|
91 | yield co | |
92 |
|
92 | |||
93 | comment_versions = collections.defaultdict( |
|
93 | comment_versions = collections.defaultdict( | |
94 | lambda: collections.defaultdict(list)) |
|
94 | lambda: collections.defaultdict(list)) | |
95 | prev_prvid = -1 |
|
95 | prev_prvid = -1 | |
96 | # fake last entry with None, to aggregate on "latest" version which |
|
96 | # fake last entry with None, to aggregate on "latest" version which | |
97 | # doesn't have an pull_request_version_id |
|
97 | # doesn't have an pull_request_version_id | |
98 | for ver in versions + [AttributeDict({'pull_request_version_id': None})]: |
|
98 | for ver in versions + [AttributeDict({'pull_request_version_id': None})]: | |
99 | prvid = ver.pull_request_version_id |
|
99 | prvid = ver.pull_request_version_id | |
100 | if prev_prvid == -1: |
|
100 | if prev_prvid == -1: | |
101 | prev_prvid = prvid |
|
101 | prev_prvid = prvid | |
102 |
|
102 | |||
103 | for co in yield_comments(prvid): |
|
103 | for co in yield_comments(prvid): | |
104 | comment_versions[prvid]['at'].append(co) |
|
104 | comment_versions[prvid]['at'].append(co) | |
105 |
|
105 | |||
106 | # save until |
|
106 | # save until | |
107 | current = comment_versions[prvid]['at'] |
|
107 | current = comment_versions[prvid]['at'] | |
108 | prev_until = comment_versions[prev_prvid]['until'] |
|
108 | prev_until = comment_versions[prev_prvid]['until'] | |
109 | cur_until = prev_until + current |
|
109 | cur_until = prev_until + current | |
110 | comment_versions[prvid]['until'].extend(cur_until) |
|
110 | comment_versions[prvid]['until'].extend(cur_until) | |
111 |
|
111 | |||
112 | # save outdated |
|
112 | # save outdated | |
113 | if inline: |
|
113 | if inline: | |
114 | outdated = [x for x in cur_until |
|
114 | outdated = [x for x in cur_until | |
115 | if x.outdated_at_version(show_version)] |
|
115 | if x.outdated_at_version(show_version)] | |
116 | else: |
|
116 | else: | |
117 | outdated = [x for x in cur_until |
|
117 | outdated = [x for x in cur_until | |
118 | if x.older_than_version(show_version)] |
|
118 | if x.older_than_version(show_version)] | |
119 | display = [x for x in cur_until if x not in outdated] |
|
119 | display = [x for x in cur_until if x not in outdated] | |
120 |
|
120 | |||
121 | comment_versions[prvid]['outdated'] = outdated |
|
121 | comment_versions[prvid]['outdated'] = outdated | |
122 | comment_versions[prvid]['display'] = display |
|
122 | comment_versions[prvid]['display'] = display | |
123 |
|
123 | |||
124 | prev_prvid = prvid |
|
124 | prev_prvid = prvid | |
125 |
|
125 | |||
126 | return comment_versions |
|
126 | return comment_versions | |
127 |
|
127 | |||
128 | def get_repository_comments(self, repo, comment_type=None, user=None, commit_id=None): |
|
128 | def get_repository_comments(self, repo, comment_type=None, user=None, commit_id=None): | |
129 | qry = Session().query(ChangesetComment) \ |
|
129 | qry = Session().query(ChangesetComment) \ | |
130 | .filter(ChangesetComment.repo == repo) |
|
130 | .filter(ChangesetComment.repo == repo) | |
131 |
|
131 | |||
132 | if comment_type and comment_type in ChangesetComment.COMMENT_TYPES: |
|
132 | if comment_type and comment_type in ChangesetComment.COMMENT_TYPES: | |
133 | qry = qry.filter(ChangesetComment.comment_type == comment_type) |
|
133 | qry = qry.filter(ChangesetComment.comment_type == comment_type) | |
134 |
|
134 | |||
135 | if user: |
|
135 | if user: | |
136 | user = self._get_user(user) |
|
136 | user = self._get_user(user) | |
137 | if user: |
|
137 | if user: | |
138 | qry = qry.filter(ChangesetComment.user_id == user.user_id) |
|
138 | qry = qry.filter(ChangesetComment.user_id == user.user_id) | |
139 |
|
139 | |||
140 | if commit_id: |
|
140 | if commit_id: | |
141 | qry = qry.filter(ChangesetComment.revision == commit_id) |
|
141 | qry = qry.filter(ChangesetComment.revision == commit_id) | |
142 |
|
142 | |||
143 | qry = qry.order_by(ChangesetComment.created_on) |
|
143 | qry = qry.order_by(ChangesetComment.created_on) | |
144 | return qry.all() |
|
144 | return qry.all() | |
145 |
|
145 | |||
146 | def get_repository_unresolved_todos(self, repo): |
|
146 | def get_repository_unresolved_todos(self, repo): | |
147 | todos = Session().query(ChangesetComment) \ |
|
147 | todos = Session().query(ChangesetComment) \ | |
148 | .filter(ChangesetComment.repo == repo) \ |
|
148 | .filter(ChangesetComment.repo == repo) \ | |
149 | .filter(ChangesetComment.resolved_by == None) \ |
|
149 | .filter(ChangesetComment.resolved_by == None) \ | |
150 | .filter(ChangesetComment.comment_type |
|
150 | .filter(ChangesetComment.comment_type | |
151 | == ChangesetComment.COMMENT_TYPE_TODO) |
|
151 | == ChangesetComment.COMMENT_TYPE_TODO) | |
152 | todos = todos.all() |
|
152 | todos = todos.all() | |
153 |
|
153 | |||
154 | return todos |
|
154 | return todos | |
155 |
|
155 | |||
156 | def get_pull_request_unresolved_todos(self, pull_request, show_outdated=True): |
|
156 | def get_pull_request_unresolved_todos(self, pull_request, show_outdated=True): | |
157 |
|
157 | |||
158 | todos = Session().query(ChangesetComment) \ |
|
158 | todos = Session().query(ChangesetComment) \ | |
159 | .filter(ChangesetComment.pull_request == pull_request) \ |
|
159 | .filter(ChangesetComment.pull_request == pull_request) \ | |
160 | .filter(ChangesetComment.resolved_by == None) \ |
|
160 | .filter(ChangesetComment.resolved_by == None) \ | |
161 | .filter(ChangesetComment.comment_type |
|
161 | .filter(ChangesetComment.comment_type | |
162 | == ChangesetComment.COMMENT_TYPE_TODO) |
|
162 | == ChangesetComment.COMMENT_TYPE_TODO) | |
163 |
|
163 | |||
164 | if not show_outdated: |
|
164 | if not show_outdated: | |
165 | todos = todos.filter( |
|
165 | todos = todos.filter( | |
166 | coalesce(ChangesetComment.display_state, '') != |
|
166 | coalesce(ChangesetComment.display_state, '') != | |
167 | ChangesetComment.COMMENT_OUTDATED) |
|
167 | ChangesetComment.COMMENT_OUTDATED) | |
168 |
|
168 | |||
169 | todos = todos.all() |
|
169 | todos = todos.all() | |
170 |
|
170 | |||
171 | return todos |
|
171 | return todos | |
172 |
|
172 | |||
173 | def get_pull_request_resolved_todos(self, pull_request, show_outdated=True): |
|
173 | def get_pull_request_resolved_todos(self, pull_request, show_outdated=True): | |
174 |
|
174 | |||
175 | todos = Session().query(ChangesetComment) \ |
|
175 | todos = Session().query(ChangesetComment) \ | |
176 | .filter(ChangesetComment.pull_request == pull_request) \ |
|
176 | .filter(ChangesetComment.pull_request == pull_request) \ | |
177 | .filter(ChangesetComment.resolved_by != None) \ |
|
177 | .filter(ChangesetComment.resolved_by != None) \ | |
178 | .filter(ChangesetComment.comment_type |
|
178 | .filter(ChangesetComment.comment_type | |
179 | == ChangesetComment.COMMENT_TYPE_TODO) |
|
179 | == ChangesetComment.COMMENT_TYPE_TODO) | |
180 |
|
180 | |||
181 | if not show_outdated: |
|
181 | if not show_outdated: | |
182 | todos = todos.filter( |
|
182 | todos = todos.filter( | |
183 | coalesce(ChangesetComment.display_state, '') != |
|
183 | coalesce(ChangesetComment.display_state, '') != | |
184 | ChangesetComment.COMMENT_OUTDATED) |
|
184 | ChangesetComment.COMMENT_OUTDATED) | |
185 |
|
185 | |||
186 | todos = todos.all() |
|
186 | todos = todos.all() | |
187 |
|
187 | |||
188 | return todos |
|
188 | return todos | |
189 |
|
189 | |||
190 | def get_commit_unresolved_todos(self, commit_id, show_outdated=True): |
|
190 | def get_commit_unresolved_todos(self, commit_id, show_outdated=True): | |
191 |
|
191 | |||
192 | todos = Session().query(ChangesetComment) \ |
|
192 | todos = Session().query(ChangesetComment) \ | |
193 | .filter(ChangesetComment.revision == commit_id) \ |
|
193 | .filter(ChangesetComment.revision == commit_id) \ | |
194 | .filter(ChangesetComment.resolved_by == None) \ |
|
194 | .filter(ChangesetComment.resolved_by == None) \ | |
195 | .filter(ChangesetComment.comment_type |
|
195 | .filter(ChangesetComment.comment_type | |
196 | == ChangesetComment.COMMENT_TYPE_TODO) |
|
196 | == ChangesetComment.COMMENT_TYPE_TODO) | |
197 |
|
197 | |||
198 | if not show_outdated: |
|
198 | if not show_outdated: | |
199 | todos = todos.filter( |
|
199 | todos = todos.filter( | |
200 | coalesce(ChangesetComment.display_state, '') != |
|
200 | coalesce(ChangesetComment.display_state, '') != | |
201 | ChangesetComment.COMMENT_OUTDATED) |
|
201 | ChangesetComment.COMMENT_OUTDATED) | |
202 |
|
202 | |||
203 | todos = todos.all() |
|
203 | todos = todos.all() | |
204 |
|
204 | |||
205 | return todos |
|
205 | return todos | |
206 |
|
206 | |||
207 | def get_commit_resolved_todos(self, commit_id, show_outdated=True): |
|
207 | def get_commit_resolved_todos(self, commit_id, show_outdated=True): | |
208 |
|
208 | |||
209 | todos = Session().query(ChangesetComment) \ |
|
209 | todos = Session().query(ChangesetComment) \ | |
210 | .filter(ChangesetComment.revision == commit_id) \ |
|
210 | .filter(ChangesetComment.revision == commit_id) \ | |
211 | .filter(ChangesetComment.resolved_by != None) \ |
|
211 | .filter(ChangesetComment.resolved_by != None) \ | |
212 | .filter(ChangesetComment.comment_type |
|
212 | .filter(ChangesetComment.comment_type | |
213 | == ChangesetComment.COMMENT_TYPE_TODO) |
|
213 | == ChangesetComment.COMMENT_TYPE_TODO) | |
214 |
|
214 | |||
215 | if not show_outdated: |
|
215 | if not show_outdated: | |
216 | todos = todos.filter( |
|
216 | todos = todos.filter( | |
217 | coalesce(ChangesetComment.display_state, '') != |
|
217 | coalesce(ChangesetComment.display_state, '') != | |
218 | ChangesetComment.COMMENT_OUTDATED) |
|
218 | ChangesetComment.COMMENT_OUTDATED) | |
219 |
|
219 | |||
220 | todos = todos.all() |
|
220 | todos = todos.all() | |
221 |
|
221 | |||
222 | return todos |
|
222 | return todos | |
223 |
|
223 | |||
224 | def _log_audit_action(self, action, action_data, auth_user, comment): |
|
224 | def _log_audit_action(self, action, action_data, auth_user, comment): | |
225 | audit_logger.store( |
|
225 | audit_logger.store( | |
226 | action=action, |
|
226 | action=action, | |
227 | action_data=action_data, |
|
227 | action_data=action_data, | |
228 | user=auth_user, |
|
228 | user=auth_user, | |
229 | repo=comment.repo) |
|
229 | repo=comment.repo) | |
230 |
|
230 | |||
231 | def create(self, text, repo, user, commit_id=None, pull_request=None, |
|
231 | def create(self, text, repo, user, commit_id=None, pull_request=None, | |
232 | f_path=None, line_no=None, status_change=None, |
|
232 | f_path=None, line_no=None, status_change=None, | |
233 | status_change_type=None, comment_type=None, |
|
233 | status_change_type=None, comment_type=None, | |
234 | resolves_comment_id=None, closing_pr=False, send_email=True, |
|
234 | resolves_comment_id=None, closing_pr=False, send_email=True, | |
235 | renderer=None, auth_user=None): |
|
235 | renderer=None, auth_user=None): | |
236 | """ |
|
236 | """ | |
237 | Creates new comment for commit or pull request. |
|
237 | Creates new comment for commit or pull request. | |
238 | IF status_change is not none this comment is associated with a |
|
238 | IF status_change is not none this comment is associated with a | |
239 | status change of commit or commit associated with pull request |
|
239 | status change of commit or commit associated with pull request | |
240 |
|
240 | |||
241 | :param text: |
|
241 | :param text: | |
242 | :param repo: |
|
242 | :param repo: | |
243 | :param user: |
|
243 | :param user: | |
244 | :param commit_id: |
|
244 | :param commit_id: | |
245 | :param pull_request: |
|
245 | :param pull_request: | |
246 | :param f_path: |
|
246 | :param f_path: | |
247 | :param line_no: |
|
247 | :param line_no: | |
248 | :param status_change: Label for status change |
|
248 | :param status_change: Label for status change | |
249 | :param comment_type: Type of comment |
|
249 | :param comment_type: Type of comment | |
250 | :param status_change_type: type of status change |
|
250 | :param status_change_type: type of status change | |
251 | :param closing_pr: |
|
251 | :param closing_pr: | |
252 | :param send_email: |
|
252 | :param send_email: | |
253 | :param renderer: pick renderer for this comment |
|
253 | :param renderer: pick renderer for this comment | |
254 | """ |
|
254 | """ | |
255 |
|
255 | |||
256 | if not text: |
|
256 | if not text: | |
257 | log.warning('Missing text for comment, skipping...') |
|
257 | log.warning('Missing text for comment, skipping...') | |
258 | return |
|
258 | return | |
259 | request = get_current_request() |
|
259 | request = get_current_request() | |
260 | _ = request.translate |
|
260 | _ = request.translate | |
261 |
|
261 | |||
262 | if not renderer: |
|
262 | if not renderer: | |
263 | renderer = self._get_renderer(request=request) |
|
263 | renderer = self._get_renderer(request=request) | |
264 |
|
264 | |||
265 | repo = self._get_repo(repo) |
|
265 | repo = self._get_repo(repo) | |
266 | user = self._get_user(user) |
|
266 | user = self._get_user(user) | |
267 | auth_user = auth_user or user |
|
267 | auth_user = auth_user or user | |
268 |
|
268 | |||
269 | schema = comment_schema.CommentSchema() |
|
269 | schema = comment_schema.CommentSchema() | |
270 | validated_kwargs = schema.deserialize(dict( |
|
270 | validated_kwargs = schema.deserialize(dict( | |
271 | comment_body=text, |
|
271 | comment_body=text, | |
272 | comment_type=comment_type, |
|
272 | comment_type=comment_type, | |
273 | comment_file=f_path, |
|
273 | comment_file=f_path, | |
274 | comment_line=line_no, |
|
274 | comment_line=line_no, | |
275 | renderer_type=renderer, |
|
275 | renderer_type=renderer, | |
276 | status_change=status_change_type, |
|
276 | status_change=status_change_type, | |
277 | resolves_comment_id=resolves_comment_id, |
|
277 | resolves_comment_id=resolves_comment_id, | |
278 | repo=repo.repo_id, |
|
278 | repo=repo.repo_id, | |
279 | user=user.user_id, |
|
279 | user=user.user_id, | |
280 | )) |
|
280 | )) | |
281 |
|
281 | |||
282 | comment = ChangesetComment() |
|
282 | comment = ChangesetComment() | |
283 | comment.renderer = validated_kwargs['renderer_type'] |
|
283 | comment.renderer = validated_kwargs['renderer_type'] | |
284 | comment.text = validated_kwargs['comment_body'] |
|
284 | comment.text = validated_kwargs['comment_body'] | |
285 | comment.f_path = validated_kwargs['comment_file'] |
|
285 | comment.f_path = validated_kwargs['comment_file'] | |
286 | comment.line_no = validated_kwargs['comment_line'] |
|
286 | comment.line_no = validated_kwargs['comment_line'] | |
287 | comment.comment_type = validated_kwargs['comment_type'] |
|
287 | comment.comment_type = validated_kwargs['comment_type'] | |
288 |
|
288 | |||
289 | comment.repo = repo |
|
289 | comment.repo = repo | |
290 | comment.author = user |
|
290 | comment.author = user | |
291 | resolved_comment = self.__get_commit_comment( |
|
291 | resolved_comment = self.__get_commit_comment( | |
292 | validated_kwargs['resolves_comment_id']) |
|
292 | validated_kwargs['resolves_comment_id']) | |
293 | # check if the comment actually belongs to this PR |
|
293 | # check if the comment actually belongs to this PR | |
294 | if resolved_comment and resolved_comment.pull_request and \ |
|
294 | if resolved_comment and resolved_comment.pull_request and \ | |
295 | resolved_comment.pull_request != pull_request: |
|
295 | resolved_comment.pull_request != pull_request: | |
296 | log.warning('Comment tried to resolved unrelated todo comment: %s', |
|
296 | log.warning('Comment tried to resolved unrelated todo comment: %s', | |
297 | resolved_comment) |
|
297 | resolved_comment) | |
298 | # comment not bound to this pull request, forbid |
|
298 | # comment not bound to this pull request, forbid | |
299 | resolved_comment = None |
|
299 | resolved_comment = None | |
300 |
|
300 | |||
301 | elif resolved_comment and resolved_comment.repo and \ |
|
301 | elif resolved_comment and resolved_comment.repo and \ | |
302 | resolved_comment.repo != repo: |
|
302 | resolved_comment.repo != repo: | |
303 | log.warning('Comment tried to resolved unrelated todo comment: %s', |
|
303 | log.warning('Comment tried to resolved unrelated todo comment: %s', | |
304 | resolved_comment) |
|
304 | resolved_comment) | |
305 | # comment not bound to this repo, forbid |
|
305 | # comment not bound to this repo, forbid | |
306 | resolved_comment = None |
|
306 | resolved_comment = None | |
307 |
|
307 | |||
308 | comment.resolved_comment = resolved_comment |
|
308 | comment.resolved_comment = resolved_comment | |
309 |
|
309 | |||
310 | pull_request_id = pull_request |
|
310 | pull_request_id = pull_request | |
311 |
|
311 | |||
312 | commit_obj = None |
|
312 | commit_obj = None | |
313 | pull_request_obj = None |
|
313 | pull_request_obj = None | |
314 |
|
314 | |||
315 | if commit_id: |
|
315 | if commit_id: | |
316 | notification_type = EmailNotificationModel.TYPE_COMMIT_COMMENT |
|
316 | notification_type = EmailNotificationModel.TYPE_COMMIT_COMMENT | |
317 | # do a lookup, so we don't pass something bad here |
|
317 | # do a lookup, so we don't pass something bad here | |
318 | commit_obj = repo.scm_instance().get_commit(commit_id=commit_id) |
|
318 | commit_obj = repo.scm_instance().get_commit(commit_id=commit_id) | |
319 | comment.revision = commit_obj.raw_id |
|
319 | comment.revision = commit_obj.raw_id | |
320 |
|
320 | |||
321 | elif pull_request_id: |
|
321 | elif pull_request_id: | |
322 | notification_type = EmailNotificationModel.TYPE_PULL_REQUEST_COMMENT |
|
322 | notification_type = EmailNotificationModel.TYPE_PULL_REQUEST_COMMENT | |
323 | pull_request_obj = self.__get_pull_request(pull_request_id) |
|
323 | pull_request_obj = self.__get_pull_request(pull_request_id) | |
324 | comment.pull_request = pull_request_obj |
|
324 | comment.pull_request = pull_request_obj | |
325 | else: |
|
325 | else: | |
326 | raise Exception('Please specify commit or pull_request_id') |
|
326 | raise Exception('Please specify commit or pull_request_id') | |
327 |
|
327 | |||
328 | Session().add(comment) |
|
328 | Session().add(comment) | |
329 | Session().flush() |
|
329 | Session().flush() | |
330 | kwargs = { |
|
330 | kwargs = { | |
331 | 'user': user, |
|
331 | 'user': user, | |
332 | 'renderer_type': renderer, |
|
332 | 'renderer_type': renderer, | |
333 | 'repo_name': repo.repo_name, |
|
333 | 'repo_name': repo.repo_name, | |
334 | 'status_change': status_change, |
|
334 | 'status_change': status_change, | |
335 | 'status_change_type': status_change_type, |
|
335 | 'status_change_type': status_change_type, | |
336 | 'comment_body': text, |
|
336 | 'comment_body': text, | |
337 | 'comment_file': f_path, |
|
337 | 'comment_file': f_path, | |
338 | 'comment_line': line_no, |
|
338 | 'comment_line': line_no, | |
339 | 'comment_type': comment_type or 'note' |
|
339 | 'comment_type': comment_type or 'note' | |
340 | } |
|
340 | } | |
341 |
|
341 | |||
342 | if commit_obj: |
|
342 | if commit_obj: | |
343 | recipients = ChangesetComment.get_users( |
|
343 | recipients = ChangesetComment.get_users( | |
344 | revision=commit_obj.raw_id) |
|
344 | revision=commit_obj.raw_id) | |
345 | # add commit author if it's in RhodeCode system |
|
345 | # add commit author if it's in RhodeCode system | |
346 | cs_author = User.get_from_cs_author(commit_obj.author) |
|
346 | cs_author = User.get_from_cs_author(commit_obj.author) | |
347 | if not cs_author: |
|
347 | if not cs_author: | |
348 | # use repo owner if we cannot extract the author correctly |
|
348 | # use repo owner if we cannot extract the author correctly | |
349 | cs_author = repo.user |
|
349 | cs_author = repo.user | |
350 | recipients += [cs_author] |
|
350 | recipients += [cs_author] | |
351 |
|
351 | |||
352 | commit_comment_url = self.get_url(comment, request=request) |
|
352 | commit_comment_url = self.get_url(comment, request=request) | |
353 |
|
353 | |||
354 | target_repo_url = h.link_to( |
|
354 | target_repo_url = h.link_to( | |
355 | repo.repo_name, |
|
355 | repo.repo_name, | |
356 | h.route_url('repo_summary', repo_name=repo.repo_name)) |
|
356 | h.route_url('repo_summary', repo_name=repo.repo_name)) | |
357 |
|
357 | |||
358 | # commit specifics |
|
358 | # commit specifics | |
359 | kwargs.update({ |
|
359 | kwargs.update({ | |
360 | 'commit': commit_obj, |
|
360 | 'commit': commit_obj, | |
361 | 'commit_message': commit_obj.message, |
|
361 | 'commit_message': commit_obj.message, | |
362 | 'commit_target_repo': target_repo_url, |
|
362 | 'commit_target_repo_url': target_repo_url, | |
363 | 'commit_comment_url': commit_comment_url, |
|
363 | 'commit_comment_url': commit_comment_url, | |
364 | }) |
|
364 | }) | |
365 |
|
365 | |||
366 | elif pull_request_obj: |
|
366 | elif pull_request_obj: | |
367 | # get the current participants of this pull request |
|
367 | # get the current participants of this pull request | |
368 | recipients = ChangesetComment.get_users( |
|
368 | recipients = ChangesetComment.get_users( | |
369 | pull_request_id=pull_request_obj.pull_request_id) |
|
369 | pull_request_id=pull_request_obj.pull_request_id) | |
370 | # add pull request author |
|
370 | # add pull request author | |
371 | recipients += [pull_request_obj.author] |
|
371 | recipients += [pull_request_obj.author] | |
372 |
|
372 | |||
373 | # add the reviewers to notification |
|
373 | # add the reviewers to notification | |
374 | recipients += [x.user for x in pull_request_obj.reviewers] |
|
374 | recipients += [x.user for x in pull_request_obj.reviewers] | |
375 |
|
375 | |||
376 | pr_target_repo = pull_request_obj.target_repo |
|
376 | pr_target_repo = pull_request_obj.target_repo | |
377 | pr_source_repo = pull_request_obj.source_repo |
|
377 | pr_source_repo = pull_request_obj.source_repo | |
378 |
|
378 | |||
379 | pr_comment_url = h.route_url( |
|
379 | pr_comment_url = h.route_url( | |
380 | 'pullrequest_show', |
|
380 | 'pullrequest_show', | |
381 | repo_name=pr_target_repo.repo_name, |
|
381 | repo_name=pr_target_repo.repo_name, | |
382 | pull_request_id=pull_request_obj.pull_request_id, |
|
382 | pull_request_id=pull_request_obj.pull_request_id, | |
383 | _anchor='comment-%s' % comment.comment_id) |
|
383 | _anchor='comment-%s' % comment.comment_id) | |
384 |
|
384 | |||
|
385 | pr_url = h.route_url( | |||
|
386 | 'pullrequest_show', | |||
|
387 | repo_name=pr_target_repo.repo_name, | |||
|
388 | pull_request_id=pull_request_obj.pull_request_id, ) | |||
|
389 | ||||
385 | # set some variables for email notification |
|
390 | # set some variables for email notification | |
386 | pr_target_repo_url = h.route_url( |
|
391 | pr_target_repo_url = h.route_url( | |
387 | 'repo_summary', repo_name=pr_target_repo.repo_name) |
|
392 | 'repo_summary', repo_name=pr_target_repo.repo_name) | |
388 |
|
393 | |||
389 | pr_source_repo_url = h.route_url( |
|
394 | pr_source_repo_url = h.route_url( | |
390 | 'repo_summary', repo_name=pr_source_repo.repo_name) |
|
395 | 'repo_summary', repo_name=pr_source_repo.repo_name) | |
391 |
|
396 | |||
392 | # pull request specifics |
|
397 | # pull request specifics | |
393 | kwargs.update({ |
|
398 | kwargs.update({ | |
394 | 'pull_request': pull_request_obj, |
|
399 | 'pull_request': pull_request_obj, | |
395 | 'pr_id': pull_request_obj.pull_request_id, |
|
400 | 'pr_id': pull_request_obj.pull_request_id, | |
396 |
'p |
|
401 | 'pull_request_url': pr_url, | |
397 |
'pr_target_repo |
|
402 | 'pull_request_target_repo': pr_target_repo, | |
398 |
'p |
|
403 | 'pull_request_target_repo_url': pr_target_repo_url, | |
399 |
'pr_source_repo |
|
404 | 'pull_request_source_repo': pr_source_repo, | |
|
405 | 'pull_request_source_repo_url': pr_source_repo_url, | |||
400 | 'pr_comment_url': pr_comment_url, |
|
406 | 'pr_comment_url': pr_comment_url, | |
401 | 'pr_closing': closing_pr, |
|
407 | 'pr_closing': closing_pr, | |
402 | }) |
|
408 | }) | |
403 | if send_email: |
|
409 | if send_email: | |
404 | # pre-generate the subject for notification itself |
|
410 | # pre-generate the subject for notification itself | |
405 | (subject, |
|
411 | (subject, | |
406 | _h, _e, # we don't care about those |
|
412 | _h, _e, # we don't care about those | |
407 | body_plaintext) = EmailNotificationModel().render_email( |
|
413 | body_plaintext) = EmailNotificationModel().render_email( | |
408 | notification_type, **kwargs) |
|
414 | notification_type, **kwargs) | |
409 |
|
415 | |||
410 | mention_recipients = set( |
|
416 | mention_recipients = set( | |
411 | self._extract_mentions(text)).difference(recipients) |
|
417 | self._extract_mentions(text)).difference(recipients) | |
412 |
|
418 | |||
413 | # create notification objects, and emails |
|
419 | # create notification objects, and emails | |
414 | NotificationModel().create( |
|
420 | NotificationModel().create( | |
415 | created_by=user, |
|
421 | created_by=user, | |
416 | notification_subject=subject, |
|
422 | notification_subject=subject, | |
417 | notification_body=body_plaintext, |
|
423 | notification_body=body_plaintext, | |
418 | notification_type=notification_type, |
|
424 | notification_type=notification_type, | |
419 | recipients=recipients, |
|
425 | recipients=recipients, | |
420 | mention_recipients=mention_recipients, |
|
426 | mention_recipients=mention_recipients, | |
421 | email_kwargs=kwargs, |
|
427 | email_kwargs=kwargs, | |
422 | ) |
|
428 | ) | |
423 |
|
429 | |||
424 | Session().flush() |
|
430 | Session().flush() | |
425 | if comment.pull_request: |
|
431 | if comment.pull_request: | |
426 | action = 'repo.pull_request.comment.create' |
|
432 | action = 'repo.pull_request.comment.create' | |
427 | else: |
|
433 | else: | |
428 | action = 'repo.commit.comment.create' |
|
434 | action = 'repo.commit.comment.create' | |
429 |
|
435 | |||
430 | comment_data = comment.get_api_data() |
|
436 | comment_data = comment.get_api_data() | |
431 | self._log_audit_action( |
|
437 | self._log_audit_action( | |
432 | action, {'data': comment_data}, auth_user, comment) |
|
438 | action, {'data': comment_data}, auth_user, comment) | |
433 |
|
439 | |||
434 | msg_url = '' |
|
440 | msg_url = '' | |
435 | channel = None |
|
441 | channel = None | |
436 | if commit_obj: |
|
442 | if commit_obj: | |
437 | msg_url = commit_comment_url |
|
443 | msg_url = commit_comment_url | |
438 | repo_name = repo.repo_name |
|
444 | repo_name = repo.repo_name | |
439 | channel = u'/repo${}$/commit/{}'.format( |
|
445 | channel = u'/repo${}$/commit/{}'.format( | |
440 | repo_name, |
|
446 | repo_name, | |
441 | commit_obj.raw_id |
|
447 | commit_obj.raw_id | |
442 | ) |
|
448 | ) | |
443 | elif pull_request_obj: |
|
449 | elif pull_request_obj: | |
444 | msg_url = pr_comment_url |
|
450 | msg_url = pr_comment_url | |
445 | repo_name = pr_target_repo.repo_name |
|
451 | repo_name = pr_target_repo.repo_name | |
446 | channel = u'/repo${}$/pr/{}'.format( |
|
452 | channel = u'/repo${}$/pr/{}'.format( | |
447 | repo_name, |
|
453 | repo_name, | |
448 | pull_request_id |
|
454 | pull_request_id | |
449 | ) |
|
455 | ) | |
450 |
|
456 | |||
451 | message = '<strong>{}</strong> {} - ' \ |
|
457 | message = '<strong>{}</strong> {} - ' \ | |
452 | '<a onclick="window.location=\'{}\';' \ |
|
458 | '<a onclick="window.location=\'{}\';' \ | |
453 | 'window.location.reload()">' \ |
|
459 | 'window.location.reload()">' \ | |
454 | '<strong>{}</strong></a>' |
|
460 | '<strong>{}</strong></a>' | |
455 | message = message.format( |
|
461 | message = message.format( | |
456 | user.username, _('made a comment'), msg_url, |
|
462 | user.username, _('made a comment'), msg_url, | |
457 | _('Show it now')) |
|
463 | _('Show it now')) | |
458 |
|
464 | |||
459 | channelstream.post_message( |
|
465 | channelstream.post_message( | |
460 | channel, message, user.username, |
|
466 | channel, message, user.username, | |
461 | registry=get_current_registry()) |
|
467 | registry=get_current_registry()) | |
462 |
|
468 | |||
463 | return comment |
|
469 | return comment | |
464 |
|
470 | |||
465 | def delete(self, comment, auth_user): |
|
471 | def delete(self, comment, auth_user): | |
466 | """ |
|
472 | """ | |
467 | Deletes given comment |
|
473 | Deletes given comment | |
468 | """ |
|
474 | """ | |
469 | comment = self.__get_commit_comment(comment) |
|
475 | comment = self.__get_commit_comment(comment) | |
470 | old_data = comment.get_api_data() |
|
476 | old_data = comment.get_api_data() | |
471 | Session().delete(comment) |
|
477 | Session().delete(comment) | |
472 |
|
478 | |||
473 | if comment.pull_request: |
|
479 | if comment.pull_request: | |
474 | action = 'repo.pull_request.comment.delete' |
|
480 | action = 'repo.pull_request.comment.delete' | |
475 | else: |
|
481 | else: | |
476 | action = 'repo.commit.comment.delete' |
|
482 | action = 'repo.commit.comment.delete' | |
477 |
|
483 | |||
478 | self._log_audit_action( |
|
484 | self._log_audit_action( | |
479 | action, {'old_data': old_data}, auth_user, comment) |
|
485 | action, {'old_data': old_data}, auth_user, comment) | |
480 |
|
486 | |||
481 | return comment |
|
487 | return comment | |
482 |
|
488 | |||
483 | def get_all_comments(self, repo_id, revision=None, pull_request=None): |
|
489 | def get_all_comments(self, repo_id, revision=None, pull_request=None): | |
484 | q = ChangesetComment.query()\ |
|
490 | q = ChangesetComment.query()\ | |
485 | .filter(ChangesetComment.repo_id == repo_id) |
|
491 | .filter(ChangesetComment.repo_id == repo_id) | |
486 | if revision: |
|
492 | if revision: | |
487 | q = q.filter(ChangesetComment.revision == revision) |
|
493 | q = q.filter(ChangesetComment.revision == revision) | |
488 | elif pull_request: |
|
494 | elif pull_request: | |
489 | pull_request = self.__get_pull_request(pull_request) |
|
495 | pull_request = self.__get_pull_request(pull_request) | |
490 | q = q.filter(ChangesetComment.pull_request == pull_request) |
|
496 | q = q.filter(ChangesetComment.pull_request == pull_request) | |
491 | else: |
|
497 | else: | |
492 | raise Exception('Please specify commit or pull_request') |
|
498 | raise Exception('Please specify commit or pull_request') | |
493 | q = q.order_by(ChangesetComment.created_on) |
|
499 | q = q.order_by(ChangesetComment.created_on) | |
494 | return q.all() |
|
500 | return q.all() | |
495 |
|
501 | |||
496 | def get_url(self, comment, request=None, permalink=False): |
|
502 | def get_url(self, comment, request=None, permalink=False): | |
497 | if not request: |
|
503 | if not request: | |
498 | request = get_current_request() |
|
504 | request = get_current_request() | |
499 |
|
505 | |||
500 | comment = self.__get_commit_comment(comment) |
|
506 | comment = self.__get_commit_comment(comment) | |
501 | if comment.pull_request: |
|
507 | if comment.pull_request: | |
502 | pull_request = comment.pull_request |
|
508 | pull_request = comment.pull_request | |
503 | if permalink: |
|
509 | if permalink: | |
504 | return request.route_url( |
|
510 | return request.route_url( | |
505 | 'pull_requests_global', |
|
511 | 'pull_requests_global', | |
506 | pull_request_id=pull_request.pull_request_id, |
|
512 | pull_request_id=pull_request.pull_request_id, | |
507 | _anchor='comment-%s' % comment.comment_id) |
|
513 | _anchor='comment-%s' % comment.comment_id) | |
508 | else: |
|
514 | else: | |
509 | return request.route_url( |
|
515 | return request.route_url( | |
510 | 'pullrequest_show', |
|
516 | 'pullrequest_show', | |
511 | repo_name=safe_str(pull_request.target_repo.repo_name), |
|
517 | repo_name=safe_str(pull_request.target_repo.repo_name), | |
512 | pull_request_id=pull_request.pull_request_id, |
|
518 | pull_request_id=pull_request.pull_request_id, | |
513 | _anchor='comment-%s' % comment.comment_id) |
|
519 | _anchor='comment-%s' % comment.comment_id) | |
514 |
|
520 | |||
515 | else: |
|
521 | else: | |
516 | repo = comment.repo |
|
522 | repo = comment.repo | |
517 | commit_id = comment.revision |
|
523 | commit_id = comment.revision | |
518 |
|
524 | |||
519 | if permalink: |
|
525 | if permalink: | |
520 | return request.route_url( |
|
526 | return request.route_url( | |
521 | 'repo_commit', repo_name=safe_str(repo.repo_id), |
|
527 | 'repo_commit', repo_name=safe_str(repo.repo_id), | |
522 | commit_id=commit_id, |
|
528 | commit_id=commit_id, | |
523 | _anchor='comment-%s' % comment.comment_id) |
|
529 | _anchor='comment-%s' % comment.comment_id) | |
524 |
|
530 | |||
525 | else: |
|
531 | else: | |
526 | return request.route_url( |
|
532 | return request.route_url( | |
527 | 'repo_commit', repo_name=safe_str(repo.repo_name), |
|
533 | 'repo_commit', repo_name=safe_str(repo.repo_name), | |
528 | commit_id=commit_id, |
|
534 | commit_id=commit_id, | |
529 | _anchor='comment-%s' % comment.comment_id) |
|
535 | _anchor='comment-%s' % comment.comment_id) | |
530 |
|
536 | |||
531 | def get_comments(self, repo_id, revision=None, pull_request=None): |
|
537 | def get_comments(self, repo_id, revision=None, pull_request=None): | |
532 | """ |
|
538 | """ | |
533 | Gets main comments based on revision or pull_request_id |
|
539 | Gets main comments based on revision or pull_request_id | |
534 |
|
540 | |||
535 | :param repo_id: |
|
541 | :param repo_id: | |
536 | :param revision: |
|
542 | :param revision: | |
537 | :param pull_request: |
|
543 | :param pull_request: | |
538 | """ |
|
544 | """ | |
539 |
|
545 | |||
540 | q = ChangesetComment.query()\ |
|
546 | q = ChangesetComment.query()\ | |
541 | .filter(ChangesetComment.repo_id == repo_id)\ |
|
547 | .filter(ChangesetComment.repo_id == repo_id)\ | |
542 | .filter(ChangesetComment.line_no == None)\ |
|
548 | .filter(ChangesetComment.line_no == None)\ | |
543 | .filter(ChangesetComment.f_path == None) |
|
549 | .filter(ChangesetComment.f_path == None) | |
544 | if revision: |
|
550 | if revision: | |
545 | q = q.filter(ChangesetComment.revision == revision) |
|
551 | q = q.filter(ChangesetComment.revision == revision) | |
546 | elif pull_request: |
|
552 | elif pull_request: | |
547 | pull_request = self.__get_pull_request(pull_request) |
|
553 | pull_request = self.__get_pull_request(pull_request) | |
548 | q = q.filter(ChangesetComment.pull_request == pull_request) |
|
554 | q = q.filter(ChangesetComment.pull_request == pull_request) | |
549 | else: |
|
555 | else: | |
550 | raise Exception('Please specify commit or pull_request') |
|
556 | raise Exception('Please specify commit or pull_request') | |
551 | q = q.order_by(ChangesetComment.created_on) |
|
557 | q = q.order_by(ChangesetComment.created_on) | |
552 | return q.all() |
|
558 | return q.all() | |
553 |
|
559 | |||
554 | def get_inline_comments(self, repo_id, revision=None, pull_request=None): |
|
560 | def get_inline_comments(self, repo_id, revision=None, pull_request=None): | |
555 | q = self._get_inline_comments_query(repo_id, revision, pull_request) |
|
561 | q = self._get_inline_comments_query(repo_id, revision, pull_request) | |
556 | return self._group_comments_by_path_and_line_number(q) |
|
562 | return self._group_comments_by_path_and_line_number(q) | |
557 |
|
563 | |||
558 | def get_inline_comments_count(self, inline_comments, skip_outdated=True, |
|
564 | def get_inline_comments_count(self, inline_comments, skip_outdated=True, | |
559 | version=None): |
|
565 | version=None): | |
560 | inline_cnt = 0 |
|
566 | inline_cnt = 0 | |
561 | for fname, per_line_comments in inline_comments.iteritems(): |
|
567 | for fname, per_line_comments in inline_comments.iteritems(): | |
562 | for lno, comments in per_line_comments.iteritems(): |
|
568 | for lno, comments in per_line_comments.iteritems(): | |
563 | for comm in comments: |
|
569 | for comm in comments: | |
564 | if not comm.outdated_at_version(version) and skip_outdated: |
|
570 | if not comm.outdated_at_version(version) and skip_outdated: | |
565 | inline_cnt += 1 |
|
571 | inline_cnt += 1 | |
566 |
|
572 | |||
567 | return inline_cnt |
|
573 | return inline_cnt | |
568 |
|
574 | |||
569 | def get_outdated_comments(self, repo_id, pull_request): |
|
575 | def get_outdated_comments(self, repo_id, pull_request): | |
570 | # TODO: johbo: Remove `repo_id`, it is not needed to find the comments |
|
576 | # TODO: johbo: Remove `repo_id`, it is not needed to find the comments | |
571 | # of a pull request. |
|
577 | # of a pull request. | |
572 | q = self._all_inline_comments_of_pull_request(pull_request) |
|
578 | q = self._all_inline_comments_of_pull_request(pull_request) | |
573 | q = q.filter( |
|
579 | q = q.filter( | |
574 | ChangesetComment.display_state == |
|
580 | ChangesetComment.display_state == | |
575 | ChangesetComment.COMMENT_OUTDATED |
|
581 | ChangesetComment.COMMENT_OUTDATED | |
576 | ).order_by(ChangesetComment.comment_id.asc()) |
|
582 | ).order_by(ChangesetComment.comment_id.asc()) | |
577 |
|
583 | |||
578 | return self._group_comments_by_path_and_line_number(q) |
|
584 | return self._group_comments_by_path_and_line_number(q) | |
579 |
|
585 | |||
580 | def _get_inline_comments_query(self, repo_id, revision, pull_request): |
|
586 | def _get_inline_comments_query(self, repo_id, revision, pull_request): | |
581 | # TODO: johbo: Split this into two methods: One for PR and one for |
|
587 | # TODO: johbo: Split this into two methods: One for PR and one for | |
582 | # commit. |
|
588 | # commit. | |
583 | if revision: |
|
589 | if revision: | |
584 | q = Session().query(ChangesetComment).filter( |
|
590 | q = Session().query(ChangesetComment).filter( | |
585 | ChangesetComment.repo_id == repo_id, |
|
591 | ChangesetComment.repo_id == repo_id, | |
586 | ChangesetComment.line_no != null(), |
|
592 | ChangesetComment.line_no != null(), | |
587 | ChangesetComment.f_path != null(), |
|
593 | ChangesetComment.f_path != null(), | |
588 | ChangesetComment.revision == revision) |
|
594 | ChangesetComment.revision == revision) | |
589 |
|
595 | |||
590 | elif pull_request: |
|
596 | elif pull_request: | |
591 | pull_request = self.__get_pull_request(pull_request) |
|
597 | pull_request = self.__get_pull_request(pull_request) | |
592 | if not CommentsModel.use_outdated_comments(pull_request): |
|
598 | if not CommentsModel.use_outdated_comments(pull_request): | |
593 | q = self._visible_inline_comments_of_pull_request(pull_request) |
|
599 | q = self._visible_inline_comments_of_pull_request(pull_request) | |
594 | else: |
|
600 | else: | |
595 | q = self._all_inline_comments_of_pull_request(pull_request) |
|
601 | q = self._all_inline_comments_of_pull_request(pull_request) | |
596 |
|
602 | |||
597 | else: |
|
603 | else: | |
598 | raise Exception('Please specify commit or pull_request_id') |
|
604 | raise Exception('Please specify commit or pull_request_id') | |
599 | q = q.order_by(ChangesetComment.comment_id.asc()) |
|
605 | q = q.order_by(ChangesetComment.comment_id.asc()) | |
600 | return q |
|
606 | return q | |
601 |
|
607 | |||
602 | def _group_comments_by_path_and_line_number(self, q): |
|
608 | def _group_comments_by_path_and_line_number(self, q): | |
603 | comments = q.all() |
|
609 | comments = q.all() | |
604 | paths = collections.defaultdict(lambda: collections.defaultdict(list)) |
|
610 | paths = collections.defaultdict(lambda: collections.defaultdict(list)) | |
605 | for co in comments: |
|
611 | for co in comments: | |
606 | paths[co.f_path][co.line_no].append(co) |
|
612 | paths[co.f_path][co.line_no].append(co) | |
607 | return paths |
|
613 | return paths | |
608 |
|
614 | |||
609 | @classmethod |
|
615 | @classmethod | |
610 | def needed_extra_diff_context(cls): |
|
616 | def needed_extra_diff_context(cls): | |
611 | return max(cls.DIFF_CONTEXT_BEFORE, cls.DIFF_CONTEXT_AFTER) |
|
617 | return max(cls.DIFF_CONTEXT_BEFORE, cls.DIFF_CONTEXT_AFTER) | |
612 |
|
618 | |||
613 | def outdate_comments(self, pull_request, old_diff_data, new_diff_data): |
|
619 | def outdate_comments(self, pull_request, old_diff_data, new_diff_data): | |
614 | if not CommentsModel.use_outdated_comments(pull_request): |
|
620 | if not CommentsModel.use_outdated_comments(pull_request): | |
615 | return |
|
621 | return | |
616 |
|
622 | |||
617 | comments = self._visible_inline_comments_of_pull_request(pull_request) |
|
623 | comments = self._visible_inline_comments_of_pull_request(pull_request) | |
618 | comments_to_outdate = comments.all() |
|
624 | comments_to_outdate = comments.all() | |
619 |
|
625 | |||
620 | for comment in comments_to_outdate: |
|
626 | for comment in comments_to_outdate: | |
621 | self._outdate_one_comment(comment, old_diff_data, new_diff_data) |
|
627 | self._outdate_one_comment(comment, old_diff_data, new_diff_data) | |
622 |
|
628 | |||
623 | def _outdate_one_comment(self, comment, old_diff_proc, new_diff_proc): |
|
629 | def _outdate_one_comment(self, comment, old_diff_proc, new_diff_proc): | |
624 | diff_line = _parse_comment_line_number(comment.line_no) |
|
630 | diff_line = _parse_comment_line_number(comment.line_no) | |
625 |
|
631 | |||
626 | try: |
|
632 | try: | |
627 | old_context = old_diff_proc.get_context_of_line( |
|
633 | old_context = old_diff_proc.get_context_of_line( | |
628 | path=comment.f_path, diff_line=diff_line) |
|
634 | path=comment.f_path, diff_line=diff_line) | |
629 | new_context = new_diff_proc.get_context_of_line( |
|
635 | new_context = new_diff_proc.get_context_of_line( | |
630 | path=comment.f_path, diff_line=diff_line) |
|
636 | path=comment.f_path, diff_line=diff_line) | |
631 | except (diffs.LineNotInDiffException, |
|
637 | except (diffs.LineNotInDiffException, | |
632 | diffs.FileNotInDiffException): |
|
638 | diffs.FileNotInDiffException): | |
633 | comment.display_state = ChangesetComment.COMMENT_OUTDATED |
|
639 | comment.display_state = ChangesetComment.COMMENT_OUTDATED | |
634 | return |
|
640 | return | |
635 |
|
641 | |||
636 | if old_context == new_context: |
|
642 | if old_context == new_context: | |
637 | return |
|
643 | return | |
638 |
|
644 | |||
639 | if self._should_relocate_diff_line(diff_line): |
|
645 | if self._should_relocate_diff_line(diff_line): | |
640 | new_diff_lines = new_diff_proc.find_context( |
|
646 | new_diff_lines = new_diff_proc.find_context( | |
641 | path=comment.f_path, context=old_context, |
|
647 | path=comment.f_path, context=old_context, | |
642 | offset=self.DIFF_CONTEXT_BEFORE) |
|
648 | offset=self.DIFF_CONTEXT_BEFORE) | |
643 | if not new_diff_lines: |
|
649 | if not new_diff_lines: | |
644 | comment.display_state = ChangesetComment.COMMENT_OUTDATED |
|
650 | comment.display_state = ChangesetComment.COMMENT_OUTDATED | |
645 | else: |
|
651 | else: | |
646 | new_diff_line = self._choose_closest_diff_line( |
|
652 | new_diff_line = self._choose_closest_diff_line( | |
647 | diff_line, new_diff_lines) |
|
653 | diff_line, new_diff_lines) | |
648 | comment.line_no = _diff_to_comment_line_number(new_diff_line) |
|
654 | comment.line_no = _diff_to_comment_line_number(new_diff_line) | |
649 | else: |
|
655 | else: | |
650 | comment.display_state = ChangesetComment.COMMENT_OUTDATED |
|
656 | comment.display_state = ChangesetComment.COMMENT_OUTDATED | |
651 |
|
657 | |||
652 | def _should_relocate_diff_line(self, diff_line): |
|
658 | def _should_relocate_diff_line(self, diff_line): | |
653 | """ |
|
659 | """ | |
654 | Checks if relocation shall be tried for the given `diff_line`. |
|
660 | Checks if relocation shall be tried for the given `diff_line`. | |
655 |
|
661 | |||
656 | If a comment points into the first lines, then we can have a situation |
|
662 | If a comment points into the first lines, then we can have a situation | |
657 | that after an update another line has been added on top. In this case |
|
663 | that after an update another line has been added on top. In this case | |
658 | we would find the context still and move the comment around. This |
|
664 | we would find the context still and move the comment around. This | |
659 | would be wrong. |
|
665 | would be wrong. | |
660 | """ |
|
666 | """ | |
661 | should_relocate = ( |
|
667 | should_relocate = ( | |
662 | (diff_line.new and diff_line.new > self.DIFF_CONTEXT_BEFORE) or |
|
668 | (diff_line.new and diff_line.new > self.DIFF_CONTEXT_BEFORE) or | |
663 | (diff_line.old and diff_line.old > self.DIFF_CONTEXT_BEFORE)) |
|
669 | (diff_line.old and diff_line.old > self.DIFF_CONTEXT_BEFORE)) | |
664 | return should_relocate |
|
670 | return should_relocate | |
665 |
|
671 | |||
666 | def _choose_closest_diff_line(self, diff_line, new_diff_lines): |
|
672 | def _choose_closest_diff_line(self, diff_line, new_diff_lines): | |
667 | candidate = new_diff_lines[0] |
|
673 | candidate = new_diff_lines[0] | |
668 | best_delta = _diff_line_delta(diff_line, candidate) |
|
674 | best_delta = _diff_line_delta(diff_line, candidate) | |
669 | for new_diff_line in new_diff_lines[1:]: |
|
675 | for new_diff_line in new_diff_lines[1:]: | |
670 | delta = _diff_line_delta(diff_line, new_diff_line) |
|
676 | delta = _diff_line_delta(diff_line, new_diff_line) | |
671 | if delta < best_delta: |
|
677 | if delta < best_delta: | |
672 | candidate = new_diff_line |
|
678 | candidate = new_diff_line | |
673 | best_delta = delta |
|
679 | best_delta = delta | |
674 | return candidate |
|
680 | return candidate | |
675 |
|
681 | |||
676 | def _visible_inline_comments_of_pull_request(self, pull_request): |
|
682 | def _visible_inline_comments_of_pull_request(self, pull_request): | |
677 | comments = self._all_inline_comments_of_pull_request(pull_request) |
|
683 | comments = self._all_inline_comments_of_pull_request(pull_request) | |
678 | comments = comments.filter( |
|
684 | comments = comments.filter( | |
679 | coalesce(ChangesetComment.display_state, '') != |
|
685 | coalesce(ChangesetComment.display_state, '') != | |
680 | ChangesetComment.COMMENT_OUTDATED) |
|
686 | ChangesetComment.COMMENT_OUTDATED) | |
681 | return comments |
|
687 | return comments | |
682 |
|
688 | |||
683 | def _all_inline_comments_of_pull_request(self, pull_request): |
|
689 | def _all_inline_comments_of_pull_request(self, pull_request): | |
684 | comments = Session().query(ChangesetComment)\ |
|
690 | comments = Session().query(ChangesetComment)\ | |
685 | .filter(ChangesetComment.line_no != None)\ |
|
691 | .filter(ChangesetComment.line_no != None)\ | |
686 | .filter(ChangesetComment.f_path != None)\ |
|
692 | .filter(ChangesetComment.f_path != None)\ | |
687 | .filter(ChangesetComment.pull_request == pull_request) |
|
693 | .filter(ChangesetComment.pull_request == pull_request) | |
688 | return comments |
|
694 | return comments | |
689 |
|
695 | |||
690 | def _all_general_comments_of_pull_request(self, pull_request): |
|
696 | def _all_general_comments_of_pull_request(self, pull_request): | |
691 | comments = Session().query(ChangesetComment)\ |
|
697 | comments = Session().query(ChangesetComment)\ | |
692 | .filter(ChangesetComment.line_no == None)\ |
|
698 | .filter(ChangesetComment.line_no == None)\ | |
693 | .filter(ChangesetComment.f_path == None)\ |
|
699 | .filter(ChangesetComment.f_path == None)\ | |
694 | .filter(ChangesetComment.pull_request == pull_request) |
|
700 | .filter(ChangesetComment.pull_request == pull_request) | |
695 | return comments |
|
701 | return comments | |
696 |
|
702 | |||
697 | @staticmethod |
|
703 | @staticmethod | |
698 | def use_outdated_comments(pull_request): |
|
704 | def use_outdated_comments(pull_request): | |
699 | settings_model = VcsSettingsModel(repo=pull_request.target_repo) |
|
705 | settings_model = VcsSettingsModel(repo=pull_request.target_repo) | |
700 | settings = settings_model.get_general_settings() |
|
706 | settings = settings_model.get_general_settings() | |
701 | return settings.get('rhodecode_use_outdated_comments', False) |
|
707 | return settings.get('rhodecode_use_outdated_comments', False) | |
702 |
|
708 | |||
703 |
|
709 | |||
704 | def _parse_comment_line_number(line_no): |
|
710 | def _parse_comment_line_number(line_no): | |
705 | """ |
|
711 | """ | |
706 | Parses line numbers of the form "(o|n)\d+" and returns them in a tuple. |
|
712 | Parses line numbers of the form "(o|n)\d+" and returns them in a tuple. | |
707 | """ |
|
713 | """ | |
708 | old_line = None |
|
714 | old_line = None | |
709 | new_line = None |
|
715 | new_line = None | |
710 | if line_no.startswith('o'): |
|
716 | if line_no.startswith('o'): | |
711 | old_line = int(line_no[1:]) |
|
717 | old_line = int(line_no[1:]) | |
712 | elif line_no.startswith('n'): |
|
718 | elif line_no.startswith('n'): | |
713 | new_line = int(line_no[1:]) |
|
719 | new_line = int(line_no[1:]) | |
714 | else: |
|
720 | else: | |
715 | raise ValueError("Comment lines have to start with either 'o' or 'n'.") |
|
721 | raise ValueError("Comment lines have to start with either 'o' or 'n'.") | |
716 | return diffs.DiffLineNumber(old_line, new_line) |
|
722 | return diffs.DiffLineNumber(old_line, new_line) | |
717 |
|
723 | |||
718 |
|
724 | |||
719 | def _diff_to_comment_line_number(diff_line): |
|
725 | def _diff_to_comment_line_number(diff_line): | |
720 | if diff_line.new is not None: |
|
726 | if diff_line.new is not None: | |
721 | return u'n{}'.format(diff_line.new) |
|
727 | return u'n{}'.format(diff_line.new) | |
722 | elif diff_line.old is not None: |
|
728 | elif diff_line.old is not None: | |
723 | return u'o{}'.format(diff_line.old) |
|
729 | return u'o{}'.format(diff_line.old) | |
724 | return u'' |
|
730 | return u'' | |
725 |
|
731 | |||
726 |
|
732 | |||
727 | def _diff_line_delta(a, b): |
|
733 | def _diff_line_delta(a, b): | |
728 | if None not in (a.new, b.new): |
|
734 | if None not in (a.new, b.new): | |
729 | return abs(a.new - b.new) |
|
735 | return abs(a.new - b.new) | |
730 | elif None not in (a.old, b.old): |
|
736 | elif None not in (a.old, b.old): | |
731 | return abs(a.old - b.old) |
|
737 | return abs(a.old - b.old) | |
732 | else: |
|
738 | else: | |
733 | raise ValueError( |
|
739 | raise ValueError( | |
734 | "Cannot compute delta between {} and {}".format(a, b)) |
|
740 | "Cannot compute delta between {} and {}".format(a, b)) |
@@ -1,1744 +1,1744 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2012-2019 RhodeCode GmbH |
|
3 | # Copyright (C) 2012-2019 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 |
|
21 | |||
22 | """ |
|
22 | """ | |
23 | pull request model for RhodeCode |
|
23 | pull request model for RhodeCode | |
24 | """ |
|
24 | """ | |
25 |
|
25 | |||
26 |
|
26 | |||
27 | import json |
|
27 | import json | |
28 | import logging |
|
28 | import logging | |
29 | import datetime |
|
29 | import datetime | |
30 | import urllib |
|
30 | import urllib | |
31 | import collections |
|
31 | import collections | |
32 |
|
32 | |||
33 | from pyramid import compat |
|
33 | from pyramid import compat | |
34 | from pyramid.threadlocal import get_current_request |
|
34 | from pyramid.threadlocal import get_current_request | |
35 |
|
35 | |||
36 | from rhodecode import events |
|
36 | from rhodecode import events | |
37 | from rhodecode.translation import lazy_ugettext |
|
37 | from rhodecode.translation import lazy_ugettext | |
38 | from rhodecode.lib import helpers as h, hooks_utils, diffs |
|
38 | from rhodecode.lib import helpers as h, hooks_utils, diffs | |
39 | from rhodecode.lib import audit_logger |
|
39 | from rhodecode.lib import audit_logger | |
40 | from rhodecode.lib.compat import OrderedDict |
|
40 | from rhodecode.lib.compat import OrderedDict | |
41 | from rhodecode.lib.hooks_daemon import prepare_callback_daemon |
|
41 | from rhodecode.lib.hooks_daemon import prepare_callback_daemon | |
42 | from rhodecode.lib.markup_renderer import ( |
|
42 | from rhodecode.lib.markup_renderer import ( | |
43 | DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer) |
|
43 | DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer) | |
44 | from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe |
|
44 | from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe | |
45 | from rhodecode.lib.vcs.backends.base import ( |
|
45 | from rhodecode.lib.vcs.backends.base import ( | |
46 | Reference, MergeResponse, MergeFailureReason, UpdateFailureReason) |
|
46 | Reference, MergeResponse, MergeFailureReason, UpdateFailureReason) | |
47 | from rhodecode.lib.vcs.conf import settings as vcs_settings |
|
47 | from rhodecode.lib.vcs.conf import settings as vcs_settings | |
48 | from rhodecode.lib.vcs.exceptions import ( |
|
48 | from rhodecode.lib.vcs.exceptions import ( | |
49 | CommitDoesNotExistError, EmptyRepositoryError) |
|
49 | CommitDoesNotExistError, EmptyRepositoryError) | |
50 | from rhodecode.model import BaseModel |
|
50 | from rhodecode.model import BaseModel | |
51 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
51 | from rhodecode.model.changeset_status import ChangesetStatusModel | |
52 | from rhodecode.model.comment import CommentsModel |
|
52 | from rhodecode.model.comment import CommentsModel | |
53 | from rhodecode.model.db import ( |
|
53 | from rhodecode.model.db import ( | |
54 | or_, PullRequest, PullRequestReviewers, ChangesetStatus, |
|
54 | or_, PullRequest, PullRequestReviewers, ChangesetStatus, | |
55 | PullRequestVersion, ChangesetComment, Repository, RepoReviewRule) |
|
55 | PullRequestVersion, ChangesetComment, Repository, RepoReviewRule) | |
56 | from rhodecode.model.meta import Session |
|
56 | from rhodecode.model.meta import Session | |
57 | from rhodecode.model.notification import NotificationModel, \ |
|
57 | from rhodecode.model.notification import NotificationModel, \ | |
58 | EmailNotificationModel |
|
58 | EmailNotificationModel | |
59 | from rhodecode.model.scm import ScmModel |
|
59 | from rhodecode.model.scm import ScmModel | |
60 | from rhodecode.model.settings import VcsSettingsModel |
|
60 | from rhodecode.model.settings import VcsSettingsModel | |
61 |
|
61 | |||
62 |
|
62 | |||
63 | log = logging.getLogger(__name__) |
|
63 | log = logging.getLogger(__name__) | |
64 |
|
64 | |||
65 |
|
65 | |||
66 | # Data structure to hold the response data when updating commits during a pull |
|
66 | # Data structure to hold the response data when updating commits during a pull | |
67 | # request update. |
|
67 | # request update. | |
68 | UpdateResponse = collections.namedtuple('UpdateResponse', [ |
|
68 | UpdateResponse = collections.namedtuple('UpdateResponse', [ | |
69 | 'executed', 'reason', 'new', 'old', 'changes', |
|
69 | 'executed', 'reason', 'new', 'old', 'changes', | |
70 | 'source_changed', 'target_changed']) |
|
70 | 'source_changed', 'target_changed']) | |
71 |
|
71 | |||
72 |
|
72 | |||
73 | class PullRequestModel(BaseModel): |
|
73 | class PullRequestModel(BaseModel): | |
74 |
|
74 | |||
75 | cls = PullRequest |
|
75 | cls = PullRequest | |
76 |
|
76 | |||
77 | DIFF_CONTEXT = diffs.DEFAULT_CONTEXT |
|
77 | DIFF_CONTEXT = diffs.DEFAULT_CONTEXT | |
78 |
|
78 | |||
79 | UPDATE_STATUS_MESSAGES = { |
|
79 | UPDATE_STATUS_MESSAGES = { | |
80 | UpdateFailureReason.NONE: lazy_ugettext( |
|
80 | UpdateFailureReason.NONE: lazy_ugettext( | |
81 | 'Pull request update successful.'), |
|
81 | 'Pull request update successful.'), | |
82 | UpdateFailureReason.UNKNOWN: lazy_ugettext( |
|
82 | UpdateFailureReason.UNKNOWN: lazy_ugettext( | |
83 | 'Pull request update failed because of an unknown error.'), |
|
83 | 'Pull request update failed because of an unknown error.'), | |
84 | UpdateFailureReason.NO_CHANGE: lazy_ugettext( |
|
84 | UpdateFailureReason.NO_CHANGE: lazy_ugettext( | |
85 | 'No update needed because the source and target have not changed.'), |
|
85 | 'No update needed because the source and target have not changed.'), | |
86 | UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext( |
|
86 | UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext( | |
87 | 'Pull request cannot be updated because the reference type is ' |
|
87 | 'Pull request cannot be updated because the reference type is ' | |
88 | 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'), |
|
88 | 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'), | |
89 | UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext( |
|
89 | UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext( | |
90 | 'This pull request cannot be updated because the target ' |
|
90 | 'This pull request cannot be updated because the target ' | |
91 | 'reference is missing.'), |
|
91 | 'reference is missing.'), | |
92 | UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext( |
|
92 | UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext( | |
93 | 'This pull request cannot be updated because the source ' |
|
93 | 'This pull request cannot be updated because the source ' | |
94 | 'reference is missing.'), |
|
94 | 'reference is missing.'), | |
95 | } |
|
95 | } | |
96 | REF_TYPES = ['bookmark', 'book', 'tag', 'branch'] |
|
96 | REF_TYPES = ['bookmark', 'book', 'tag', 'branch'] | |
97 | UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch'] |
|
97 | UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch'] | |
98 |
|
98 | |||
99 | def __get_pull_request(self, pull_request): |
|
99 | def __get_pull_request(self, pull_request): | |
100 | return self._get_instance(( |
|
100 | return self._get_instance(( | |
101 | PullRequest, PullRequestVersion), pull_request) |
|
101 | PullRequest, PullRequestVersion), pull_request) | |
102 |
|
102 | |||
103 | def _check_perms(self, perms, pull_request, user, api=False): |
|
103 | def _check_perms(self, perms, pull_request, user, api=False): | |
104 | if not api: |
|
104 | if not api: | |
105 | return h.HasRepoPermissionAny(*perms)( |
|
105 | return h.HasRepoPermissionAny(*perms)( | |
106 | user=user, repo_name=pull_request.target_repo.repo_name) |
|
106 | user=user, repo_name=pull_request.target_repo.repo_name) | |
107 | else: |
|
107 | else: | |
108 | return h.HasRepoPermissionAnyApi(*perms)( |
|
108 | return h.HasRepoPermissionAnyApi(*perms)( | |
109 | user=user, repo_name=pull_request.target_repo.repo_name) |
|
109 | user=user, repo_name=pull_request.target_repo.repo_name) | |
110 |
|
110 | |||
111 | def check_user_read(self, pull_request, user, api=False): |
|
111 | def check_user_read(self, pull_request, user, api=False): | |
112 | _perms = ('repository.admin', 'repository.write', 'repository.read',) |
|
112 | _perms = ('repository.admin', 'repository.write', 'repository.read',) | |
113 | return self._check_perms(_perms, pull_request, user, api) |
|
113 | return self._check_perms(_perms, pull_request, user, api) | |
114 |
|
114 | |||
115 | def check_user_merge(self, pull_request, user, api=False): |
|
115 | def check_user_merge(self, pull_request, user, api=False): | |
116 | _perms = ('repository.admin', 'repository.write', 'hg.admin',) |
|
116 | _perms = ('repository.admin', 'repository.write', 'hg.admin',) | |
117 | return self._check_perms(_perms, pull_request, user, api) |
|
117 | return self._check_perms(_perms, pull_request, user, api) | |
118 |
|
118 | |||
119 | def check_user_update(self, pull_request, user, api=False): |
|
119 | def check_user_update(self, pull_request, user, api=False): | |
120 | owner = user.user_id == pull_request.user_id |
|
120 | owner = user.user_id == pull_request.user_id | |
121 | return self.check_user_merge(pull_request, user, api) or owner |
|
121 | return self.check_user_merge(pull_request, user, api) or owner | |
122 |
|
122 | |||
123 | def check_user_delete(self, pull_request, user): |
|
123 | def check_user_delete(self, pull_request, user): | |
124 | owner = user.user_id == pull_request.user_id |
|
124 | owner = user.user_id == pull_request.user_id | |
125 | _perms = ('repository.admin',) |
|
125 | _perms = ('repository.admin',) | |
126 | return self._check_perms(_perms, pull_request, user) or owner |
|
126 | return self._check_perms(_perms, pull_request, user) or owner | |
127 |
|
127 | |||
128 | def check_user_change_status(self, pull_request, user, api=False): |
|
128 | def check_user_change_status(self, pull_request, user, api=False): | |
129 | reviewer = user.user_id in [x.user_id for x in |
|
129 | reviewer = user.user_id in [x.user_id for x in | |
130 | pull_request.reviewers] |
|
130 | pull_request.reviewers] | |
131 | return self.check_user_update(pull_request, user, api) or reviewer |
|
131 | return self.check_user_update(pull_request, user, api) or reviewer | |
132 |
|
132 | |||
133 | def check_user_comment(self, pull_request, user): |
|
133 | def check_user_comment(self, pull_request, user): | |
134 | owner = user.user_id == pull_request.user_id |
|
134 | owner = user.user_id == pull_request.user_id | |
135 | return self.check_user_read(pull_request, user) or owner |
|
135 | return self.check_user_read(pull_request, user) or owner | |
136 |
|
136 | |||
137 | def get(self, pull_request): |
|
137 | def get(self, pull_request): | |
138 | return self.__get_pull_request(pull_request) |
|
138 | return self.__get_pull_request(pull_request) | |
139 |
|
139 | |||
140 | def _prepare_get_all_query(self, repo_name, source=False, statuses=None, |
|
140 | def _prepare_get_all_query(self, repo_name, source=False, statuses=None, | |
141 | opened_by=None, order_by=None, |
|
141 | opened_by=None, order_by=None, | |
142 | order_dir='desc', only_created=False): |
|
142 | order_dir='desc', only_created=False): | |
143 | repo = None |
|
143 | repo = None | |
144 | if repo_name: |
|
144 | if repo_name: | |
145 | repo = self._get_repo(repo_name) |
|
145 | repo = self._get_repo(repo_name) | |
146 |
|
146 | |||
147 | q = PullRequest.query() |
|
147 | q = PullRequest.query() | |
148 |
|
148 | |||
149 | # source or target |
|
149 | # source or target | |
150 | if repo and source: |
|
150 | if repo and source: | |
151 | q = q.filter(PullRequest.source_repo == repo) |
|
151 | q = q.filter(PullRequest.source_repo == repo) | |
152 | elif repo: |
|
152 | elif repo: | |
153 | q = q.filter(PullRequest.target_repo == repo) |
|
153 | q = q.filter(PullRequest.target_repo == repo) | |
154 |
|
154 | |||
155 | # closed,opened |
|
155 | # closed,opened | |
156 | if statuses: |
|
156 | if statuses: | |
157 | q = q.filter(PullRequest.status.in_(statuses)) |
|
157 | q = q.filter(PullRequest.status.in_(statuses)) | |
158 |
|
158 | |||
159 | # opened by filter |
|
159 | # opened by filter | |
160 | if opened_by: |
|
160 | if opened_by: | |
161 | q = q.filter(PullRequest.user_id.in_(opened_by)) |
|
161 | q = q.filter(PullRequest.user_id.in_(opened_by)) | |
162 |
|
162 | |||
163 | # only get those that are in "created" state |
|
163 | # only get those that are in "created" state | |
164 | if only_created: |
|
164 | if only_created: | |
165 | q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED) |
|
165 | q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED) | |
166 |
|
166 | |||
167 | if order_by: |
|
167 | if order_by: | |
168 | order_map = { |
|
168 | order_map = { | |
169 | 'name_raw': PullRequest.pull_request_id, |
|
169 | 'name_raw': PullRequest.pull_request_id, | |
170 | 'id': PullRequest.pull_request_id, |
|
170 | 'id': PullRequest.pull_request_id, | |
171 | 'title': PullRequest.title, |
|
171 | 'title': PullRequest.title, | |
172 | 'updated_on_raw': PullRequest.updated_on, |
|
172 | 'updated_on_raw': PullRequest.updated_on, | |
173 | 'target_repo': PullRequest.target_repo_id |
|
173 | 'target_repo': PullRequest.target_repo_id | |
174 | } |
|
174 | } | |
175 | if order_dir == 'asc': |
|
175 | if order_dir == 'asc': | |
176 | q = q.order_by(order_map[order_by].asc()) |
|
176 | q = q.order_by(order_map[order_by].asc()) | |
177 | else: |
|
177 | else: | |
178 | q = q.order_by(order_map[order_by].desc()) |
|
178 | q = q.order_by(order_map[order_by].desc()) | |
179 |
|
179 | |||
180 | return q |
|
180 | return q | |
181 |
|
181 | |||
182 | def count_all(self, repo_name, source=False, statuses=None, |
|
182 | def count_all(self, repo_name, source=False, statuses=None, | |
183 | opened_by=None): |
|
183 | opened_by=None): | |
184 | """ |
|
184 | """ | |
185 | Count the number of pull requests for a specific repository. |
|
185 | Count the number of pull requests for a specific repository. | |
186 |
|
186 | |||
187 | :param repo_name: target or source repo |
|
187 | :param repo_name: target or source repo | |
188 | :param source: boolean flag to specify if repo_name refers to source |
|
188 | :param source: boolean flag to specify if repo_name refers to source | |
189 | :param statuses: list of pull request statuses |
|
189 | :param statuses: list of pull request statuses | |
190 | :param opened_by: author user of the pull request |
|
190 | :param opened_by: author user of the pull request | |
191 | :returns: int number of pull requests |
|
191 | :returns: int number of pull requests | |
192 | """ |
|
192 | """ | |
193 | q = self._prepare_get_all_query( |
|
193 | q = self._prepare_get_all_query( | |
194 | repo_name, source=source, statuses=statuses, opened_by=opened_by) |
|
194 | repo_name, source=source, statuses=statuses, opened_by=opened_by) | |
195 |
|
195 | |||
196 | return q.count() |
|
196 | return q.count() | |
197 |
|
197 | |||
198 | def get_all(self, repo_name, source=False, statuses=None, opened_by=None, |
|
198 | def get_all(self, repo_name, source=False, statuses=None, opened_by=None, | |
199 | offset=0, length=None, order_by=None, order_dir='desc'): |
|
199 | offset=0, length=None, order_by=None, order_dir='desc'): | |
200 | """ |
|
200 | """ | |
201 | Get all pull requests for a specific repository. |
|
201 | Get all pull requests for a specific repository. | |
202 |
|
202 | |||
203 | :param repo_name: target or source repo |
|
203 | :param repo_name: target or source repo | |
204 | :param source: boolean flag to specify if repo_name refers to source |
|
204 | :param source: boolean flag to specify if repo_name refers to source | |
205 | :param statuses: list of pull request statuses |
|
205 | :param statuses: list of pull request statuses | |
206 | :param opened_by: author user of the pull request |
|
206 | :param opened_by: author user of the pull request | |
207 | :param offset: pagination offset |
|
207 | :param offset: pagination offset | |
208 | :param length: length of returned list |
|
208 | :param length: length of returned list | |
209 | :param order_by: order of the returned list |
|
209 | :param order_by: order of the returned list | |
210 | :param order_dir: 'asc' or 'desc' ordering direction |
|
210 | :param order_dir: 'asc' or 'desc' ordering direction | |
211 | :returns: list of pull requests |
|
211 | :returns: list of pull requests | |
212 | """ |
|
212 | """ | |
213 | q = self._prepare_get_all_query( |
|
213 | q = self._prepare_get_all_query( | |
214 | repo_name, source=source, statuses=statuses, opened_by=opened_by, |
|
214 | repo_name, source=source, statuses=statuses, opened_by=opened_by, | |
215 | order_by=order_by, order_dir=order_dir) |
|
215 | order_by=order_by, order_dir=order_dir) | |
216 |
|
216 | |||
217 | if length: |
|
217 | if length: | |
218 | pull_requests = q.limit(length).offset(offset).all() |
|
218 | pull_requests = q.limit(length).offset(offset).all() | |
219 | else: |
|
219 | else: | |
220 | pull_requests = q.all() |
|
220 | pull_requests = q.all() | |
221 |
|
221 | |||
222 | return pull_requests |
|
222 | return pull_requests | |
223 |
|
223 | |||
224 | def count_awaiting_review(self, repo_name, source=False, statuses=None, |
|
224 | def count_awaiting_review(self, repo_name, source=False, statuses=None, | |
225 | opened_by=None): |
|
225 | opened_by=None): | |
226 | """ |
|
226 | """ | |
227 | Count the number of pull requests for a specific repository that are |
|
227 | Count the number of pull requests for a specific repository that are | |
228 | awaiting review. |
|
228 | awaiting review. | |
229 |
|
229 | |||
230 | :param repo_name: target or source repo |
|
230 | :param repo_name: target or source repo | |
231 | :param source: boolean flag to specify if repo_name refers to source |
|
231 | :param source: boolean flag to specify if repo_name refers to source | |
232 | :param statuses: list of pull request statuses |
|
232 | :param statuses: list of pull request statuses | |
233 | :param opened_by: author user of the pull request |
|
233 | :param opened_by: author user of the pull request | |
234 | :returns: int number of pull requests |
|
234 | :returns: int number of pull requests | |
235 | """ |
|
235 | """ | |
236 | pull_requests = self.get_awaiting_review( |
|
236 | pull_requests = self.get_awaiting_review( | |
237 | repo_name, source=source, statuses=statuses, opened_by=opened_by) |
|
237 | repo_name, source=source, statuses=statuses, opened_by=opened_by) | |
238 |
|
238 | |||
239 | return len(pull_requests) |
|
239 | return len(pull_requests) | |
240 |
|
240 | |||
241 | def get_awaiting_review(self, repo_name, source=False, statuses=None, |
|
241 | def get_awaiting_review(self, repo_name, source=False, statuses=None, | |
242 | opened_by=None, offset=0, length=None, |
|
242 | opened_by=None, offset=0, length=None, | |
243 | order_by=None, order_dir='desc'): |
|
243 | order_by=None, order_dir='desc'): | |
244 | """ |
|
244 | """ | |
245 | Get all pull requests for a specific repository that are awaiting |
|
245 | Get all pull requests for a specific repository that are awaiting | |
246 | review. |
|
246 | review. | |
247 |
|
247 | |||
248 | :param repo_name: target or source repo |
|
248 | :param repo_name: target or source repo | |
249 | :param source: boolean flag to specify if repo_name refers to source |
|
249 | :param source: boolean flag to specify if repo_name refers to source | |
250 | :param statuses: list of pull request statuses |
|
250 | :param statuses: list of pull request statuses | |
251 | :param opened_by: author user of the pull request |
|
251 | :param opened_by: author user of the pull request | |
252 | :param offset: pagination offset |
|
252 | :param offset: pagination offset | |
253 | :param length: length of returned list |
|
253 | :param length: length of returned list | |
254 | :param order_by: order of the returned list |
|
254 | :param order_by: order of the returned list | |
255 | :param order_dir: 'asc' or 'desc' ordering direction |
|
255 | :param order_dir: 'asc' or 'desc' ordering direction | |
256 | :returns: list of pull requests |
|
256 | :returns: list of pull requests | |
257 | """ |
|
257 | """ | |
258 | pull_requests = self.get_all( |
|
258 | pull_requests = self.get_all( | |
259 | repo_name, source=source, statuses=statuses, opened_by=opened_by, |
|
259 | repo_name, source=source, statuses=statuses, opened_by=opened_by, | |
260 | order_by=order_by, order_dir=order_dir) |
|
260 | order_by=order_by, order_dir=order_dir) | |
261 |
|
261 | |||
262 | _filtered_pull_requests = [] |
|
262 | _filtered_pull_requests = [] | |
263 | for pr in pull_requests: |
|
263 | for pr in pull_requests: | |
264 | status = pr.calculated_review_status() |
|
264 | status = pr.calculated_review_status() | |
265 | if status in [ChangesetStatus.STATUS_NOT_REVIEWED, |
|
265 | if status in [ChangesetStatus.STATUS_NOT_REVIEWED, | |
266 | ChangesetStatus.STATUS_UNDER_REVIEW]: |
|
266 | ChangesetStatus.STATUS_UNDER_REVIEW]: | |
267 | _filtered_pull_requests.append(pr) |
|
267 | _filtered_pull_requests.append(pr) | |
268 | if length: |
|
268 | if length: | |
269 | return _filtered_pull_requests[offset:offset+length] |
|
269 | return _filtered_pull_requests[offset:offset+length] | |
270 | else: |
|
270 | else: | |
271 | return _filtered_pull_requests |
|
271 | return _filtered_pull_requests | |
272 |
|
272 | |||
273 | def count_awaiting_my_review(self, repo_name, source=False, statuses=None, |
|
273 | def count_awaiting_my_review(self, repo_name, source=False, statuses=None, | |
274 | opened_by=None, user_id=None): |
|
274 | opened_by=None, user_id=None): | |
275 | """ |
|
275 | """ | |
276 | Count the number of pull requests for a specific repository that are |
|
276 | Count the number of pull requests for a specific repository that are | |
277 | awaiting review from a specific user. |
|
277 | awaiting review from a specific user. | |
278 |
|
278 | |||
279 | :param repo_name: target or source repo |
|
279 | :param repo_name: target or source repo | |
280 | :param source: boolean flag to specify if repo_name refers to source |
|
280 | :param source: boolean flag to specify if repo_name refers to source | |
281 | :param statuses: list of pull request statuses |
|
281 | :param statuses: list of pull request statuses | |
282 | :param opened_by: author user of the pull request |
|
282 | :param opened_by: author user of the pull request | |
283 | :param user_id: reviewer user of the pull request |
|
283 | :param user_id: reviewer user of the pull request | |
284 | :returns: int number of pull requests |
|
284 | :returns: int number of pull requests | |
285 | """ |
|
285 | """ | |
286 | pull_requests = self.get_awaiting_my_review( |
|
286 | pull_requests = self.get_awaiting_my_review( | |
287 | repo_name, source=source, statuses=statuses, opened_by=opened_by, |
|
287 | repo_name, source=source, statuses=statuses, opened_by=opened_by, | |
288 | user_id=user_id) |
|
288 | user_id=user_id) | |
289 |
|
289 | |||
290 | return len(pull_requests) |
|
290 | return len(pull_requests) | |
291 |
|
291 | |||
292 | def get_awaiting_my_review(self, repo_name, source=False, statuses=None, |
|
292 | def get_awaiting_my_review(self, repo_name, source=False, statuses=None, | |
293 | opened_by=None, user_id=None, offset=0, |
|
293 | opened_by=None, user_id=None, offset=0, | |
294 | length=None, order_by=None, order_dir='desc'): |
|
294 | length=None, order_by=None, order_dir='desc'): | |
295 | """ |
|
295 | """ | |
296 | Get all pull requests for a specific repository that are awaiting |
|
296 | Get all pull requests for a specific repository that are awaiting | |
297 | review from a specific user. |
|
297 | review from a specific user. | |
298 |
|
298 | |||
299 | :param repo_name: target or source repo |
|
299 | :param repo_name: target or source repo | |
300 | :param source: boolean flag to specify if repo_name refers to source |
|
300 | :param source: boolean flag to specify if repo_name refers to source | |
301 | :param statuses: list of pull request statuses |
|
301 | :param statuses: list of pull request statuses | |
302 | :param opened_by: author user of the pull request |
|
302 | :param opened_by: author user of the pull request | |
303 | :param user_id: reviewer user of the pull request |
|
303 | :param user_id: reviewer user of the pull request | |
304 | :param offset: pagination offset |
|
304 | :param offset: pagination offset | |
305 | :param length: length of returned list |
|
305 | :param length: length of returned list | |
306 | :param order_by: order of the returned list |
|
306 | :param order_by: order of the returned list | |
307 | :param order_dir: 'asc' or 'desc' ordering direction |
|
307 | :param order_dir: 'asc' or 'desc' ordering direction | |
308 | :returns: list of pull requests |
|
308 | :returns: list of pull requests | |
309 | """ |
|
309 | """ | |
310 | pull_requests = self.get_all( |
|
310 | pull_requests = self.get_all( | |
311 | repo_name, source=source, statuses=statuses, opened_by=opened_by, |
|
311 | repo_name, source=source, statuses=statuses, opened_by=opened_by, | |
312 | order_by=order_by, order_dir=order_dir) |
|
312 | order_by=order_by, order_dir=order_dir) | |
313 |
|
313 | |||
314 | _my = PullRequestModel().get_not_reviewed(user_id) |
|
314 | _my = PullRequestModel().get_not_reviewed(user_id) | |
315 | my_participation = [] |
|
315 | my_participation = [] | |
316 | for pr in pull_requests: |
|
316 | for pr in pull_requests: | |
317 | if pr in _my: |
|
317 | if pr in _my: | |
318 | my_participation.append(pr) |
|
318 | my_participation.append(pr) | |
319 | _filtered_pull_requests = my_participation |
|
319 | _filtered_pull_requests = my_participation | |
320 | if length: |
|
320 | if length: | |
321 | return _filtered_pull_requests[offset:offset+length] |
|
321 | return _filtered_pull_requests[offset:offset+length] | |
322 | else: |
|
322 | else: | |
323 | return _filtered_pull_requests |
|
323 | return _filtered_pull_requests | |
324 |
|
324 | |||
325 | def get_not_reviewed(self, user_id): |
|
325 | def get_not_reviewed(self, user_id): | |
326 | return [ |
|
326 | return [ | |
327 | x.pull_request for x in PullRequestReviewers.query().filter( |
|
327 | x.pull_request for x in PullRequestReviewers.query().filter( | |
328 | PullRequestReviewers.user_id == user_id).all() |
|
328 | PullRequestReviewers.user_id == user_id).all() | |
329 | ] |
|
329 | ] | |
330 |
|
330 | |||
331 | def _prepare_participating_query(self, user_id=None, statuses=None, |
|
331 | def _prepare_participating_query(self, user_id=None, statuses=None, | |
332 | order_by=None, order_dir='desc'): |
|
332 | order_by=None, order_dir='desc'): | |
333 | q = PullRequest.query() |
|
333 | q = PullRequest.query() | |
334 | if user_id: |
|
334 | if user_id: | |
335 | reviewers_subquery = Session().query( |
|
335 | reviewers_subquery = Session().query( | |
336 | PullRequestReviewers.pull_request_id).filter( |
|
336 | PullRequestReviewers.pull_request_id).filter( | |
337 | PullRequestReviewers.user_id == user_id).subquery() |
|
337 | PullRequestReviewers.user_id == user_id).subquery() | |
338 | user_filter = or_( |
|
338 | user_filter = or_( | |
339 | PullRequest.user_id == user_id, |
|
339 | PullRequest.user_id == user_id, | |
340 | PullRequest.pull_request_id.in_(reviewers_subquery) |
|
340 | PullRequest.pull_request_id.in_(reviewers_subquery) | |
341 | ) |
|
341 | ) | |
342 | q = PullRequest.query().filter(user_filter) |
|
342 | q = PullRequest.query().filter(user_filter) | |
343 |
|
343 | |||
344 | # closed,opened |
|
344 | # closed,opened | |
345 | if statuses: |
|
345 | if statuses: | |
346 | q = q.filter(PullRequest.status.in_(statuses)) |
|
346 | q = q.filter(PullRequest.status.in_(statuses)) | |
347 |
|
347 | |||
348 | if order_by: |
|
348 | if order_by: | |
349 | order_map = { |
|
349 | order_map = { | |
350 | 'name_raw': PullRequest.pull_request_id, |
|
350 | 'name_raw': PullRequest.pull_request_id, | |
351 | 'title': PullRequest.title, |
|
351 | 'title': PullRequest.title, | |
352 | 'updated_on_raw': PullRequest.updated_on, |
|
352 | 'updated_on_raw': PullRequest.updated_on, | |
353 | 'target_repo': PullRequest.target_repo_id |
|
353 | 'target_repo': PullRequest.target_repo_id | |
354 | } |
|
354 | } | |
355 | if order_dir == 'asc': |
|
355 | if order_dir == 'asc': | |
356 | q = q.order_by(order_map[order_by].asc()) |
|
356 | q = q.order_by(order_map[order_by].asc()) | |
357 | else: |
|
357 | else: | |
358 | q = q.order_by(order_map[order_by].desc()) |
|
358 | q = q.order_by(order_map[order_by].desc()) | |
359 |
|
359 | |||
360 | return q |
|
360 | return q | |
361 |
|
361 | |||
362 | def count_im_participating_in(self, user_id=None, statuses=None): |
|
362 | def count_im_participating_in(self, user_id=None, statuses=None): | |
363 | q = self._prepare_participating_query(user_id, statuses=statuses) |
|
363 | q = self._prepare_participating_query(user_id, statuses=statuses) | |
364 | return q.count() |
|
364 | return q.count() | |
365 |
|
365 | |||
366 | def get_im_participating_in( |
|
366 | def get_im_participating_in( | |
367 | self, user_id=None, statuses=None, offset=0, |
|
367 | self, user_id=None, statuses=None, offset=0, | |
368 | length=None, order_by=None, order_dir='desc'): |
|
368 | length=None, order_by=None, order_dir='desc'): | |
369 | """ |
|
369 | """ | |
370 | Get all Pull requests that i'm participating in, or i have opened |
|
370 | Get all Pull requests that i'm participating in, or i have opened | |
371 | """ |
|
371 | """ | |
372 |
|
372 | |||
373 | q = self._prepare_participating_query( |
|
373 | q = self._prepare_participating_query( | |
374 | user_id, statuses=statuses, order_by=order_by, |
|
374 | user_id, statuses=statuses, order_by=order_by, | |
375 | order_dir=order_dir) |
|
375 | order_dir=order_dir) | |
376 |
|
376 | |||
377 | if length: |
|
377 | if length: | |
378 | pull_requests = q.limit(length).offset(offset).all() |
|
378 | pull_requests = q.limit(length).offset(offset).all() | |
379 | else: |
|
379 | else: | |
380 | pull_requests = q.all() |
|
380 | pull_requests = q.all() | |
381 |
|
381 | |||
382 | return pull_requests |
|
382 | return pull_requests | |
383 |
|
383 | |||
384 | def get_versions(self, pull_request): |
|
384 | def get_versions(self, pull_request): | |
385 | """ |
|
385 | """ | |
386 | returns version of pull request sorted by ID descending |
|
386 | returns version of pull request sorted by ID descending | |
387 | """ |
|
387 | """ | |
388 | return PullRequestVersion.query()\ |
|
388 | return PullRequestVersion.query()\ | |
389 | .filter(PullRequestVersion.pull_request == pull_request)\ |
|
389 | .filter(PullRequestVersion.pull_request == pull_request)\ | |
390 | .order_by(PullRequestVersion.pull_request_version_id.asc())\ |
|
390 | .order_by(PullRequestVersion.pull_request_version_id.asc())\ | |
391 | .all() |
|
391 | .all() | |
392 |
|
392 | |||
393 | def get_pr_version(self, pull_request_id, version=None): |
|
393 | def get_pr_version(self, pull_request_id, version=None): | |
394 | at_version = None |
|
394 | at_version = None | |
395 |
|
395 | |||
396 | if version and version == 'latest': |
|
396 | if version and version == 'latest': | |
397 | pull_request_ver = PullRequest.get(pull_request_id) |
|
397 | pull_request_ver = PullRequest.get(pull_request_id) | |
398 | pull_request_obj = pull_request_ver |
|
398 | pull_request_obj = pull_request_ver | |
399 | _org_pull_request_obj = pull_request_obj |
|
399 | _org_pull_request_obj = pull_request_obj | |
400 | at_version = 'latest' |
|
400 | at_version = 'latest' | |
401 | elif version: |
|
401 | elif version: | |
402 | pull_request_ver = PullRequestVersion.get_or_404(version) |
|
402 | pull_request_ver = PullRequestVersion.get_or_404(version) | |
403 | pull_request_obj = pull_request_ver |
|
403 | pull_request_obj = pull_request_ver | |
404 | _org_pull_request_obj = pull_request_ver.pull_request |
|
404 | _org_pull_request_obj = pull_request_ver.pull_request | |
405 | at_version = pull_request_ver.pull_request_version_id |
|
405 | at_version = pull_request_ver.pull_request_version_id | |
406 | else: |
|
406 | else: | |
407 | _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404( |
|
407 | _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404( | |
408 | pull_request_id) |
|
408 | pull_request_id) | |
409 |
|
409 | |||
410 | pull_request_display_obj = PullRequest.get_pr_display_object( |
|
410 | pull_request_display_obj = PullRequest.get_pr_display_object( | |
411 | pull_request_obj, _org_pull_request_obj) |
|
411 | pull_request_obj, _org_pull_request_obj) | |
412 |
|
412 | |||
413 | return _org_pull_request_obj, pull_request_obj, \ |
|
413 | return _org_pull_request_obj, pull_request_obj, \ | |
414 | pull_request_display_obj, at_version |
|
414 | pull_request_display_obj, at_version | |
415 |
|
415 | |||
416 | def create(self, created_by, source_repo, source_ref, target_repo, |
|
416 | def create(self, created_by, source_repo, source_ref, target_repo, | |
417 | target_ref, revisions, reviewers, title, description=None, |
|
417 | target_ref, revisions, reviewers, title, description=None, | |
418 | description_renderer=None, |
|
418 | description_renderer=None, | |
419 | reviewer_data=None, translator=None, auth_user=None): |
|
419 | reviewer_data=None, translator=None, auth_user=None): | |
420 | translator = translator or get_current_request().translate |
|
420 | translator = translator or get_current_request().translate | |
421 |
|
421 | |||
422 | created_by_user = self._get_user(created_by) |
|
422 | created_by_user = self._get_user(created_by) | |
423 | auth_user = auth_user or created_by_user.AuthUser() |
|
423 | auth_user = auth_user or created_by_user.AuthUser() | |
424 | source_repo = self._get_repo(source_repo) |
|
424 | source_repo = self._get_repo(source_repo) | |
425 | target_repo = self._get_repo(target_repo) |
|
425 | target_repo = self._get_repo(target_repo) | |
426 |
|
426 | |||
427 | pull_request = PullRequest() |
|
427 | pull_request = PullRequest() | |
428 | pull_request.source_repo = source_repo |
|
428 | pull_request.source_repo = source_repo | |
429 | pull_request.source_ref = source_ref |
|
429 | pull_request.source_ref = source_ref | |
430 | pull_request.target_repo = target_repo |
|
430 | pull_request.target_repo = target_repo | |
431 | pull_request.target_ref = target_ref |
|
431 | pull_request.target_ref = target_ref | |
432 | pull_request.revisions = revisions |
|
432 | pull_request.revisions = revisions | |
433 | pull_request.title = title |
|
433 | pull_request.title = title | |
434 | pull_request.description = description |
|
434 | pull_request.description = description | |
435 | pull_request.description_renderer = description_renderer |
|
435 | pull_request.description_renderer = description_renderer | |
436 | pull_request.author = created_by_user |
|
436 | pull_request.author = created_by_user | |
437 | pull_request.reviewer_data = reviewer_data |
|
437 | pull_request.reviewer_data = reviewer_data | |
438 | pull_request.pull_request_state = pull_request.STATE_CREATING |
|
438 | pull_request.pull_request_state = pull_request.STATE_CREATING | |
439 | Session().add(pull_request) |
|
439 | Session().add(pull_request) | |
440 | Session().flush() |
|
440 | Session().flush() | |
441 |
|
441 | |||
442 | reviewer_ids = set() |
|
442 | reviewer_ids = set() | |
443 | # members / reviewers |
|
443 | # members / reviewers | |
444 | for reviewer_object in reviewers: |
|
444 | for reviewer_object in reviewers: | |
445 | user_id, reasons, mandatory, rules = reviewer_object |
|
445 | user_id, reasons, mandatory, rules = reviewer_object | |
446 | user = self._get_user(user_id) |
|
446 | user = self._get_user(user_id) | |
447 |
|
447 | |||
448 | # skip duplicates |
|
448 | # skip duplicates | |
449 | if user.user_id in reviewer_ids: |
|
449 | if user.user_id in reviewer_ids: | |
450 | continue |
|
450 | continue | |
451 |
|
451 | |||
452 | reviewer_ids.add(user.user_id) |
|
452 | reviewer_ids.add(user.user_id) | |
453 |
|
453 | |||
454 | reviewer = PullRequestReviewers() |
|
454 | reviewer = PullRequestReviewers() | |
455 | reviewer.user = user |
|
455 | reviewer.user = user | |
456 | reviewer.pull_request = pull_request |
|
456 | reviewer.pull_request = pull_request | |
457 | reviewer.reasons = reasons |
|
457 | reviewer.reasons = reasons | |
458 | reviewer.mandatory = mandatory |
|
458 | reviewer.mandatory = mandatory | |
459 |
|
459 | |||
460 | # NOTE(marcink): pick only first rule for now |
|
460 | # NOTE(marcink): pick only first rule for now | |
461 | rule_id = list(rules)[0] if rules else None |
|
461 | rule_id = list(rules)[0] if rules else None | |
462 | rule = RepoReviewRule.get(rule_id) if rule_id else None |
|
462 | rule = RepoReviewRule.get(rule_id) if rule_id else None | |
463 | if rule: |
|
463 | if rule: | |
464 | review_group = rule.user_group_vote_rule(user_id) |
|
464 | review_group = rule.user_group_vote_rule(user_id) | |
465 | # we check if this particular reviewer is member of a voting group |
|
465 | # we check if this particular reviewer is member of a voting group | |
466 | if review_group: |
|
466 | if review_group: | |
467 | # NOTE(marcink): |
|
467 | # NOTE(marcink): | |
468 | # can be that user is member of more but we pick the first same, |
|
468 | # can be that user is member of more but we pick the first same, | |
469 | # same as default reviewers algo |
|
469 | # same as default reviewers algo | |
470 | review_group = review_group[0] |
|
470 | review_group = review_group[0] | |
471 |
|
471 | |||
472 | rule_data = { |
|
472 | rule_data = { | |
473 | 'rule_name': |
|
473 | 'rule_name': | |
474 | rule.review_rule_name, |
|
474 | rule.review_rule_name, | |
475 | 'rule_user_group_entry_id': |
|
475 | 'rule_user_group_entry_id': | |
476 | review_group.repo_review_rule_users_group_id, |
|
476 | review_group.repo_review_rule_users_group_id, | |
477 | 'rule_user_group_name': |
|
477 | 'rule_user_group_name': | |
478 | review_group.users_group.users_group_name, |
|
478 | review_group.users_group.users_group_name, | |
479 | 'rule_user_group_members': |
|
479 | 'rule_user_group_members': | |
480 | [x.user.username for x in review_group.users_group.members], |
|
480 | [x.user.username for x in review_group.users_group.members], | |
481 | 'rule_user_group_members_id': |
|
481 | 'rule_user_group_members_id': | |
482 | [x.user.user_id for x in review_group.users_group.members], |
|
482 | [x.user.user_id for x in review_group.users_group.members], | |
483 | } |
|
483 | } | |
484 | # e.g {'vote_rule': -1, 'mandatory': True} |
|
484 | # e.g {'vote_rule': -1, 'mandatory': True} | |
485 | rule_data.update(review_group.rule_data()) |
|
485 | rule_data.update(review_group.rule_data()) | |
486 |
|
486 | |||
487 | reviewer.rule_data = rule_data |
|
487 | reviewer.rule_data = rule_data | |
488 |
|
488 | |||
489 | Session().add(reviewer) |
|
489 | Session().add(reviewer) | |
490 | Session().flush() |
|
490 | Session().flush() | |
491 |
|
491 | |||
492 | # Set approval status to "Under Review" for all commits which are |
|
492 | # Set approval status to "Under Review" for all commits which are | |
493 | # part of this pull request. |
|
493 | # part of this pull request. | |
494 | ChangesetStatusModel().set_status( |
|
494 | ChangesetStatusModel().set_status( | |
495 | repo=target_repo, |
|
495 | repo=target_repo, | |
496 | status=ChangesetStatus.STATUS_UNDER_REVIEW, |
|
496 | status=ChangesetStatus.STATUS_UNDER_REVIEW, | |
497 | user=created_by_user, |
|
497 | user=created_by_user, | |
498 | pull_request=pull_request |
|
498 | pull_request=pull_request | |
499 | ) |
|
499 | ) | |
500 | # we commit early at this point. This has to do with a fact |
|
500 | # we commit early at this point. This has to do with a fact | |
501 | # that before queries do some row-locking. And because of that |
|
501 | # that before queries do some row-locking. And because of that | |
502 | # we need to commit and finish transaction before below validate call |
|
502 | # we need to commit and finish transaction before below validate call | |
503 | # that for large repos could be long resulting in long row locks |
|
503 | # that for large repos could be long resulting in long row locks | |
504 | Session().commit() |
|
504 | Session().commit() | |
505 |
|
505 | |||
506 | # prepare workspace, and run initial merge simulation. Set state during that |
|
506 | # prepare workspace, and run initial merge simulation. Set state during that | |
507 | # operation |
|
507 | # operation | |
508 | pull_request = PullRequest.get(pull_request.pull_request_id) |
|
508 | pull_request = PullRequest.get(pull_request.pull_request_id) | |
509 |
|
509 | |||
510 | # set as merging, for merge simulation, and if finished to created so we mark |
|
510 | # set as merging, for merge simulation, and if finished to created so we mark | |
511 | # simulation is working fine |
|
511 | # simulation is working fine | |
512 | with pull_request.set_state(PullRequest.STATE_MERGING, |
|
512 | with pull_request.set_state(PullRequest.STATE_MERGING, | |
513 | final_state=PullRequest.STATE_CREATED) as state_obj: |
|
513 | final_state=PullRequest.STATE_CREATED) as state_obj: | |
514 | MergeCheck.validate( |
|
514 | MergeCheck.validate( | |
515 | pull_request, auth_user=auth_user, translator=translator) |
|
515 | pull_request, auth_user=auth_user, translator=translator) | |
516 |
|
516 | |||
517 | self.notify_reviewers(pull_request, reviewer_ids) |
|
517 | self.notify_reviewers(pull_request, reviewer_ids) | |
518 | self.trigger_pull_request_hook( |
|
518 | self.trigger_pull_request_hook( | |
519 | pull_request, created_by_user, 'create') |
|
519 | pull_request, created_by_user, 'create') | |
520 |
|
520 | |||
521 | creation_data = pull_request.get_api_data(with_merge_state=False) |
|
521 | creation_data = pull_request.get_api_data(with_merge_state=False) | |
522 | self._log_audit_action( |
|
522 | self._log_audit_action( | |
523 | 'repo.pull_request.create', {'data': creation_data}, |
|
523 | 'repo.pull_request.create', {'data': creation_data}, | |
524 | auth_user, pull_request) |
|
524 | auth_user, pull_request) | |
525 |
|
525 | |||
526 | return pull_request |
|
526 | return pull_request | |
527 |
|
527 | |||
528 | def trigger_pull_request_hook(self, pull_request, user, action, data=None): |
|
528 | def trigger_pull_request_hook(self, pull_request, user, action, data=None): | |
529 | pull_request = self.__get_pull_request(pull_request) |
|
529 | pull_request = self.__get_pull_request(pull_request) | |
530 | target_scm = pull_request.target_repo.scm_instance() |
|
530 | target_scm = pull_request.target_repo.scm_instance() | |
531 | if action == 'create': |
|
531 | if action == 'create': | |
532 | trigger_hook = hooks_utils.trigger_log_create_pull_request_hook |
|
532 | trigger_hook = hooks_utils.trigger_log_create_pull_request_hook | |
533 | elif action == 'merge': |
|
533 | elif action == 'merge': | |
534 | trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook |
|
534 | trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook | |
535 | elif action == 'close': |
|
535 | elif action == 'close': | |
536 | trigger_hook = hooks_utils.trigger_log_close_pull_request_hook |
|
536 | trigger_hook = hooks_utils.trigger_log_close_pull_request_hook | |
537 | elif action == 'review_status_change': |
|
537 | elif action == 'review_status_change': | |
538 | trigger_hook = hooks_utils.trigger_log_review_pull_request_hook |
|
538 | trigger_hook = hooks_utils.trigger_log_review_pull_request_hook | |
539 | elif action == 'update': |
|
539 | elif action == 'update': | |
540 | trigger_hook = hooks_utils.trigger_log_update_pull_request_hook |
|
540 | trigger_hook = hooks_utils.trigger_log_update_pull_request_hook | |
541 | elif action == 'comment': |
|
541 | elif action == 'comment': | |
542 | # dummy hook ! for comment. We want this function to handle all cases |
|
542 | # dummy hook ! for comment. We want this function to handle all cases | |
543 | def trigger_hook(*args, **kwargs): |
|
543 | def trigger_hook(*args, **kwargs): | |
544 | pass |
|
544 | pass | |
545 | comment = data['comment'] |
|
545 | comment = data['comment'] | |
546 | events.trigger(events.PullRequestCommentEvent(pull_request, comment)) |
|
546 | events.trigger(events.PullRequestCommentEvent(pull_request, comment)) | |
547 | else: |
|
547 | else: | |
548 | return |
|
548 | return | |
549 |
|
549 | |||
550 | trigger_hook( |
|
550 | trigger_hook( | |
551 | username=user.username, |
|
551 | username=user.username, | |
552 | repo_name=pull_request.target_repo.repo_name, |
|
552 | repo_name=pull_request.target_repo.repo_name, | |
553 | repo_alias=target_scm.alias, |
|
553 | repo_alias=target_scm.alias, | |
554 | pull_request=pull_request, |
|
554 | pull_request=pull_request, | |
555 | data=data) |
|
555 | data=data) | |
556 |
|
556 | |||
557 | def _get_commit_ids(self, pull_request): |
|
557 | def _get_commit_ids(self, pull_request): | |
558 | """ |
|
558 | """ | |
559 | Return the commit ids of the merged pull request. |
|
559 | Return the commit ids of the merged pull request. | |
560 |
|
560 | |||
561 | This method is not dealing correctly yet with the lack of autoupdates |
|
561 | This method is not dealing correctly yet with the lack of autoupdates | |
562 | nor with the implicit target updates. |
|
562 | nor with the implicit target updates. | |
563 | For example: if a commit in the source repo is already in the target it |
|
563 | For example: if a commit in the source repo is already in the target it | |
564 | will be reported anyways. |
|
564 | will be reported anyways. | |
565 | """ |
|
565 | """ | |
566 | merge_rev = pull_request.merge_rev |
|
566 | merge_rev = pull_request.merge_rev | |
567 | if merge_rev is None: |
|
567 | if merge_rev is None: | |
568 | raise ValueError('This pull request was not merged yet') |
|
568 | raise ValueError('This pull request was not merged yet') | |
569 |
|
569 | |||
570 | commit_ids = list(pull_request.revisions) |
|
570 | commit_ids = list(pull_request.revisions) | |
571 | if merge_rev not in commit_ids: |
|
571 | if merge_rev not in commit_ids: | |
572 | commit_ids.append(merge_rev) |
|
572 | commit_ids.append(merge_rev) | |
573 |
|
573 | |||
574 | return commit_ids |
|
574 | return commit_ids | |
575 |
|
575 | |||
576 | def merge_repo(self, pull_request, user, extras): |
|
576 | def merge_repo(self, pull_request, user, extras): | |
577 | log.debug("Merging pull request %s", pull_request.pull_request_id) |
|
577 | log.debug("Merging pull request %s", pull_request.pull_request_id) | |
578 | extras['user_agent'] = 'internal-merge' |
|
578 | extras['user_agent'] = 'internal-merge' | |
579 | merge_state = self._merge_pull_request(pull_request, user, extras) |
|
579 | merge_state = self._merge_pull_request(pull_request, user, extras) | |
580 | if merge_state.executed: |
|
580 | if merge_state.executed: | |
581 | log.debug("Merge was successful, updating the pull request comments.") |
|
581 | log.debug("Merge was successful, updating the pull request comments.") | |
582 | self._comment_and_close_pr(pull_request, user, merge_state) |
|
582 | self._comment_and_close_pr(pull_request, user, merge_state) | |
583 |
|
583 | |||
584 | self._log_audit_action( |
|
584 | self._log_audit_action( | |
585 | 'repo.pull_request.merge', |
|
585 | 'repo.pull_request.merge', | |
586 | {'merge_state': merge_state.__dict__}, |
|
586 | {'merge_state': merge_state.__dict__}, | |
587 | user, pull_request) |
|
587 | user, pull_request) | |
588 |
|
588 | |||
589 | else: |
|
589 | else: | |
590 | log.warn("Merge failed, not updating the pull request.") |
|
590 | log.warn("Merge failed, not updating the pull request.") | |
591 | return merge_state |
|
591 | return merge_state | |
592 |
|
592 | |||
593 | def _merge_pull_request(self, pull_request, user, extras, merge_msg=None): |
|
593 | def _merge_pull_request(self, pull_request, user, extras, merge_msg=None): | |
594 | target_vcs = pull_request.target_repo.scm_instance() |
|
594 | target_vcs = pull_request.target_repo.scm_instance() | |
595 | source_vcs = pull_request.source_repo.scm_instance() |
|
595 | source_vcs = pull_request.source_repo.scm_instance() | |
596 |
|
596 | |||
597 | message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format( |
|
597 | message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format( | |
598 | pr_id=pull_request.pull_request_id, |
|
598 | pr_id=pull_request.pull_request_id, | |
599 | pr_title=pull_request.title, |
|
599 | pr_title=pull_request.title, | |
600 | source_repo=source_vcs.name, |
|
600 | source_repo=source_vcs.name, | |
601 | source_ref_name=pull_request.source_ref_parts.name, |
|
601 | source_ref_name=pull_request.source_ref_parts.name, | |
602 | target_repo=target_vcs.name, |
|
602 | target_repo=target_vcs.name, | |
603 | target_ref_name=pull_request.target_ref_parts.name, |
|
603 | target_ref_name=pull_request.target_ref_parts.name, | |
604 | ) |
|
604 | ) | |
605 |
|
605 | |||
606 | workspace_id = self._workspace_id(pull_request) |
|
606 | workspace_id = self._workspace_id(pull_request) | |
607 | repo_id = pull_request.target_repo.repo_id |
|
607 | repo_id = pull_request.target_repo.repo_id | |
608 | use_rebase = self._use_rebase_for_merging(pull_request) |
|
608 | use_rebase = self._use_rebase_for_merging(pull_request) | |
609 | close_branch = self._close_branch_before_merging(pull_request) |
|
609 | close_branch = self._close_branch_before_merging(pull_request) | |
610 |
|
610 | |||
611 | target_ref = self._refresh_reference( |
|
611 | target_ref = self._refresh_reference( | |
612 | pull_request.target_ref_parts, target_vcs) |
|
612 | pull_request.target_ref_parts, target_vcs) | |
613 |
|
613 | |||
614 | callback_daemon, extras = prepare_callback_daemon( |
|
614 | callback_daemon, extras = prepare_callback_daemon( | |
615 | extras, protocol=vcs_settings.HOOKS_PROTOCOL, |
|
615 | extras, protocol=vcs_settings.HOOKS_PROTOCOL, | |
616 | host=vcs_settings.HOOKS_HOST, |
|
616 | host=vcs_settings.HOOKS_HOST, | |
617 | use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS) |
|
617 | use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS) | |
618 |
|
618 | |||
619 | with callback_daemon: |
|
619 | with callback_daemon: | |
620 | # TODO: johbo: Implement a clean way to run a config_override |
|
620 | # TODO: johbo: Implement a clean way to run a config_override | |
621 | # for a single call. |
|
621 | # for a single call. | |
622 | target_vcs.config.set( |
|
622 | target_vcs.config.set( | |
623 | 'rhodecode', 'RC_SCM_DATA', json.dumps(extras)) |
|
623 | 'rhodecode', 'RC_SCM_DATA', json.dumps(extras)) | |
624 |
|
624 | |||
625 | user_name = user.short_contact |
|
625 | user_name = user.short_contact | |
626 | merge_state = target_vcs.merge( |
|
626 | merge_state = target_vcs.merge( | |
627 | repo_id, workspace_id, target_ref, source_vcs, |
|
627 | repo_id, workspace_id, target_ref, source_vcs, | |
628 | pull_request.source_ref_parts, |
|
628 | pull_request.source_ref_parts, | |
629 | user_name=user_name, user_email=user.email, |
|
629 | user_name=user_name, user_email=user.email, | |
630 | message=message, use_rebase=use_rebase, |
|
630 | message=message, use_rebase=use_rebase, | |
631 | close_branch=close_branch) |
|
631 | close_branch=close_branch) | |
632 | return merge_state |
|
632 | return merge_state | |
633 |
|
633 | |||
634 | def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None): |
|
634 | def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None): | |
635 | pull_request.merge_rev = merge_state.merge_ref.commit_id |
|
635 | pull_request.merge_rev = merge_state.merge_ref.commit_id | |
636 | pull_request.updated_on = datetime.datetime.now() |
|
636 | pull_request.updated_on = datetime.datetime.now() | |
637 | close_msg = close_msg or 'Pull request merged and closed' |
|
637 | close_msg = close_msg or 'Pull request merged and closed' | |
638 |
|
638 | |||
639 | CommentsModel().create( |
|
639 | CommentsModel().create( | |
640 | text=safe_unicode(close_msg), |
|
640 | text=safe_unicode(close_msg), | |
641 | repo=pull_request.target_repo.repo_id, |
|
641 | repo=pull_request.target_repo.repo_id, | |
642 | user=user.user_id, |
|
642 | user=user.user_id, | |
643 | pull_request=pull_request.pull_request_id, |
|
643 | pull_request=pull_request.pull_request_id, | |
644 | f_path=None, |
|
644 | f_path=None, | |
645 | line_no=None, |
|
645 | line_no=None, | |
646 | closing_pr=True |
|
646 | closing_pr=True | |
647 | ) |
|
647 | ) | |
648 |
|
648 | |||
649 | Session().add(pull_request) |
|
649 | Session().add(pull_request) | |
650 | Session().flush() |
|
650 | Session().flush() | |
651 | # TODO: paris: replace invalidation with less radical solution |
|
651 | # TODO: paris: replace invalidation with less radical solution | |
652 | ScmModel().mark_for_invalidation( |
|
652 | ScmModel().mark_for_invalidation( | |
653 | pull_request.target_repo.repo_name) |
|
653 | pull_request.target_repo.repo_name) | |
654 | self.trigger_pull_request_hook(pull_request, user, 'merge') |
|
654 | self.trigger_pull_request_hook(pull_request, user, 'merge') | |
655 |
|
655 | |||
656 | def has_valid_update_type(self, pull_request): |
|
656 | def has_valid_update_type(self, pull_request): | |
657 | source_ref_type = pull_request.source_ref_parts.type |
|
657 | source_ref_type = pull_request.source_ref_parts.type | |
658 | return source_ref_type in self.REF_TYPES |
|
658 | return source_ref_type in self.REF_TYPES | |
659 |
|
659 | |||
660 | def update_commits(self, pull_request): |
|
660 | def update_commits(self, pull_request): | |
661 | """ |
|
661 | """ | |
662 | Get the updated list of commits for the pull request |
|
662 | Get the updated list of commits for the pull request | |
663 | and return the new pull request version and the list |
|
663 | and return the new pull request version and the list | |
664 | of commits processed by this update action |
|
664 | of commits processed by this update action | |
665 | """ |
|
665 | """ | |
666 | pull_request = self.__get_pull_request(pull_request) |
|
666 | pull_request = self.__get_pull_request(pull_request) | |
667 | source_ref_type = pull_request.source_ref_parts.type |
|
667 | source_ref_type = pull_request.source_ref_parts.type | |
668 | source_ref_name = pull_request.source_ref_parts.name |
|
668 | source_ref_name = pull_request.source_ref_parts.name | |
669 | source_ref_id = pull_request.source_ref_parts.commit_id |
|
669 | source_ref_id = pull_request.source_ref_parts.commit_id | |
670 |
|
670 | |||
671 | target_ref_type = pull_request.target_ref_parts.type |
|
671 | target_ref_type = pull_request.target_ref_parts.type | |
672 | target_ref_name = pull_request.target_ref_parts.name |
|
672 | target_ref_name = pull_request.target_ref_parts.name | |
673 | target_ref_id = pull_request.target_ref_parts.commit_id |
|
673 | target_ref_id = pull_request.target_ref_parts.commit_id | |
674 |
|
674 | |||
675 | if not self.has_valid_update_type(pull_request): |
|
675 | if not self.has_valid_update_type(pull_request): | |
676 | log.debug("Skipping update of pull request %s due to ref type: %s", |
|
676 | log.debug("Skipping update of pull request %s due to ref type: %s", | |
677 | pull_request, source_ref_type) |
|
677 | pull_request, source_ref_type) | |
678 | return UpdateResponse( |
|
678 | return UpdateResponse( | |
679 | executed=False, |
|
679 | executed=False, | |
680 | reason=UpdateFailureReason.WRONG_REF_TYPE, |
|
680 | reason=UpdateFailureReason.WRONG_REF_TYPE, | |
681 | old=pull_request, new=None, changes=None, |
|
681 | old=pull_request, new=None, changes=None, | |
682 | source_changed=False, target_changed=False) |
|
682 | source_changed=False, target_changed=False) | |
683 |
|
683 | |||
684 | # source repo |
|
684 | # source repo | |
685 | source_repo = pull_request.source_repo.scm_instance() |
|
685 | source_repo = pull_request.source_repo.scm_instance() | |
686 |
|
686 | |||
687 | try: |
|
687 | try: | |
688 | source_commit = source_repo.get_commit(commit_id=source_ref_name) |
|
688 | source_commit = source_repo.get_commit(commit_id=source_ref_name) | |
689 | except CommitDoesNotExistError: |
|
689 | except CommitDoesNotExistError: | |
690 | return UpdateResponse( |
|
690 | return UpdateResponse( | |
691 | executed=False, |
|
691 | executed=False, | |
692 | reason=UpdateFailureReason.MISSING_SOURCE_REF, |
|
692 | reason=UpdateFailureReason.MISSING_SOURCE_REF, | |
693 | old=pull_request, new=None, changes=None, |
|
693 | old=pull_request, new=None, changes=None, | |
694 | source_changed=False, target_changed=False) |
|
694 | source_changed=False, target_changed=False) | |
695 |
|
695 | |||
696 | source_changed = source_ref_id != source_commit.raw_id |
|
696 | source_changed = source_ref_id != source_commit.raw_id | |
697 |
|
697 | |||
698 | # target repo |
|
698 | # target repo | |
699 | target_repo = pull_request.target_repo.scm_instance() |
|
699 | target_repo = pull_request.target_repo.scm_instance() | |
700 |
|
700 | |||
701 | try: |
|
701 | try: | |
702 | target_commit = target_repo.get_commit(commit_id=target_ref_name) |
|
702 | target_commit = target_repo.get_commit(commit_id=target_ref_name) | |
703 | except CommitDoesNotExistError: |
|
703 | except CommitDoesNotExistError: | |
704 | return UpdateResponse( |
|
704 | return UpdateResponse( | |
705 | executed=False, |
|
705 | executed=False, | |
706 | reason=UpdateFailureReason.MISSING_TARGET_REF, |
|
706 | reason=UpdateFailureReason.MISSING_TARGET_REF, | |
707 | old=pull_request, new=None, changes=None, |
|
707 | old=pull_request, new=None, changes=None, | |
708 | source_changed=False, target_changed=False) |
|
708 | source_changed=False, target_changed=False) | |
709 | target_changed = target_ref_id != target_commit.raw_id |
|
709 | target_changed = target_ref_id != target_commit.raw_id | |
710 |
|
710 | |||
711 | if not (source_changed or target_changed): |
|
711 | if not (source_changed or target_changed): | |
712 | log.debug("Nothing changed in pull request %s", pull_request) |
|
712 | log.debug("Nothing changed in pull request %s", pull_request) | |
713 | return UpdateResponse( |
|
713 | return UpdateResponse( | |
714 | executed=False, |
|
714 | executed=False, | |
715 | reason=UpdateFailureReason.NO_CHANGE, |
|
715 | reason=UpdateFailureReason.NO_CHANGE, | |
716 | old=pull_request, new=None, changes=None, |
|
716 | old=pull_request, new=None, changes=None, | |
717 | source_changed=target_changed, target_changed=source_changed) |
|
717 | source_changed=target_changed, target_changed=source_changed) | |
718 |
|
718 | |||
719 | change_in_found = 'target repo' if target_changed else 'source repo' |
|
719 | change_in_found = 'target repo' if target_changed else 'source repo' | |
720 | log.debug('Updating pull request because of change in %s detected', |
|
720 | log.debug('Updating pull request because of change in %s detected', | |
721 | change_in_found) |
|
721 | change_in_found) | |
722 |
|
722 | |||
723 | # Finally there is a need for an update, in case of source change |
|
723 | # Finally there is a need for an update, in case of source change | |
724 | # we create a new version, else just an update |
|
724 | # we create a new version, else just an update | |
725 | if source_changed: |
|
725 | if source_changed: | |
726 | pull_request_version = self._create_version_from_snapshot(pull_request) |
|
726 | pull_request_version = self._create_version_from_snapshot(pull_request) | |
727 | self._link_comments_to_version(pull_request_version) |
|
727 | self._link_comments_to_version(pull_request_version) | |
728 | else: |
|
728 | else: | |
729 | try: |
|
729 | try: | |
730 | ver = pull_request.versions[-1] |
|
730 | ver = pull_request.versions[-1] | |
731 | except IndexError: |
|
731 | except IndexError: | |
732 | ver = None |
|
732 | ver = None | |
733 |
|
733 | |||
734 | pull_request.pull_request_version_id = \ |
|
734 | pull_request.pull_request_version_id = \ | |
735 | ver.pull_request_version_id if ver else None |
|
735 | ver.pull_request_version_id if ver else None | |
736 | pull_request_version = pull_request |
|
736 | pull_request_version = pull_request | |
737 |
|
737 | |||
738 | try: |
|
738 | try: | |
739 | if target_ref_type in self.REF_TYPES: |
|
739 | if target_ref_type in self.REF_TYPES: | |
740 | target_commit = target_repo.get_commit(target_ref_name) |
|
740 | target_commit = target_repo.get_commit(target_ref_name) | |
741 | else: |
|
741 | else: | |
742 | target_commit = target_repo.get_commit(target_ref_id) |
|
742 | target_commit = target_repo.get_commit(target_ref_id) | |
743 | except CommitDoesNotExistError: |
|
743 | except CommitDoesNotExistError: | |
744 | return UpdateResponse( |
|
744 | return UpdateResponse( | |
745 | executed=False, |
|
745 | executed=False, | |
746 | reason=UpdateFailureReason.MISSING_TARGET_REF, |
|
746 | reason=UpdateFailureReason.MISSING_TARGET_REF, | |
747 | old=pull_request, new=None, changes=None, |
|
747 | old=pull_request, new=None, changes=None, | |
748 | source_changed=source_changed, target_changed=target_changed) |
|
748 | source_changed=source_changed, target_changed=target_changed) | |
749 |
|
749 | |||
750 | # re-compute commit ids |
|
750 | # re-compute commit ids | |
751 | old_commit_ids = pull_request.revisions |
|
751 | old_commit_ids = pull_request.revisions | |
752 | pre_load = ["author", "date", "message", "branch"] |
|
752 | pre_load = ["author", "date", "message", "branch"] | |
753 | commit_ranges = target_repo.compare( |
|
753 | commit_ranges = target_repo.compare( | |
754 | target_commit.raw_id, source_commit.raw_id, source_repo, merge=True, |
|
754 | target_commit.raw_id, source_commit.raw_id, source_repo, merge=True, | |
755 | pre_load=pre_load) |
|
755 | pre_load=pre_load) | |
756 |
|
756 | |||
757 | ancestor = source_repo.get_common_ancestor( |
|
757 | ancestor = source_repo.get_common_ancestor( | |
758 | source_commit.raw_id, target_commit.raw_id, target_repo) |
|
758 | source_commit.raw_id, target_commit.raw_id, target_repo) | |
759 |
|
759 | |||
760 | pull_request.source_ref = '%s:%s:%s' % ( |
|
760 | pull_request.source_ref = '%s:%s:%s' % ( | |
761 | source_ref_type, source_ref_name, source_commit.raw_id) |
|
761 | source_ref_type, source_ref_name, source_commit.raw_id) | |
762 | pull_request.target_ref = '%s:%s:%s' % ( |
|
762 | pull_request.target_ref = '%s:%s:%s' % ( | |
763 | target_ref_type, target_ref_name, ancestor) |
|
763 | target_ref_type, target_ref_name, ancestor) | |
764 |
|
764 | |||
765 | pull_request.revisions = [ |
|
765 | pull_request.revisions = [ | |
766 | commit.raw_id for commit in reversed(commit_ranges)] |
|
766 | commit.raw_id for commit in reversed(commit_ranges)] | |
767 | pull_request.updated_on = datetime.datetime.now() |
|
767 | pull_request.updated_on = datetime.datetime.now() | |
768 | Session().add(pull_request) |
|
768 | Session().add(pull_request) | |
769 | new_commit_ids = pull_request.revisions |
|
769 | new_commit_ids = pull_request.revisions | |
770 |
|
770 | |||
771 | old_diff_data, new_diff_data = self._generate_update_diffs( |
|
771 | old_diff_data, new_diff_data = self._generate_update_diffs( | |
772 | pull_request, pull_request_version) |
|
772 | pull_request, pull_request_version) | |
773 |
|
773 | |||
774 | # calculate commit and file changes |
|
774 | # calculate commit and file changes | |
775 | changes = self._calculate_commit_id_changes( |
|
775 | changes = self._calculate_commit_id_changes( | |
776 | old_commit_ids, new_commit_ids) |
|
776 | old_commit_ids, new_commit_ids) | |
777 | file_changes = self._calculate_file_changes( |
|
777 | file_changes = self._calculate_file_changes( | |
778 | old_diff_data, new_diff_data) |
|
778 | old_diff_data, new_diff_data) | |
779 |
|
779 | |||
780 | # set comments as outdated if DIFFS changed |
|
780 | # set comments as outdated if DIFFS changed | |
781 | CommentsModel().outdate_comments( |
|
781 | CommentsModel().outdate_comments( | |
782 | pull_request, old_diff_data=old_diff_data, |
|
782 | pull_request, old_diff_data=old_diff_data, | |
783 | new_diff_data=new_diff_data) |
|
783 | new_diff_data=new_diff_data) | |
784 |
|
784 | |||
785 | commit_changes = (changes.added or changes.removed) |
|
785 | commit_changes = (changes.added or changes.removed) | |
786 | file_node_changes = ( |
|
786 | file_node_changes = ( | |
787 | file_changes.added or file_changes.modified or file_changes.removed) |
|
787 | file_changes.added or file_changes.modified or file_changes.removed) | |
788 | pr_has_changes = commit_changes or file_node_changes |
|
788 | pr_has_changes = commit_changes or file_node_changes | |
789 |
|
789 | |||
790 | # Add an automatic comment to the pull request, in case |
|
790 | # Add an automatic comment to the pull request, in case | |
791 | # anything has changed |
|
791 | # anything has changed | |
792 | if pr_has_changes: |
|
792 | if pr_has_changes: | |
793 | update_comment = CommentsModel().create( |
|
793 | update_comment = CommentsModel().create( | |
794 | text=self._render_update_message(changes, file_changes), |
|
794 | text=self._render_update_message(changes, file_changes), | |
795 | repo=pull_request.target_repo, |
|
795 | repo=pull_request.target_repo, | |
796 | user=pull_request.author, |
|
796 | user=pull_request.author, | |
797 | pull_request=pull_request, |
|
797 | pull_request=pull_request, | |
798 | send_email=False, renderer=DEFAULT_COMMENTS_RENDERER) |
|
798 | send_email=False, renderer=DEFAULT_COMMENTS_RENDERER) | |
799 |
|
799 | |||
800 | # Update status to "Under Review" for added commits |
|
800 | # Update status to "Under Review" for added commits | |
801 | for commit_id in changes.added: |
|
801 | for commit_id in changes.added: | |
802 | ChangesetStatusModel().set_status( |
|
802 | ChangesetStatusModel().set_status( | |
803 | repo=pull_request.source_repo, |
|
803 | repo=pull_request.source_repo, | |
804 | status=ChangesetStatus.STATUS_UNDER_REVIEW, |
|
804 | status=ChangesetStatus.STATUS_UNDER_REVIEW, | |
805 | comment=update_comment, |
|
805 | comment=update_comment, | |
806 | user=pull_request.author, |
|
806 | user=pull_request.author, | |
807 | pull_request=pull_request, |
|
807 | pull_request=pull_request, | |
808 | revision=commit_id) |
|
808 | revision=commit_id) | |
809 |
|
809 | |||
810 | log.debug( |
|
810 | log.debug( | |
811 | 'Updated pull request %s, added_ids: %s, common_ids: %s, ' |
|
811 | 'Updated pull request %s, added_ids: %s, common_ids: %s, ' | |
812 | 'removed_ids: %s', pull_request.pull_request_id, |
|
812 | 'removed_ids: %s', pull_request.pull_request_id, | |
813 | changes.added, changes.common, changes.removed) |
|
813 | changes.added, changes.common, changes.removed) | |
814 | log.debug( |
|
814 | log.debug( | |
815 | 'Updated pull request with the following file changes: %s', |
|
815 | 'Updated pull request with the following file changes: %s', | |
816 | file_changes) |
|
816 | file_changes) | |
817 |
|
817 | |||
818 | log.info( |
|
818 | log.info( | |
819 | "Updated pull request %s from commit %s to commit %s, " |
|
819 | "Updated pull request %s from commit %s to commit %s, " | |
820 | "stored new version %s of this pull request.", |
|
820 | "stored new version %s of this pull request.", | |
821 | pull_request.pull_request_id, source_ref_id, |
|
821 | pull_request.pull_request_id, source_ref_id, | |
822 | pull_request.source_ref_parts.commit_id, |
|
822 | pull_request.source_ref_parts.commit_id, | |
823 | pull_request_version.pull_request_version_id) |
|
823 | pull_request_version.pull_request_version_id) | |
824 | Session().commit() |
|
824 | Session().commit() | |
825 | self.trigger_pull_request_hook(pull_request, pull_request.author, 'update') |
|
825 | self.trigger_pull_request_hook(pull_request, pull_request.author, 'update') | |
826 |
|
826 | |||
827 | return UpdateResponse( |
|
827 | return UpdateResponse( | |
828 | executed=True, reason=UpdateFailureReason.NONE, |
|
828 | executed=True, reason=UpdateFailureReason.NONE, | |
829 | old=pull_request, new=pull_request_version, changes=changes, |
|
829 | old=pull_request, new=pull_request_version, changes=changes, | |
830 | source_changed=source_changed, target_changed=target_changed) |
|
830 | source_changed=source_changed, target_changed=target_changed) | |
831 |
|
831 | |||
832 | def _create_version_from_snapshot(self, pull_request): |
|
832 | def _create_version_from_snapshot(self, pull_request): | |
833 | version = PullRequestVersion() |
|
833 | version = PullRequestVersion() | |
834 | version.title = pull_request.title |
|
834 | version.title = pull_request.title | |
835 | version.description = pull_request.description |
|
835 | version.description = pull_request.description | |
836 | version.status = pull_request.status |
|
836 | version.status = pull_request.status | |
837 | version.pull_request_state = pull_request.pull_request_state |
|
837 | version.pull_request_state = pull_request.pull_request_state | |
838 | version.created_on = datetime.datetime.now() |
|
838 | version.created_on = datetime.datetime.now() | |
839 | version.updated_on = pull_request.updated_on |
|
839 | version.updated_on = pull_request.updated_on | |
840 | version.user_id = pull_request.user_id |
|
840 | version.user_id = pull_request.user_id | |
841 | version.source_repo = pull_request.source_repo |
|
841 | version.source_repo = pull_request.source_repo | |
842 | version.source_ref = pull_request.source_ref |
|
842 | version.source_ref = pull_request.source_ref | |
843 | version.target_repo = pull_request.target_repo |
|
843 | version.target_repo = pull_request.target_repo | |
844 | version.target_ref = pull_request.target_ref |
|
844 | version.target_ref = pull_request.target_ref | |
845 |
|
845 | |||
846 | version._last_merge_source_rev = pull_request._last_merge_source_rev |
|
846 | version._last_merge_source_rev = pull_request._last_merge_source_rev | |
847 | version._last_merge_target_rev = pull_request._last_merge_target_rev |
|
847 | version._last_merge_target_rev = pull_request._last_merge_target_rev | |
848 | version.last_merge_status = pull_request.last_merge_status |
|
848 | version.last_merge_status = pull_request.last_merge_status | |
849 | version.shadow_merge_ref = pull_request.shadow_merge_ref |
|
849 | version.shadow_merge_ref = pull_request.shadow_merge_ref | |
850 | version.merge_rev = pull_request.merge_rev |
|
850 | version.merge_rev = pull_request.merge_rev | |
851 | version.reviewer_data = pull_request.reviewer_data |
|
851 | version.reviewer_data = pull_request.reviewer_data | |
852 |
|
852 | |||
853 | version.revisions = pull_request.revisions |
|
853 | version.revisions = pull_request.revisions | |
854 | version.pull_request = pull_request |
|
854 | version.pull_request = pull_request | |
855 | Session().add(version) |
|
855 | Session().add(version) | |
856 | Session().flush() |
|
856 | Session().flush() | |
857 |
|
857 | |||
858 | return version |
|
858 | return version | |
859 |
|
859 | |||
860 | def _generate_update_diffs(self, pull_request, pull_request_version): |
|
860 | def _generate_update_diffs(self, pull_request, pull_request_version): | |
861 |
|
861 | |||
862 | diff_context = ( |
|
862 | diff_context = ( | |
863 | self.DIFF_CONTEXT + |
|
863 | self.DIFF_CONTEXT + | |
864 | CommentsModel.needed_extra_diff_context()) |
|
864 | CommentsModel.needed_extra_diff_context()) | |
865 | hide_whitespace_changes = False |
|
865 | hide_whitespace_changes = False | |
866 | source_repo = pull_request_version.source_repo |
|
866 | source_repo = pull_request_version.source_repo | |
867 | source_ref_id = pull_request_version.source_ref_parts.commit_id |
|
867 | source_ref_id = pull_request_version.source_ref_parts.commit_id | |
868 | target_ref_id = pull_request_version.target_ref_parts.commit_id |
|
868 | target_ref_id = pull_request_version.target_ref_parts.commit_id | |
869 | old_diff = self._get_diff_from_pr_or_version( |
|
869 | old_diff = self._get_diff_from_pr_or_version( | |
870 | source_repo, source_ref_id, target_ref_id, |
|
870 | source_repo, source_ref_id, target_ref_id, | |
871 | hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context) |
|
871 | hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context) | |
872 |
|
872 | |||
873 | source_repo = pull_request.source_repo |
|
873 | source_repo = pull_request.source_repo | |
874 | source_ref_id = pull_request.source_ref_parts.commit_id |
|
874 | source_ref_id = pull_request.source_ref_parts.commit_id | |
875 | target_ref_id = pull_request.target_ref_parts.commit_id |
|
875 | target_ref_id = pull_request.target_ref_parts.commit_id | |
876 |
|
876 | |||
877 | new_diff = self._get_diff_from_pr_or_version( |
|
877 | new_diff = self._get_diff_from_pr_or_version( | |
878 | source_repo, source_ref_id, target_ref_id, |
|
878 | source_repo, source_ref_id, target_ref_id, | |
879 | hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context) |
|
879 | hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context) | |
880 |
|
880 | |||
881 | old_diff_data = diffs.DiffProcessor(old_diff) |
|
881 | old_diff_data = diffs.DiffProcessor(old_diff) | |
882 | old_diff_data.prepare() |
|
882 | old_diff_data.prepare() | |
883 | new_diff_data = diffs.DiffProcessor(new_diff) |
|
883 | new_diff_data = diffs.DiffProcessor(new_diff) | |
884 | new_diff_data.prepare() |
|
884 | new_diff_data.prepare() | |
885 |
|
885 | |||
886 | return old_diff_data, new_diff_data |
|
886 | return old_diff_data, new_diff_data | |
887 |
|
887 | |||
888 | def _link_comments_to_version(self, pull_request_version): |
|
888 | def _link_comments_to_version(self, pull_request_version): | |
889 | """ |
|
889 | """ | |
890 | Link all unlinked comments of this pull request to the given version. |
|
890 | Link all unlinked comments of this pull request to the given version. | |
891 |
|
891 | |||
892 | :param pull_request_version: The `PullRequestVersion` to which |
|
892 | :param pull_request_version: The `PullRequestVersion` to which | |
893 | the comments shall be linked. |
|
893 | the comments shall be linked. | |
894 |
|
894 | |||
895 | """ |
|
895 | """ | |
896 | pull_request = pull_request_version.pull_request |
|
896 | pull_request = pull_request_version.pull_request | |
897 | comments = ChangesetComment.query()\ |
|
897 | comments = ChangesetComment.query()\ | |
898 | .filter( |
|
898 | .filter( | |
899 | # TODO: johbo: Should we query for the repo at all here? |
|
899 | # TODO: johbo: Should we query for the repo at all here? | |
900 | # Pending decision on how comments of PRs are to be related |
|
900 | # Pending decision on how comments of PRs are to be related | |
901 | # to either the source repo, the target repo or no repo at all. |
|
901 | # to either the source repo, the target repo or no repo at all. | |
902 | ChangesetComment.repo_id == pull_request.target_repo.repo_id, |
|
902 | ChangesetComment.repo_id == pull_request.target_repo.repo_id, | |
903 | ChangesetComment.pull_request == pull_request, |
|
903 | ChangesetComment.pull_request == pull_request, | |
904 | ChangesetComment.pull_request_version == None)\ |
|
904 | ChangesetComment.pull_request_version == None)\ | |
905 | .order_by(ChangesetComment.comment_id.asc()) |
|
905 | .order_by(ChangesetComment.comment_id.asc()) | |
906 |
|
906 | |||
907 | # TODO: johbo: Find out why this breaks if it is done in a bulk |
|
907 | # TODO: johbo: Find out why this breaks if it is done in a bulk | |
908 | # operation. |
|
908 | # operation. | |
909 | for comment in comments: |
|
909 | for comment in comments: | |
910 | comment.pull_request_version_id = ( |
|
910 | comment.pull_request_version_id = ( | |
911 | pull_request_version.pull_request_version_id) |
|
911 | pull_request_version.pull_request_version_id) | |
912 | Session().add(comment) |
|
912 | Session().add(comment) | |
913 |
|
913 | |||
914 | def _calculate_commit_id_changes(self, old_ids, new_ids): |
|
914 | def _calculate_commit_id_changes(self, old_ids, new_ids): | |
915 | added = [x for x in new_ids if x not in old_ids] |
|
915 | added = [x for x in new_ids if x not in old_ids] | |
916 | common = [x for x in new_ids if x in old_ids] |
|
916 | common = [x for x in new_ids if x in old_ids] | |
917 | removed = [x for x in old_ids if x not in new_ids] |
|
917 | removed = [x for x in old_ids if x not in new_ids] | |
918 | total = new_ids |
|
918 | total = new_ids | |
919 | return ChangeTuple(added, common, removed, total) |
|
919 | return ChangeTuple(added, common, removed, total) | |
920 |
|
920 | |||
921 | def _calculate_file_changes(self, old_diff_data, new_diff_data): |
|
921 | def _calculate_file_changes(self, old_diff_data, new_diff_data): | |
922 |
|
922 | |||
923 | old_files = OrderedDict() |
|
923 | old_files = OrderedDict() | |
924 | for diff_data in old_diff_data.parsed_diff: |
|
924 | for diff_data in old_diff_data.parsed_diff: | |
925 | old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff']) |
|
925 | old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff']) | |
926 |
|
926 | |||
927 | added_files = [] |
|
927 | added_files = [] | |
928 | modified_files = [] |
|
928 | modified_files = [] | |
929 | removed_files = [] |
|
929 | removed_files = [] | |
930 | for diff_data in new_diff_data.parsed_diff: |
|
930 | for diff_data in new_diff_data.parsed_diff: | |
931 | new_filename = diff_data['filename'] |
|
931 | new_filename = diff_data['filename'] | |
932 | new_hash = md5_safe(diff_data['raw_diff']) |
|
932 | new_hash = md5_safe(diff_data['raw_diff']) | |
933 |
|
933 | |||
934 | old_hash = old_files.get(new_filename) |
|
934 | old_hash = old_files.get(new_filename) | |
935 | if not old_hash: |
|
935 | if not old_hash: | |
936 | # file is not present in old diff, means it's added |
|
936 | # file is not present in old diff, means it's added | |
937 | added_files.append(new_filename) |
|
937 | added_files.append(new_filename) | |
938 | else: |
|
938 | else: | |
939 | if new_hash != old_hash: |
|
939 | if new_hash != old_hash: | |
940 | modified_files.append(new_filename) |
|
940 | modified_files.append(new_filename) | |
941 | # now remove a file from old, since we have seen it already |
|
941 | # now remove a file from old, since we have seen it already | |
942 | del old_files[new_filename] |
|
942 | del old_files[new_filename] | |
943 |
|
943 | |||
944 | # removed files is when there are present in old, but not in NEW, |
|
944 | # removed files is when there are present in old, but not in NEW, | |
945 | # since we remove old files that are present in new diff, left-overs |
|
945 | # since we remove old files that are present in new diff, left-overs | |
946 | # if any should be the removed files |
|
946 | # if any should be the removed files | |
947 | removed_files.extend(old_files.keys()) |
|
947 | removed_files.extend(old_files.keys()) | |
948 |
|
948 | |||
949 | return FileChangeTuple(added_files, modified_files, removed_files) |
|
949 | return FileChangeTuple(added_files, modified_files, removed_files) | |
950 |
|
950 | |||
951 | def _render_update_message(self, changes, file_changes): |
|
951 | def _render_update_message(self, changes, file_changes): | |
952 | """ |
|
952 | """ | |
953 | render the message using DEFAULT_COMMENTS_RENDERER (RST renderer), |
|
953 | render the message using DEFAULT_COMMENTS_RENDERER (RST renderer), | |
954 | so it's always looking the same disregarding on which default |
|
954 | so it's always looking the same disregarding on which default | |
955 | renderer system is using. |
|
955 | renderer system is using. | |
956 |
|
956 | |||
957 | :param changes: changes named tuple |
|
957 | :param changes: changes named tuple | |
958 | :param file_changes: file changes named tuple |
|
958 | :param file_changes: file changes named tuple | |
959 |
|
959 | |||
960 | """ |
|
960 | """ | |
961 | new_status = ChangesetStatus.get_status_lbl( |
|
961 | new_status = ChangesetStatus.get_status_lbl( | |
962 | ChangesetStatus.STATUS_UNDER_REVIEW) |
|
962 | ChangesetStatus.STATUS_UNDER_REVIEW) | |
963 |
|
963 | |||
964 | changed_files = ( |
|
964 | changed_files = ( | |
965 | file_changes.added + file_changes.modified + file_changes.removed) |
|
965 | file_changes.added + file_changes.modified + file_changes.removed) | |
966 |
|
966 | |||
967 | params = { |
|
967 | params = { | |
968 | 'under_review_label': new_status, |
|
968 | 'under_review_label': new_status, | |
969 | 'added_commits': changes.added, |
|
969 | 'added_commits': changes.added, | |
970 | 'removed_commits': changes.removed, |
|
970 | 'removed_commits': changes.removed, | |
971 | 'changed_files': changed_files, |
|
971 | 'changed_files': changed_files, | |
972 | 'added_files': file_changes.added, |
|
972 | 'added_files': file_changes.added, | |
973 | 'modified_files': file_changes.modified, |
|
973 | 'modified_files': file_changes.modified, | |
974 | 'removed_files': file_changes.removed, |
|
974 | 'removed_files': file_changes.removed, | |
975 | } |
|
975 | } | |
976 | renderer = RstTemplateRenderer() |
|
976 | renderer = RstTemplateRenderer() | |
977 | return renderer.render('pull_request_update.mako', **params) |
|
977 | return renderer.render('pull_request_update.mako', **params) | |
978 |
|
978 | |||
979 | def edit(self, pull_request, title, description, description_renderer, user): |
|
979 | def edit(self, pull_request, title, description, description_renderer, user): | |
980 | pull_request = self.__get_pull_request(pull_request) |
|
980 | pull_request = self.__get_pull_request(pull_request) | |
981 | old_data = pull_request.get_api_data(with_merge_state=False) |
|
981 | old_data = pull_request.get_api_data(with_merge_state=False) | |
982 | if pull_request.is_closed(): |
|
982 | if pull_request.is_closed(): | |
983 | raise ValueError('This pull request is closed') |
|
983 | raise ValueError('This pull request is closed') | |
984 | if title: |
|
984 | if title: | |
985 | pull_request.title = title |
|
985 | pull_request.title = title | |
986 | pull_request.description = description |
|
986 | pull_request.description = description | |
987 | pull_request.updated_on = datetime.datetime.now() |
|
987 | pull_request.updated_on = datetime.datetime.now() | |
988 | pull_request.description_renderer = description_renderer |
|
988 | pull_request.description_renderer = description_renderer | |
989 | Session().add(pull_request) |
|
989 | Session().add(pull_request) | |
990 | self._log_audit_action( |
|
990 | self._log_audit_action( | |
991 | 'repo.pull_request.edit', {'old_data': old_data}, |
|
991 | 'repo.pull_request.edit', {'old_data': old_data}, | |
992 | user, pull_request) |
|
992 | user, pull_request) | |
993 |
|
993 | |||
994 | def update_reviewers(self, pull_request, reviewer_data, user): |
|
994 | def update_reviewers(self, pull_request, reviewer_data, user): | |
995 | """ |
|
995 | """ | |
996 | Update the reviewers in the pull request |
|
996 | Update the reviewers in the pull request | |
997 |
|
997 | |||
998 | :param pull_request: the pr to update |
|
998 | :param pull_request: the pr to update | |
999 | :param reviewer_data: list of tuples |
|
999 | :param reviewer_data: list of tuples | |
1000 | [(user, ['reason1', 'reason2'], mandatory_flag, [rules])] |
|
1000 | [(user, ['reason1', 'reason2'], mandatory_flag, [rules])] | |
1001 | """ |
|
1001 | """ | |
1002 | pull_request = self.__get_pull_request(pull_request) |
|
1002 | pull_request = self.__get_pull_request(pull_request) | |
1003 | if pull_request.is_closed(): |
|
1003 | if pull_request.is_closed(): | |
1004 | raise ValueError('This pull request is closed') |
|
1004 | raise ValueError('This pull request is closed') | |
1005 |
|
1005 | |||
1006 | reviewers = {} |
|
1006 | reviewers = {} | |
1007 | for user_id, reasons, mandatory, rules in reviewer_data: |
|
1007 | for user_id, reasons, mandatory, rules in reviewer_data: | |
1008 | if isinstance(user_id, (int, compat.string_types)): |
|
1008 | if isinstance(user_id, (int, compat.string_types)): | |
1009 | user_id = self._get_user(user_id).user_id |
|
1009 | user_id = self._get_user(user_id).user_id | |
1010 | reviewers[user_id] = { |
|
1010 | reviewers[user_id] = { | |
1011 | 'reasons': reasons, 'mandatory': mandatory} |
|
1011 | 'reasons': reasons, 'mandatory': mandatory} | |
1012 |
|
1012 | |||
1013 | reviewers_ids = set(reviewers.keys()) |
|
1013 | reviewers_ids = set(reviewers.keys()) | |
1014 | current_reviewers = PullRequestReviewers.query()\ |
|
1014 | current_reviewers = PullRequestReviewers.query()\ | |
1015 | .filter(PullRequestReviewers.pull_request == |
|
1015 | .filter(PullRequestReviewers.pull_request == | |
1016 | pull_request).all() |
|
1016 | pull_request).all() | |
1017 | current_reviewers_ids = set([x.user.user_id for x in current_reviewers]) |
|
1017 | current_reviewers_ids = set([x.user.user_id for x in current_reviewers]) | |
1018 |
|
1018 | |||
1019 | ids_to_add = reviewers_ids.difference(current_reviewers_ids) |
|
1019 | ids_to_add = reviewers_ids.difference(current_reviewers_ids) | |
1020 | ids_to_remove = current_reviewers_ids.difference(reviewers_ids) |
|
1020 | ids_to_remove = current_reviewers_ids.difference(reviewers_ids) | |
1021 |
|
1021 | |||
1022 | log.debug("Adding %s reviewers", ids_to_add) |
|
1022 | log.debug("Adding %s reviewers", ids_to_add) | |
1023 | log.debug("Removing %s reviewers", ids_to_remove) |
|
1023 | log.debug("Removing %s reviewers", ids_to_remove) | |
1024 | changed = False |
|
1024 | changed = False | |
1025 | added_audit_reviewers = [] |
|
1025 | added_audit_reviewers = [] | |
1026 | removed_audit_reviewers = [] |
|
1026 | removed_audit_reviewers = [] | |
1027 |
|
1027 | |||
1028 | for uid in ids_to_add: |
|
1028 | for uid in ids_to_add: | |
1029 | changed = True |
|
1029 | changed = True | |
1030 | _usr = self._get_user(uid) |
|
1030 | _usr = self._get_user(uid) | |
1031 | reviewer = PullRequestReviewers() |
|
1031 | reviewer = PullRequestReviewers() | |
1032 | reviewer.user = _usr |
|
1032 | reviewer.user = _usr | |
1033 | reviewer.pull_request = pull_request |
|
1033 | reviewer.pull_request = pull_request | |
1034 | reviewer.reasons = reviewers[uid]['reasons'] |
|
1034 | reviewer.reasons = reviewers[uid]['reasons'] | |
1035 | # NOTE(marcink): mandatory shouldn't be changed now |
|
1035 | # NOTE(marcink): mandatory shouldn't be changed now | |
1036 | # reviewer.mandatory = reviewers[uid]['reasons'] |
|
1036 | # reviewer.mandatory = reviewers[uid]['reasons'] | |
1037 | Session().add(reviewer) |
|
1037 | Session().add(reviewer) | |
1038 | added_audit_reviewers.append(reviewer.get_dict()) |
|
1038 | added_audit_reviewers.append(reviewer.get_dict()) | |
1039 |
|
1039 | |||
1040 | for uid in ids_to_remove: |
|
1040 | for uid in ids_to_remove: | |
1041 | changed = True |
|
1041 | changed = True | |
1042 | # NOTE(marcink): we fetch "ALL" reviewers using .all(). This is an edge case |
|
1042 | # NOTE(marcink): we fetch "ALL" reviewers using .all(). This is an edge case | |
1043 | # that prevents and fixes cases that we added the same reviewer twice. |
|
1043 | # that prevents and fixes cases that we added the same reviewer twice. | |
1044 | # this CAN happen due to the lack of DB checks |
|
1044 | # this CAN happen due to the lack of DB checks | |
1045 | reviewers = PullRequestReviewers.query()\ |
|
1045 | reviewers = PullRequestReviewers.query()\ | |
1046 | .filter(PullRequestReviewers.user_id == uid, |
|
1046 | .filter(PullRequestReviewers.user_id == uid, | |
1047 | PullRequestReviewers.pull_request == pull_request)\ |
|
1047 | PullRequestReviewers.pull_request == pull_request)\ | |
1048 | .all() |
|
1048 | .all() | |
1049 |
|
1049 | |||
1050 | for obj in reviewers: |
|
1050 | for obj in reviewers: | |
1051 | added_audit_reviewers.append(obj.get_dict()) |
|
1051 | added_audit_reviewers.append(obj.get_dict()) | |
1052 | Session().delete(obj) |
|
1052 | Session().delete(obj) | |
1053 |
|
1053 | |||
1054 | if changed: |
|
1054 | if changed: | |
1055 | Session().expire_all() |
|
1055 | Session().expire_all() | |
1056 | pull_request.updated_on = datetime.datetime.now() |
|
1056 | pull_request.updated_on = datetime.datetime.now() | |
1057 | Session().add(pull_request) |
|
1057 | Session().add(pull_request) | |
1058 |
|
1058 | |||
1059 | # finally store audit logs |
|
1059 | # finally store audit logs | |
1060 | for user_data in added_audit_reviewers: |
|
1060 | for user_data in added_audit_reviewers: | |
1061 | self._log_audit_action( |
|
1061 | self._log_audit_action( | |
1062 | 'repo.pull_request.reviewer.add', {'data': user_data}, |
|
1062 | 'repo.pull_request.reviewer.add', {'data': user_data}, | |
1063 | user, pull_request) |
|
1063 | user, pull_request) | |
1064 | for user_data in removed_audit_reviewers: |
|
1064 | for user_data in removed_audit_reviewers: | |
1065 | self._log_audit_action( |
|
1065 | self._log_audit_action( | |
1066 | 'repo.pull_request.reviewer.delete', {'old_data': user_data}, |
|
1066 | 'repo.pull_request.reviewer.delete', {'old_data': user_data}, | |
1067 | user, pull_request) |
|
1067 | user, pull_request) | |
1068 |
|
1068 | |||
1069 | self.notify_reviewers(pull_request, ids_to_add) |
|
1069 | self.notify_reviewers(pull_request, ids_to_add) | |
1070 | return ids_to_add, ids_to_remove |
|
1070 | return ids_to_add, ids_to_remove | |
1071 |
|
1071 | |||
1072 | def get_url(self, pull_request, request=None, permalink=False): |
|
1072 | def get_url(self, pull_request, request=None, permalink=False): | |
1073 | if not request: |
|
1073 | if not request: | |
1074 | request = get_current_request() |
|
1074 | request = get_current_request() | |
1075 |
|
1075 | |||
1076 | if permalink: |
|
1076 | if permalink: | |
1077 | return request.route_url( |
|
1077 | return request.route_url( | |
1078 | 'pull_requests_global', |
|
1078 | 'pull_requests_global', | |
1079 | pull_request_id=pull_request.pull_request_id,) |
|
1079 | pull_request_id=pull_request.pull_request_id,) | |
1080 | else: |
|
1080 | else: | |
1081 | return request.route_url('pullrequest_show', |
|
1081 | return request.route_url('pullrequest_show', | |
1082 | repo_name=safe_str(pull_request.target_repo.repo_name), |
|
1082 | repo_name=safe_str(pull_request.target_repo.repo_name), | |
1083 | pull_request_id=pull_request.pull_request_id,) |
|
1083 | pull_request_id=pull_request.pull_request_id,) | |
1084 |
|
1084 | |||
1085 | def get_shadow_clone_url(self, pull_request, request=None): |
|
1085 | def get_shadow_clone_url(self, pull_request, request=None): | |
1086 | """ |
|
1086 | """ | |
1087 | Returns qualified url pointing to the shadow repository. If this pull |
|
1087 | Returns qualified url pointing to the shadow repository. If this pull | |
1088 | request is closed there is no shadow repository and ``None`` will be |
|
1088 | request is closed there is no shadow repository and ``None`` will be | |
1089 | returned. |
|
1089 | returned. | |
1090 | """ |
|
1090 | """ | |
1091 | if pull_request.is_closed(): |
|
1091 | if pull_request.is_closed(): | |
1092 | return None |
|
1092 | return None | |
1093 | else: |
|
1093 | else: | |
1094 | pr_url = urllib.unquote(self.get_url(pull_request, request=request)) |
|
1094 | pr_url = urllib.unquote(self.get_url(pull_request, request=request)) | |
1095 | return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url)) |
|
1095 | return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url)) | |
1096 |
|
1096 | |||
1097 | def notify_reviewers(self, pull_request, reviewers_ids): |
|
1097 | def notify_reviewers(self, pull_request, reviewers_ids): | |
1098 | # notification to reviewers |
|
1098 | # notification to reviewers | |
1099 | if not reviewers_ids: |
|
1099 | if not reviewers_ids: | |
1100 | return |
|
1100 | return | |
1101 |
|
1101 | |||
1102 | log.debug('Notify following reviewers about pull-request %s', reviewers_ids) |
|
1102 | log.debug('Notify following reviewers about pull-request %s', reviewers_ids) | |
1103 |
|
1103 | |||
1104 | pull_request_obj = pull_request |
|
1104 | pull_request_obj = pull_request | |
1105 | # get the current participants of this pull request |
|
1105 | # get the current participants of this pull request | |
1106 | recipients = reviewers_ids |
|
1106 | recipients = reviewers_ids | |
1107 | notification_type = EmailNotificationModel.TYPE_PULL_REQUEST |
|
1107 | notification_type = EmailNotificationModel.TYPE_PULL_REQUEST | |
1108 |
|
1108 | |||
1109 | pr_source_repo = pull_request_obj.source_repo |
|
1109 | pr_source_repo = pull_request_obj.source_repo | |
1110 | pr_target_repo = pull_request_obj.target_repo |
|
1110 | pr_target_repo = pull_request_obj.target_repo | |
1111 |
|
1111 | |||
1112 | pr_url = h.route_url('pullrequest_show', |
|
1112 | pr_url = h.route_url('pullrequest_show', | |
1113 | repo_name=pr_target_repo.repo_name, |
|
1113 | repo_name=pr_target_repo.repo_name, | |
1114 | pull_request_id=pull_request_obj.pull_request_id,) |
|
1114 | pull_request_id=pull_request_obj.pull_request_id,) | |
1115 |
|
1115 | |||
1116 | # set some variables for email notification |
|
1116 | # set some variables for email notification | |
1117 | pr_target_repo_url = h.route_url( |
|
1117 | pr_target_repo_url = h.route_url( | |
1118 | 'repo_summary', repo_name=pr_target_repo.repo_name) |
|
1118 | 'repo_summary', repo_name=pr_target_repo.repo_name) | |
1119 |
|
1119 | |||
1120 | pr_source_repo_url = h.route_url( |
|
1120 | pr_source_repo_url = h.route_url( | |
1121 | 'repo_summary', repo_name=pr_source_repo.repo_name) |
|
1121 | 'repo_summary', repo_name=pr_source_repo.repo_name) | |
1122 |
|
1122 | |||
1123 | # pull request specifics |
|
1123 | # pull request specifics | |
1124 | pull_request_commits = [ |
|
1124 | pull_request_commits = [ | |
1125 | (x.raw_id, x.message) |
|
1125 | (x.raw_id, x.message) | |
1126 | for x in map(pr_source_repo.get_commit, pull_request.revisions)] |
|
1126 | for x in map(pr_source_repo.get_commit, pull_request.revisions)] | |
1127 |
|
1127 | |||
1128 | kwargs = { |
|
1128 | kwargs = { | |
1129 | 'user': pull_request.author, |
|
1129 | 'user': pull_request.author, | |
1130 | 'pull_request': pull_request_obj, |
|
1130 | 'pull_request': pull_request_obj, | |
1131 | 'pull_request_commits': pull_request_commits, |
|
1131 | 'pull_request_commits': pull_request_commits, | |
1132 |
|
1132 | |||
1133 | 'pull_request_target_repo': pr_target_repo, |
|
1133 | 'pull_request_target_repo': pr_target_repo, | |
1134 | 'pull_request_target_repo_url': pr_target_repo_url, |
|
1134 | 'pull_request_target_repo_url': pr_target_repo_url, | |
1135 |
|
1135 | |||
1136 | 'pull_request_source_repo': pr_source_repo, |
|
1136 | 'pull_request_source_repo': pr_source_repo, | |
1137 | 'pull_request_source_repo_url': pr_source_repo_url, |
|
1137 | 'pull_request_source_repo_url': pr_source_repo_url, | |
1138 |
|
1138 | |||
1139 | 'pull_request_url': pr_url, |
|
1139 | 'pull_request_url': pr_url, | |
1140 | } |
|
1140 | } | |
1141 |
|
1141 | |||
1142 | # pre-generate the subject for notification itself |
|
1142 | # pre-generate the subject for notification itself | |
1143 | (subject, |
|
1143 | (subject, | |
1144 | _h, _e, # we don't care about those |
|
1144 | _h, _e, # we don't care about those | |
1145 | body_plaintext) = EmailNotificationModel().render_email( |
|
1145 | body_plaintext) = EmailNotificationModel().render_email( | |
1146 | notification_type, **kwargs) |
|
1146 | notification_type, **kwargs) | |
1147 |
|
1147 | |||
1148 | # create notification objects, and emails |
|
1148 | # create notification objects, and emails | |
1149 | NotificationModel().create( |
|
1149 | NotificationModel().create( | |
1150 | created_by=pull_request.author, |
|
1150 | created_by=pull_request.author, | |
1151 | notification_subject=subject, |
|
1151 | notification_subject=subject, | |
1152 | notification_body=body_plaintext, |
|
1152 | notification_body=body_plaintext, | |
1153 | notification_type=notification_type, |
|
1153 | notification_type=notification_type, | |
1154 | recipients=recipients, |
|
1154 | recipients=recipients, | |
1155 | email_kwargs=kwargs, |
|
1155 | email_kwargs=kwargs, | |
1156 | ) |
|
1156 | ) | |
1157 |
|
1157 | |||
1158 | def delete(self, pull_request, user): |
|
1158 | def delete(self, pull_request, user): | |
1159 | pull_request = self.__get_pull_request(pull_request) |
|
1159 | pull_request = self.__get_pull_request(pull_request) | |
1160 | old_data = pull_request.get_api_data(with_merge_state=False) |
|
1160 | old_data = pull_request.get_api_data(with_merge_state=False) | |
1161 | self._cleanup_merge_workspace(pull_request) |
|
1161 | self._cleanup_merge_workspace(pull_request) | |
1162 | self._log_audit_action( |
|
1162 | self._log_audit_action( | |
1163 | 'repo.pull_request.delete', {'old_data': old_data}, |
|
1163 | 'repo.pull_request.delete', {'old_data': old_data}, | |
1164 | user, pull_request) |
|
1164 | user, pull_request) | |
1165 | Session().delete(pull_request) |
|
1165 | Session().delete(pull_request) | |
1166 |
|
1166 | |||
1167 | def close_pull_request(self, pull_request, user): |
|
1167 | def close_pull_request(self, pull_request, user): | |
1168 | pull_request = self.__get_pull_request(pull_request) |
|
1168 | pull_request = self.__get_pull_request(pull_request) | |
1169 | self._cleanup_merge_workspace(pull_request) |
|
1169 | self._cleanup_merge_workspace(pull_request) | |
1170 | pull_request.status = PullRequest.STATUS_CLOSED |
|
1170 | pull_request.status = PullRequest.STATUS_CLOSED | |
1171 | pull_request.updated_on = datetime.datetime.now() |
|
1171 | pull_request.updated_on = datetime.datetime.now() | |
1172 | Session().add(pull_request) |
|
1172 | Session().add(pull_request) | |
1173 | self.trigger_pull_request_hook( |
|
1173 | self.trigger_pull_request_hook( | |
1174 | pull_request, pull_request.author, 'close') |
|
1174 | pull_request, pull_request.author, 'close') | |
1175 |
|
1175 | |||
1176 | pr_data = pull_request.get_api_data(with_merge_state=False) |
|
1176 | pr_data = pull_request.get_api_data(with_merge_state=False) | |
1177 | self._log_audit_action( |
|
1177 | self._log_audit_action( | |
1178 | 'repo.pull_request.close', {'data': pr_data}, user, pull_request) |
|
1178 | 'repo.pull_request.close', {'data': pr_data}, user, pull_request) | |
1179 |
|
1179 | |||
1180 | def close_pull_request_with_comment( |
|
1180 | def close_pull_request_with_comment( | |
1181 | self, pull_request, user, repo, message=None, auth_user=None): |
|
1181 | self, pull_request, user, repo, message=None, auth_user=None): | |
1182 |
|
1182 | |||
1183 | pull_request_review_status = pull_request.calculated_review_status() |
|
1183 | pull_request_review_status = pull_request.calculated_review_status() | |
1184 |
|
1184 | |||
1185 | if pull_request_review_status == ChangesetStatus.STATUS_APPROVED: |
|
1185 | if pull_request_review_status == ChangesetStatus.STATUS_APPROVED: | |
1186 | # approved only if we have voting consent |
|
1186 | # approved only if we have voting consent | |
1187 | status = ChangesetStatus.STATUS_APPROVED |
|
1187 | status = ChangesetStatus.STATUS_APPROVED | |
1188 | else: |
|
1188 | else: | |
1189 | status = ChangesetStatus.STATUS_REJECTED |
|
1189 | status = ChangesetStatus.STATUS_REJECTED | |
1190 | status_lbl = ChangesetStatus.get_status_lbl(status) |
|
1190 | status_lbl = ChangesetStatus.get_status_lbl(status) | |
1191 |
|
1191 | |||
1192 | default_message = ( |
|
1192 | default_message = ( | |
1193 | 'Closing with status change {transition_icon} {status}.' |
|
1193 | 'Closing with status change {transition_icon} {status}.' | |
1194 | ).format(transition_icon='>', status=status_lbl) |
|
1194 | ).format(transition_icon='>', status=status_lbl) | |
1195 | text = message or default_message |
|
1195 | text = message or default_message | |
1196 |
|
1196 | |||
1197 | # create a comment, and link it to new status |
|
1197 | # create a comment, and link it to new status | |
1198 | comment = CommentsModel().create( |
|
1198 | comment = CommentsModel().create( | |
1199 | text=text, |
|
1199 | text=text, | |
1200 | repo=repo.repo_id, |
|
1200 | repo=repo.repo_id, | |
1201 | user=user.user_id, |
|
1201 | user=user.user_id, | |
1202 | pull_request=pull_request.pull_request_id, |
|
1202 | pull_request=pull_request.pull_request_id, | |
1203 | status_change=status_lbl, |
|
1203 | status_change=status_lbl, | |
1204 | status_change_type=status, |
|
1204 | status_change_type=status, | |
1205 | closing_pr=True, |
|
1205 | closing_pr=True, | |
1206 | auth_user=auth_user, |
|
1206 | auth_user=auth_user, | |
1207 | ) |
|
1207 | ) | |
1208 |
|
1208 | |||
1209 | # calculate old status before we change it |
|
1209 | # calculate old status before we change it | |
1210 | old_calculated_status = pull_request.calculated_review_status() |
|
1210 | old_calculated_status = pull_request.calculated_review_status() | |
1211 | ChangesetStatusModel().set_status( |
|
1211 | ChangesetStatusModel().set_status( | |
1212 | repo.repo_id, |
|
1212 | repo.repo_id, | |
1213 | status, |
|
1213 | status, | |
1214 | user.user_id, |
|
1214 | user.user_id, | |
1215 | comment=comment, |
|
1215 | comment=comment, | |
1216 | pull_request=pull_request.pull_request_id |
|
1216 | pull_request=pull_request.pull_request_id | |
1217 | ) |
|
1217 | ) | |
1218 |
|
1218 | |||
1219 | Session().flush() |
|
1219 | Session().flush() | |
1220 | events.trigger(events.PullRequestCommentEvent(pull_request, comment)) |
|
1220 | events.trigger(events.PullRequestCommentEvent(pull_request, comment)) | |
1221 | # we now calculate the status of pull request again, and based on that |
|
1221 | # we now calculate the status of pull request again, and based on that | |
1222 | # calculation trigger status change. This might happen in cases |
|
1222 | # calculation trigger status change. This might happen in cases | |
1223 | # that non-reviewer admin closes a pr, which means his vote doesn't |
|
1223 | # that non-reviewer admin closes a pr, which means his vote doesn't | |
1224 | # change the status, while if he's a reviewer this might change it. |
|
1224 | # change the status, while if he's a reviewer this might change it. | |
1225 | calculated_status = pull_request.calculated_review_status() |
|
1225 | calculated_status = pull_request.calculated_review_status() | |
1226 | if old_calculated_status != calculated_status: |
|
1226 | if old_calculated_status != calculated_status: | |
1227 | self.trigger_pull_request_hook( |
|
1227 | self.trigger_pull_request_hook( | |
1228 | pull_request, user, 'review_status_change', |
|
1228 | pull_request, user, 'review_status_change', | |
1229 | data={'status': calculated_status}) |
|
1229 | data={'status': calculated_status}) | |
1230 |
|
1230 | |||
1231 | # finally close the PR |
|
1231 | # finally close the PR | |
1232 | PullRequestModel().close_pull_request( |
|
1232 | PullRequestModel().close_pull_request( | |
1233 | pull_request.pull_request_id, user) |
|
1233 | pull_request.pull_request_id, user) | |
1234 |
|
1234 | |||
1235 | return comment, status |
|
1235 | return comment, status | |
1236 |
|
1236 | |||
1237 | def merge_status(self, pull_request, translator=None, |
|
1237 | def merge_status(self, pull_request, translator=None, | |
1238 | force_shadow_repo_refresh=False): |
|
1238 | force_shadow_repo_refresh=False): | |
1239 | _ = translator or get_current_request().translate |
|
1239 | _ = translator or get_current_request().translate | |
1240 |
|
1240 | |||
1241 | if not self._is_merge_enabled(pull_request): |
|
1241 | if not self._is_merge_enabled(pull_request): | |
1242 | return False, _('Server-side pull request merging is disabled.') |
|
1242 | return False, _('Server-side pull request merging is disabled.') | |
1243 | if pull_request.is_closed(): |
|
1243 | if pull_request.is_closed(): | |
1244 | return False, _('This pull request is closed.') |
|
1244 | return False, _('This pull request is closed.') | |
1245 | merge_possible, msg = self._check_repo_requirements( |
|
1245 | merge_possible, msg = self._check_repo_requirements( | |
1246 | target=pull_request.target_repo, source=pull_request.source_repo, |
|
1246 | target=pull_request.target_repo, source=pull_request.source_repo, | |
1247 | translator=_) |
|
1247 | translator=_) | |
1248 | if not merge_possible: |
|
1248 | if not merge_possible: | |
1249 | return merge_possible, msg |
|
1249 | return merge_possible, msg | |
1250 |
|
1250 | |||
1251 | try: |
|
1251 | try: | |
1252 | resp = self._try_merge( |
|
1252 | resp = self._try_merge( | |
1253 | pull_request, |
|
1253 | pull_request, | |
1254 | force_shadow_repo_refresh=force_shadow_repo_refresh) |
|
1254 | force_shadow_repo_refresh=force_shadow_repo_refresh) | |
1255 | log.debug("Merge response: %s", resp) |
|
1255 | log.debug("Merge response: %s", resp) | |
1256 | status = resp.possible, resp.merge_status_message |
|
1256 | status = resp.possible, resp.merge_status_message | |
1257 | except NotImplementedError: |
|
1257 | except NotImplementedError: | |
1258 | status = False, _('Pull request merging is not supported.') |
|
1258 | status = False, _('Pull request merging is not supported.') | |
1259 |
|
1259 | |||
1260 | return status |
|
1260 | return status | |
1261 |
|
1261 | |||
1262 | def _check_repo_requirements(self, target, source, translator): |
|
1262 | def _check_repo_requirements(self, target, source, translator): | |
1263 | """ |
|
1263 | """ | |
1264 | Check if `target` and `source` have compatible requirements. |
|
1264 | Check if `target` and `source` have compatible requirements. | |
1265 |
|
1265 | |||
1266 | Currently this is just checking for largefiles. |
|
1266 | Currently this is just checking for largefiles. | |
1267 | """ |
|
1267 | """ | |
1268 | _ = translator |
|
1268 | _ = translator | |
1269 | target_has_largefiles = self._has_largefiles(target) |
|
1269 | target_has_largefiles = self._has_largefiles(target) | |
1270 | source_has_largefiles = self._has_largefiles(source) |
|
1270 | source_has_largefiles = self._has_largefiles(source) | |
1271 | merge_possible = True |
|
1271 | merge_possible = True | |
1272 | message = u'' |
|
1272 | message = u'' | |
1273 |
|
1273 | |||
1274 | if target_has_largefiles != source_has_largefiles: |
|
1274 | if target_has_largefiles != source_has_largefiles: | |
1275 | merge_possible = False |
|
1275 | merge_possible = False | |
1276 | if source_has_largefiles: |
|
1276 | if source_has_largefiles: | |
1277 | message = _( |
|
1277 | message = _( | |
1278 | 'Target repository large files support is disabled.') |
|
1278 | 'Target repository large files support is disabled.') | |
1279 | else: |
|
1279 | else: | |
1280 | message = _( |
|
1280 | message = _( | |
1281 | 'Source repository large files support is disabled.') |
|
1281 | 'Source repository large files support is disabled.') | |
1282 |
|
1282 | |||
1283 | return merge_possible, message |
|
1283 | return merge_possible, message | |
1284 |
|
1284 | |||
1285 | def _has_largefiles(self, repo): |
|
1285 | def _has_largefiles(self, repo): | |
1286 | largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings( |
|
1286 | largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings( | |
1287 | 'extensions', 'largefiles') |
|
1287 | 'extensions', 'largefiles') | |
1288 | return largefiles_ui and largefiles_ui[0].active |
|
1288 | return largefiles_ui and largefiles_ui[0].active | |
1289 |
|
1289 | |||
1290 | def _try_merge(self, pull_request, force_shadow_repo_refresh=False): |
|
1290 | def _try_merge(self, pull_request, force_shadow_repo_refresh=False): | |
1291 | """ |
|
1291 | """ | |
1292 | Try to merge the pull request and return the merge status. |
|
1292 | Try to merge the pull request and return the merge status. | |
1293 | """ |
|
1293 | """ | |
1294 | log.debug( |
|
1294 | log.debug( | |
1295 | "Trying out if the pull request %s can be merged. Force_refresh=%s", |
|
1295 | "Trying out if the pull request %s can be merged. Force_refresh=%s", | |
1296 | pull_request.pull_request_id, force_shadow_repo_refresh) |
|
1296 | pull_request.pull_request_id, force_shadow_repo_refresh) | |
1297 | target_vcs = pull_request.target_repo.scm_instance() |
|
1297 | target_vcs = pull_request.target_repo.scm_instance() | |
1298 | # Refresh the target reference. |
|
1298 | # Refresh the target reference. | |
1299 | try: |
|
1299 | try: | |
1300 | target_ref = self._refresh_reference( |
|
1300 | target_ref = self._refresh_reference( | |
1301 | pull_request.target_ref_parts, target_vcs) |
|
1301 | pull_request.target_ref_parts, target_vcs) | |
1302 | except CommitDoesNotExistError: |
|
1302 | except CommitDoesNotExistError: | |
1303 | merge_state = MergeResponse( |
|
1303 | merge_state = MergeResponse( | |
1304 | False, False, None, MergeFailureReason.MISSING_TARGET_REF, |
|
1304 | False, False, None, MergeFailureReason.MISSING_TARGET_REF, | |
1305 | metadata={'target_ref': pull_request.target_ref_parts}) |
|
1305 | metadata={'target_ref': pull_request.target_ref_parts}) | |
1306 | return merge_state |
|
1306 | return merge_state | |
1307 |
|
1307 | |||
1308 | target_locked = pull_request.target_repo.locked |
|
1308 | target_locked = pull_request.target_repo.locked | |
1309 | if target_locked and target_locked[0]: |
|
1309 | if target_locked and target_locked[0]: | |
1310 | locked_by = 'user:{}'.format(target_locked[0]) |
|
1310 | locked_by = 'user:{}'.format(target_locked[0]) | |
1311 | log.debug("The target repository is locked by %s.", locked_by) |
|
1311 | log.debug("The target repository is locked by %s.", locked_by) | |
1312 | merge_state = MergeResponse( |
|
1312 | merge_state = MergeResponse( | |
1313 | False, False, None, MergeFailureReason.TARGET_IS_LOCKED, |
|
1313 | False, False, None, MergeFailureReason.TARGET_IS_LOCKED, | |
1314 | metadata={'locked_by': locked_by}) |
|
1314 | metadata={'locked_by': locked_by}) | |
1315 | elif force_shadow_repo_refresh or self._needs_merge_state_refresh( |
|
1315 | elif force_shadow_repo_refresh or self._needs_merge_state_refresh( | |
1316 | pull_request, target_ref): |
|
1316 | pull_request, target_ref): | |
1317 | log.debug("Refreshing the merge status of the repository.") |
|
1317 | log.debug("Refreshing the merge status of the repository.") | |
1318 | merge_state = self._refresh_merge_state( |
|
1318 | merge_state = self._refresh_merge_state( | |
1319 | pull_request, target_vcs, target_ref) |
|
1319 | pull_request, target_vcs, target_ref) | |
1320 | else: |
|
1320 | else: | |
1321 | possible = pull_request.last_merge_status == MergeFailureReason.NONE |
|
1321 | possible = pull_request.last_merge_status == MergeFailureReason.NONE | |
1322 | metadata = { |
|
1322 | metadata = { | |
1323 | 'target_ref': pull_request.target_ref_parts, |
|
1323 | 'target_ref': pull_request.target_ref_parts, | |
1324 | 'source_ref': pull_request.source_ref_parts, |
|
1324 | 'source_ref': pull_request.source_ref_parts, | |
1325 | } |
|
1325 | } | |
1326 | if not possible and target_ref.type == 'branch': |
|
1326 | if not possible and target_ref.type == 'branch': | |
1327 | # NOTE(marcink): case for mercurial multiple heads on branch |
|
1327 | # NOTE(marcink): case for mercurial multiple heads on branch | |
1328 | heads = target_vcs._heads(target_ref.name) |
|
1328 | heads = target_vcs._heads(target_ref.name) | |
1329 | if len(heads) != 1: |
|
1329 | if len(heads) != 1: | |
1330 | heads = '\n,'.join(target_vcs._heads(target_ref.name)) |
|
1330 | heads = '\n,'.join(target_vcs._heads(target_ref.name)) | |
1331 | metadata.update({ |
|
1331 | metadata.update({ | |
1332 | 'heads': heads |
|
1332 | 'heads': heads | |
1333 | }) |
|
1333 | }) | |
1334 | merge_state = MergeResponse( |
|
1334 | merge_state = MergeResponse( | |
1335 | possible, False, None, pull_request.last_merge_status, metadata=metadata) |
|
1335 | possible, False, None, pull_request.last_merge_status, metadata=metadata) | |
1336 |
|
1336 | |||
1337 | return merge_state |
|
1337 | return merge_state | |
1338 |
|
1338 | |||
1339 | def _refresh_reference(self, reference, vcs_repository): |
|
1339 | def _refresh_reference(self, reference, vcs_repository): | |
1340 | if reference.type in self.UPDATABLE_REF_TYPES: |
|
1340 | if reference.type in self.UPDATABLE_REF_TYPES: | |
1341 | name_or_id = reference.name |
|
1341 | name_or_id = reference.name | |
1342 | else: |
|
1342 | else: | |
1343 | name_or_id = reference.commit_id |
|
1343 | name_or_id = reference.commit_id | |
1344 |
|
1344 | |||
1345 | refreshed_commit = vcs_repository.get_commit(name_or_id) |
|
1345 | refreshed_commit = vcs_repository.get_commit(name_or_id) | |
1346 | refreshed_reference = Reference( |
|
1346 | refreshed_reference = Reference( | |
1347 | reference.type, reference.name, refreshed_commit.raw_id) |
|
1347 | reference.type, reference.name, refreshed_commit.raw_id) | |
1348 | return refreshed_reference |
|
1348 | return refreshed_reference | |
1349 |
|
1349 | |||
1350 | def _needs_merge_state_refresh(self, pull_request, target_reference): |
|
1350 | def _needs_merge_state_refresh(self, pull_request, target_reference): | |
1351 | return not( |
|
1351 | return not( | |
1352 | pull_request.revisions and |
|
1352 | pull_request.revisions and | |
1353 | pull_request.revisions[0] == pull_request._last_merge_source_rev and |
|
1353 | pull_request.revisions[0] == pull_request._last_merge_source_rev and | |
1354 | target_reference.commit_id == pull_request._last_merge_target_rev) |
|
1354 | target_reference.commit_id == pull_request._last_merge_target_rev) | |
1355 |
|
1355 | |||
1356 | def _refresh_merge_state(self, pull_request, target_vcs, target_reference): |
|
1356 | def _refresh_merge_state(self, pull_request, target_vcs, target_reference): | |
1357 | workspace_id = self._workspace_id(pull_request) |
|
1357 | workspace_id = self._workspace_id(pull_request) | |
1358 | source_vcs = pull_request.source_repo.scm_instance() |
|
1358 | source_vcs = pull_request.source_repo.scm_instance() | |
1359 | repo_id = pull_request.target_repo.repo_id |
|
1359 | repo_id = pull_request.target_repo.repo_id | |
1360 | use_rebase = self._use_rebase_for_merging(pull_request) |
|
1360 | use_rebase = self._use_rebase_for_merging(pull_request) | |
1361 | close_branch = self._close_branch_before_merging(pull_request) |
|
1361 | close_branch = self._close_branch_before_merging(pull_request) | |
1362 | merge_state = target_vcs.merge( |
|
1362 | merge_state = target_vcs.merge( | |
1363 | repo_id, workspace_id, |
|
1363 | repo_id, workspace_id, | |
1364 | target_reference, source_vcs, pull_request.source_ref_parts, |
|
1364 | target_reference, source_vcs, pull_request.source_ref_parts, | |
1365 | dry_run=True, use_rebase=use_rebase, |
|
1365 | dry_run=True, use_rebase=use_rebase, | |
1366 | close_branch=close_branch) |
|
1366 | close_branch=close_branch) | |
1367 |
|
1367 | |||
1368 | # Do not store the response if there was an unknown error. |
|
1368 | # Do not store the response if there was an unknown error. | |
1369 | if merge_state.failure_reason != MergeFailureReason.UNKNOWN: |
|
1369 | if merge_state.failure_reason != MergeFailureReason.UNKNOWN: | |
1370 | pull_request._last_merge_source_rev = \ |
|
1370 | pull_request._last_merge_source_rev = \ | |
1371 | pull_request.source_ref_parts.commit_id |
|
1371 | pull_request.source_ref_parts.commit_id | |
1372 | pull_request._last_merge_target_rev = target_reference.commit_id |
|
1372 | pull_request._last_merge_target_rev = target_reference.commit_id | |
1373 | pull_request.last_merge_status = merge_state.failure_reason |
|
1373 | pull_request.last_merge_status = merge_state.failure_reason | |
1374 | pull_request.shadow_merge_ref = merge_state.merge_ref |
|
1374 | pull_request.shadow_merge_ref = merge_state.merge_ref | |
1375 | Session().add(pull_request) |
|
1375 | Session().add(pull_request) | |
1376 | Session().commit() |
|
1376 | Session().commit() | |
1377 |
|
1377 | |||
1378 | return merge_state |
|
1378 | return merge_state | |
1379 |
|
1379 | |||
1380 | def _workspace_id(self, pull_request): |
|
1380 | def _workspace_id(self, pull_request): | |
1381 | workspace_id = 'pr-%s' % pull_request.pull_request_id |
|
1381 | workspace_id = 'pr-%s' % pull_request.pull_request_id | |
1382 | return workspace_id |
|
1382 | return workspace_id | |
1383 |
|
1383 | |||
1384 | def generate_repo_data(self, repo, commit_id=None, branch=None, |
|
1384 | def generate_repo_data(self, repo, commit_id=None, branch=None, | |
1385 | bookmark=None, translator=None): |
|
1385 | bookmark=None, translator=None): | |
1386 | from rhodecode.model.repo import RepoModel |
|
1386 | from rhodecode.model.repo import RepoModel | |
1387 |
|
1387 | |||
1388 | all_refs, selected_ref = \ |
|
1388 | all_refs, selected_ref = \ | |
1389 | self._get_repo_pullrequest_sources( |
|
1389 | self._get_repo_pullrequest_sources( | |
1390 | repo.scm_instance(), commit_id=commit_id, |
|
1390 | repo.scm_instance(), commit_id=commit_id, | |
1391 | branch=branch, bookmark=bookmark, translator=translator) |
|
1391 | branch=branch, bookmark=bookmark, translator=translator) | |
1392 |
|
1392 | |||
1393 | refs_select2 = [] |
|
1393 | refs_select2 = [] | |
1394 | for element in all_refs: |
|
1394 | for element in all_refs: | |
1395 | children = [{'id': x[0], 'text': x[1]} for x in element[0]] |
|
1395 | children = [{'id': x[0], 'text': x[1]} for x in element[0]] | |
1396 | refs_select2.append({'text': element[1], 'children': children}) |
|
1396 | refs_select2.append({'text': element[1], 'children': children}) | |
1397 |
|
1397 | |||
1398 | return { |
|
1398 | return { | |
1399 | 'user': { |
|
1399 | 'user': { | |
1400 | 'user_id': repo.user.user_id, |
|
1400 | 'user_id': repo.user.user_id, | |
1401 | 'username': repo.user.username, |
|
1401 | 'username': repo.user.username, | |
1402 | 'firstname': repo.user.first_name, |
|
1402 | 'firstname': repo.user.first_name, | |
1403 | 'lastname': repo.user.last_name, |
|
1403 | 'lastname': repo.user.last_name, | |
1404 | 'gravatar_link': h.gravatar_url(repo.user.email, 14), |
|
1404 | 'gravatar_link': h.gravatar_url(repo.user.email, 14), | |
1405 | }, |
|
1405 | }, | |
1406 | 'name': repo.repo_name, |
|
1406 | 'name': repo.repo_name, | |
1407 | 'link': RepoModel().get_url(repo), |
|
1407 | 'link': RepoModel().get_url(repo), | |
1408 | 'description': h.chop_at_smart(repo.description_safe, '\n'), |
|
1408 | 'description': h.chop_at_smart(repo.description_safe, '\n'), | |
1409 | 'refs': { |
|
1409 | 'refs': { | |
1410 | 'all_refs': all_refs, |
|
1410 | 'all_refs': all_refs, | |
1411 | 'selected_ref': selected_ref, |
|
1411 | 'selected_ref': selected_ref, | |
1412 | 'select2_refs': refs_select2 |
|
1412 | 'select2_refs': refs_select2 | |
1413 | } |
|
1413 | } | |
1414 | } |
|
1414 | } | |
1415 |
|
1415 | |||
1416 | def generate_pullrequest_title(self, source, source_ref, target): |
|
1416 | def generate_pullrequest_title(self, source, source_ref, target): | |
1417 | return u'{source}#{at_ref} to {target}'.format( |
|
1417 | return u'{source}#{at_ref} to {target}'.format( | |
1418 | source=source, |
|
1418 | source=source, | |
1419 | at_ref=source_ref, |
|
1419 | at_ref=source_ref, | |
1420 | target=target, |
|
1420 | target=target, | |
1421 | ) |
|
1421 | ) | |
1422 |
|
1422 | |||
1423 | def _cleanup_merge_workspace(self, pull_request): |
|
1423 | def _cleanup_merge_workspace(self, pull_request): | |
1424 | # Merging related cleanup |
|
1424 | # Merging related cleanup | |
1425 | repo_id = pull_request.target_repo.repo_id |
|
1425 | repo_id = pull_request.target_repo.repo_id | |
1426 | target_scm = pull_request.target_repo.scm_instance() |
|
1426 | target_scm = pull_request.target_repo.scm_instance() | |
1427 | workspace_id = self._workspace_id(pull_request) |
|
1427 | workspace_id = self._workspace_id(pull_request) | |
1428 |
|
1428 | |||
1429 | try: |
|
1429 | try: | |
1430 | target_scm.cleanup_merge_workspace(repo_id, workspace_id) |
|
1430 | target_scm.cleanup_merge_workspace(repo_id, workspace_id) | |
1431 | except NotImplementedError: |
|
1431 | except NotImplementedError: | |
1432 | pass |
|
1432 | pass | |
1433 |
|
1433 | |||
1434 | def _get_repo_pullrequest_sources( |
|
1434 | def _get_repo_pullrequest_sources( | |
1435 | self, repo, commit_id=None, branch=None, bookmark=None, |
|
1435 | self, repo, commit_id=None, branch=None, bookmark=None, | |
1436 | translator=None): |
|
1436 | translator=None): | |
1437 | """ |
|
1437 | """ | |
1438 | Return a structure with repo's interesting commits, suitable for |
|
1438 | Return a structure with repo's interesting commits, suitable for | |
1439 | the selectors in pullrequest controller |
|
1439 | the selectors in pullrequest controller | |
1440 |
|
1440 | |||
1441 | :param commit_id: a commit that must be in the list somehow |
|
1441 | :param commit_id: a commit that must be in the list somehow | |
1442 | and selected by default |
|
1442 | and selected by default | |
1443 | :param branch: a branch that must be in the list and selected |
|
1443 | :param branch: a branch that must be in the list and selected | |
1444 | by default - even if closed |
|
1444 | by default - even if closed | |
1445 | :param bookmark: a bookmark that must be in the list and selected |
|
1445 | :param bookmark: a bookmark that must be in the list and selected | |
1446 | """ |
|
1446 | """ | |
1447 | _ = translator or get_current_request().translate |
|
1447 | _ = translator or get_current_request().translate | |
1448 |
|
1448 | |||
1449 | commit_id = safe_str(commit_id) if commit_id else None |
|
1449 | commit_id = safe_str(commit_id) if commit_id else None | |
1450 | branch = safe_unicode(branch) if branch else None |
|
1450 | branch = safe_unicode(branch) if branch else None | |
1451 | bookmark = safe_unicode(bookmark) if bookmark else None |
|
1451 | bookmark = safe_unicode(bookmark) if bookmark else None | |
1452 |
|
1452 | |||
1453 | selected = None |
|
1453 | selected = None | |
1454 |
|
1454 | |||
1455 | # order matters: first source that has commit_id in it will be selected |
|
1455 | # order matters: first source that has commit_id in it will be selected | |
1456 | sources = [] |
|
1456 | sources = [] | |
1457 | sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark)) |
|
1457 | sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark)) | |
1458 | sources.append(('branch', repo.branches.items(), _('Branches'), branch)) |
|
1458 | sources.append(('branch', repo.branches.items(), _('Branches'), branch)) | |
1459 |
|
1459 | |||
1460 | if commit_id: |
|
1460 | if commit_id: | |
1461 | ref_commit = (h.short_id(commit_id), commit_id) |
|
1461 | ref_commit = (h.short_id(commit_id), commit_id) | |
1462 | sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id)) |
|
1462 | sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id)) | |
1463 |
|
1463 | |||
1464 | sources.append( |
|
1464 | sources.append( | |
1465 | ('branch', repo.branches_closed.items(), _('Closed Branches'), branch), |
|
1465 | ('branch', repo.branches_closed.items(), _('Closed Branches'), branch), | |
1466 | ) |
|
1466 | ) | |
1467 |
|
1467 | |||
1468 | groups = [] |
|
1468 | groups = [] | |
1469 |
|
1469 | |||
1470 | for group_key, ref_list, group_name, match in sources: |
|
1470 | for group_key, ref_list, group_name, match in sources: | |
1471 | group_refs = [] |
|
1471 | group_refs = [] | |
1472 | for ref_name, ref_id in ref_list: |
|
1472 | for ref_name, ref_id in ref_list: | |
1473 | ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id) |
|
1473 | ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id) | |
1474 | group_refs.append((ref_key, ref_name)) |
|
1474 | group_refs.append((ref_key, ref_name)) | |
1475 |
|
1475 | |||
1476 | if not selected: |
|
1476 | if not selected: | |
1477 | if set([commit_id, match]) & set([ref_id, ref_name]): |
|
1477 | if set([commit_id, match]) & set([ref_id, ref_name]): | |
1478 | selected = ref_key |
|
1478 | selected = ref_key | |
1479 |
|
1479 | |||
1480 | if group_refs: |
|
1480 | if group_refs: | |
1481 | groups.append((group_refs, group_name)) |
|
1481 | groups.append((group_refs, group_name)) | |
1482 |
|
1482 | |||
1483 | if not selected: |
|
1483 | if not selected: | |
1484 | ref = commit_id or branch or bookmark |
|
1484 | ref = commit_id or branch or bookmark | |
1485 | if ref: |
|
1485 | if ref: | |
1486 | raise CommitDoesNotExistError( |
|
1486 | raise CommitDoesNotExistError( | |
1487 | u'No commit refs could be found matching: {}'.format(ref)) |
|
1487 | u'No commit refs could be found matching: {}'.format(ref)) | |
1488 | elif repo.DEFAULT_BRANCH_NAME in repo.branches: |
|
1488 | elif repo.DEFAULT_BRANCH_NAME in repo.branches: | |
1489 | selected = u'branch:{}:{}'.format( |
|
1489 | selected = u'branch:{}:{}'.format( | |
1490 | safe_unicode(repo.DEFAULT_BRANCH_NAME), |
|
1490 | safe_unicode(repo.DEFAULT_BRANCH_NAME), | |
1491 | safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME]) |
|
1491 | safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME]) | |
1492 | ) |
|
1492 | ) | |
1493 | elif repo.commit_ids: |
|
1493 | elif repo.commit_ids: | |
1494 | # make the user select in this case |
|
1494 | # make the user select in this case | |
1495 | selected = None |
|
1495 | selected = None | |
1496 | else: |
|
1496 | else: | |
1497 | raise EmptyRepositoryError() |
|
1497 | raise EmptyRepositoryError() | |
1498 | return groups, selected |
|
1498 | return groups, selected | |
1499 |
|
1499 | |||
1500 | def get_diff(self, source_repo, source_ref_id, target_ref_id, |
|
1500 | def get_diff(self, source_repo, source_ref_id, target_ref_id, | |
1501 | hide_whitespace_changes, diff_context): |
|
1501 | hide_whitespace_changes, diff_context): | |
1502 |
|
1502 | |||
1503 | return self._get_diff_from_pr_or_version( |
|
1503 | return self._get_diff_from_pr_or_version( | |
1504 | source_repo, source_ref_id, target_ref_id, |
|
1504 | source_repo, source_ref_id, target_ref_id, | |
1505 | hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context) |
|
1505 | hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context) | |
1506 |
|
1506 | |||
1507 | def _get_diff_from_pr_or_version( |
|
1507 | def _get_diff_from_pr_or_version( | |
1508 | self, source_repo, source_ref_id, target_ref_id, |
|
1508 | self, source_repo, source_ref_id, target_ref_id, | |
1509 | hide_whitespace_changes, diff_context): |
|
1509 | hide_whitespace_changes, diff_context): | |
1510 |
|
1510 | |||
1511 | target_commit = source_repo.get_commit( |
|
1511 | target_commit = source_repo.get_commit( | |
1512 | commit_id=safe_str(target_ref_id)) |
|
1512 | commit_id=safe_str(target_ref_id)) | |
1513 | source_commit = source_repo.get_commit( |
|
1513 | source_commit = source_repo.get_commit( | |
1514 | commit_id=safe_str(source_ref_id)) |
|
1514 | commit_id=safe_str(source_ref_id)) | |
1515 | if isinstance(source_repo, Repository): |
|
1515 | if isinstance(source_repo, Repository): | |
1516 | vcs_repo = source_repo.scm_instance() |
|
1516 | vcs_repo = source_repo.scm_instance() | |
1517 | else: |
|
1517 | else: | |
1518 | vcs_repo = source_repo |
|
1518 | vcs_repo = source_repo | |
1519 |
|
1519 | |||
1520 | # TODO: johbo: In the context of an update, we cannot reach |
|
1520 | # TODO: johbo: In the context of an update, we cannot reach | |
1521 | # the old commit anymore with our normal mechanisms. It needs |
|
1521 | # the old commit anymore with our normal mechanisms. It needs | |
1522 | # some sort of special support in the vcs layer to avoid this |
|
1522 | # some sort of special support in the vcs layer to avoid this | |
1523 | # workaround. |
|
1523 | # workaround. | |
1524 | if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and |
|
1524 | if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and | |
1525 | vcs_repo.alias == 'git'): |
|
1525 | vcs_repo.alias == 'git'): | |
1526 | source_commit.raw_id = safe_str(source_ref_id) |
|
1526 | source_commit.raw_id = safe_str(source_ref_id) | |
1527 |
|
1527 | |||
1528 | log.debug('calculating diff between ' |
|
1528 | log.debug('calculating diff between ' | |
1529 | 'source_ref:%s and target_ref:%s for repo `%s`', |
|
1529 | 'source_ref:%s and target_ref:%s for repo `%s`', | |
1530 | target_ref_id, source_ref_id, |
|
1530 | target_ref_id, source_ref_id, | |
1531 | safe_unicode(vcs_repo.path)) |
|
1531 | safe_unicode(vcs_repo.path)) | |
1532 |
|
1532 | |||
1533 | vcs_diff = vcs_repo.get_diff( |
|
1533 | vcs_diff = vcs_repo.get_diff( | |
1534 | commit1=target_commit, commit2=source_commit, |
|
1534 | commit1=target_commit, commit2=source_commit, | |
1535 | ignore_whitespace=hide_whitespace_changes, context=diff_context) |
|
1535 | ignore_whitespace=hide_whitespace_changes, context=diff_context) | |
1536 | return vcs_diff |
|
1536 | return vcs_diff | |
1537 |
|
1537 | |||
1538 | def _is_merge_enabled(self, pull_request): |
|
1538 | def _is_merge_enabled(self, pull_request): | |
1539 | return self._get_general_setting( |
|
1539 | return self._get_general_setting( | |
1540 | pull_request, 'rhodecode_pr_merge_enabled') |
|
1540 | pull_request, 'rhodecode_pr_merge_enabled') | |
1541 |
|
1541 | |||
1542 | def _use_rebase_for_merging(self, pull_request): |
|
1542 | def _use_rebase_for_merging(self, pull_request): | |
1543 | repo_type = pull_request.target_repo.repo_type |
|
1543 | repo_type = pull_request.target_repo.repo_type | |
1544 | if repo_type == 'hg': |
|
1544 | if repo_type == 'hg': | |
1545 | return self._get_general_setting( |
|
1545 | return self._get_general_setting( | |
1546 | pull_request, 'rhodecode_hg_use_rebase_for_merging') |
|
1546 | pull_request, 'rhodecode_hg_use_rebase_for_merging') | |
1547 | elif repo_type == 'git': |
|
1547 | elif repo_type == 'git': | |
1548 | return self._get_general_setting( |
|
1548 | return self._get_general_setting( | |
1549 | pull_request, 'rhodecode_git_use_rebase_for_merging') |
|
1549 | pull_request, 'rhodecode_git_use_rebase_for_merging') | |
1550 |
|
1550 | |||
1551 | return False |
|
1551 | return False | |
1552 |
|
1552 | |||
1553 | def _close_branch_before_merging(self, pull_request): |
|
1553 | def _close_branch_before_merging(self, pull_request): | |
1554 | repo_type = pull_request.target_repo.repo_type |
|
1554 | repo_type = pull_request.target_repo.repo_type | |
1555 | if repo_type == 'hg': |
|
1555 | if repo_type == 'hg': | |
1556 | return self._get_general_setting( |
|
1556 | return self._get_general_setting( | |
1557 | pull_request, 'rhodecode_hg_close_branch_before_merging') |
|
1557 | pull_request, 'rhodecode_hg_close_branch_before_merging') | |
1558 | elif repo_type == 'git': |
|
1558 | elif repo_type == 'git': | |
1559 | return self._get_general_setting( |
|
1559 | return self._get_general_setting( | |
1560 | pull_request, 'rhodecode_git_close_branch_before_merging') |
|
1560 | pull_request, 'rhodecode_git_close_branch_before_merging') | |
1561 |
|
1561 | |||
1562 | return False |
|
1562 | return False | |
1563 |
|
1563 | |||
1564 | def _get_general_setting(self, pull_request, settings_key, default=False): |
|
1564 | def _get_general_setting(self, pull_request, settings_key, default=False): | |
1565 | settings_model = VcsSettingsModel(repo=pull_request.target_repo) |
|
1565 | settings_model = VcsSettingsModel(repo=pull_request.target_repo) | |
1566 | settings = settings_model.get_general_settings() |
|
1566 | settings = settings_model.get_general_settings() | |
1567 | return settings.get(settings_key, default) |
|
1567 | return settings.get(settings_key, default) | |
1568 |
|
1568 | |||
1569 | def _log_audit_action(self, action, action_data, user, pull_request): |
|
1569 | def _log_audit_action(self, action, action_data, user, pull_request): | |
1570 | audit_logger.store( |
|
1570 | audit_logger.store( | |
1571 | action=action, |
|
1571 | action=action, | |
1572 | action_data=action_data, |
|
1572 | action_data=action_data, | |
1573 | user=user, |
|
1573 | user=user, | |
1574 | repo=pull_request.target_repo) |
|
1574 | repo=pull_request.target_repo) | |
1575 |
|
1575 | |||
1576 | def get_reviewer_functions(self): |
|
1576 | def get_reviewer_functions(self): | |
1577 | """ |
|
1577 | """ | |
1578 | Fetches functions for validation and fetching default reviewers. |
|
1578 | Fetches functions for validation and fetching default reviewers. | |
1579 | If available we use the EE package, else we fallback to CE |
|
1579 | If available we use the EE package, else we fallback to CE | |
1580 | package functions |
|
1580 | package functions | |
1581 | """ |
|
1581 | """ | |
1582 | try: |
|
1582 | try: | |
1583 | from rc_reviewers.utils import get_default_reviewers_data |
|
1583 | from rc_reviewers.utils import get_default_reviewers_data | |
1584 | from rc_reviewers.utils import validate_default_reviewers |
|
1584 | from rc_reviewers.utils import validate_default_reviewers | |
1585 | except ImportError: |
|
1585 | except ImportError: | |
1586 | from rhodecode.apps.repository.utils import get_default_reviewers_data |
|
1586 | from rhodecode.apps.repository.utils import get_default_reviewers_data | |
1587 | from rhodecode.apps.repository.utils import validate_default_reviewers |
|
1587 | from rhodecode.apps.repository.utils import validate_default_reviewers | |
1588 |
|
1588 | |||
1589 | return get_default_reviewers_data, validate_default_reviewers |
|
1589 | return get_default_reviewers_data, validate_default_reviewers | |
1590 |
|
1590 | |||
1591 |
|
1591 | |||
1592 | class MergeCheck(object): |
|
1592 | class MergeCheck(object): | |
1593 | """ |
|
1593 | """ | |
1594 | Perform Merge Checks and returns a check object which stores information |
|
1594 | Perform Merge Checks and returns a check object which stores information | |
1595 | about merge errors, and merge conditions |
|
1595 | about merge errors, and merge conditions | |
1596 | """ |
|
1596 | """ | |
1597 | TODO_CHECK = 'todo' |
|
1597 | TODO_CHECK = 'todo' | |
1598 | PERM_CHECK = 'perm' |
|
1598 | PERM_CHECK = 'perm' | |
1599 | REVIEW_CHECK = 'review' |
|
1599 | REVIEW_CHECK = 'review' | |
1600 | MERGE_CHECK = 'merge' |
|
1600 | MERGE_CHECK = 'merge' | |
1601 |
|
1601 | |||
1602 | def __init__(self): |
|
1602 | def __init__(self): | |
1603 | self.review_status = None |
|
1603 | self.review_status = None | |
1604 | self.merge_possible = None |
|
1604 | self.merge_possible = None | |
1605 | self.merge_msg = '' |
|
1605 | self.merge_msg = '' | |
1606 | self.failed = None |
|
1606 | self.failed = None | |
1607 | self.errors = [] |
|
1607 | self.errors = [] | |
1608 | self.error_details = OrderedDict() |
|
1608 | self.error_details = OrderedDict() | |
1609 |
|
1609 | |||
1610 | def push_error(self, error_type, message, error_key, details): |
|
1610 | def push_error(self, error_type, message, error_key, details): | |
1611 | self.failed = True |
|
1611 | self.failed = True | |
1612 | self.errors.append([error_type, message]) |
|
1612 | self.errors.append([error_type, message]) | |
1613 | self.error_details[error_key] = dict( |
|
1613 | self.error_details[error_key] = dict( | |
1614 | details=details, |
|
1614 | details=details, | |
1615 | error_type=error_type, |
|
1615 | error_type=error_type, | |
1616 | message=message |
|
1616 | message=message | |
1617 | ) |
|
1617 | ) | |
1618 |
|
1618 | |||
1619 | @classmethod |
|
1619 | @classmethod | |
1620 | def validate(cls, pull_request, auth_user, translator, fail_early=False, |
|
1620 | def validate(cls, pull_request, auth_user, translator, fail_early=False, | |
1621 | force_shadow_repo_refresh=False): |
|
1621 | force_shadow_repo_refresh=False): | |
1622 | _ = translator |
|
1622 | _ = translator | |
1623 | merge_check = cls() |
|
1623 | merge_check = cls() | |
1624 |
|
1624 | |||
1625 | # permissions to merge |
|
1625 | # permissions to merge | |
1626 | user_allowed_to_merge = PullRequestModel().check_user_merge( |
|
1626 | user_allowed_to_merge = PullRequestModel().check_user_merge( | |
1627 | pull_request, auth_user) |
|
1627 | pull_request, auth_user) | |
1628 | if not user_allowed_to_merge: |
|
1628 | if not user_allowed_to_merge: | |
1629 | log.debug("MergeCheck: cannot merge, approval is pending.") |
|
1629 | log.debug("MergeCheck: cannot merge, approval is pending.") | |
1630 |
|
1630 | |||
1631 | msg = _('User `{}` not allowed to perform merge.').format(auth_user.username) |
|
1631 | msg = _('User `{}` not allowed to perform merge.').format(auth_user.username) | |
1632 | merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username) |
|
1632 | merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username) | |
1633 | if fail_early: |
|
1633 | if fail_early: | |
1634 | return merge_check |
|
1634 | return merge_check | |
1635 |
|
1635 | |||
1636 | # permission to merge into the target branch |
|
1636 | # permission to merge into the target branch | |
1637 | target_commit_id = pull_request.target_ref_parts.commit_id |
|
1637 | target_commit_id = pull_request.target_ref_parts.commit_id | |
1638 | if pull_request.target_ref_parts.type == 'branch': |
|
1638 | if pull_request.target_ref_parts.type == 'branch': | |
1639 | branch_name = pull_request.target_ref_parts.name |
|
1639 | branch_name = pull_request.target_ref_parts.name | |
1640 | else: |
|
1640 | else: | |
1641 | # for mercurial we can always figure out the branch from the commit |
|
1641 | # for mercurial we can always figure out the branch from the commit | |
1642 | # in case of bookmark |
|
1642 | # in case of bookmark | |
1643 | target_commit = pull_request.target_repo.get_commit(target_commit_id) |
|
1643 | target_commit = pull_request.target_repo.get_commit(target_commit_id) | |
1644 | branch_name = target_commit.branch |
|
1644 | branch_name = target_commit.branch | |
1645 |
|
1645 | |||
1646 | rule, branch_perm = auth_user.get_rule_and_branch_permission( |
|
1646 | rule, branch_perm = auth_user.get_rule_and_branch_permission( | |
1647 | pull_request.target_repo.repo_name, branch_name) |
|
1647 | pull_request.target_repo.repo_name, branch_name) | |
1648 | if branch_perm and branch_perm == 'branch.none': |
|
1648 | if branch_perm and branch_perm == 'branch.none': | |
1649 | msg = _('Target branch `{}` changes rejected by rule {}.').format( |
|
1649 | msg = _('Target branch `{}` changes rejected by rule {}.').format( | |
1650 | branch_name, rule) |
|
1650 | branch_name, rule) | |
1651 | merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username) |
|
1651 | merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username) | |
1652 | if fail_early: |
|
1652 | if fail_early: | |
1653 | return merge_check |
|
1653 | return merge_check | |
1654 |
|
1654 | |||
1655 | # review status, must be always present |
|
1655 | # review status, must be always present | |
1656 | review_status = pull_request.calculated_review_status() |
|
1656 | review_status = pull_request.calculated_review_status() | |
1657 | merge_check.review_status = review_status |
|
1657 | merge_check.review_status = review_status | |
1658 |
|
1658 | |||
1659 | status_approved = review_status == ChangesetStatus.STATUS_APPROVED |
|
1659 | status_approved = review_status == ChangesetStatus.STATUS_APPROVED | |
1660 | if not status_approved: |
|
1660 | if not status_approved: | |
1661 | log.debug("MergeCheck: cannot merge, approval is pending.") |
|
1661 | log.debug("MergeCheck: cannot merge, approval is pending.") | |
1662 |
|
1662 | |||
1663 | msg = _('Pull request reviewer approval is pending.') |
|
1663 | msg = _('Pull request reviewer approval is pending.') | |
1664 |
|
1664 | |||
1665 | merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status) |
|
1665 | merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status) | |
1666 |
|
1666 | |||
1667 | if fail_early: |
|
1667 | if fail_early: | |
1668 | return merge_check |
|
1668 | return merge_check | |
1669 |
|
1669 | |||
1670 | # left over TODOs |
|
1670 | # left over TODOs | |
1671 | todos = CommentsModel().get_pull_request_unresolved_todos(pull_request) |
|
1671 | todos = CommentsModel().get_pull_request_unresolved_todos(pull_request) | |
1672 | if todos: |
|
1672 | if todos: | |
1673 | log.debug("MergeCheck: cannot merge, {} " |
|
1673 | log.debug("MergeCheck: cannot merge, {} " | |
1674 | "unresolved TODOs left.".format(len(todos))) |
|
1674 | "unresolved TODOs left.".format(len(todos))) | |
1675 |
|
1675 | |||
1676 | if len(todos) == 1: |
|
1676 | if len(todos) == 1: | |
1677 | msg = _('Cannot merge, {} TODO still not resolved.').format( |
|
1677 | msg = _('Cannot merge, {} TODO still not resolved.').format( | |
1678 | len(todos)) |
|
1678 | len(todos)) | |
1679 | else: |
|
1679 | else: | |
1680 | msg = _('Cannot merge, {} TODOs still not resolved.').format( |
|
1680 | msg = _('Cannot merge, {} TODOs still not resolved.').format( | |
1681 | len(todos)) |
|
1681 | len(todos)) | |
1682 |
|
1682 | |||
1683 | merge_check.push_error('warning', msg, cls.TODO_CHECK, todos) |
|
1683 | merge_check.push_error('warning', msg, cls.TODO_CHECK, todos) | |
1684 |
|
1684 | |||
1685 | if fail_early: |
|
1685 | if fail_early: | |
1686 | return merge_check |
|
1686 | return merge_check | |
1687 |
|
1687 | |||
1688 | # merge possible, here is the filesystem simulation + shadow repo |
|
1688 | # merge possible, here is the filesystem simulation + shadow repo | |
1689 | merge_status, msg = PullRequestModel().merge_status( |
|
1689 | merge_status, msg = PullRequestModel().merge_status( | |
1690 | pull_request, translator=translator, |
|
1690 | pull_request, translator=translator, | |
1691 | force_shadow_repo_refresh=force_shadow_repo_refresh) |
|
1691 | force_shadow_repo_refresh=force_shadow_repo_refresh) | |
1692 | merge_check.merge_possible = merge_status |
|
1692 | merge_check.merge_possible = merge_status | |
1693 | merge_check.merge_msg = msg |
|
1693 | merge_check.merge_msg = msg | |
1694 | if not merge_status: |
|
1694 | if not merge_status: | |
1695 | log.debug("MergeCheck: cannot merge, pull request merge not possible.") |
|
1695 | log.debug("MergeCheck: cannot merge, pull request merge not possible.") | |
1696 | merge_check.push_error('warning', msg, cls.MERGE_CHECK, None) |
|
1696 | merge_check.push_error('warning', msg, cls.MERGE_CHECK, None) | |
1697 |
|
1697 | |||
1698 | if fail_early: |
|
1698 | if fail_early: | |
1699 | return merge_check |
|
1699 | return merge_check | |
1700 |
|
1700 | |||
1701 | log.debug('MergeCheck: is failed: %s', merge_check.failed) |
|
1701 | log.debug('MergeCheck: is failed: %s', merge_check.failed) | |
1702 | return merge_check |
|
1702 | return merge_check | |
1703 |
|
1703 | |||
1704 | @classmethod |
|
1704 | @classmethod | |
1705 | def get_merge_conditions(cls, pull_request, translator): |
|
1705 | def get_merge_conditions(cls, pull_request, translator): | |
1706 | _ = translator |
|
1706 | _ = translator | |
1707 | merge_details = {} |
|
1707 | merge_details = {} | |
1708 |
|
1708 | |||
1709 | model = PullRequestModel() |
|
1709 | model = PullRequestModel() | |
1710 | use_rebase = model._use_rebase_for_merging(pull_request) |
|
1710 | use_rebase = model._use_rebase_for_merging(pull_request) | |
1711 |
|
1711 | |||
1712 | if use_rebase: |
|
1712 | if use_rebase: | |
1713 | merge_details['merge_strategy'] = dict( |
|
1713 | merge_details['merge_strategy'] = dict( | |
1714 | details={}, |
|
1714 | details={}, | |
1715 | message=_('Merge strategy: rebase') |
|
1715 | message=_('Merge strategy: rebase') | |
1716 | ) |
|
1716 | ) | |
1717 | else: |
|
1717 | else: | |
1718 | merge_details['merge_strategy'] = dict( |
|
1718 | merge_details['merge_strategy'] = dict( | |
1719 | details={}, |
|
1719 | details={}, | |
1720 | message=_('Merge strategy: explicit merge commit') |
|
1720 | message=_('Merge strategy: explicit merge commit') | |
1721 | ) |
|
1721 | ) | |
1722 |
|
1722 | |||
1723 | close_branch = model._close_branch_before_merging(pull_request) |
|
1723 | close_branch = model._close_branch_before_merging(pull_request) | |
1724 | if close_branch: |
|
1724 | if close_branch: | |
1725 | repo_type = pull_request.target_repo.repo_type |
|
1725 | repo_type = pull_request.target_repo.repo_type | |
1726 | close_msg = '' |
|
1726 | close_msg = '' | |
1727 | if repo_type == 'hg': |
|
1727 | if repo_type == 'hg': | |
1728 | close_msg = _('Source branch will be closed after merge.') |
|
1728 | close_msg = _('Source branch will be closed after merge.') | |
1729 | elif repo_type == 'git': |
|
1729 | elif repo_type == 'git': | |
1730 | close_msg = _('Source branch will be deleted after merge.') |
|
1730 | close_msg = _('Source branch will be deleted after merge.') | |
1731 |
|
1731 | |||
1732 | merge_details['close_branch'] = dict( |
|
1732 | merge_details['close_branch'] = dict( | |
1733 | details={}, |
|
1733 | details={}, | |
1734 | message=close_msg |
|
1734 | message=close_msg | |
1735 | ) |
|
1735 | ) | |
1736 |
|
1736 | |||
1737 | return merge_details |
|
1737 | return merge_details | |
1738 |
|
1738 | |||
1739 |
|
1739 | |||
1740 | ChangeTuple = collections.namedtuple( |
|
1740 | ChangeTuple = collections.namedtuple( | |
1741 | 'ChangeTuple', ['added', 'common', 'removed', 'total']) |
|
1741 | 'ChangeTuple', ['added', 'common', 'removed', 'total']) | |
1742 |
|
1742 | |||
1743 | FileChangeTuple = collections.namedtuple( |
|
1743 | FileChangeTuple = collections.namedtuple( | |
1744 | 'FileChangeTuple', ['added', 'modified', 'removed']) |
|
1744 | 'FileChangeTuple', ['added', 'modified', 'removed']) |
@@ -1,1005 +1,1007 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2019 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2019 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | users model for RhodeCode |
|
22 | users model for RhodeCode | |
23 | """ |
|
23 | """ | |
24 |
|
24 | |||
25 | import logging |
|
25 | import logging | |
26 | import traceback |
|
26 | import traceback | |
27 | import datetime |
|
27 | import datetime | |
28 | import ipaddress |
|
28 | import ipaddress | |
29 |
|
29 | |||
30 | from pyramid.threadlocal import get_current_request |
|
30 | from pyramid.threadlocal import get_current_request | |
31 | from sqlalchemy.exc import DatabaseError |
|
31 | from sqlalchemy.exc import DatabaseError | |
32 |
|
32 | |||
33 | from rhodecode import events |
|
33 | from rhodecode import events | |
34 | from rhodecode.lib.user_log_filter import user_log_filter |
|
34 | from rhodecode.lib.user_log_filter import user_log_filter | |
35 | from rhodecode.lib.utils2 import ( |
|
35 | from rhodecode.lib.utils2 import ( | |
36 | safe_unicode, get_current_rhodecode_user, action_logger_generic, |
|
36 | safe_unicode, get_current_rhodecode_user, action_logger_generic, | |
37 | AttributeDict, str2bool) |
|
37 | AttributeDict, str2bool) | |
38 | from rhodecode.lib.exceptions import ( |
|
38 | from rhodecode.lib.exceptions import ( | |
39 | DefaultUserException, UserOwnsReposException, UserOwnsRepoGroupsException, |
|
39 | DefaultUserException, UserOwnsReposException, UserOwnsRepoGroupsException, | |
40 | UserOwnsUserGroupsException, NotAllowedToCreateUserError, UserOwnsArtifactsException) |
|
40 | UserOwnsUserGroupsException, NotAllowedToCreateUserError, UserOwnsArtifactsException) | |
41 | from rhodecode.lib.caching_query import FromCache |
|
41 | from rhodecode.lib.caching_query import FromCache | |
42 | from rhodecode.model import BaseModel |
|
42 | from rhodecode.model import BaseModel | |
43 | from rhodecode.model.auth_token import AuthTokenModel |
|
43 | from rhodecode.model.auth_token import AuthTokenModel | |
44 | from rhodecode.model.db import ( |
|
44 | from rhodecode.model.db import ( | |
45 | _hash_key, true, false, or_, joinedload, User, UserToPerm, |
|
45 | _hash_key, true, false, or_, joinedload, User, UserToPerm, | |
46 | UserEmailMap, UserIpMap, UserLog) |
|
46 | UserEmailMap, UserIpMap, UserLog) | |
47 | from rhodecode.model.meta import Session |
|
47 | from rhodecode.model.meta import Session | |
48 | from rhodecode.model.repo_group import RepoGroupModel |
|
48 | from rhodecode.model.repo_group import RepoGroupModel | |
49 |
|
49 | |||
50 |
|
50 | |||
51 | log = logging.getLogger(__name__) |
|
51 | log = logging.getLogger(__name__) | |
52 |
|
52 | |||
53 |
|
53 | |||
54 | class UserModel(BaseModel): |
|
54 | class UserModel(BaseModel): | |
55 | cls = User |
|
55 | cls = User | |
56 |
|
56 | |||
57 | def get(self, user_id, cache=False): |
|
57 | def get(self, user_id, cache=False): | |
58 | user = self.sa.query(User) |
|
58 | user = self.sa.query(User) | |
59 | if cache: |
|
59 | if cache: | |
60 | user = user.options( |
|
60 | user = user.options( | |
61 | FromCache("sql_cache_short", "get_user_%s" % user_id)) |
|
61 | FromCache("sql_cache_short", "get_user_%s" % user_id)) | |
62 | return user.get(user_id) |
|
62 | return user.get(user_id) | |
63 |
|
63 | |||
64 | def get_user(self, user): |
|
64 | def get_user(self, user): | |
65 | return self._get_user(user) |
|
65 | return self._get_user(user) | |
66 |
|
66 | |||
67 | def _serialize_user(self, user): |
|
67 | def _serialize_user(self, user): | |
68 | import rhodecode.lib.helpers as h |
|
68 | import rhodecode.lib.helpers as h | |
69 |
|
69 | |||
70 | return { |
|
70 | return { | |
71 | 'id': user.user_id, |
|
71 | 'id': user.user_id, | |
72 | 'first_name': user.first_name, |
|
72 | 'first_name': user.first_name, | |
73 | 'last_name': user.last_name, |
|
73 | 'last_name': user.last_name, | |
74 | 'username': user.username, |
|
74 | 'username': user.username, | |
75 | 'email': user.email, |
|
75 | 'email': user.email, | |
76 | 'icon_link': h.gravatar_url(user.email, 30), |
|
76 | 'icon_link': h.gravatar_url(user.email, 30), | |
77 | 'profile_link': h.link_to_user(user), |
|
77 | 'profile_link': h.link_to_user(user), | |
78 | 'value_display': h.escape(h.person(user)), |
|
78 | 'value_display': h.escape(h.person(user)), | |
79 | 'value': user.username, |
|
79 | 'value': user.username, | |
80 | 'value_type': 'user', |
|
80 | 'value_type': 'user', | |
81 | 'active': user.active, |
|
81 | 'active': user.active, | |
82 | } |
|
82 | } | |
83 |
|
83 | |||
84 | def get_users(self, name_contains=None, limit=20, only_active=True): |
|
84 | def get_users(self, name_contains=None, limit=20, only_active=True): | |
85 |
|
85 | |||
86 | query = self.sa.query(User) |
|
86 | query = self.sa.query(User) | |
87 | if only_active: |
|
87 | if only_active: | |
88 | query = query.filter(User.active == true()) |
|
88 | query = query.filter(User.active == true()) | |
89 |
|
89 | |||
90 | if name_contains: |
|
90 | if name_contains: | |
91 | ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) |
|
91 | ilike_expression = u'%{}%'.format(safe_unicode(name_contains)) | |
92 | query = query.filter( |
|
92 | query = query.filter( | |
93 | or_( |
|
93 | or_( | |
94 | User.name.ilike(ilike_expression), |
|
94 | User.name.ilike(ilike_expression), | |
95 | User.lastname.ilike(ilike_expression), |
|
95 | User.lastname.ilike(ilike_expression), | |
96 | User.username.ilike(ilike_expression) |
|
96 | User.username.ilike(ilike_expression) | |
97 | ) |
|
97 | ) | |
98 | ) |
|
98 | ) | |
99 | query = query.limit(limit) |
|
99 | query = query.limit(limit) | |
100 | users = query.all() |
|
100 | users = query.all() | |
101 |
|
101 | |||
102 | _users = [ |
|
102 | _users = [ | |
103 | self._serialize_user(user) for user in users |
|
103 | self._serialize_user(user) for user in users | |
104 | ] |
|
104 | ] | |
105 | return _users |
|
105 | return _users | |
106 |
|
106 | |||
107 | def get_by_username(self, username, cache=False, case_insensitive=False): |
|
107 | def get_by_username(self, username, cache=False, case_insensitive=False): | |
108 |
|
108 | |||
109 | if case_insensitive: |
|
109 | if case_insensitive: | |
110 | user = self.sa.query(User).filter(User.username.ilike(username)) |
|
110 | user = self.sa.query(User).filter(User.username.ilike(username)) | |
111 | else: |
|
111 | else: | |
112 | user = self.sa.query(User)\ |
|
112 | user = self.sa.query(User)\ | |
113 | .filter(User.username == username) |
|
113 | .filter(User.username == username) | |
114 | if cache: |
|
114 | if cache: | |
115 | name_key = _hash_key(username) |
|
115 | name_key = _hash_key(username) | |
116 | user = user.options( |
|
116 | user = user.options( | |
117 | FromCache("sql_cache_short", "get_user_%s" % name_key)) |
|
117 | FromCache("sql_cache_short", "get_user_%s" % name_key)) | |
118 | return user.scalar() |
|
118 | return user.scalar() | |
119 |
|
119 | |||
120 | def get_by_email(self, email, cache=False, case_insensitive=False): |
|
120 | def get_by_email(self, email, cache=False, case_insensitive=False): | |
121 | return User.get_by_email(email, case_insensitive, cache) |
|
121 | return User.get_by_email(email, case_insensitive, cache) | |
122 |
|
122 | |||
123 | def get_by_auth_token(self, auth_token, cache=False): |
|
123 | def get_by_auth_token(self, auth_token, cache=False): | |
124 | return User.get_by_auth_token(auth_token, cache) |
|
124 | return User.get_by_auth_token(auth_token, cache) | |
125 |
|
125 | |||
126 | def get_active_user_count(self, cache=False): |
|
126 | def get_active_user_count(self, cache=False): | |
127 | qry = User.query().filter( |
|
127 | qry = User.query().filter( | |
128 | User.active == true()).filter( |
|
128 | User.active == true()).filter( | |
129 | User.username != User.DEFAULT_USER) |
|
129 | User.username != User.DEFAULT_USER) | |
130 | if cache: |
|
130 | if cache: | |
131 | qry = qry.options( |
|
131 | qry = qry.options( | |
132 | FromCache("sql_cache_short", "get_active_users")) |
|
132 | FromCache("sql_cache_short", "get_active_users")) | |
133 | return qry.count() |
|
133 | return qry.count() | |
134 |
|
134 | |||
135 | def create(self, form_data, cur_user=None): |
|
135 | def create(self, form_data, cur_user=None): | |
136 | if not cur_user: |
|
136 | if not cur_user: | |
137 | cur_user = getattr(get_current_rhodecode_user(), 'username', None) |
|
137 | cur_user = getattr(get_current_rhodecode_user(), 'username', None) | |
138 |
|
138 | |||
139 | user_data = { |
|
139 | user_data = { | |
140 | 'username': form_data['username'], |
|
140 | 'username': form_data['username'], | |
141 | 'password': form_data['password'], |
|
141 | 'password': form_data['password'], | |
142 | 'email': form_data['email'], |
|
142 | 'email': form_data['email'], | |
143 | 'firstname': form_data['firstname'], |
|
143 | 'firstname': form_data['firstname'], | |
144 | 'lastname': form_data['lastname'], |
|
144 | 'lastname': form_data['lastname'], | |
145 | 'active': form_data['active'], |
|
145 | 'active': form_data['active'], | |
146 | 'extern_type': form_data['extern_type'], |
|
146 | 'extern_type': form_data['extern_type'], | |
147 | 'extern_name': form_data['extern_name'], |
|
147 | 'extern_name': form_data['extern_name'], | |
148 | 'admin': False, |
|
148 | 'admin': False, | |
149 | 'cur_user': cur_user |
|
149 | 'cur_user': cur_user | |
150 | } |
|
150 | } | |
151 |
|
151 | |||
152 | if 'create_repo_group' in form_data: |
|
152 | if 'create_repo_group' in form_data: | |
153 | user_data['create_repo_group'] = str2bool( |
|
153 | user_data['create_repo_group'] = str2bool( | |
154 | form_data.get('create_repo_group')) |
|
154 | form_data.get('create_repo_group')) | |
155 |
|
155 | |||
156 | try: |
|
156 | try: | |
157 | if form_data.get('password_change'): |
|
157 | if form_data.get('password_change'): | |
158 | user_data['force_password_change'] = True |
|
158 | user_data['force_password_change'] = True | |
159 | return UserModel().create_or_update(**user_data) |
|
159 | return UserModel().create_or_update(**user_data) | |
160 | except Exception: |
|
160 | except Exception: | |
161 | log.error(traceback.format_exc()) |
|
161 | log.error(traceback.format_exc()) | |
162 | raise |
|
162 | raise | |
163 |
|
163 | |||
164 | def update_user(self, user, skip_attrs=None, **kwargs): |
|
164 | def update_user(self, user, skip_attrs=None, **kwargs): | |
165 | from rhodecode.lib.auth import get_crypt_password |
|
165 | from rhodecode.lib.auth import get_crypt_password | |
166 |
|
166 | |||
167 | user = self._get_user(user) |
|
167 | user = self._get_user(user) | |
168 | if user.username == User.DEFAULT_USER: |
|
168 | if user.username == User.DEFAULT_USER: | |
169 | raise DefaultUserException( |
|
169 | raise DefaultUserException( | |
170 | "You can't edit this user (`%(username)s`) since it's " |
|
170 | "You can't edit this user (`%(username)s`) since it's " | |
171 | "crucial for entire application" % { |
|
171 | "crucial for entire application" % { | |
172 | 'username': user.username}) |
|
172 | 'username': user.username}) | |
173 |
|
173 | |||
174 | # first store only defaults |
|
174 | # first store only defaults | |
175 | user_attrs = { |
|
175 | user_attrs = { | |
176 | 'updating_user_id': user.user_id, |
|
176 | 'updating_user_id': user.user_id, | |
177 | 'username': user.username, |
|
177 | 'username': user.username, | |
178 | 'password': user.password, |
|
178 | 'password': user.password, | |
179 | 'email': user.email, |
|
179 | 'email': user.email, | |
180 | 'firstname': user.name, |
|
180 | 'firstname': user.name, | |
181 | 'lastname': user.lastname, |
|
181 | 'lastname': user.lastname, | |
182 | 'description': user.description, |
|
182 | 'description': user.description, | |
183 | 'active': user.active, |
|
183 | 'active': user.active, | |
184 | 'admin': user.admin, |
|
184 | 'admin': user.admin, | |
185 | 'extern_name': user.extern_name, |
|
185 | 'extern_name': user.extern_name, | |
186 | 'extern_type': user.extern_type, |
|
186 | 'extern_type': user.extern_type, | |
187 | 'language': user.user_data.get('language') |
|
187 | 'language': user.user_data.get('language') | |
188 | } |
|
188 | } | |
189 |
|
189 | |||
190 | # in case there's new_password, that comes from form, use it to |
|
190 | # in case there's new_password, that comes from form, use it to | |
191 | # store password |
|
191 | # store password | |
192 | if kwargs.get('new_password'): |
|
192 | if kwargs.get('new_password'): | |
193 | kwargs['password'] = kwargs['new_password'] |
|
193 | kwargs['password'] = kwargs['new_password'] | |
194 |
|
194 | |||
195 | # cleanups, my_account password change form |
|
195 | # cleanups, my_account password change form | |
196 | kwargs.pop('current_password', None) |
|
196 | kwargs.pop('current_password', None) | |
197 | kwargs.pop('new_password', None) |
|
197 | kwargs.pop('new_password', None) | |
198 |
|
198 | |||
199 | # cleanups, user edit password change form |
|
199 | # cleanups, user edit password change form | |
200 | kwargs.pop('password_confirmation', None) |
|
200 | kwargs.pop('password_confirmation', None) | |
201 | kwargs.pop('password_change', None) |
|
201 | kwargs.pop('password_change', None) | |
202 |
|
202 | |||
203 | # create repo group on user creation |
|
203 | # create repo group on user creation | |
204 | kwargs.pop('create_repo_group', None) |
|
204 | kwargs.pop('create_repo_group', None) | |
205 |
|
205 | |||
206 | # legacy forms send name, which is the firstname |
|
206 | # legacy forms send name, which is the firstname | |
207 | firstname = kwargs.pop('name', None) |
|
207 | firstname = kwargs.pop('name', None) | |
208 | if firstname: |
|
208 | if firstname: | |
209 | kwargs['firstname'] = firstname |
|
209 | kwargs['firstname'] = firstname | |
210 |
|
210 | |||
211 | for k, v in kwargs.items(): |
|
211 | for k, v in kwargs.items(): | |
212 | # skip if we don't want to update this |
|
212 | # skip if we don't want to update this | |
213 | if skip_attrs and k in skip_attrs: |
|
213 | if skip_attrs and k in skip_attrs: | |
214 | continue |
|
214 | continue | |
215 |
|
215 | |||
216 | user_attrs[k] = v |
|
216 | user_attrs[k] = v | |
217 |
|
217 | |||
218 | try: |
|
218 | try: | |
219 | return self.create_or_update(**user_attrs) |
|
219 | return self.create_or_update(**user_attrs) | |
220 | except Exception: |
|
220 | except Exception: | |
221 | log.error(traceback.format_exc()) |
|
221 | log.error(traceback.format_exc()) | |
222 | raise |
|
222 | raise | |
223 |
|
223 | |||
224 | def create_or_update( |
|
224 | def create_or_update( | |
225 | self, username, password, email, firstname='', lastname='', |
|
225 | self, username, password, email, firstname='', lastname='', | |
226 | active=True, admin=False, extern_type=None, extern_name=None, |
|
226 | active=True, admin=False, extern_type=None, extern_name=None, | |
227 | cur_user=None, plugin=None, force_password_change=False, |
|
227 | cur_user=None, plugin=None, force_password_change=False, | |
228 | allow_to_create_user=True, create_repo_group=None, |
|
228 | allow_to_create_user=True, create_repo_group=None, | |
229 | updating_user_id=None, language=None, description='', |
|
229 | updating_user_id=None, language=None, description='', | |
230 | strict_creation_check=True): |
|
230 | strict_creation_check=True): | |
231 | """ |
|
231 | """ | |
232 | Creates a new instance if not found, or updates current one |
|
232 | Creates a new instance if not found, or updates current one | |
233 |
|
233 | |||
234 | :param username: |
|
234 | :param username: | |
235 | :param password: |
|
235 | :param password: | |
236 | :param email: |
|
236 | :param email: | |
237 | :param firstname: |
|
237 | :param firstname: | |
238 | :param lastname: |
|
238 | :param lastname: | |
239 | :param active: |
|
239 | :param active: | |
240 | :param admin: |
|
240 | :param admin: | |
241 | :param extern_type: |
|
241 | :param extern_type: | |
242 | :param extern_name: |
|
242 | :param extern_name: | |
243 | :param cur_user: |
|
243 | :param cur_user: | |
244 | :param plugin: optional plugin this method was called from |
|
244 | :param plugin: optional plugin this method was called from | |
245 | :param force_password_change: toggles new or existing user flag |
|
245 | :param force_password_change: toggles new or existing user flag | |
246 | for password change |
|
246 | for password change | |
247 | :param allow_to_create_user: Defines if the method can actually create |
|
247 | :param allow_to_create_user: Defines if the method can actually create | |
248 | new users |
|
248 | new users | |
249 | :param create_repo_group: Defines if the method should also |
|
249 | :param create_repo_group: Defines if the method should also | |
250 | create an repo group with user name, and owner |
|
250 | create an repo group with user name, and owner | |
251 | :param updating_user_id: if we set it up this is the user we want to |
|
251 | :param updating_user_id: if we set it up this is the user we want to | |
252 | update this allows to editing username. |
|
252 | update this allows to editing username. | |
253 | :param language: language of user from interface. |
|
253 | :param language: language of user from interface. | |
254 | :param description: user description |
|
254 | :param description: user description | |
255 | :param strict_creation_check: checks for allowed creation license wise etc. |
|
255 | :param strict_creation_check: checks for allowed creation license wise etc. | |
256 |
|
256 | |||
257 | :returns: new User object with injected `is_new_user` attribute. |
|
257 | :returns: new User object with injected `is_new_user` attribute. | |
258 | """ |
|
258 | """ | |
259 |
|
259 | |||
260 | if not cur_user: |
|
260 | if not cur_user: | |
261 | cur_user = getattr(get_current_rhodecode_user(), 'username', None) |
|
261 | cur_user = getattr(get_current_rhodecode_user(), 'username', None) | |
262 |
|
262 | |||
263 | from rhodecode.lib.auth import ( |
|
263 | from rhodecode.lib.auth import ( | |
264 | get_crypt_password, check_password, generate_auth_token) |
|
264 | get_crypt_password, check_password, generate_auth_token) | |
265 | from rhodecode.lib.hooks_base import ( |
|
265 | from rhodecode.lib.hooks_base import ( | |
266 | log_create_user, check_allowed_create_user) |
|
266 | log_create_user, check_allowed_create_user) | |
267 |
|
267 | |||
268 | def _password_change(new_user, password): |
|
268 | def _password_change(new_user, password): | |
269 | old_password = new_user.password or '' |
|
269 | old_password = new_user.password or '' | |
270 | # empty password |
|
270 | # empty password | |
271 | if not old_password: |
|
271 | if not old_password: | |
272 | return False |
|
272 | return False | |
273 |
|
273 | |||
274 | # password check is only needed for RhodeCode internal auth calls |
|
274 | # password check is only needed for RhodeCode internal auth calls | |
275 | # in case it's a plugin we don't care |
|
275 | # in case it's a plugin we don't care | |
276 | if not plugin: |
|
276 | if not plugin: | |
277 |
|
277 | |||
278 | # first check if we gave crypted password back, and if it |
|
278 | # first check if we gave crypted password back, and if it | |
279 | # matches it's not password change |
|
279 | # matches it's not password change | |
280 | if new_user.password == password: |
|
280 | if new_user.password == password: | |
281 | return False |
|
281 | return False | |
282 |
|
282 | |||
283 | password_match = check_password(password, old_password) |
|
283 | password_match = check_password(password, old_password) | |
284 | if not password_match: |
|
284 | if not password_match: | |
285 | return True |
|
285 | return True | |
286 |
|
286 | |||
287 | return False |
|
287 | return False | |
288 |
|
288 | |||
289 | # read settings on default personal repo group creation |
|
289 | # read settings on default personal repo group creation | |
290 | if create_repo_group is None: |
|
290 | if create_repo_group is None: | |
291 | default_create_repo_group = RepoGroupModel()\ |
|
291 | default_create_repo_group = RepoGroupModel()\ | |
292 | .get_default_create_personal_repo_group() |
|
292 | .get_default_create_personal_repo_group() | |
293 | create_repo_group = default_create_repo_group |
|
293 | create_repo_group = default_create_repo_group | |
294 |
|
294 | |||
295 | user_data = { |
|
295 | user_data = { | |
296 | 'username': username, |
|
296 | 'username': username, | |
297 | 'password': password, |
|
297 | 'password': password, | |
298 | 'email': email, |
|
298 | 'email': email, | |
299 | 'firstname': firstname, |
|
299 | 'firstname': firstname, | |
300 | 'lastname': lastname, |
|
300 | 'lastname': lastname, | |
301 | 'active': active, |
|
301 | 'active': active, | |
302 | 'admin': admin |
|
302 | 'admin': admin | |
303 | } |
|
303 | } | |
304 |
|
304 | |||
305 | if updating_user_id: |
|
305 | if updating_user_id: | |
306 | log.debug('Checking for existing account in RhodeCode ' |
|
306 | log.debug('Checking for existing account in RhodeCode ' | |
307 | 'database with user_id `%s` ', updating_user_id) |
|
307 | 'database with user_id `%s` ', updating_user_id) | |
308 | user = User.get(updating_user_id) |
|
308 | user = User.get(updating_user_id) | |
309 | else: |
|
309 | else: | |
310 | log.debug('Checking for existing account in RhodeCode ' |
|
310 | log.debug('Checking for existing account in RhodeCode ' | |
311 | 'database with username `%s` ', username) |
|
311 | 'database with username `%s` ', username) | |
312 | user = User.get_by_username(username, case_insensitive=True) |
|
312 | user = User.get_by_username(username, case_insensitive=True) | |
313 |
|
313 | |||
314 | if user is None: |
|
314 | if user is None: | |
315 | # we check internal flag if this method is actually allowed to |
|
315 | # we check internal flag if this method is actually allowed to | |
316 | # create new user |
|
316 | # create new user | |
317 | if not allow_to_create_user: |
|
317 | if not allow_to_create_user: | |
318 | msg = ('Method wants to create new user, but it is not ' |
|
318 | msg = ('Method wants to create new user, but it is not ' | |
319 | 'allowed to do so') |
|
319 | 'allowed to do so') | |
320 | log.warning(msg) |
|
320 | log.warning(msg) | |
321 | raise NotAllowedToCreateUserError(msg) |
|
321 | raise NotAllowedToCreateUserError(msg) | |
322 |
|
322 | |||
323 | log.debug('Creating new user %s', username) |
|
323 | log.debug('Creating new user %s', username) | |
324 |
|
324 | |||
325 | # only if we create user that is active |
|
325 | # only if we create user that is active | |
326 | new_active_user = active |
|
326 | new_active_user = active | |
327 | if new_active_user and strict_creation_check: |
|
327 | if new_active_user and strict_creation_check: | |
328 | # raises UserCreationError if it's not allowed for any reason to |
|
328 | # raises UserCreationError if it's not allowed for any reason to | |
329 | # create new active user, this also executes pre-create hooks |
|
329 | # create new active user, this also executes pre-create hooks | |
330 | check_allowed_create_user(user_data, cur_user, strict_check=True) |
|
330 | check_allowed_create_user(user_data, cur_user, strict_check=True) | |
331 | events.trigger(events.UserPreCreate(user_data)) |
|
331 | events.trigger(events.UserPreCreate(user_data)) | |
332 | new_user = User() |
|
332 | new_user = User() | |
333 | edit = False |
|
333 | edit = False | |
334 | else: |
|
334 | else: | |
335 | log.debug('updating user `%s`', username) |
|
335 | log.debug('updating user `%s`', username) | |
336 | events.trigger(events.UserPreUpdate(user, user_data)) |
|
336 | events.trigger(events.UserPreUpdate(user, user_data)) | |
337 | new_user = user |
|
337 | new_user = user | |
338 | edit = True |
|
338 | edit = True | |
339 |
|
339 | |||
340 | # we're not allowed to edit default user |
|
340 | # we're not allowed to edit default user | |
341 | if user.username == User.DEFAULT_USER: |
|
341 | if user.username == User.DEFAULT_USER: | |
342 | raise DefaultUserException( |
|
342 | raise DefaultUserException( | |
343 | "You can't edit this user (`%(username)s`) since it's " |
|
343 | "You can't edit this user (`%(username)s`) since it's " | |
344 | "crucial for entire application" |
|
344 | "crucial for entire application" | |
345 | % {'username': user.username}) |
|
345 | % {'username': user.username}) | |
346 |
|
346 | |||
347 | # inject special attribute that will tell us if User is new or old |
|
347 | # inject special attribute that will tell us if User is new or old | |
348 | new_user.is_new_user = not edit |
|
348 | new_user.is_new_user = not edit | |
349 | # for users that didn's specify auth type, we use RhodeCode built in |
|
349 | # for users that didn's specify auth type, we use RhodeCode built in | |
350 | from rhodecode.authentication.plugins import auth_rhodecode |
|
350 | from rhodecode.authentication.plugins import auth_rhodecode | |
351 | extern_name = extern_name or auth_rhodecode.RhodeCodeAuthPlugin.uid |
|
351 | extern_name = extern_name or auth_rhodecode.RhodeCodeAuthPlugin.uid | |
352 | extern_type = extern_type or auth_rhodecode.RhodeCodeAuthPlugin.uid |
|
352 | extern_type = extern_type or auth_rhodecode.RhodeCodeAuthPlugin.uid | |
353 |
|
353 | |||
354 | try: |
|
354 | try: | |
355 | new_user.username = username |
|
355 | new_user.username = username | |
356 | new_user.admin = admin |
|
356 | new_user.admin = admin | |
357 | new_user.email = email |
|
357 | new_user.email = email | |
358 | new_user.active = active |
|
358 | new_user.active = active | |
359 | new_user.extern_name = safe_unicode(extern_name) |
|
359 | new_user.extern_name = safe_unicode(extern_name) | |
360 | new_user.extern_type = safe_unicode(extern_type) |
|
360 | new_user.extern_type = safe_unicode(extern_type) | |
361 | new_user.name = firstname |
|
361 | new_user.name = firstname | |
362 | new_user.lastname = lastname |
|
362 | new_user.lastname = lastname | |
363 | new_user.description = description |
|
363 | new_user.description = description | |
364 |
|
364 | |||
365 | # set password only if creating an user or password is changed |
|
365 | # set password only if creating an user or password is changed | |
366 | if not edit or _password_change(new_user, password): |
|
366 | if not edit or _password_change(new_user, password): | |
367 | reason = 'new password' if edit else 'new user' |
|
367 | reason = 'new password' if edit else 'new user' | |
368 | log.debug('Updating password reason=>%s', reason) |
|
368 | log.debug('Updating password reason=>%s', reason) | |
369 | new_user.password = get_crypt_password(password) if password else None |
|
369 | new_user.password = get_crypt_password(password) if password else None | |
370 |
|
370 | |||
371 | if force_password_change: |
|
371 | if force_password_change: | |
372 | new_user.update_userdata(force_password_change=True) |
|
372 | new_user.update_userdata(force_password_change=True) | |
373 | if language: |
|
373 | if language: | |
374 | new_user.update_userdata(language=language) |
|
374 | new_user.update_userdata(language=language) | |
375 | new_user.update_userdata(notification_status=True) |
|
375 | new_user.update_userdata(notification_status=True) | |
376 |
|
376 | |||
377 | self.sa.add(new_user) |
|
377 | self.sa.add(new_user) | |
378 |
|
378 | |||
379 | if not edit and create_repo_group: |
|
379 | if not edit and create_repo_group: | |
380 | RepoGroupModel().create_personal_repo_group( |
|
380 | RepoGroupModel().create_personal_repo_group( | |
381 | new_user, commit_early=False) |
|
381 | new_user, commit_early=False) | |
382 |
|
382 | |||
383 | if not edit: |
|
383 | if not edit: | |
384 | # add the RSS token |
|
384 | # add the RSS token | |
385 | self.add_auth_token( |
|
385 | self.add_auth_token( | |
386 | user=username, lifetime_minutes=-1, |
|
386 | user=username, lifetime_minutes=-1, | |
387 | role=self.auth_token_role.ROLE_FEED, |
|
387 | role=self.auth_token_role.ROLE_FEED, | |
388 | description=u'Generated feed token') |
|
388 | description=u'Generated feed token') | |
389 |
|
389 | |||
390 | kwargs = new_user.get_dict() |
|
390 | kwargs = new_user.get_dict() | |
391 | # backward compat, require api_keys present |
|
391 | # backward compat, require api_keys present | |
392 | kwargs['api_keys'] = kwargs['auth_tokens'] |
|
392 | kwargs['api_keys'] = kwargs['auth_tokens'] | |
393 | log_create_user(created_by=cur_user, **kwargs) |
|
393 | log_create_user(created_by=cur_user, **kwargs) | |
394 | events.trigger(events.UserPostCreate(user_data)) |
|
394 | events.trigger(events.UserPostCreate(user_data)) | |
395 | return new_user |
|
395 | return new_user | |
396 | except (DatabaseError,): |
|
396 | except (DatabaseError,): | |
397 | log.error(traceback.format_exc()) |
|
397 | log.error(traceback.format_exc()) | |
398 | raise |
|
398 | raise | |
399 |
|
399 | |||
400 | def create_registration(self, form_data, |
|
400 | def create_registration(self, form_data, | |
401 | extern_name='rhodecode', extern_type='rhodecode'): |
|
401 | extern_name='rhodecode', extern_type='rhodecode'): | |
402 | from rhodecode.model.notification import NotificationModel |
|
402 | from rhodecode.model.notification import NotificationModel | |
403 | from rhodecode.model.notification import EmailNotificationModel |
|
403 | from rhodecode.model.notification import EmailNotificationModel | |
404 |
|
404 | |||
405 | try: |
|
405 | try: | |
406 | form_data['admin'] = False |
|
406 | form_data['admin'] = False | |
407 | form_data['extern_name'] = extern_name |
|
407 | form_data['extern_name'] = extern_name | |
408 | form_data['extern_type'] = extern_type |
|
408 | form_data['extern_type'] = extern_type | |
409 | new_user = self.create(form_data) |
|
409 | new_user = self.create(form_data) | |
410 |
|
410 | |||
411 | self.sa.add(new_user) |
|
411 | self.sa.add(new_user) | |
412 | self.sa.flush() |
|
412 | self.sa.flush() | |
413 |
|
413 | |||
414 | user_data = new_user.get_dict() |
|
414 | user_data = new_user.get_dict() | |
415 | kwargs = { |
|
415 | kwargs = { | |
416 | # use SQLALCHEMY safe dump of user data |
|
416 | # use SQLALCHEMY safe dump of user data | |
417 | 'user': AttributeDict(user_data), |
|
417 | 'user': AttributeDict(user_data), | |
418 | 'date': datetime.datetime.now() |
|
418 | 'date': datetime.datetime.now() | |
419 | } |
|
419 | } | |
420 | notification_type = EmailNotificationModel.TYPE_REGISTRATION |
|
420 | notification_type = EmailNotificationModel.TYPE_REGISTRATION | |
421 | # pre-generate the subject for notification itself |
|
421 | # pre-generate the subject for notification itself | |
422 | (subject, |
|
422 | (subject, | |
423 | _h, _e, # we don't care about those |
|
423 | _h, _e, # we don't care about those | |
424 | body_plaintext) = EmailNotificationModel().render_email( |
|
424 | body_plaintext) = EmailNotificationModel().render_email( | |
425 | notification_type, **kwargs) |
|
425 | notification_type, **kwargs) | |
426 |
|
426 | |||
427 | # create notification objects, and emails |
|
427 | # create notification objects, and emails | |
428 | NotificationModel().create( |
|
428 | NotificationModel().create( | |
429 | created_by=new_user, |
|
429 | created_by=new_user, | |
430 | notification_subject=subject, |
|
430 | notification_subject=subject, | |
431 | notification_body=body_plaintext, |
|
431 | notification_body=body_plaintext, | |
432 | notification_type=notification_type, |
|
432 | notification_type=notification_type, | |
433 | recipients=None, # all admins |
|
433 | recipients=None, # all admins | |
434 | email_kwargs=kwargs, |
|
434 | email_kwargs=kwargs, | |
435 | ) |
|
435 | ) | |
436 |
|
436 | |||
437 | return new_user |
|
437 | return new_user | |
438 | except Exception: |
|
438 | except Exception: | |
439 | log.error(traceback.format_exc()) |
|
439 | log.error(traceback.format_exc()) | |
440 | raise |
|
440 | raise | |
441 |
|
441 | |||
442 | def _handle_user_repos(self, username, repositories, handle_mode=None): |
|
442 | def _handle_user_repos(self, username, repositories, handle_mode=None): | |
443 | _superadmin = self.cls.get_first_super_admin() |
|
443 | _superadmin = self.cls.get_first_super_admin() | |
444 | left_overs = True |
|
444 | left_overs = True | |
445 |
|
445 | |||
446 | from rhodecode.model.repo import RepoModel |
|
446 | from rhodecode.model.repo import RepoModel | |
447 |
|
447 | |||
448 | if handle_mode == 'detach': |
|
448 | if handle_mode == 'detach': | |
449 | for obj in repositories: |
|
449 | for obj in repositories: | |
450 | obj.user = _superadmin |
|
450 | obj.user = _superadmin | |
451 | # set description we know why we super admin now owns |
|
451 | # set description we know why we super admin now owns | |
452 | # additional repositories that were orphaned ! |
|
452 | # additional repositories that were orphaned ! | |
453 | obj.description += ' \n::detached repository from deleted user: %s' % (username,) |
|
453 | obj.description += ' \n::detached repository from deleted user: %s' % (username,) | |
454 | self.sa.add(obj) |
|
454 | self.sa.add(obj) | |
455 | left_overs = False |
|
455 | left_overs = False | |
456 | elif handle_mode == 'delete': |
|
456 | elif handle_mode == 'delete': | |
457 | for obj in repositories: |
|
457 | for obj in repositories: | |
458 | RepoModel().delete(obj, forks='detach') |
|
458 | RepoModel().delete(obj, forks='detach') | |
459 | left_overs = False |
|
459 | left_overs = False | |
460 |
|
460 | |||
461 | # if nothing is done we have left overs left |
|
461 | # if nothing is done we have left overs left | |
462 | return left_overs |
|
462 | return left_overs | |
463 |
|
463 | |||
464 | def _handle_user_repo_groups(self, username, repository_groups, |
|
464 | def _handle_user_repo_groups(self, username, repository_groups, | |
465 | handle_mode=None): |
|
465 | handle_mode=None): | |
466 | _superadmin = self.cls.get_first_super_admin() |
|
466 | _superadmin = self.cls.get_first_super_admin() | |
467 | left_overs = True |
|
467 | left_overs = True | |
468 |
|
468 | |||
469 | from rhodecode.model.repo_group import RepoGroupModel |
|
469 | from rhodecode.model.repo_group import RepoGroupModel | |
470 |
|
470 | |||
471 | if handle_mode == 'detach': |
|
471 | if handle_mode == 'detach': | |
472 | for r in repository_groups: |
|
472 | for r in repository_groups: | |
473 | r.user = _superadmin |
|
473 | r.user = _superadmin | |
474 | # set description we know why we super admin now owns |
|
474 | # set description we know why we super admin now owns | |
475 | # additional repositories that were orphaned ! |
|
475 | # additional repositories that were orphaned ! | |
476 | r.group_description += ' \n::detached repository group from deleted user: %s' % (username,) |
|
476 | r.group_description += ' \n::detached repository group from deleted user: %s' % (username,) | |
477 | r.personal = False |
|
477 | r.personal = False | |
478 | self.sa.add(r) |
|
478 | self.sa.add(r) | |
479 | left_overs = False |
|
479 | left_overs = False | |
480 | elif handle_mode == 'delete': |
|
480 | elif handle_mode == 'delete': | |
481 | for r in repository_groups: |
|
481 | for r in repository_groups: | |
482 | RepoGroupModel().delete(r) |
|
482 | RepoGroupModel().delete(r) | |
483 | left_overs = False |
|
483 | left_overs = False | |
484 |
|
484 | |||
485 | # if nothing is done we have left overs left |
|
485 | # if nothing is done we have left overs left | |
486 | return left_overs |
|
486 | return left_overs | |
487 |
|
487 | |||
488 | def _handle_user_user_groups(self, username, user_groups, handle_mode=None): |
|
488 | def _handle_user_user_groups(self, username, user_groups, handle_mode=None): | |
489 | _superadmin = self.cls.get_first_super_admin() |
|
489 | _superadmin = self.cls.get_first_super_admin() | |
490 | left_overs = True |
|
490 | left_overs = True | |
491 |
|
491 | |||
492 | from rhodecode.model.user_group import UserGroupModel |
|
492 | from rhodecode.model.user_group import UserGroupModel | |
493 |
|
493 | |||
494 | if handle_mode == 'detach': |
|
494 | if handle_mode == 'detach': | |
495 | for r in user_groups: |
|
495 | for r in user_groups: | |
496 | for user_user_group_to_perm in r.user_user_group_to_perm: |
|
496 | for user_user_group_to_perm in r.user_user_group_to_perm: | |
497 | if user_user_group_to_perm.user.username == username: |
|
497 | if user_user_group_to_perm.user.username == username: | |
498 | user_user_group_to_perm.user = _superadmin |
|
498 | user_user_group_to_perm.user = _superadmin | |
499 | r.user = _superadmin |
|
499 | r.user = _superadmin | |
500 | # set description we know why we super admin now owns |
|
500 | # set description we know why we super admin now owns | |
501 | # additional repositories that were orphaned ! |
|
501 | # additional repositories that were orphaned ! | |
502 | r.user_group_description += ' \n::detached user group from deleted user: %s' % (username,) |
|
502 | r.user_group_description += ' \n::detached user group from deleted user: %s' % (username,) | |
503 | self.sa.add(r) |
|
503 | self.sa.add(r) | |
504 | left_overs = False |
|
504 | left_overs = False | |
505 | elif handle_mode == 'delete': |
|
505 | elif handle_mode == 'delete': | |
506 | for r in user_groups: |
|
506 | for r in user_groups: | |
507 | UserGroupModel().delete(r) |
|
507 | UserGroupModel().delete(r) | |
508 | left_overs = False |
|
508 | left_overs = False | |
509 |
|
509 | |||
510 | # if nothing is done we have left overs left |
|
510 | # if nothing is done we have left overs left | |
511 | return left_overs |
|
511 | return left_overs | |
512 |
|
512 | |||
513 | def _handle_user_artifacts(self, username, artifacts, handle_mode=None): |
|
513 | def _handle_user_artifacts(self, username, artifacts, handle_mode=None): | |
514 | _superadmin = self.cls.get_first_super_admin() |
|
514 | _superadmin = self.cls.get_first_super_admin() | |
515 | left_overs = True |
|
515 | left_overs = True | |
516 |
|
516 | |||
517 | if handle_mode == 'detach': |
|
517 | if handle_mode == 'detach': | |
518 | for a in artifacts: |
|
518 | for a in artifacts: | |
519 | a.upload_user = _superadmin |
|
519 | a.upload_user = _superadmin | |
520 | # set description we know why we super admin now owns |
|
520 | # set description we know why we super admin now owns | |
521 | # additional artifacts that were orphaned ! |
|
521 | # additional artifacts that were orphaned ! | |
522 | a.file_description += ' \n::detached artifact from deleted user: %s' % (username,) |
|
522 | a.file_description += ' \n::detached artifact from deleted user: %s' % (username,) | |
523 | self.sa.add(a) |
|
523 | self.sa.add(a) | |
524 | left_overs = False |
|
524 | left_overs = False | |
525 | elif handle_mode == 'delete': |
|
525 | elif handle_mode == 'delete': | |
526 | from rhodecode.apps.file_store import utils as store_utils |
|
526 | from rhodecode.apps.file_store import utils as store_utils | |
527 | storage = store_utils.get_file_storage(self.request.registry.settings) |
|
527 | storage = store_utils.get_file_storage(self.request.registry.settings) | |
528 | for a in artifacts: |
|
528 | for a in artifacts: | |
529 | file_uid = a.file_uid |
|
529 | file_uid = a.file_uid | |
530 | storage.delete(file_uid) |
|
530 | storage.delete(file_uid) | |
531 | self.sa.delete(a) |
|
531 | self.sa.delete(a) | |
532 |
|
532 | |||
533 | left_overs = False |
|
533 | left_overs = False | |
534 |
|
534 | |||
535 | # if nothing is done we have left overs left |
|
535 | # if nothing is done we have left overs left | |
536 | return left_overs |
|
536 | return left_overs | |
537 |
|
537 | |||
538 | def delete(self, user, cur_user=None, handle_repos=None, |
|
538 | def delete(self, user, cur_user=None, handle_repos=None, | |
539 | handle_repo_groups=None, handle_user_groups=None, handle_artifacts=None): |
|
539 | handle_repo_groups=None, handle_user_groups=None, handle_artifacts=None): | |
540 | from rhodecode.lib.hooks_base import log_delete_user |
|
540 | from rhodecode.lib.hooks_base import log_delete_user | |
541 |
|
541 | |||
542 | if not cur_user: |
|
542 | if not cur_user: | |
543 | cur_user = getattr(get_current_rhodecode_user(), 'username', None) |
|
543 | cur_user = getattr(get_current_rhodecode_user(), 'username', None) | |
544 | user = self._get_user(user) |
|
544 | user = self._get_user(user) | |
545 |
|
545 | |||
546 | try: |
|
546 | try: | |
547 | if user.username == User.DEFAULT_USER: |
|
547 | if user.username == User.DEFAULT_USER: | |
548 | raise DefaultUserException( |
|
548 | raise DefaultUserException( | |
549 | u"You can't remove this user since it's" |
|
549 | u"You can't remove this user since it's" | |
550 | u" crucial for entire application") |
|
550 | u" crucial for entire application") | |
551 |
|
551 | |||
552 | left_overs = self._handle_user_repos( |
|
552 | left_overs = self._handle_user_repos( | |
553 | user.username, user.repositories, handle_repos) |
|
553 | user.username, user.repositories, handle_repos) | |
554 | if left_overs and user.repositories: |
|
554 | if left_overs and user.repositories: | |
555 | repos = [x.repo_name for x in user.repositories] |
|
555 | repos = [x.repo_name for x in user.repositories] | |
556 | raise UserOwnsReposException( |
|
556 | raise UserOwnsReposException( | |
557 | u'user "%(username)s" still owns %(len_repos)s repositories and cannot be ' |
|
557 | u'user "%(username)s" still owns %(len_repos)s repositories and cannot be ' | |
558 | u'removed. Switch owners or remove those repositories:%(list_repos)s' |
|
558 | u'removed. Switch owners or remove those repositories:%(list_repos)s' | |
559 | % {'username': user.username, 'len_repos': len(repos), |
|
559 | % {'username': user.username, 'len_repos': len(repos), | |
560 | 'list_repos': ', '.join(repos)}) |
|
560 | 'list_repos': ', '.join(repos)}) | |
561 |
|
561 | |||
562 | left_overs = self._handle_user_repo_groups( |
|
562 | left_overs = self._handle_user_repo_groups( | |
563 | user.username, user.repository_groups, handle_repo_groups) |
|
563 | user.username, user.repository_groups, handle_repo_groups) | |
564 | if left_overs and user.repository_groups: |
|
564 | if left_overs and user.repository_groups: | |
565 | repo_groups = [x.group_name for x in user.repository_groups] |
|
565 | repo_groups = [x.group_name for x in user.repository_groups] | |
566 | raise UserOwnsRepoGroupsException( |
|
566 | raise UserOwnsRepoGroupsException( | |
567 | u'user "%(username)s" still owns %(len_repo_groups)s repository groups and cannot be ' |
|
567 | u'user "%(username)s" still owns %(len_repo_groups)s repository groups and cannot be ' | |
568 | u'removed. Switch owners or remove those repository groups:%(list_repo_groups)s' |
|
568 | u'removed. Switch owners or remove those repository groups:%(list_repo_groups)s' | |
569 | % {'username': user.username, 'len_repo_groups': len(repo_groups), |
|
569 | % {'username': user.username, 'len_repo_groups': len(repo_groups), | |
570 | 'list_repo_groups': ', '.join(repo_groups)}) |
|
570 | 'list_repo_groups': ', '.join(repo_groups)}) | |
571 |
|
571 | |||
572 | left_overs = self._handle_user_user_groups( |
|
572 | left_overs = self._handle_user_user_groups( | |
573 | user.username, user.user_groups, handle_user_groups) |
|
573 | user.username, user.user_groups, handle_user_groups) | |
574 | if left_overs and user.user_groups: |
|
574 | if left_overs and user.user_groups: | |
575 | user_groups = [x.users_group_name for x in user.user_groups] |
|
575 | user_groups = [x.users_group_name for x in user.user_groups] | |
576 | raise UserOwnsUserGroupsException( |
|
576 | raise UserOwnsUserGroupsException( | |
577 | u'user "%s" still owns %s user groups and cannot be ' |
|
577 | u'user "%s" still owns %s user groups and cannot be ' | |
578 | u'removed. Switch owners or remove those user groups:%s' |
|
578 | u'removed. Switch owners or remove those user groups:%s' | |
579 | % (user.username, len(user_groups), ', '.join(user_groups))) |
|
579 | % (user.username, len(user_groups), ', '.join(user_groups))) | |
580 |
|
580 | |||
581 | left_overs = self._handle_user_artifacts( |
|
581 | left_overs = self._handle_user_artifacts( | |
582 | user.username, user.artifacts, handle_artifacts) |
|
582 | user.username, user.artifacts, handle_artifacts) | |
583 | if left_overs and user.artifacts: |
|
583 | if left_overs and user.artifacts: | |
584 | artifacts = [x.file_uid for x in user.artifacts] |
|
584 | artifacts = [x.file_uid for x in user.artifacts] | |
585 | raise UserOwnsArtifactsException( |
|
585 | raise UserOwnsArtifactsException( | |
586 | u'user "%s" still owns %s artifacts and cannot be ' |
|
586 | u'user "%s" still owns %s artifacts and cannot be ' | |
587 | u'removed. Switch owners or remove those artifacts:%s' |
|
587 | u'removed. Switch owners or remove those artifacts:%s' | |
588 | % (user.username, len(artifacts), ', '.join(artifacts))) |
|
588 | % (user.username, len(artifacts), ', '.join(artifacts))) | |
589 |
|
589 | |||
590 | user_data = user.get_dict() # fetch user data before expire |
|
590 | user_data = user.get_dict() # fetch user data before expire | |
591 |
|
591 | |||
592 | # we might change the user data with detach/delete, make sure |
|
592 | # we might change the user data with detach/delete, make sure | |
593 | # the object is marked as expired before actually deleting ! |
|
593 | # the object is marked as expired before actually deleting ! | |
594 | self.sa.expire(user) |
|
594 | self.sa.expire(user) | |
595 | self.sa.delete(user) |
|
595 | self.sa.delete(user) | |
596 |
|
596 | |||
597 | log_delete_user(deleted_by=cur_user, **user_data) |
|
597 | log_delete_user(deleted_by=cur_user, **user_data) | |
598 | except Exception: |
|
598 | except Exception: | |
599 | log.error(traceback.format_exc()) |
|
599 | log.error(traceback.format_exc()) | |
600 | raise |
|
600 | raise | |
601 |
|
601 | |||
602 | def reset_password_link(self, data, pwd_reset_url): |
|
602 | def reset_password_link(self, data, pwd_reset_url): | |
603 | from rhodecode.lib.celerylib import tasks, run_task |
|
603 | from rhodecode.lib.celerylib import tasks, run_task | |
604 | from rhodecode.model.notification import EmailNotificationModel |
|
604 | from rhodecode.model.notification import EmailNotificationModel | |
605 | user_email = data['email'] |
|
605 | user_email = data['email'] | |
606 | try: |
|
606 | try: | |
607 | user = User.get_by_email(user_email) |
|
607 | user = User.get_by_email(user_email) | |
608 | if user: |
|
608 | if user: | |
609 | log.debug('password reset user found %s', user) |
|
609 | log.debug('password reset user found %s', user) | |
610 |
|
610 | |||
611 | email_kwargs = { |
|
611 | email_kwargs = { | |
612 | 'password_reset_url': pwd_reset_url, |
|
612 | 'password_reset_url': pwd_reset_url, | |
613 | 'user': user, |
|
613 | 'user': user, | |
614 | 'email': user_email, |
|
614 | 'email': user_email, | |
615 | 'date': datetime.datetime.now() |
|
615 | 'date': datetime.datetime.now(), | |
|
616 | 'first_admin_email': User.get_first_super_admin().email | |||
616 | } |
|
617 | } | |
617 |
|
618 | |||
618 | (subject, headers, email_body, |
|
619 | (subject, headers, email_body, | |
619 | email_body_plaintext) = EmailNotificationModel().render_email( |
|
620 | email_body_plaintext) = EmailNotificationModel().render_email( | |
620 | EmailNotificationModel.TYPE_PASSWORD_RESET, **email_kwargs) |
|
621 | EmailNotificationModel.TYPE_PASSWORD_RESET, **email_kwargs) | |
621 |
|
622 | |||
622 | recipients = [user_email] |
|
623 | recipients = [user_email] | |
623 |
|
624 | |||
624 | action_logger_generic( |
|
625 | action_logger_generic( | |
625 | 'sending password reset email to user: {}'.format( |
|
626 | 'sending password reset email to user: {}'.format( | |
626 | user), namespace='security.password_reset') |
|
627 | user), namespace='security.password_reset') | |
627 |
|
628 | |||
628 | run_task(tasks.send_email, recipients, subject, |
|
629 | run_task(tasks.send_email, recipients, subject, | |
629 | email_body_plaintext, email_body) |
|
630 | email_body_plaintext, email_body) | |
630 |
|
631 | |||
631 | else: |
|
632 | else: | |
632 | log.debug("password reset email %s not found", user_email) |
|
633 | log.debug("password reset email %s not found", user_email) | |
633 | except Exception: |
|
634 | except Exception: | |
634 | log.error(traceback.format_exc()) |
|
635 | log.error(traceback.format_exc()) | |
635 | return False |
|
636 | return False | |
636 |
|
637 | |||
637 | return True |
|
638 | return True | |
638 |
|
639 | |||
639 | def reset_password(self, data): |
|
640 | def reset_password(self, data): | |
640 | from rhodecode.lib.celerylib import tasks, run_task |
|
641 | from rhodecode.lib.celerylib import tasks, run_task | |
641 | from rhodecode.model.notification import EmailNotificationModel |
|
642 | from rhodecode.model.notification import EmailNotificationModel | |
642 | from rhodecode.lib import auth |
|
643 | from rhodecode.lib import auth | |
643 | user_email = data['email'] |
|
644 | user_email = data['email'] | |
644 | pre_db = True |
|
645 | pre_db = True | |
645 | try: |
|
646 | try: | |
646 | user = User.get_by_email(user_email) |
|
647 | user = User.get_by_email(user_email) | |
647 | new_passwd = auth.PasswordGenerator().gen_password( |
|
648 | new_passwd = auth.PasswordGenerator().gen_password( | |
648 | 12, auth.PasswordGenerator.ALPHABETS_BIG_SMALL) |
|
649 | 12, auth.PasswordGenerator.ALPHABETS_BIG_SMALL) | |
649 | if user: |
|
650 | if user: | |
650 | user.password = auth.get_crypt_password(new_passwd) |
|
651 | user.password = auth.get_crypt_password(new_passwd) | |
651 | # also force this user to reset his password ! |
|
652 | # also force this user to reset his password ! | |
652 | user.update_userdata(force_password_change=True) |
|
653 | user.update_userdata(force_password_change=True) | |
653 |
|
654 | |||
654 | Session().add(user) |
|
655 | Session().add(user) | |
655 |
|
656 | |||
656 | # now delete the token in question |
|
657 | # now delete the token in question | |
657 | UserApiKeys = AuthTokenModel.cls |
|
658 | UserApiKeys = AuthTokenModel.cls | |
658 | UserApiKeys().query().filter( |
|
659 | UserApiKeys().query().filter( | |
659 | UserApiKeys.api_key == data['token']).delete() |
|
660 | UserApiKeys.api_key == data['token']).delete() | |
660 |
|
661 | |||
661 | Session().commit() |
|
662 | Session().commit() | |
662 | log.info('successfully reset password for `%s`', user_email) |
|
663 | log.info('successfully reset password for `%s`', user_email) | |
663 |
|
664 | |||
664 | if new_passwd is None: |
|
665 | if new_passwd is None: | |
665 | raise Exception('unable to generate new password') |
|
666 | raise Exception('unable to generate new password') | |
666 |
|
667 | |||
667 | pre_db = False |
|
668 | pre_db = False | |
668 |
|
669 | |||
669 | email_kwargs = { |
|
670 | email_kwargs = { | |
670 | 'new_password': new_passwd, |
|
671 | 'new_password': new_passwd, | |
671 | 'user': user, |
|
672 | 'user': user, | |
672 | 'email': user_email, |
|
673 | 'email': user_email, | |
673 | 'date': datetime.datetime.now() |
|
674 | 'date': datetime.datetime.now(), | |
|
675 | 'first_admin_email': User.get_first_super_admin().email | |||
674 | } |
|
676 | } | |
675 |
|
677 | |||
676 | (subject, headers, email_body, |
|
678 | (subject, headers, email_body, | |
677 | email_body_plaintext) = EmailNotificationModel().render_email( |
|
679 | email_body_plaintext) = EmailNotificationModel().render_email( | |
678 | EmailNotificationModel.TYPE_PASSWORD_RESET_CONFIRMATION, |
|
680 | EmailNotificationModel.TYPE_PASSWORD_RESET_CONFIRMATION, | |
679 | **email_kwargs) |
|
681 | **email_kwargs) | |
680 |
|
682 | |||
681 | recipients = [user_email] |
|
683 | recipients = [user_email] | |
682 |
|
684 | |||
683 | action_logger_generic( |
|
685 | action_logger_generic( | |
684 | 'sent new password to user: {} with email: {}'.format( |
|
686 | 'sent new password to user: {} with email: {}'.format( | |
685 | user, user_email), namespace='security.password_reset') |
|
687 | user, user_email), namespace='security.password_reset') | |
686 |
|
688 | |||
687 | run_task(tasks.send_email, recipients, subject, |
|
689 | run_task(tasks.send_email, recipients, subject, | |
688 | email_body_plaintext, email_body) |
|
690 | email_body_plaintext, email_body) | |
689 |
|
691 | |||
690 | except Exception: |
|
692 | except Exception: | |
691 | log.error('Failed to update user password') |
|
693 | log.error('Failed to update user password') | |
692 | log.error(traceback.format_exc()) |
|
694 | log.error(traceback.format_exc()) | |
693 | if pre_db: |
|
695 | if pre_db: | |
694 | # we rollback only if local db stuff fails. If it goes into |
|
696 | # we rollback only if local db stuff fails. If it goes into | |
695 | # run_task, we're pass rollback state this wouldn't work then |
|
697 | # run_task, we're pass rollback state this wouldn't work then | |
696 | Session().rollback() |
|
698 | Session().rollback() | |
697 |
|
699 | |||
698 | return True |
|
700 | return True | |
699 |
|
701 | |||
700 | def fill_data(self, auth_user, user_id=None, api_key=None, username=None): |
|
702 | def fill_data(self, auth_user, user_id=None, api_key=None, username=None): | |
701 | """ |
|
703 | """ | |
702 | Fetches auth_user by user_id,or api_key if present. |
|
704 | Fetches auth_user by user_id,or api_key if present. | |
703 | Fills auth_user attributes with those taken from database. |
|
705 | Fills auth_user attributes with those taken from database. | |
704 | Additionally set's is_authenitated if lookup fails |
|
706 | Additionally set's is_authenitated if lookup fails | |
705 | present in database |
|
707 | present in database | |
706 |
|
708 | |||
707 | :param auth_user: instance of user to set attributes |
|
709 | :param auth_user: instance of user to set attributes | |
708 | :param user_id: user id to fetch by |
|
710 | :param user_id: user id to fetch by | |
709 | :param api_key: api key to fetch by |
|
711 | :param api_key: api key to fetch by | |
710 | :param username: username to fetch by |
|
712 | :param username: username to fetch by | |
711 | """ |
|
713 | """ | |
712 | def token_obfuscate(token): |
|
714 | def token_obfuscate(token): | |
713 | if token: |
|
715 | if token: | |
714 | return token[:4] + "****" |
|
716 | return token[:4] + "****" | |
715 |
|
717 | |||
716 | if user_id is None and api_key is None and username is None: |
|
718 | if user_id is None and api_key is None and username is None: | |
717 | raise Exception('You need to pass user_id, api_key or username') |
|
719 | raise Exception('You need to pass user_id, api_key or username') | |
718 |
|
720 | |||
719 | log.debug( |
|
721 | log.debug( | |
720 | 'AuthUser: fill data execution based on: ' |
|
722 | 'AuthUser: fill data execution based on: ' | |
721 | 'user_id:%s api_key:%s username:%s', user_id, api_key, username) |
|
723 | 'user_id:%s api_key:%s username:%s', user_id, api_key, username) | |
722 | try: |
|
724 | try: | |
723 | dbuser = None |
|
725 | dbuser = None | |
724 | if user_id: |
|
726 | if user_id: | |
725 | dbuser = self.get(user_id) |
|
727 | dbuser = self.get(user_id) | |
726 | elif api_key: |
|
728 | elif api_key: | |
727 | dbuser = self.get_by_auth_token(api_key) |
|
729 | dbuser = self.get_by_auth_token(api_key) | |
728 | elif username: |
|
730 | elif username: | |
729 | dbuser = self.get_by_username(username) |
|
731 | dbuser = self.get_by_username(username) | |
730 |
|
732 | |||
731 | if not dbuser: |
|
733 | if not dbuser: | |
732 | log.warning( |
|
734 | log.warning( | |
733 | 'Unable to lookup user by id:%s api_key:%s username:%s', |
|
735 | 'Unable to lookup user by id:%s api_key:%s username:%s', | |
734 | user_id, token_obfuscate(api_key), username) |
|
736 | user_id, token_obfuscate(api_key), username) | |
735 | return False |
|
737 | return False | |
736 | if not dbuser.active: |
|
738 | if not dbuser.active: | |
737 | log.debug('User `%s:%s` is inactive, skipping fill data', |
|
739 | log.debug('User `%s:%s` is inactive, skipping fill data', | |
738 | username, user_id) |
|
740 | username, user_id) | |
739 | return False |
|
741 | return False | |
740 |
|
742 | |||
741 | log.debug('AuthUser: filling found user:%s data', dbuser) |
|
743 | log.debug('AuthUser: filling found user:%s data', dbuser) | |
742 |
|
744 | |||
743 | attrs = { |
|
745 | attrs = { | |
744 | 'user_id': dbuser.user_id, |
|
746 | 'user_id': dbuser.user_id, | |
745 | 'username': dbuser.username, |
|
747 | 'username': dbuser.username, | |
746 | 'name': dbuser.name, |
|
748 | 'name': dbuser.name, | |
747 | 'first_name': dbuser.first_name, |
|
749 | 'first_name': dbuser.first_name, | |
748 | 'firstname': dbuser.firstname, |
|
750 | 'firstname': dbuser.firstname, | |
749 | 'last_name': dbuser.last_name, |
|
751 | 'last_name': dbuser.last_name, | |
750 | 'lastname': dbuser.lastname, |
|
752 | 'lastname': dbuser.lastname, | |
751 | 'admin': dbuser.admin, |
|
753 | 'admin': dbuser.admin, | |
752 | 'active': dbuser.active, |
|
754 | 'active': dbuser.active, | |
753 |
|
755 | |||
754 | 'email': dbuser.email, |
|
756 | 'email': dbuser.email, | |
755 | 'emails': dbuser.emails_cached(), |
|
757 | 'emails': dbuser.emails_cached(), | |
756 | 'short_contact': dbuser.short_contact, |
|
758 | 'short_contact': dbuser.short_contact, | |
757 | 'full_contact': dbuser.full_contact, |
|
759 | 'full_contact': dbuser.full_contact, | |
758 | 'full_name': dbuser.full_name, |
|
760 | 'full_name': dbuser.full_name, | |
759 | 'full_name_or_username': dbuser.full_name_or_username, |
|
761 | 'full_name_or_username': dbuser.full_name_or_username, | |
760 |
|
762 | |||
761 | '_api_key': dbuser._api_key, |
|
763 | '_api_key': dbuser._api_key, | |
762 | '_user_data': dbuser._user_data, |
|
764 | '_user_data': dbuser._user_data, | |
763 |
|
765 | |||
764 | 'created_on': dbuser.created_on, |
|
766 | 'created_on': dbuser.created_on, | |
765 | 'extern_name': dbuser.extern_name, |
|
767 | 'extern_name': dbuser.extern_name, | |
766 | 'extern_type': dbuser.extern_type, |
|
768 | 'extern_type': dbuser.extern_type, | |
767 |
|
769 | |||
768 | 'inherit_default_permissions': dbuser.inherit_default_permissions, |
|
770 | 'inherit_default_permissions': dbuser.inherit_default_permissions, | |
769 |
|
771 | |||
770 | 'language': dbuser.language, |
|
772 | 'language': dbuser.language, | |
771 | 'last_activity': dbuser.last_activity, |
|
773 | 'last_activity': dbuser.last_activity, | |
772 | 'last_login': dbuser.last_login, |
|
774 | 'last_login': dbuser.last_login, | |
773 | 'password': dbuser.password, |
|
775 | 'password': dbuser.password, | |
774 | } |
|
776 | } | |
775 | auth_user.__dict__.update(attrs) |
|
777 | auth_user.__dict__.update(attrs) | |
776 | except Exception: |
|
778 | except Exception: | |
777 | log.error(traceback.format_exc()) |
|
779 | log.error(traceback.format_exc()) | |
778 | auth_user.is_authenticated = False |
|
780 | auth_user.is_authenticated = False | |
779 | return False |
|
781 | return False | |
780 |
|
782 | |||
781 | return True |
|
783 | return True | |
782 |
|
784 | |||
783 | def has_perm(self, user, perm): |
|
785 | def has_perm(self, user, perm): | |
784 | perm = self._get_perm(perm) |
|
786 | perm = self._get_perm(perm) | |
785 | user = self._get_user(user) |
|
787 | user = self._get_user(user) | |
786 |
|
788 | |||
787 | return UserToPerm.query().filter(UserToPerm.user == user)\ |
|
789 | return UserToPerm.query().filter(UserToPerm.user == user)\ | |
788 | .filter(UserToPerm.permission == perm).scalar() is not None |
|
790 | .filter(UserToPerm.permission == perm).scalar() is not None | |
789 |
|
791 | |||
790 | def grant_perm(self, user, perm): |
|
792 | def grant_perm(self, user, perm): | |
791 | """ |
|
793 | """ | |
792 | Grant user global permissions |
|
794 | Grant user global permissions | |
793 |
|
795 | |||
794 | :param user: |
|
796 | :param user: | |
795 | :param perm: |
|
797 | :param perm: | |
796 | """ |
|
798 | """ | |
797 | user = self._get_user(user) |
|
799 | user = self._get_user(user) | |
798 | perm = self._get_perm(perm) |
|
800 | perm = self._get_perm(perm) | |
799 | # if this permission is already granted skip it |
|
801 | # if this permission is already granted skip it | |
800 | _perm = UserToPerm.query()\ |
|
802 | _perm = UserToPerm.query()\ | |
801 | .filter(UserToPerm.user == user)\ |
|
803 | .filter(UserToPerm.user == user)\ | |
802 | .filter(UserToPerm.permission == perm)\ |
|
804 | .filter(UserToPerm.permission == perm)\ | |
803 | .scalar() |
|
805 | .scalar() | |
804 | if _perm: |
|
806 | if _perm: | |
805 | return |
|
807 | return | |
806 | new = UserToPerm() |
|
808 | new = UserToPerm() | |
807 | new.user = user |
|
809 | new.user = user | |
808 | new.permission = perm |
|
810 | new.permission = perm | |
809 | self.sa.add(new) |
|
811 | self.sa.add(new) | |
810 | return new |
|
812 | return new | |
811 |
|
813 | |||
812 | def revoke_perm(self, user, perm): |
|
814 | def revoke_perm(self, user, perm): | |
813 | """ |
|
815 | """ | |
814 | Revoke users global permissions |
|
816 | Revoke users global permissions | |
815 |
|
817 | |||
816 | :param user: |
|
818 | :param user: | |
817 | :param perm: |
|
819 | :param perm: | |
818 | """ |
|
820 | """ | |
819 | user = self._get_user(user) |
|
821 | user = self._get_user(user) | |
820 | perm = self._get_perm(perm) |
|
822 | perm = self._get_perm(perm) | |
821 |
|
823 | |||
822 | obj = UserToPerm.query()\ |
|
824 | obj = UserToPerm.query()\ | |
823 | .filter(UserToPerm.user == user)\ |
|
825 | .filter(UserToPerm.user == user)\ | |
824 | .filter(UserToPerm.permission == perm)\ |
|
826 | .filter(UserToPerm.permission == perm)\ | |
825 | .scalar() |
|
827 | .scalar() | |
826 | if obj: |
|
828 | if obj: | |
827 | self.sa.delete(obj) |
|
829 | self.sa.delete(obj) | |
828 |
|
830 | |||
829 | def add_extra_email(self, user, email): |
|
831 | def add_extra_email(self, user, email): | |
830 | """ |
|
832 | """ | |
831 | Adds email address to UserEmailMap |
|
833 | Adds email address to UserEmailMap | |
832 |
|
834 | |||
833 | :param user: |
|
835 | :param user: | |
834 | :param email: |
|
836 | :param email: | |
835 | """ |
|
837 | """ | |
836 |
|
838 | |||
837 | user = self._get_user(user) |
|
839 | user = self._get_user(user) | |
838 |
|
840 | |||
839 | obj = UserEmailMap() |
|
841 | obj = UserEmailMap() | |
840 | obj.user = user |
|
842 | obj.user = user | |
841 | obj.email = email |
|
843 | obj.email = email | |
842 | self.sa.add(obj) |
|
844 | self.sa.add(obj) | |
843 | return obj |
|
845 | return obj | |
844 |
|
846 | |||
845 | def delete_extra_email(self, user, email_id): |
|
847 | def delete_extra_email(self, user, email_id): | |
846 | """ |
|
848 | """ | |
847 | Removes email address from UserEmailMap |
|
849 | Removes email address from UserEmailMap | |
848 |
|
850 | |||
849 | :param user: |
|
851 | :param user: | |
850 | :param email_id: |
|
852 | :param email_id: | |
851 | """ |
|
853 | """ | |
852 | user = self._get_user(user) |
|
854 | user = self._get_user(user) | |
853 | obj = UserEmailMap.query().get(email_id) |
|
855 | obj = UserEmailMap.query().get(email_id) | |
854 | if obj and obj.user_id == user.user_id: |
|
856 | if obj and obj.user_id == user.user_id: | |
855 | self.sa.delete(obj) |
|
857 | self.sa.delete(obj) | |
856 |
|
858 | |||
857 | def parse_ip_range(self, ip_range): |
|
859 | def parse_ip_range(self, ip_range): | |
858 | ip_list = [] |
|
860 | ip_list = [] | |
859 |
|
861 | |||
860 | def make_unique(value): |
|
862 | def make_unique(value): | |
861 | seen = [] |
|
863 | seen = [] | |
862 | return [c for c in value if not (c in seen or seen.append(c))] |
|
864 | return [c for c in value if not (c in seen or seen.append(c))] | |
863 |
|
865 | |||
864 | # firsts split by commas |
|
866 | # firsts split by commas | |
865 | for ip_range in ip_range.split(','): |
|
867 | for ip_range in ip_range.split(','): | |
866 | if not ip_range: |
|
868 | if not ip_range: | |
867 | continue |
|
869 | continue | |
868 | ip_range = ip_range.strip() |
|
870 | ip_range = ip_range.strip() | |
869 | if '-' in ip_range: |
|
871 | if '-' in ip_range: | |
870 | start_ip, end_ip = ip_range.split('-', 1) |
|
872 | start_ip, end_ip = ip_range.split('-', 1) | |
871 | start_ip = ipaddress.ip_address(safe_unicode(start_ip.strip())) |
|
873 | start_ip = ipaddress.ip_address(safe_unicode(start_ip.strip())) | |
872 | end_ip = ipaddress.ip_address(safe_unicode(end_ip.strip())) |
|
874 | end_ip = ipaddress.ip_address(safe_unicode(end_ip.strip())) | |
873 | parsed_ip_range = [] |
|
875 | parsed_ip_range = [] | |
874 |
|
876 | |||
875 | for index in xrange(int(start_ip), int(end_ip) + 1): |
|
877 | for index in xrange(int(start_ip), int(end_ip) + 1): | |
876 | new_ip = ipaddress.ip_address(index) |
|
878 | new_ip = ipaddress.ip_address(index) | |
877 | parsed_ip_range.append(str(new_ip)) |
|
879 | parsed_ip_range.append(str(new_ip)) | |
878 | ip_list.extend(parsed_ip_range) |
|
880 | ip_list.extend(parsed_ip_range) | |
879 | else: |
|
881 | else: | |
880 | ip_list.append(ip_range) |
|
882 | ip_list.append(ip_range) | |
881 |
|
883 | |||
882 | return make_unique(ip_list) |
|
884 | return make_unique(ip_list) | |
883 |
|
885 | |||
884 | def add_extra_ip(self, user, ip, description=None): |
|
886 | def add_extra_ip(self, user, ip, description=None): | |
885 | """ |
|
887 | """ | |
886 | Adds ip address to UserIpMap |
|
888 | Adds ip address to UserIpMap | |
887 |
|
889 | |||
888 | :param user: |
|
890 | :param user: | |
889 | :param ip: |
|
891 | :param ip: | |
890 | """ |
|
892 | """ | |
891 |
|
893 | |||
892 | user = self._get_user(user) |
|
894 | user = self._get_user(user) | |
893 | obj = UserIpMap() |
|
895 | obj = UserIpMap() | |
894 | obj.user = user |
|
896 | obj.user = user | |
895 | obj.ip_addr = ip |
|
897 | obj.ip_addr = ip | |
896 | obj.description = description |
|
898 | obj.description = description | |
897 | self.sa.add(obj) |
|
899 | self.sa.add(obj) | |
898 | return obj |
|
900 | return obj | |
899 |
|
901 | |||
900 | auth_token_role = AuthTokenModel.cls |
|
902 | auth_token_role = AuthTokenModel.cls | |
901 |
|
903 | |||
902 | def add_auth_token(self, user, lifetime_minutes, role, description=u'', |
|
904 | def add_auth_token(self, user, lifetime_minutes, role, description=u'', | |
903 | scope_callback=None): |
|
905 | scope_callback=None): | |
904 | """ |
|
906 | """ | |
905 | Add AuthToken for user. |
|
907 | Add AuthToken for user. | |
906 |
|
908 | |||
907 | :param user: username/user_id |
|
909 | :param user: username/user_id | |
908 | :param lifetime_minutes: in minutes the lifetime for token, -1 equals no limit |
|
910 | :param lifetime_minutes: in minutes the lifetime for token, -1 equals no limit | |
909 | :param role: one of AuthTokenModel.cls.ROLE_* |
|
911 | :param role: one of AuthTokenModel.cls.ROLE_* | |
910 | :param description: optional string description |
|
912 | :param description: optional string description | |
911 | """ |
|
913 | """ | |
912 |
|
914 | |||
913 | token = AuthTokenModel().create( |
|
915 | token = AuthTokenModel().create( | |
914 | user, description, lifetime_minutes, role) |
|
916 | user, description, lifetime_minutes, role) | |
915 | if scope_callback and callable(scope_callback): |
|
917 | if scope_callback and callable(scope_callback): | |
916 | # call the callback if we provide, used to attach scope for EE edition |
|
918 | # call the callback if we provide, used to attach scope for EE edition | |
917 | scope_callback(token) |
|
919 | scope_callback(token) | |
918 | return token |
|
920 | return token | |
919 |
|
921 | |||
920 | def delete_extra_ip(self, user, ip_id): |
|
922 | def delete_extra_ip(self, user, ip_id): | |
921 | """ |
|
923 | """ | |
922 | Removes ip address from UserIpMap |
|
924 | Removes ip address from UserIpMap | |
923 |
|
925 | |||
924 | :param user: |
|
926 | :param user: | |
925 | :param ip_id: |
|
927 | :param ip_id: | |
926 | """ |
|
928 | """ | |
927 | user = self._get_user(user) |
|
929 | user = self._get_user(user) | |
928 | obj = UserIpMap.query().get(ip_id) |
|
930 | obj = UserIpMap.query().get(ip_id) | |
929 | if obj and obj.user_id == user.user_id: |
|
931 | if obj and obj.user_id == user.user_id: | |
930 | self.sa.delete(obj) |
|
932 | self.sa.delete(obj) | |
931 |
|
933 | |||
932 | def get_accounts_in_creation_order(self, current_user=None): |
|
934 | def get_accounts_in_creation_order(self, current_user=None): | |
933 | """ |
|
935 | """ | |
934 | Get accounts in order of creation for deactivation for license limits |
|
936 | Get accounts in order of creation for deactivation for license limits | |
935 |
|
937 | |||
936 | pick currently logged in user, and append to the list in position 0 |
|
938 | pick currently logged in user, and append to the list in position 0 | |
937 | pick all super-admins in order of creation date and add it to the list |
|
939 | pick all super-admins in order of creation date and add it to the list | |
938 | pick all other accounts in order of creation and add it to the list. |
|
940 | pick all other accounts in order of creation and add it to the list. | |
939 |
|
941 | |||
940 | Based on that list, the last accounts can be disabled as they are |
|
942 | Based on that list, the last accounts can be disabled as they are | |
941 | created at the end and don't include any of the super admins as well |
|
943 | created at the end and don't include any of the super admins as well | |
942 | as the current user. |
|
944 | as the current user. | |
943 |
|
945 | |||
944 | :param current_user: optionally current user running this operation |
|
946 | :param current_user: optionally current user running this operation | |
945 | """ |
|
947 | """ | |
946 |
|
948 | |||
947 | if not current_user: |
|
949 | if not current_user: | |
948 | current_user = get_current_rhodecode_user() |
|
950 | current_user = get_current_rhodecode_user() | |
949 | active_super_admins = [ |
|
951 | active_super_admins = [ | |
950 | x.user_id for x in User.query() |
|
952 | x.user_id for x in User.query() | |
951 | .filter(User.user_id != current_user.user_id) |
|
953 | .filter(User.user_id != current_user.user_id) | |
952 | .filter(User.active == true()) |
|
954 | .filter(User.active == true()) | |
953 | .filter(User.admin == true()) |
|
955 | .filter(User.admin == true()) | |
954 | .order_by(User.created_on.asc())] |
|
956 | .order_by(User.created_on.asc())] | |
955 |
|
957 | |||
956 | active_regular_users = [ |
|
958 | active_regular_users = [ | |
957 | x.user_id for x in User.query() |
|
959 | x.user_id for x in User.query() | |
958 | .filter(User.user_id != current_user.user_id) |
|
960 | .filter(User.user_id != current_user.user_id) | |
959 | .filter(User.active == true()) |
|
961 | .filter(User.active == true()) | |
960 | .filter(User.admin == false()) |
|
962 | .filter(User.admin == false()) | |
961 | .order_by(User.created_on.asc())] |
|
963 | .order_by(User.created_on.asc())] | |
962 |
|
964 | |||
963 | list_of_accounts = [current_user.user_id] |
|
965 | list_of_accounts = [current_user.user_id] | |
964 | list_of_accounts += active_super_admins |
|
966 | list_of_accounts += active_super_admins | |
965 | list_of_accounts += active_regular_users |
|
967 | list_of_accounts += active_regular_users | |
966 |
|
968 | |||
967 | return list_of_accounts |
|
969 | return list_of_accounts | |
968 |
|
970 | |||
969 | def deactivate_last_users(self, expected_users, current_user=None): |
|
971 | def deactivate_last_users(self, expected_users, current_user=None): | |
970 | """ |
|
972 | """ | |
971 | Deactivate accounts that are over the license limits. |
|
973 | Deactivate accounts that are over the license limits. | |
972 | Algorithm of which accounts to disabled is based on the formula: |
|
974 | Algorithm of which accounts to disabled is based on the formula: | |
973 |
|
975 | |||
974 | Get current user, then super admins in creation order, then regular |
|
976 | Get current user, then super admins in creation order, then regular | |
975 | active users in creation order. |
|
977 | active users in creation order. | |
976 |
|
978 | |||
977 | Using that list we mark all accounts from the end of it as inactive. |
|
979 | Using that list we mark all accounts from the end of it as inactive. | |
978 | This way we block only latest created accounts. |
|
980 | This way we block only latest created accounts. | |
979 |
|
981 | |||
980 | :param expected_users: list of users in special order, we deactivate |
|
982 | :param expected_users: list of users in special order, we deactivate | |
981 | the end N amount of users from that list |
|
983 | the end N amount of users from that list | |
982 | """ |
|
984 | """ | |
983 |
|
985 | |||
984 | list_of_accounts = self.get_accounts_in_creation_order( |
|
986 | list_of_accounts = self.get_accounts_in_creation_order( | |
985 | current_user=current_user) |
|
987 | current_user=current_user) | |
986 |
|
988 | |||
987 | for acc_id in list_of_accounts[expected_users + 1:]: |
|
989 | for acc_id in list_of_accounts[expected_users + 1:]: | |
988 | user = User.get(acc_id) |
|
990 | user = User.get(acc_id) | |
989 | log.info('Deactivating account %s for license unlock', user) |
|
991 | log.info('Deactivating account %s for license unlock', user) | |
990 | user.active = False |
|
992 | user.active = False | |
991 | Session().add(user) |
|
993 | Session().add(user) | |
992 | Session().commit() |
|
994 | Session().commit() | |
993 |
|
995 | |||
994 | return |
|
996 | return | |
995 |
|
997 | |||
996 | def get_user_log(self, user, filter_term): |
|
998 | def get_user_log(self, user, filter_term): | |
997 | user_log = UserLog.query()\ |
|
999 | user_log = UserLog.query()\ | |
998 | .filter(or_(UserLog.user_id == user.user_id, |
|
1000 | .filter(or_(UserLog.user_id == user.user_id, | |
999 | UserLog.username == user.username))\ |
|
1001 | UserLog.username == user.username))\ | |
1000 | .options(joinedload(UserLog.user))\ |
|
1002 | .options(joinedload(UserLog.user))\ | |
1001 | .options(joinedload(UserLog.repository))\ |
|
1003 | .options(joinedload(UserLog.repository))\ | |
1002 | .order_by(UserLog.action_date.desc()) |
|
1004 | .order_by(UserLog.action_date.desc()) | |
1003 |
|
1005 | |||
1004 | user_log = user_log_filter(user_log, filter_term) |
|
1006 | user_log = user_log_filter(user_log, filter_term) | |
1005 | return user_log |
|
1007 | return user_log |
@@ -1,385 +1,387 b'' | |||||
1 |
|
1 | |||
2 | /****************************************************************************** |
|
2 | /****************************************************************************** | |
3 | * * |
|
3 | * * | |
4 | * DO NOT CHANGE THIS FILE MANUALLY * |
|
4 | * DO NOT CHANGE THIS FILE MANUALLY * | |
5 | * * |
|
5 | * * | |
6 | * * |
|
6 | * * | |
7 | * This file is automatically generated when the app starts up with * |
|
7 | * This file is automatically generated when the app starts up with * | |
8 | * generate_js_files = true * |
|
8 | * generate_js_files = true * | |
9 | * * |
|
9 | * * | |
10 | * To add a route here pass jsroute=True to the route definition in the app * |
|
10 | * To add a route here pass jsroute=True to the route definition in the app * | |
11 | * * |
|
11 | * * | |
12 | ******************************************************************************/ |
|
12 | ******************************************************************************/ | |
13 | function registerRCRoutes() { |
|
13 | function registerRCRoutes() { | |
14 | // routes registration |
|
14 | // routes registration | |
15 | pyroutes.register('favicon', '/favicon.ico', []); |
|
15 | pyroutes.register('favicon', '/favicon.ico', []); | |
16 | pyroutes.register('robots', '/robots.txt', []); |
|
16 | pyroutes.register('robots', '/robots.txt', []); | |
17 | pyroutes.register('auth_home', '/_admin/auth*traverse', []); |
|
17 | pyroutes.register('auth_home', '/_admin/auth*traverse', []); | |
18 | pyroutes.register('global_integrations_new', '/_admin/integrations/new', []); |
|
18 | pyroutes.register('global_integrations_new', '/_admin/integrations/new', []); | |
19 | pyroutes.register('global_integrations_home', '/_admin/integrations', []); |
|
19 | pyroutes.register('global_integrations_home', '/_admin/integrations', []); | |
20 | pyroutes.register('global_integrations_list', '/_admin/integrations/%(integration)s', ['integration']); |
|
20 | pyroutes.register('global_integrations_list', '/_admin/integrations/%(integration)s', ['integration']); | |
21 | pyroutes.register('global_integrations_create', '/_admin/integrations/%(integration)s/new', ['integration']); |
|
21 | pyroutes.register('global_integrations_create', '/_admin/integrations/%(integration)s/new', ['integration']); | |
22 | pyroutes.register('global_integrations_edit', '/_admin/integrations/%(integration)s/%(integration_id)s', ['integration', 'integration_id']); |
|
22 | pyroutes.register('global_integrations_edit', '/_admin/integrations/%(integration)s/%(integration_id)s', ['integration', 'integration_id']); | |
23 | pyroutes.register('repo_group_integrations_home', '/%(repo_group_name)s/_settings/integrations', ['repo_group_name']); |
|
23 | pyroutes.register('repo_group_integrations_home', '/%(repo_group_name)s/_settings/integrations', ['repo_group_name']); | |
24 | pyroutes.register('repo_group_integrations_new', '/%(repo_group_name)s/_settings/integrations/new', ['repo_group_name']); |
|
24 | pyroutes.register('repo_group_integrations_new', '/%(repo_group_name)s/_settings/integrations/new', ['repo_group_name']); | |
25 | pyroutes.register('repo_group_integrations_list', '/%(repo_group_name)s/_settings/integrations/%(integration)s', ['repo_group_name', 'integration']); |
|
25 | pyroutes.register('repo_group_integrations_list', '/%(repo_group_name)s/_settings/integrations/%(integration)s', ['repo_group_name', 'integration']); | |
26 | pyroutes.register('repo_group_integrations_create', '/%(repo_group_name)s/_settings/integrations/%(integration)s/new', ['repo_group_name', 'integration']); |
|
26 | pyroutes.register('repo_group_integrations_create', '/%(repo_group_name)s/_settings/integrations/%(integration)s/new', ['repo_group_name', 'integration']); | |
27 | pyroutes.register('repo_group_integrations_edit', '/%(repo_group_name)s/_settings/integrations/%(integration)s/%(integration_id)s', ['repo_group_name', 'integration', 'integration_id']); |
|
27 | pyroutes.register('repo_group_integrations_edit', '/%(repo_group_name)s/_settings/integrations/%(integration)s/%(integration_id)s', ['repo_group_name', 'integration', 'integration_id']); | |
28 | pyroutes.register('repo_integrations_home', '/%(repo_name)s/settings/integrations', ['repo_name']); |
|
28 | pyroutes.register('repo_integrations_home', '/%(repo_name)s/settings/integrations', ['repo_name']); | |
29 | pyroutes.register('repo_integrations_new', '/%(repo_name)s/settings/integrations/new', ['repo_name']); |
|
29 | pyroutes.register('repo_integrations_new', '/%(repo_name)s/settings/integrations/new', ['repo_name']); | |
30 | pyroutes.register('repo_integrations_list', '/%(repo_name)s/settings/integrations/%(integration)s', ['repo_name', 'integration']); |
|
30 | pyroutes.register('repo_integrations_list', '/%(repo_name)s/settings/integrations/%(integration)s', ['repo_name', 'integration']); | |
31 | pyroutes.register('repo_integrations_create', '/%(repo_name)s/settings/integrations/%(integration)s/new', ['repo_name', 'integration']); |
|
31 | pyroutes.register('repo_integrations_create', '/%(repo_name)s/settings/integrations/%(integration)s/new', ['repo_name', 'integration']); | |
32 | pyroutes.register('repo_integrations_edit', '/%(repo_name)s/settings/integrations/%(integration)s/%(integration_id)s', ['repo_name', 'integration', 'integration_id']); |
|
32 | pyroutes.register('repo_integrations_edit', '/%(repo_name)s/settings/integrations/%(integration)s/%(integration_id)s', ['repo_name', 'integration', 'integration_id']); | |
33 | pyroutes.register('hovercard_user', '/_hovercard/user/%(user_id)s', ['user_id']); |
|
33 | pyroutes.register('hovercard_user', '/_hovercard/user/%(user_id)s', ['user_id']); | |
34 | pyroutes.register('hovercard_user_group', '/_hovercard/user_group/%(user_group_id)s', ['user_group_id']); |
|
34 | pyroutes.register('hovercard_user_group', '/_hovercard/user_group/%(user_group_id)s', ['user_group_id']); | |
35 | pyroutes.register('hovercard_repo_commit', '/_hovercard/commit/%(repo_name)s/%(commit_id)s', ['repo_name', 'commit_id']); |
|
35 | pyroutes.register('hovercard_repo_commit', '/_hovercard/commit/%(repo_name)s/%(commit_id)s', ['repo_name', 'commit_id']); | |
36 | pyroutes.register('ops_ping', '/_admin/ops/ping', []); |
|
36 | pyroutes.register('ops_ping', '/_admin/ops/ping', []); | |
37 | pyroutes.register('ops_error_test', '/_admin/ops/error', []); |
|
37 | pyroutes.register('ops_error_test', '/_admin/ops/error', []); | |
38 | pyroutes.register('ops_redirect_test', '/_admin/ops/redirect', []); |
|
38 | pyroutes.register('ops_redirect_test', '/_admin/ops/redirect', []); | |
39 | pyroutes.register('ops_ping_legacy', '/_admin/ping', []); |
|
39 | pyroutes.register('ops_ping_legacy', '/_admin/ping', []); | |
40 | pyroutes.register('ops_error_test_legacy', '/_admin/error_test', []); |
|
40 | pyroutes.register('ops_error_test_legacy', '/_admin/error_test', []); | |
41 | pyroutes.register('admin_home', '/_admin', []); |
|
41 | pyroutes.register('admin_home', '/_admin', []); | |
42 | pyroutes.register('admin_audit_logs', '/_admin/audit_logs', []); |
|
42 | pyroutes.register('admin_audit_logs', '/_admin/audit_logs', []); | |
43 | pyroutes.register('admin_audit_log_entry', '/_admin/audit_logs/%(audit_log_id)s', ['audit_log_id']); |
|
43 | pyroutes.register('admin_audit_log_entry', '/_admin/audit_logs/%(audit_log_id)s', ['audit_log_id']); | |
44 | pyroutes.register('pull_requests_global_0', '/_admin/pull_requests/%(pull_request_id)s', ['pull_request_id']); |
|
44 | pyroutes.register('pull_requests_global_0', '/_admin/pull_requests/%(pull_request_id)s', ['pull_request_id']); | |
45 | pyroutes.register('pull_requests_global_1', '/_admin/pull-requests/%(pull_request_id)s', ['pull_request_id']); |
|
45 | pyroutes.register('pull_requests_global_1', '/_admin/pull-requests/%(pull_request_id)s', ['pull_request_id']); | |
46 | pyroutes.register('pull_requests_global', '/_admin/pull-request/%(pull_request_id)s', ['pull_request_id']); |
|
46 | pyroutes.register('pull_requests_global', '/_admin/pull-request/%(pull_request_id)s', ['pull_request_id']); | |
47 | pyroutes.register('admin_settings_open_source', '/_admin/settings/open_source', []); |
|
47 | pyroutes.register('admin_settings_open_source', '/_admin/settings/open_source', []); | |
48 | pyroutes.register('admin_settings_vcs_svn_generate_cfg', '/_admin/settings/vcs/svn_generate_cfg', []); |
|
48 | pyroutes.register('admin_settings_vcs_svn_generate_cfg', '/_admin/settings/vcs/svn_generate_cfg', []); | |
49 | pyroutes.register('admin_settings_system', '/_admin/settings/system', []); |
|
49 | pyroutes.register('admin_settings_system', '/_admin/settings/system', []); | |
50 | pyroutes.register('admin_settings_system_update', '/_admin/settings/system/updates', []); |
|
50 | pyroutes.register('admin_settings_system_update', '/_admin/settings/system/updates', []); | |
51 | pyroutes.register('admin_settings_exception_tracker', '/_admin/settings/exceptions', []); |
|
51 | pyroutes.register('admin_settings_exception_tracker', '/_admin/settings/exceptions', []); | |
52 | pyroutes.register('admin_settings_exception_tracker_delete_all', '/_admin/settings/exceptions/delete', []); |
|
52 | pyroutes.register('admin_settings_exception_tracker_delete_all', '/_admin/settings/exceptions/delete', []); | |
53 | pyroutes.register('admin_settings_exception_tracker_show', '/_admin/settings/exceptions/%(exception_id)s', ['exception_id']); |
|
53 | pyroutes.register('admin_settings_exception_tracker_show', '/_admin/settings/exceptions/%(exception_id)s', ['exception_id']); | |
54 | pyroutes.register('admin_settings_exception_tracker_delete', '/_admin/settings/exceptions/%(exception_id)s/delete', ['exception_id']); |
|
54 | pyroutes.register('admin_settings_exception_tracker_delete', '/_admin/settings/exceptions/%(exception_id)s/delete', ['exception_id']); | |
55 | pyroutes.register('admin_settings_sessions', '/_admin/settings/sessions', []); |
|
55 | pyroutes.register('admin_settings_sessions', '/_admin/settings/sessions', []); | |
56 | pyroutes.register('admin_settings_sessions_cleanup', '/_admin/settings/sessions/cleanup', []); |
|
56 | pyroutes.register('admin_settings_sessions_cleanup', '/_admin/settings/sessions/cleanup', []); | |
57 | pyroutes.register('admin_settings_process_management', '/_admin/settings/process_management', []); |
|
57 | pyroutes.register('admin_settings_process_management', '/_admin/settings/process_management', []); | |
58 | pyroutes.register('admin_settings_process_management_data', '/_admin/settings/process_management/data', []); |
|
58 | pyroutes.register('admin_settings_process_management_data', '/_admin/settings/process_management/data', []); | |
59 | pyroutes.register('admin_settings_process_management_signal', '/_admin/settings/process_management/signal', []); |
|
59 | pyroutes.register('admin_settings_process_management_signal', '/_admin/settings/process_management/signal', []); | |
60 | pyroutes.register('admin_settings_process_management_master_signal', '/_admin/settings/process_management/master_signal', []); |
|
60 | pyroutes.register('admin_settings_process_management_master_signal', '/_admin/settings/process_management/master_signal', []); | |
61 | pyroutes.register('admin_defaults_repositories', '/_admin/defaults/repositories', []); |
|
61 | pyroutes.register('admin_defaults_repositories', '/_admin/defaults/repositories', []); | |
62 | pyroutes.register('admin_defaults_repositories_update', '/_admin/defaults/repositories/update', []); |
|
62 | pyroutes.register('admin_defaults_repositories_update', '/_admin/defaults/repositories/update', []); | |
63 | pyroutes.register('admin_settings', '/_admin/settings', []); |
|
63 | pyroutes.register('admin_settings', '/_admin/settings', []); | |
64 | pyroutes.register('admin_settings_update', '/_admin/settings/update', []); |
|
64 | pyroutes.register('admin_settings_update', '/_admin/settings/update', []); | |
65 | pyroutes.register('admin_settings_global', '/_admin/settings/global', []); |
|
65 | pyroutes.register('admin_settings_global', '/_admin/settings/global', []); | |
66 | pyroutes.register('admin_settings_global_update', '/_admin/settings/global/update', []); |
|
66 | pyroutes.register('admin_settings_global_update', '/_admin/settings/global/update', []); | |
67 | pyroutes.register('admin_settings_vcs', '/_admin/settings/vcs', []); |
|
67 | pyroutes.register('admin_settings_vcs', '/_admin/settings/vcs', []); | |
68 | pyroutes.register('admin_settings_vcs_update', '/_admin/settings/vcs/update', []); |
|
68 | pyroutes.register('admin_settings_vcs_update', '/_admin/settings/vcs/update', []); | |
69 | pyroutes.register('admin_settings_vcs_svn_pattern_delete', '/_admin/settings/vcs/svn_pattern_delete', []); |
|
69 | pyroutes.register('admin_settings_vcs_svn_pattern_delete', '/_admin/settings/vcs/svn_pattern_delete', []); | |
70 | pyroutes.register('admin_settings_mapping', '/_admin/settings/mapping', []); |
|
70 | pyroutes.register('admin_settings_mapping', '/_admin/settings/mapping', []); | |
71 | pyroutes.register('admin_settings_mapping_update', '/_admin/settings/mapping/update', []); |
|
71 | pyroutes.register('admin_settings_mapping_update', '/_admin/settings/mapping/update', []); | |
72 | pyroutes.register('admin_settings_visual', '/_admin/settings/visual', []); |
|
72 | pyroutes.register('admin_settings_visual', '/_admin/settings/visual', []); | |
73 | pyroutes.register('admin_settings_visual_update', '/_admin/settings/visual/update', []); |
|
73 | pyroutes.register('admin_settings_visual_update', '/_admin/settings/visual/update', []); | |
74 | pyroutes.register('admin_settings_issuetracker', '/_admin/settings/issue-tracker', []); |
|
74 | pyroutes.register('admin_settings_issuetracker', '/_admin/settings/issue-tracker', []); | |
75 | pyroutes.register('admin_settings_issuetracker_update', '/_admin/settings/issue-tracker/update', []); |
|
75 | pyroutes.register('admin_settings_issuetracker_update', '/_admin/settings/issue-tracker/update', []); | |
76 | pyroutes.register('admin_settings_issuetracker_test', '/_admin/settings/issue-tracker/test', []); |
|
76 | pyroutes.register('admin_settings_issuetracker_test', '/_admin/settings/issue-tracker/test', []); | |
77 | pyroutes.register('admin_settings_issuetracker_delete', '/_admin/settings/issue-tracker/delete', []); |
|
77 | pyroutes.register('admin_settings_issuetracker_delete', '/_admin/settings/issue-tracker/delete', []); | |
78 | pyroutes.register('admin_settings_email', '/_admin/settings/email', []); |
|
78 | pyroutes.register('admin_settings_email', '/_admin/settings/email', []); | |
79 | pyroutes.register('admin_settings_email_update', '/_admin/settings/email/update', []); |
|
79 | pyroutes.register('admin_settings_email_update', '/_admin/settings/email/update', []); | |
80 | pyroutes.register('admin_settings_hooks', '/_admin/settings/hooks', []); |
|
80 | pyroutes.register('admin_settings_hooks', '/_admin/settings/hooks', []); | |
81 | pyroutes.register('admin_settings_hooks_update', '/_admin/settings/hooks/update', []); |
|
81 | pyroutes.register('admin_settings_hooks_update', '/_admin/settings/hooks/update', []); | |
82 | pyroutes.register('admin_settings_hooks_delete', '/_admin/settings/hooks/delete', []); |
|
82 | pyroutes.register('admin_settings_hooks_delete', '/_admin/settings/hooks/delete', []); | |
83 | pyroutes.register('admin_settings_search', '/_admin/settings/search', []); |
|
83 | pyroutes.register('admin_settings_search', '/_admin/settings/search', []); | |
84 | pyroutes.register('admin_settings_labs', '/_admin/settings/labs', []); |
|
84 | pyroutes.register('admin_settings_labs', '/_admin/settings/labs', []); | |
85 | pyroutes.register('admin_settings_labs_update', '/_admin/settings/labs/update', []); |
|
85 | pyroutes.register('admin_settings_labs_update', '/_admin/settings/labs/update', []); | |
86 | pyroutes.register('admin_permissions_application', '/_admin/permissions/application', []); |
|
86 | pyroutes.register('admin_permissions_application', '/_admin/permissions/application', []); | |
87 | pyroutes.register('admin_permissions_application_update', '/_admin/permissions/application/update', []); |
|
87 | pyroutes.register('admin_permissions_application_update', '/_admin/permissions/application/update', []); | |
88 | pyroutes.register('admin_permissions_global', '/_admin/permissions/global', []); |
|
88 | pyroutes.register('admin_permissions_global', '/_admin/permissions/global', []); | |
89 | pyroutes.register('admin_permissions_global_update', '/_admin/permissions/global/update', []); |
|
89 | pyroutes.register('admin_permissions_global_update', '/_admin/permissions/global/update', []); | |
90 | pyroutes.register('admin_permissions_object', '/_admin/permissions/object', []); |
|
90 | pyroutes.register('admin_permissions_object', '/_admin/permissions/object', []); | |
91 | pyroutes.register('admin_permissions_object_update', '/_admin/permissions/object/update', []); |
|
91 | pyroutes.register('admin_permissions_object_update', '/_admin/permissions/object/update', []); | |
92 | pyroutes.register('admin_permissions_ips', '/_admin/permissions/ips', []); |
|
92 | pyroutes.register('admin_permissions_ips', '/_admin/permissions/ips', []); | |
93 | pyroutes.register('admin_permissions_overview', '/_admin/permissions/overview', []); |
|
93 | pyroutes.register('admin_permissions_overview', '/_admin/permissions/overview', []); | |
94 | pyroutes.register('admin_permissions_auth_token_access', '/_admin/permissions/auth_token_access', []); |
|
94 | pyroutes.register('admin_permissions_auth_token_access', '/_admin/permissions/auth_token_access', []); | |
95 | pyroutes.register('admin_permissions_ssh_keys', '/_admin/permissions/ssh_keys', []); |
|
95 | pyroutes.register('admin_permissions_ssh_keys', '/_admin/permissions/ssh_keys', []); | |
96 | pyroutes.register('admin_permissions_ssh_keys_data', '/_admin/permissions/ssh_keys/data', []); |
|
96 | pyroutes.register('admin_permissions_ssh_keys_data', '/_admin/permissions/ssh_keys/data', []); | |
97 | pyroutes.register('admin_permissions_ssh_keys_update', '/_admin/permissions/ssh_keys/update', []); |
|
97 | pyroutes.register('admin_permissions_ssh_keys_update', '/_admin/permissions/ssh_keys/update', []); | |
98 | pyroutes.register('users', '/_admin/users', []); |
|
98 | pyroutes.register('users', '/_admin/users', []); | |
99 | pyroutes.register('users_data', '/_admin/users_data', []); |
|
99 | pyroutes.register('users_data', '/_admin/users_data', []); | |
100 | pyroutes.register('users_create', '/_admin/users/create', []); |
|
100 | pyroutes.register('users_create', '/_admin/users/create', []); | |
101 | pyroutes.register('users_new', '/_admin/users/new', []); |
|
101 | pyroutes.register('users_new', '/_admin/users/new', []); | |
102 | pyroutes.register('user_edit', '/_admin/users/%(user_id)s/edit', ['user_id']); |
|
102 | pyroutes.register('user_edit', '/_admin/users/%(user_id)s/edit', ['user_id']); | |
103 | pyroutes.register('user_edit_advanced', '/_admin/users/%(user_id)s/edit/advanced', ['user_id']); |
|
103 | pyroutes.register('user_edit_advanced', '/_admin/users/%(user_id)s/edit/advanced', ['user_id']); | |
104 | pyroutes.register('user_edit_global_perms', '/_admin/users/%(user_id)s/edit/global_permissions', ['user_id']); |
|
104 | pyroutes.register('user_edit_global_perms', '/_admin/users/%(user_id)s/edit/global_permissions', ['user_id']); | |
105 | pyroutes.register('user_edit_global_perms_update', '/_admin/users/%(user_id)s/edit/global_permissions/update', ['user_id']); |
|
105 | pyroutes.register('user_edit_global_perms_update', '/_admin/users/%(user_id)s/edit/global_permissions/update', ['user_id']); | |
106 | pyroutes.register('user_update', '/_admin/users/%(user_id)s/update', ['user_id']); |
|
106 | pyroutes.register('user_update', '/_admin/users/%(user_id)s/update', ['user_id']); | |
107 | pyroutes.register('user_delete', '/_admin/users/%(user_id)s/delete', ['user_id']); |
|
107 | pyroutes.register('user_delete', '/_admin/users/%(user_id)s/delete', ['user_id']); | |
108 | pyroutes.register('user_enable_force_password_reset', '/_admin/users/%(user_id)s/password_reset_enable', ['user_id']); |
|
108 | pyroutes.register('user_enable_force_password_reset', '/_admin/users/%(user_id)s/password_reset_enable', ['user_id']); | |
109 | pyroutes.register('user_disable_force_password_reset', '/_admin/users/%(user_id)s/password_reset_disable', ['user_id']); |
|
109 | pyroutes.register('user_disable_force_password_reset', '/_admin/users/%(user_id)s/password_reset_disable', ['user_id']); | |
110 | pyroutes.register('user_create_personal_repo_group', '/_admin/users/%(user_id)s/create_repo_group', ['user_id']); |
|
110 | pyroutes.register('user_create_personal_repo_group', '/_admin/users/%(user_id)s/create_repo_group', ['user_id']); | |
111 | pyroutes.register('edit_user_auth_tokens_delete', '/_admin/users/%(user_id)s/edit/auth_tokens/delete', ['user_id']); |
|
111 | pyroutes.register('edit_user_auth_tokens_delete', '/_admin/users/%(user_id)s/edit/auth_tokens/delete', ['user_id']); | |
112 | pyroutes.register('edit_user_ssh_keys', '/_admin/users/%(user_id)s/edit/ssh_keys', ['user_id']); |
|
112 | pyroutes.register('edit_user_ssh_keys', '/_admin/users/%(user_id)s/edit/ssh_keys', ['user_id']); | |
113 | pyroutes.register('edit_user_ssh_keys_generate_keypair', '/_admin/users/%(user_id)s/edit/ssh_keys/generate', ['user_id']); |
|
113 | pyroutes.register('edit_user_ssh_keys_generate_keypair', '/_admin/users/%(user_id)s/edit/ssh_keys/generate', ['user_id']); | |
114 | pyroutes.register('edit_user_ssh_keys_add', '/_admin/users/%(user_id)s/edit/ssh_keys/new', ['user_id']); |
|
114 | pyroutes.register('edit_user_ssh_keys_add', '/_admin/users/%(user_id)s/edit/ssh_keys/new', ['user_id']); | |
115 | pyroutes.register('edit_user_ssh_keys_delete', '/_admin/users/%(user_id)s/edit/ssh_keys/delete', ['user_id']); |
|
115 | pyroutes.register('edit_user_ssh_keys_delete', '/_admin/users/%(user_id)s/edit/ssh_keys/delete', ['user_id']); | |
116 | pyroutes.register('edit_user_emails', '/_admin/users/%(user_id)s/edit/emails', ['user_id']); |
|
116 | pyroutes.register('edit_user_emails', '/_admin/users/%(user_id)s/edit/emails', ['user_id']); | |
117 | pyroutes.register('edit_user_emails_add', '/_admin/users/%(user_id)s/edit/emails/new', ['user_id']); |
|
117 | pyroutes.register('edit_user_emails_add', '/_admin/users/%(user_id)s/edit/emails/new', ['user_id']); | |
118 | pyroutes.register('edit_user_emails_delete', '/_admin/users/%(user_id)s/edit/emails/delete', ['user_id']); |
|
118 | pyroutes.register('edit_user_emails_delete', '/_admin/users/%(user_id)s/edit/emails/delete', ['user_id']); | |
119 | pyroutes.register('edit_user_ips', '/_admin/users/%(user_id)s/edit/ips', ['user_id']); |
|
119 | pyroutes.register('edit_user_ips', '/_admin/users/%(user_id)s/edit/ips', ['user_id']); | |
120 | pyroutes.register('edit_user_ips_add', '/_admin/users/%(user_id)s/edit/ips/new', ['user_id']); |
|
120 | pyroutes.register('edit_user_ips_add', '/_admin/users/%(user_id)s/edit/ips/new', ['user_id']); | |
121 | pyroutes.register('edit_user_ips_delete', '/_admin/users/%(user_id)s/edit/ips/delete', ['user_id']); |
|
121 | pyroutes.register('edit_user_ips_delete', '/_admin/users/%(user_id)s/edit/ips/delete', ['user_id']); | |
122 | pyroutes.register('edit_user_perms_summary', '/_admin/users/%(user_id)s/edit/permissions_summary', ['user_id']); |
|
122 | pyroutes.register('edit_user_perms_summary', '/_admin/users/%(user_id)s/edit/permissions_summary', ['user_id']); | |
123 | pyroutes.register('edit_user_perms_summary_json', '/_admin/users/%(user_id)s/edit/permissions_summary/json', ['user_id']); |
|
123 | pyroutes.register('edit_user_perms_summary_json', '/_admin/users/%(user_id)s/edit/permissions_summary/json', ['user_id']); | |
124 | pyroutes.register('edit_user_groups_management', '/_admin/users/%(user_id)s/edit/groups_management', ['user_id']); |
|
124 | pyroutes.register('edit_user_groups_management', '/_admin/users/%(user_id)s/edit/groups_management', ['user_id']); | |
125 | pyroutes.register('edit_user_groups_management_updates', '/_admin/users/%(user_id)s/edit/edit_user_groups_management/updates', ['user_id']); |
|
125 | pyroutes.register('edit_user_groups_management_updates', '/_admin/users/%(user_id)s/edit/edit_user_groups_management/updates', ['user_id']); | |
126 | pyroutes.register('edit_user_audit_logs', '/_admin/users/%(user_id)s/edit/audit', ['user_id']); |
|
126 | pyroutes.register('edit_user_audit_logs', '/_admin/users/%(user_id)s/edit/audit', ['user_id']); | |
127 | pyroutes.register('edit_user_audit_logs_download', '/_admin/users/%(user_id)s/edit/audit/download', ['user_id']); |
|
127 | pyroutes.register('edit_user_audit_logs_download', '/_admin/users/%(user_id)s/edit/audit/download', ['user_id']); | |
128 | pyroutes.register('edit_user_caches', '/_admin/users/%(user_id)s/edit/caches', ['user_id']); |
|
128 | pyroutes.register('edit_user_caches', '/_admin/users/%(user_id)s/edit/caches', ['user_id']); | |
129 | pyroutes.register('edit_user_caches_update', '/_admin/users/%(user_id)s/edit/caches/update', ['user_id']); |
|
129 | pyroutes.register('edit_user_caches_update', '/_admin/users/%(user_id)s/edit/caches/update', ['user_id']); | |
130 | pyroutes.register('user_groups', '/_admin/user_groups', []); |
|
130 | pyroutes.register('user_groups', '/_admin/user_groups', []); | |
131 | pyroutes.register('user_groups_data', '/_admin/user_groups_data', []); |
|
131 | pyroutes.register('user_groups_data', '/_admin/user_groups_data', []); | |
132 | pyroutes.register('user_groups_new', '/_admin/user_groups/new', []); |
|
132 | pyroutes.register('user_groups_new', '/_admin/user_groups/new', []); | |
133 | pyroutes.register('user_groups_create', '/_admin/user_groups/create', []); |
|
133 | pyroutes.register('user_groups_create', '/_admin/user_groups/create', []); | |
134 | pyroutes.register('repos', '/_admin/repos', []); |
|
134 | pyroutes.register('repos', '/_admin/repos', []); | |
135 | pyroutes.register('repo_new', '/_admin/repos/new', []); |
|
135 | pyroutes.register('repo_new', '/_admin/repos/new', []); | |
136 | pyroutes.register('repo_create', '/_admin/repos/create', []); |
|
136 | pyroutes.register('repo_create', '/_admin/repos/create', []); | |
137 | pyroutes.register('repo_groups', '/_admin/repo_groups', []); |
|
137 | pyroutes.register('repo_groups', '/_admin/repo_groups', []); | |
138 | pyroutes.register('repo_groups_data', '/_admin/repo_groups_data', []); |
|
138 | pyroutes.register('repo_groups_data', '/_admin/repo_groups_data', []); | |
139 | pyroutes.register('repo_group_new', '/_admin/repo_group/new', []); |
|
139 | pyroutes.register('repo_group_new', '/_admin/repo_group/new', []); | |
140 | pyroutes.register('repo_group_create', '/_admin/repo_group/create', []); |
|
140 | pyroutes.register('repo_group_create', '/_admin/repo_group/create', []); | |
141 | pyroutes.register('channelstream_connect', '/_admin/channelstream/connect', []); |
|
141 | pyroutes.register('channelstream_connect', '/_admin/channelstream/connect', []); | |
142 | pyroutes.register('channelstream_subscribe', '/_admin/channelstream/subscribe', []); |
|
142 | pyroutes.register('channelstream_subscribe', '/_admin/channelstream/subscribe', []); | |
143 | pyroutes.register('channelstream_proxy', '/_channelstream', []); |
|
143 | pyroutes.register('channelstream_proxy', '/_channelstream', []); | |
144 | pyroutes.register('upload_file', '/_file_store/upload', []); |
|
144 | pyroutes.register('upload_file', '/_file_store/upload', []); | |
145 | pyroutes.register('download_file', '/_file_store/download/%(fid)s', ['fid']); |
|
145 | pyroutes.register('download_file', '/_file_store/download/%(fid)s', ['fid']); | |
146 | pyroutes.register('download_file_by_token', '/_file_store/token-download/%(_auth_token)s/%(fid)s', ['_auth_token', 'fid']); |
|
146 | pyroutes.register('download_file_by_token', '/_file_store/token-download/%(_auth_token)s/%(fid)s', ['_auth_token', 'fid']); | |
147 | pyroutes.register('logout', '/_admin/logout', []); |
|
147 | pyroutes.register('logout', '/_admin/logout', []); | |
148 | pyroutes.register('reset_password', '/_admin/password_reset', []); |
|
148 | pyroutes.register('reset_password', '/_admin/password_reset', []); | |
149 | pyroutes.register('reset_password_confirmation', '/_admin/password_reset_confirmation', []); |
|
149 | pyroutes.register('reset_password_confirmation', '/_admin/password_reset_confirmation', []); | |
150 | pyroutes.register('home', '/', []); |
|
150 | pyroutes.register('home', '/', []); | |
151 | pyroutes.register('user_autocomplete_data', '/_users', []); |
|
151 | pyroutes.register('user_autocomplete_data', '/_users', []); | |
152 | pyroutes.register('user_group_autocomplete_data', '/_user_groups', []); |
|
152 | pyroutes.register('user_group_autocomplete_data', '/_user_groups', []); | |
153 | pyroutes.register('repo_list_data', '/_repos', []); |
|
153 | pyroutes.register('repo_list_data', '/_repos', []); | |
154 | pyroutes.register('repo_group_list_data', '/_repo_groups', []); |
|
154 | pyroutes.register('repo_group_list_data', '/_repo_groups', []); | |
155 | pyroutes.register('goto_switcher_data', '/_goto_data', []); |
|
155 | pyroutes.register('goto_switcher_data', '/_goto_data', []); | |
156 | pyroutes.register('markup_preview', '/_markup_preview', []); |
|
156 | pyroutes.register('markup_preview', '/_markup_preview', []); | |
157 | pyroutes.register('file_preview', '/_file_preview', []); |
|
157 | pyroutes.register('file_preview', '/_file_preview', []); | |
158 | pyroutes.register('store_user_session_value', '/_store_session_attr', []); |
|
158 | pyroutes.register('store_user_session_value', '/_store_session_attr', []); | |
159 | pyroutes.register('journal', '/_admin/journal', []); |
|
159 | pyroutes.register('journal', '/_admin/journal', []); | |
160 | pyroutes.register('journal_rss', '/_admin/journal/rss', []); |
|
160 | pyroutes.register('journal_rss', '/_admin/journal/rss', []); | |
161 | pyroutes.register('journal_atom', '/_admin/journal/atom', []); |
|
161 | pyroutes.register('journal_atom', '/_admin/journal/atom', []); | |
162 | pyroutes.register('journal_public', '/_admin/public_journal', []); |
|
162 | pyroutes.register('journal_public', '/_admin/public_journal', []); | |
163 | pyroutes.register('journal_public_atom', '/_admin/public_journal/atom', []); |
|
163 | pyroutes.register('journal_public_atom', '/_admin/public_journal/atom', []); | |
164 | pyroutes.register('journal_public_atom_old', '/_admin/public_journal_atom', []); |
|
164 | pyroutes.register('journal_public_atom_old', '/_admin/public_journal_atom', []); | |
165 | pyroutes.register('journal_public_rss', '/_admin/public_journal/rss', []); |
|
165 | pyroutes.register('journal_public_rss', '/_admin/public_journal/rss', []); | |
166 | pyroutes.register('journal_public_rss_old', '/_admin/public_journal_rss', []); |
|
166 | pyroutes.register('journal_public_rss_old', '/_admin/public_journal_rss', []); | |
167 | pyroutes.register('toggle_following', '/_admin/toggle_following', []); |
|
167 | pyroutes.register('toggle_following', '/_admin/toggle_following', []); | |
168 | pyroutes.register('repo_creating', '/%(repo_name)s/repo_creating', ['repo_name']); |
|
168 | pyroutes.register('repo_creating', '/%(repo_name)s/repo_creating', ['repo_name']); | |
169 | pyroutes.register('repo_creating_check', '/%(repo_name)s/repo_creating_check', ['repo_name']); |
|
169 | pyroutes.register('repo_creating_check', '/%(repo_name)s/repo_creating_check', ['repo_name']); | |
170 | pyroutes.register('repo_summary_explicit', '/%(repo_name)s/summary', ['repo_name']); |
|
170 | pyroutes.register('repo_summary_explicit', '/%(repo_name)s/summary', ['repo_name']); | |
171 | pyroutes.register('repo_summary_commits', '/%(repo_name)s/summary-commits', ['repo_name']); |
|
171 | pyroutes.register('repo_summary_commits', '/%(repo_name)s/summary-commits', ['repo_name']); | |
172 | pyroutes.register('repo_commit', '/%(repo_name)s/changeset/%(commit_id)s', ['repo_name', 'commit_id']); |
|
172 | pyroutes.register('repo_commit', '/%(repo_name)s/changeset/%(commit_id)s', ['repo_name', 'commit_id']); | |
173 | pyroutes.register('repo_commit_children', '/%(repo_name)s/changeset_children/%(commit_id)s', ['repo_name', 'commit_id']); |
|
173 | pyroutes.register('repo_commit_children', '/%(repo_name)s/changeset_children/%(commit_id)s', ['repo_name', 'commit_id']); | |
174 | pyroutes.register('repo_commit_parents', '/%(repo_name)s/changeset_parents/%(commit_id)s', ['repo_name', 'commit_id']); |
|
174 | pyroutes.register('repo_commit_parents', '/%(repo_name)s/changeset_parents/%(commit_id)s', ['repo_name', 'commit_id']); | |
175 | pyroutes.register('repo_commit_raw', '/%(repo_name)s/changeset-diff/%(commit_id)s', ['repo_name', 'commit_id']); |
|
175 | pyroutes.register('repo_commit_raw', '/%(repo_name)s/changeset-diff/%(commit_id)s', ['repo_name', 'commit_id']); | |
176 | pyroutes.register('repo_commit_patch', '/%(repo_name)s/changeset-patch/%(commit_id)s', ['repo_name', 'commit_id']); |
|
176 | pyroutes.register('repo_commit_patch', '/%(repo_name)s/changeset-patch/%(commit_id)s', ['repo_name', 'commit_id']); | |
177 | pyroutes.register('repo_commit_download', '/%(repo_name)s/changeset-download/%(commit_id)s', ['repo_name', 'commit_id']); |
|
177 | pyroutes.register('repo_commit_download', '/%(repo_name)s/changeset-download/%(commit_id)s', ['repo_name', 'commit_id']); | |
178 | pyroutes.register('repo_commit_data', '/%(repo_name)s/changeset-data/%(commit_id)s', ['repo_name', 'commit_id']); |
|
178 | pyroutes.register('repo_commit_data', '/%(repo_name)s/changeset-data/%(commit_id)s', ['repo_name', 'commit_id']); | |
179 | pyroutes.register('repo_commit_comment_create', '/%(repo_name)s/changeset/%(commit_id)s/comment/create', ['repo_name', 'commit_id']); |
|
179 | pyroutes.register('repo_commit_comment_create', '/%(repo_name)s/changeset/%(commit_id)s/comment/create', ['repo_name', 'commit_id']); | |
180 | pyroutes.register('repo_commit_comment_preview', '/%(repo_name)s/changeset/%(commit_id)s/comment/preview', ['repo_name', 'commit_id']); |
|
180 | pyroutes.register('repo_commit_comment_preview', '/%(repo_name)s/changeset/%(commit_id)s/comment/preview', ['repo_name', 'commit_id']); | |
181 | pyroutes.register('repo_commit_comment_attachment_upload', '/%(repo_name)s/changeset/%(commit_id)s/comment/attachment_upload', ['repo_name', 'commit_id']); |
|
181 | pyroutes.register('repo_commit_comment_attachment_upload', '/%(repo_name)s/changeset/%(commit_id)s/comment/attachment_upload', ['repo_name', 'commit_id']); | |
182 | pyroutes.register('repo_commit_comment_delete', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_id)s/delete', ['repo_name', 'commit_id', 'comment_id']); |
|
182 | pyroutes.register('repo_commit_comment_delete', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_id)s/delete', ['repo_name', 'commit_id', 'comment_id']); | |
183 | pyroutes.register('repo_commit_raw_deprecated', '/%(repo_name)s/raw-changeset/%(commit_id)s', ['repo_name', 'commit_id']); |
|
183 | pyroutes.register('repo_commit_raw_deprecated', '/%(repo_name)s/raw-changeset/%(commit_id)s', ['repo_name', 'commit_id']); | |
184 | pyroutes.register('repo_archivefile', '/%(repo_name)s/archive/%(fname)s', ['repo_name', 'fname']); |
|
184 | pyroutes.register('repo_archivefile', '/%(repo_name)s/archive/%(fname)s', ['repo_name', 'fname']); | |
185 | pyroutes.register('repo_files_diff', '/%(repo_name)s/diff/%(f_path)s', ['repo_name', 'f_path']); |
|
185 | pyroutes.register('repo_files_diff', '/%(repo_name)s/diff/%(f_path)s', ['repo_name', 'f_path']); | |
186 | pyroutes.register('repo_files_diff_2way_redirect', '/%(repo_name)s/diff-2way/%(f_path)s', ['repo_name', 'f_path']); |
|
186 | pyroutes.register('repo_files_diff_2way_redirect', '/%(repo_name)s/diff-2way/%(f_path)s', ['repo_name', 'f_path']); | |
187 | pyroutes.register('repo_files', '/%(repo_name)s/files/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
187 | pyroutes.register('repo_files', '/%(repo_name)s/files/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
188 | pyroutes.register('repo_files:default_path', '/%(repo_name)s/files/%(commit_id)s/', ['repo_name', 'commit_id']); |
|
188 | pyroutes.register('repo_files:default_path', '/%(repo_name)s/files/%(commit_id)s/', ['repo_name', 'commit_id']); | |
189 | pyroutes.register('repo_files:default_commit', '/%(repo_name)s/files', ['repo_name']); |
|
189 | pyroutes.register('repo_files:default_commit', '/%(repo_name)s/files', ['repo_name']); | |
190 | pyroutes.register('repo_files:rendered', '/%(repo_name)s/render/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
190 | pyroutes.register('repo_files:rendered', '/%(repo_name)s/render/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
191 | pyroutes.register('repo_files:annotated', '/%(repo_name)s/annotate/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
191 | pyroutes.register('repo_files:annotated', '/%(repo_name)s/annotate/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
192 | pyroutes.register('repo_files:annotated_previous', '/%(repo_name)s/annotate-previous/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
192 | pyroutes.register('repo_files:annotated_previous', '/%(repo_name)s/annotate-previous/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
193 | pyroutes.register('repo_nodetree_full', '/%(repo_name)s/nodetree_full/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
193 | pyroutes.register('repo_nodetree_full', '/%(repo_name)s/nodetree_full/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
194 | pyroutes.register('repo_nodetree_full:default_path', '/%(repo_name)s/nodetree_full/%(commit_id)s/', ['repo_name', 'commit_id']); |
|
194 | pyroutes.register('repo_nodetree_full:default_path', '/%(repo_name)s/nodetree_full/%(commit_id)s/', ['repo_name', 'commit_id']); | |
195 | pyroutes.register('repo_files_nodelist', '/%(repo_name)s/nodelist/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
195 | pyroutes.register('repo_files_nodelist', '/%(repo_name)s/nodelist/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
196 | pyroutes.register('repo_file_raw', '/%(repo_name)s/raw/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
196 | pyroutes.register('repo_file_raw', '/%(repo_name)s/raw/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
197 | pyroutes.register('repo_file_download', '/%(repo_name)s/download/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
197 | pyroutes.register('repo_file_download', '/%(repo_name)s/download/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
198 | pyroutes.register('repo_file_download:legacy', '/%(repo_name)s/rawfile/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
198 | pyroutes.register('repo_file_download:legacy', '/%(repo_name)s/rawfile/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
199 | pyroutes.register('repo_file_history', '/%(repo_name)s/history/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
199 | pyroutes.register('repo_file_history', '/%(repo_name)s/history/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
200 | pyroutes.register('repo_file_authors', '/%(repo_name)s/authors/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
200 | pyroutes.register('repo_file_authors', '/%(repo_name)s/authors/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
201 | pyroutes.register('repo_files_remove_file', '/%(repo_name)s/remove_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
201 | pyroutes.register('repo_files_remove_file', '/%(repo_name)s/remove_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
202 | pyroutes.register('repo_files_delete_file', '/%(repo_name)s/delete_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
202 | pyroutes.register('repo_files_delete_file', '/%(repo_name)s/delete_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
203 | pyroutes.register('repo_files_edit_file', '/%(repo_name)s/edit_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
203 | pyroutes.register('repo_files_edit_file', '/%(repo_name)s/edit_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
204 | pyroutes.register('repo_files_update_file', '/%(repo_name)s/update_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
204 | pyroutes.register('repo_files_update_file', '/%(repo_name)s/update_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
205 | pyroutes.register('repo_files_add_file', '/%(repo_name)s/add_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
205 | pyroutes.register('repo_files_add_file', '/%(repo_name)s/add_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
206 | pyroutes.register('repo_files_upload_file', '/%(repo_name)s/upload_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
206 | pyroutes.register('repo_files_upload_file', '/%(repo_name)s/upload_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
207 | pyroutes.register('repo_files_create_file', '/%(repo_name)s/create_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
207 | pyroutes.register('repo_files_create_file', '/%(repo_name)s/create_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
208 | pyroutes.register('repo_refs_data', '/%(repo_name)s/refs-data', ['repo_name']); |
|
208 | pyroutes.register('repo_refs_data', '/%(repo_name)s/refs-data', ['repo_name']); | |
209 | pyroutes.register('repo_refs_changelog_data', '/%(repo_name)s/refs-data-changelog', ['repo_name']); |
|
209 | pyroutes.register('repo_refs_changelog_data', '/%(repo_name)s/refs-data-changelog', ['repo_name']); | |
210 | pyroutes.register('repo_stats', '/%(repo_name)s/repo_stats/%(commit_id)s', ['repo_name', 'commit_id']); |
|
210 | pyroutes.register('repo_stats', '/%(repo_name)s/repo_stats/%(commit_id)s', ['repo_name', 'commit_id']); | |
211 | pyroutes.register('repo_commits', '/%(repo_name)s/commits', ['repo_name']); |
|
211 | pyroutes.register('repo_commits', '/%(repo_name)s/commits', ['repo_name']); | |
212 | pyroutes.register('repo_commits_file', '/%(repo_name)s/commits/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
212 | pyroutes.register('repo_commits_file', '/%(repo_name)s/commits/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
213 | pyroutes.register('repo_commits_elements', '/%(repo_name)s/commits_elements', ['repo_name']); |
|
213 | pyroutes.register('repo_commits_elements', '/%(repo_name)s/commits_elements', ['repo_name']); | |
214 | pyroutes.register('repo_commits_elements_file', '/%(repo_name)s/commits_elements/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
214 | pyroutes.register('repo_commits_elements_file', '/%(repo_name)s/commits_elements/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
215 | pyroutes.register('repo_changelog', '/%(repo_name)s/changelog', ['repo_name']); |
|
215 | pyroutes.register('repo_changelog', '/%(repo_name)s/changelog', ['repo_name']); | |
216 | pyroutes.register('repo_changelog_file', '/%(repo_name)s/changelog/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
216 | pyroutes.register('repo_changelog_file', '/%(repo_name)s/changelog/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); | |
217 | pyroutes.register('repo_compare_select', '/%(repo_name)s/compare', ['repo_name']); |
|
217 | pyroutes.register('repo_compare_select', '/%(repo_name)s/compare', ['repo_name']); | |
218 | pyroutes.register('repo_compare', '/%(repo_name)s/compare/%(source_ref_type)s@%(source_ref)s...%(target_ref_type)s@%(target_ref)s', ['repo_name', 'source_ref_type', 'source_ref', 'target_ref_type', 'target_ref']); |
|
218 | pyroutes.register('repo_compare', '/%(repo_name)s/compare/%(source_ref_type)s@%(source_ref)s...%(target_ref_type)s@%(target_ref)s', ['repo_name', 'source_ref_type', 'source_ref', 'target_ref_type', 'target_ref']); | |
219 | pyroutes.register('tags_home', '/%(repo_name)s/tags', ['repo_name']); |
|
219 | pyroutes.register('tags_home', '/%(repo_name)s/tags', ['repo_name']); | |
220 | pyroutes.register('branches_home', '/%(repo_name)s/branches', ['repo_name']); |
|
220 | pyroutes.register('branches_home', '/%(repo_name)s/branches', ['repo_name']); | |
221 | pyroutes.register('bookmarks_home', '/%(repo_name)s/bookmarks', ['repo_name']); |
|
221 | pyroutes.register('bookmarks_home', '/%(repo_name)s/bookmarks', ['repo_name']); | |
222 | pyroutes.register('repo_fork_new', '/%(repo_name)s/fork', ['repo_name']); |
|
222 | pyroutes.register('repo_fork_new', '/%(repo_name)s/fork', ['repo_name']); | |
223 | pyroutes.register('repo_fork_create', '/%(repo_name)s/fork/create', ['repo_name']); |
|
223 | pyroutes.register('repo_fork_create', '/%(repo_name)s/fork/create', ['repo_name']); | |
224 | pyroutes.register('repo_forks_show_all', '/%(repo_name)s/forks', ['repo_name']); |
|
224 | pyroutes.register('repo_forks_show_all', '/%(repo_name)s/forks', ['repo_name']); | |
225 | pyroutes.register('repo_forks_data', '/%(repo_name)s/forks/data', ['repo_name']); |
|
225 | pyroutes.register('repo_forks_data', '/%(repo_name)s/forks/data', ['repo_name']); | |
226 | pyroutes.register('pullrequest_show', '/%(repo_name)s/pull-request/%(pull_request_id)s', ['repo_name', 'pull_request_id']); |
|
226 | pyroutes.register('pullrequest_show', '/%(repo_name)s/pull-request/%(pull_request_id)s', ['repo_name', 'pull_request_id']); | |
227 | pyroutes.register('pullrequest_show_all', '/%(repo_name)s/pull-request', ['repo_name']); |
|
227 | pyroutes.register('pullrequest_show_all', '/%(repo_name)s/pull-request', ['repo_name']); | |
228 | pyroutes.register('pullrequest_show_all_data', '/%(repo_name)s/pull-request-data', ['repo_name']); |
|
228 | pyroutes.register('pullrequest_show_all_data', '/%(repo_name)s/pull-request-data', ['repo_name']); | |
229 | pyroutes.register('pullrequest_repo_refs', '/%(repo_name)s/pull-request/refs/%(target_repo_name)s', ['repo_name', 'target_repo_name']); |
|
229 | pyroutes.register('pullrequest_repo_refs', '/%(repo_name)s/pull-request/refs/%(target_repo_name)s', ['repo_name', 'target_repo_name']); | |
230 | pyroutes.register('pullrequest_repo_targets', '/%(repo_name)s/pull-request/repo-targets', ['repo_name']); |
|
230 | pyroutes.register('pullrequest_repo_targets', '/%(repo_name)s/pull-request/repo-targets', ['repo_name']); | |
231 | pyroutes.register('pullrequest_new', '/%(repo_name)s/pull-request/new', ['repo_name']); |
|
231 | pyroutes.register('pullrequest_new', '/%(repo_name)s/pull-request/new', ['repo_name']); | |
232 | pyroutes.register('pullrequest_create', '/%(repo_name)s/pull-request/create', ['repo_name']); |
|
232 | pyroutes.register('pullrequest_create', '/%(repo_name)s/pull-request/create', ['repo_name']); | |
233 | pyroutes.register('pullrequest_update', '/%(repo_name)s/pull-request/%(pull_request_id)s/update', ['repo_name', 'pull_request_id']); |
|
233 | pyroutes.register('pullrequest_update', '/%(repo_name)s/pull-request/%(pull_request_id)s/update', ['repo_name', 'pull_request_id']); | |
234 | pyroutes.register('pullrequest_merge', '/%(repo_name)s/pull-request/%(pull_request_id)s/merge', ['repo_name', 'pull_request_id']); |
|
234 | pyroutes.register('pullrequest_merge', '/%(repo_name)s/pull-request/%(pull_request_id)s/merge', ['repo_name', 'pull_request_id']); | |
235 | pyroutes.register('pullrequest_delete', '/%(repo_name)s/pull-request/%(pull_request_id)s/delete', ['repo_name', 'pull_request_id']); |
|
235 | pyroutes.register('pullrequest_delete', '/%(repo_name)s/pull-request/%(pull_request_id)s/delete', ['repo_name', 'pull_request_id']); | |
236 | pyroutes.register('pullrequest_comment_create', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment', ['repo_name', 'pull_request_id']); |
|
236 | pyroutes.register('pullrequest_comment_create', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment', ['repo_name', 'pull_request_id']); | |
237 | pyroutes.register('pullrequest_comment_delete', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment/%(comment_id)s/delete', ['repo_name', 'pull_request_id', 'comment_id']); |
|
237 | pyroutes.register('pullrequest_comment_delete', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment/%(comment_id)s/delete', ['repo_name', 'pull_request_id', 'comment_id']); | |
238 | pyroutes.register('edit_repo', '/%(repo_name)s/settings', ['repo_name']); |
|
238 | pyroutes.register('edit_repo', '/%(repo_name)s/settings', ['repo_name']); | |
239 | pyroutes.register('edit_repo_advanced', '/%(repo_name)s/settings/advanced', ['repo_name']); |
|
239 | pyroutes.register('edit_repo_advanced', '/%(repo_name)s/settings/advanced', ['repo_name']); | |
240 | pyroutes.register('edit_repo_advanced_archive', '/%(repo_name)s/settings/advanced/archive', ['repo_name']); |
|
240 | pyroutes.register('edit_repo_advanced_archive', '/%(repo_name)s/settings/advanced/archive', ['repo_name']); | |
241 | pyroutes.register('edit_repo_advanced_delete', '/%(repo_name)s/settings/advanced/delete', ['repo_name']); |
|
241 | pyroutes.register('edit_repo_advanced_delete', '/%(repo_name)s/settings/advanced/delete', ['repo_name']); | |
242 | pyroutes.register('edit_repo_advanced_locking', '/%(repo_name)s/settings/advanced/locking', ['repo_name']); |
|
242 | pyroutes.register('edit_repo_advanced_locking', '/%(repo_name)s/settings/advanced/locking', ['repo_name']); | |
243 | pyroutes.register('edit_repo_advanced_journal', '/%(repo_name)s/settings/advanced/journal', ['repo_name']); |
|
243 | pyroutes.register('edit_repo_advanced_journal', '/%(repo_name)s/settings/advanced/journal', ['repo_name']); | |
244 | pyroutes.register('edit_repo_advanced_fork', '/%(repo_name)s/settings/advanced/fork', ['repo_name']); |
|
244 | pyroutes.register('edit_repo_advanced_fork', '/%(repo_name)s/settings/advanced/fork', ['repo_name']); | |
245 | pyroutes.register('edit_repo_advanced_hooks', '/%(repo_name)s/settings/advanced/hooks', ['repo_name']); |
|
245 | pyroutes.register('edit_repo_advanced_hooks', '/%(repo_name)s/settings/advanced/hooks', ['repo_name']); | |
246 | pyroutes.register('edit_repo_caches', '/%(repo_name)s/settings/caches', ['repo_name']); |
|
246 | pyroutes.register('edit_repo_caches', '/%(repo_name)s/settings/caches', ['repo_name']); | |
247 | pyroutes.register('edit_repo_perms', '/%(repo_name)s/settings/permissions', ['repo_name']); |
|
247 | pyroutes.register('edit_repo_perms', '/%(repo_name)s/settings/permissions', ['repo_name']); | |
248 | pyroutes.register('edit_repo_perms_set_private', '/%(repo_name)s/settings/permissions/set_private', ['repo_name']); |
|
248 | pyroutes.register('edit_repo_perms_set_private', '/%(repo_name)s/settings/permissions/set_private', ['repo_name']); | |
249 | pyroutes.register('edit_repo_maintenance', '/%(repo_name)s/settings/maintenance', ['repo_name']); |
|
249 | pyroutes.register('edit_repo_maintenance', '/%(repo_name)s/settings/maintenance', ['repo_name']); | |
250 | pyroutes.register('edit_repo_maintenance_execute', '/%(repo_name)s/settings/maintenance/execute', ['repo_name']); |
|
250 | pyroutes.register('edit_repo_maintenance_execute', '/%(repo_name)s/settings/maintenance/execute', ['repo_name']); | |
251 | pyroutes.register('edit_repo_fields', '/%(repo_name)s/settings/fields', ['repo_name']); |
|
251 | pyroutes.register('edit_repo_fields', '/%(repo_name)s/settings/fields', ['repo_name']); | |
252 | pyroutes.register('edit_repo_fields_create', '/%(repo_name)s/settings/fields/create', ['repo_name']); |
|
252 | pyroutes.register('edit_repo_fields_create', '/%(repo_name)s/settings/fields/create', ['repo_name']); | |
253 | pyroutes.register('edit_repo_fields_delete', '/%(repo_name)s/settings/fields/%(field_id)s/delete', ['repo_name', 'field_id']); |
|
253 | pyroutes.register('edit_repo_fields_delete', '/%(repo_name)s/settings/fields/%(field_id)s/delete', ['repo_name', 'field_id']); | |
254 | pyroutes.register('repo_edit_toggle_locking', '/%(repo_name)s/settings/toggle_locking', ['repo_name']); |
|
254 | pyroutes.register('repo_edit_toggle_locking', '/%(repo_name)s/settings/toggle_locking', ['repo_name']); | |
255 | pyroutes.register('edit_repo_remote', '/%(repo_name)s/settings/remote', ['repo_name']); |
|
255 | pyroutes.register('edit_repo_remote', '/%(repo_name)s/settings/remote', ['repo_name']); | |
256 | pyroutes.register('edit_repo_remote_pull', '/%(repo_name)s/settings/remote/pull', ['repo_name']); |
|
256 | pyroutes.register('edit_repo_remote_pull', '/%(repo_name)s/settings/remote/pull', ['repo_name']); | |
257 | pyroutes.register('edit_repo_statistics', '/%(repo_name)s/settings/statistics', ['repo_name']); |
|
257 | pyroutes.register('edit_repo_statistics', '/%(repo_name)s/settings/statistics', ['repo_name']); | |
258 | pyroutes.register('edit_repo_statistics_reset', '/%(repo_name)s/settings/statistics/update', ['repo_name']); |
|
258 | pyroutes.register('edit_repo_statistics_reset', '/%(repo_name)s/settings/statistics/update', ['repo_name']); | |
259 | pyroutes.register('edit_repo_issuetracker', '/%(repo_name)s/settings/issue_trackers', ['repo_name']); |
|
259 | pyroutes.register('edit_repo_issuetracker', '/%(repo_name)s/settings/issue_trackers', ['repo_name']); | |
260 | pyroutes.register('edit_repo_issuetracker_test', '/%(repo_name)s/settings/issue_trackers/test', ['repo_name']); |
|
260 | pyroutes.register('edit_repo_issuetracker_test', '/%(repo_name)s/settings/issue_trackers/test', ['repo_name']); | |
261 | pyroutes.register('edit_repo_issuetracker_delete', '/%(repo_name)s/settings/issue_trackers/delete', ['repo_name']); |
|
261 | pyroutes.register('edit_repo_issuetracker_delete', '/%(repo_name)s/settings/issue_trackers/delete', ['repo_name']); | |
262 | pyroutes.register('edit_repo_issuetracker_update', '/%(repo_name)s/settings/issue_trackers/update', ['repo_name']); |
|
262 | pyroutes.register('edit_repo_issuetracker_update', '/%(repo_name)s/settings/issue_trackers/update', ['repo_name']); | |
263 | pyroutes.register('edit_repo_vcs', '/%(repo_name)s/settings/vcs', ['repo_name']); |
|
263 | pyroutes.register('edit_repo_vcs', '/%(repo_name)s/settings/vcs', ['repo_name']); | |
264 | pyroutes.register('edit_repo_vcs_update', '/%(repo_name)s/settings/vcs/update', ['repo_name']); |
|
264 | pyroutes.register('edit_repo_vcs_update', '/%(repo_name)s/settings/vcs/update', ['repo_name']); | |
265 | pyroutes.register('edit_repo_vcs_svn_pattern_delete', '/%(repo_name)s/settings/vcs/svn_pattern/delete', ['repo_name']); |
|
265 | pyroutes.register('edit_repo_vcs_svn_pattern_delete', '/%(repo_name)s/settings/vcs/svn_pattern/delete', ['repo_name']); | |
266 | pyroutes.register('repo_reviewers', '/%(repo_name)s/settings/review/rules', ['repo_name']); |
|
266 | pyroutes.register('repo_reviewers', '/%(repo_name)s/settings/review/rules', ['repo_name']); | |
267 | pyroutes.register('repo_default_reviewers_data', '/%(repo_name)s/settings/review/default-reviewers', ['repo_name']); |
|
267 | pyroutes.register('repo_default_reviewers_data', '/%(repo_name)s/settings/review/default-reviewers', ['repo_name']); | |
268 | pyroutes.register('edit_repo_strip', '/%(repo_name)s/settings/strip', ['repo_name']); |
|
268 | pyroutes.register('edit_repo_strip', '/%(repo_name)s/settings/strip', ['repo_name']); | |
269 | pyroutes.register('strip_check', '/%(repo_name)s/settings/strip_check', ['repo_name']); |
|
269 | pyroutes.register('strip_check', '/%(repo_name)s/settings/strip_check', ['repo_name']); | |
270 | pyroutes.register('strip_execute', '/%(repo_name)s/settings/strip_execute', ['repo_name']); |
|
270 | pyroutes.register('strip_execute', '/%(repo_name)s/settings/strip_execute', ['repo_name']); | |
271 | pyroutes.register('edit_repo_audit_logs', '/%(repo_name)s/settings/audit_logs', ['repo_name']); |
|
271 | pyroutes.register('edit_repo_audit_logs', '/%(repo_name)s/settings/audit_logs', ['repo_name']); | |
272 | pyroutes.register('rss_feed_home', '/%(repo_name)s/feed-rss', ['repo_name']); |
|
272 | pyroutes.register('rss_feed_home', '/%(repo_name)s/feed-rss', ['repo_name']); | |
273 | pyroutes.register('atom_feed_home', '/%(repo_name)s/feed-atom', ['repo_name']); |
|
273 | pyroutes.register('atom_feed_home', '/%(repo_name)s/feed-atom', ['repo_name']); | |
274 | pyroutes.register('rss_feed_home_old', '/%(repo_name)s/feed/rss', ['repo_name']); |
|
274 | pyroutes.register('rss_feed_home_old', '/%(repo_name)s/feed/rss', ['repo_name']); | |
275 | pyroutes.register('atom_feed_home_old', '/%(repo_name)s/feed/atom', ['repo_name']); |
|
275 | pyroutes.register('atom_feed_home_old', '/%(repo_name)s/feed/atom', ['repo_name']); | |
276 | pyroutes.register('repo_summary', '/%(repo_name)s', ['repo_name']); |
|
276 | pyroutes.register('repo_summary', '/%(repo_name)s', ['repo_name']); | |
277 | pyroutes.register('repo_summary_slash', '/%(repo_name)s/', ['repo_name']); |
|
277 | pyroutes.register('repo_summary_slash', '/%(repo_name)s/', ['repo_name']); | |
278 | pyroutes.register('edit_repo_group', '/%(repo_group_name)s/_edit', ['repo_group_name']); |
|
278 | pyroutes.register('edit_repo_group', '/%(repo_group_name)s/_edit', ['repo_group_name']); | |
279 | pyroutes.register('edit_repo_group_advanced', '/%(repo_group_name)s/_settings/advanced', ['repo_group_name']); |
|
279 | pyroutes.register('edit_repo_group_advanced', '/%(repo_group_name)s/_settings/advanced', ['repo_group_name']); | |
280 | pyroutes.register('edit_repo_group_advanced_delete', '/%(repo_group_name)s/_settings/advanced/delete', ['repo_group_name']); |
|
280 | pyroutes.register('edit_repo_group_advanced_delete', '/%(repo_group_name)s/_settings/advanced/delete', ['repo_group_name']); | |
281 | pyroutes.register('edit_repo_group_perms', '/%(repo_group_name)s/_settings/permissions', ['repo_group_name']); |
|
281 | pyroutes.register('edit_repo_group_perms', '/%(repo_group_name)s/_settings/permissions', ['repo_group_name']); | |
282 | pyroutes.register('edit_repo_group_perms_update', '/%(repo_group_name)s/_settings/permissions/update', ['repo_group_name']); |
|
282 | pyroutes.register('edit_repo_group_perms_update', '/%(repo_group_name)s/_settings/permissions/update', ['repo_group_name']); | |
283 | pyroutes.register('repo_group_home', '/%(repo_group_name)s', ['repo_group_name']); |
|
283 | pyroutes.register('repo_group_home', '/%(repo_group_name)s', ['repo_group_name']); | |
284 | pyroutes.register('repo_group_home_slash', '/%(repo_group_name)s/', ['repo_group_name']); |
|
284 | pyroutes.register('repo_group_home_slash', '/%(repo_group_name)s/', ['repo_group_name']); | |
285 | pyroutes.register('user_group_members_data', '/_admin/user_groups/%(user_group_id)s/members', ['user_group_id']); |
|
285 | pyroutes.register('user_group_members_data', '/_admin/user_groups/%(user_group_id)s/members', ['user_group_id']); | |
286 | pyroutes.register('edit_user_group_perms_summary', '/_admin/user_groups/%(user_group_id)s/edit/permissions_summary', ['user_group_id']); |
|
286 | pyroutes.register('edit_user_group_perms_summary', '/_admin/user_groups/%(user_group_id)s/edit/permissions_summary', ['user_group_id']); | |
287 | pyroutes.register('edit_user_group_perms_summary_json', '/_admin/user_groups/%(user_group_id)s/edit/permissions_summary/json', ['user_group_id']); |
|
287 | pyroutes.register('edit_user_group_perms_summary_json', '/_admin/user_groups/%(user_group_id)s/edit/permissions_summary/json', ['user_group_id']); | |
288 | pyroutes.register('edit_user_group', '/_admin/user_groups/%(user_group_id)s/edit', ['user_group_id']); |
|
288 | pyroutes.register('edit_user_group', '/_admin/user_groups/%(user_group_id)s/edit', ['user_group_id']); | |
289 | pyroutes.register('user_groups_update', '/_admin/user_groups/%(user_group_id)s/update', ['user_group_id']); |
|
289 | pyroutes.register('user_groups_update', '/_admin/user_groups/%(user_group_id)s/update', ['user_group_id']); | |
290 | pyroutes.register('edit_user_group_global_perms', '/_admin/user_groups/%(user_group_id)s/edit/global_permissions', ['user_group_id']); |
|
290 | pyroutes.register('edit_user_group_global_perms', '/_admin/user_groups/%(user_group_id)s/edit/global_permissions', ['user_group_id']); | |
291 | pyroutes.register('edit_user_group_global_perms_update', '/_admin/user_groups/%(user_group_id)s/edit/global_permissions/update', ['user_group_id']); |
|
291 | pyroutes.register('edit_user_group_global_perms_update', '/_admin/user_groups/%(user_group_id)s/edit/global_permissions/update', ['user_group_id']); | |
292 | pyroutes.register('edit_user_group_perms', '/_admin/user_groups/%(user_group_id)s/edit/permissions', ['user_group_id']); |
|
292 | pyroutes.register('edit_user_group_perms', '/_admin/user_groups/%(user_group_id)s/edit/permissions', ['user_group_id']); | |
293 | pyroutes.register('edit_user_group_perms_update', '/_admin/user_groups/%(user_group_id)s/edit/permissions/update', ['user_group_id']); |
|
293 | pyroutes.register('edit_user_group_perms_update', '/_admin/user_groups/%(user_group_id)s/edit/permissions/update', ['user_group_id']); | |
294 | pyroutes.register('edit_user_group_advanced', '/_admin/user_groups/%(user_group_id)s/edit/advanced', ['user_group_id']); |
|
294 | pyroutes.register('edit_user_group_advanced', '/_admin/user_groups/%(user_group_id)s/edit/advanced', ['user_group_id']); | |
295 | pyroutes.register('edit_user_group_advanced_sync', '/_admin/user_groups/%(user_group_id)s/edit/advanced/sync', ['user_group_id']); |
|
295 | pyroutes.register('edit_user_group_advanced_sync', '/_admin/user_groups/%(user_group_id)s/edit/advanced/sync', ['user_group_id']); | |
296 | pyroutes.register('user_groups_delete', '/_admin/user_groups/%(user_group_id)s/delete', ['user_group_id']); |
|
296 | pyroutes.register('user_groups_delete', '/_admin/user_groups/%(user_group_id)s/delete', ['user_group_id']); | |
297 | pyroutes.register('search', '/_admin/search', []); |
|
297 | pyroutes.register('search', '/_admin/search', []); | |
298 | pyroutes.register('search_repo', '/%(repo_name)s/_search', ['repo_name']); |
|
298 | pyroutes.register('search_repo', '/%(repo_name)s/_search', ['repo_name']); | |
299 | pyroutes.register('search_repo_alt', '/%(repo_name)s/search', ['repo_name']); |
|
299 | pyroutes.register('search_repo_alt', '/%(repo_name)s/search', ['repo_name']); | |
300 | pyroutes.register('search_repo_group', '/%(repo_group_name)s/_search', ['repo_group_name']); |
|
300 | pyroutes.register('search_repo_group', '/%(repo_group_name)s/_search', ['repo_group_name']); | |
301 | pyroutes.register('user_profile', '/_profiles/%(username)s', ['username']); |
|
301 | pyroutes.register('user_profile', '/_profiles/%(username)s', ['username']); | |
302 | pyroutes.register('user_group_profile', '/_profile_user_group/%(user_group_name)s', ['user_group_name']); |
|
302 | pyroutes.register('user_group_profile', '/_profile_user_group/%(user_group_name)s', ['user_group_name']); | |
303 | pyroutes.register('my_account_profile', '/_admin/my_account/profile', []); |
|
303 | pyroutes.register('my_account_profile', '/_admin/my_account/profile', []); | |
304 | pyroutes.register('my_account_edit', '/_admin/my_account/edit', []); |
|
304 | pyroutes.register('my_account_edit', '/_admin/my_account/edit', []); | |
305 | pyroutes.register('my_account_update', '/_admin/my_account/update', []); |
|
305 | pyroutes.register('my_account_update', '/_admin/my_account/update', []); | |
306 | pyroutes.register('my_account_password', '/_admin/my_account/password', []); |
|
306 | pyroutes.register('my_account_password', '/_admin/my_account/password', []); | |
307 | pyroutes.register('my_account_password_update', '/_admin/my_account/password/update', []); |
|
307 | pyroutes.register('my_account_password_update', '/_admin/my_account/password/update', []); | |
308 | pyroutes.register('my_account_auth_tokens_delete', '/_admin/my_account/auth_tokens/delete', []); |
|
308 | pyroutes.register('my_account_auth_tokens_delete', '/_admin/my_account/auth_tokens/delete', []); | |
309 | pyroutes.register('my_account_ssh_keys', '/_admin/my_account/ssh_keys', []); |
|
309 | pyroutes.register('my_account_ssh_keys', '/_admin/my_account/ssh_keys', []); | |
310 | pyroutes.register('my_account_ssh_keys_generate', '/_admin/my_account/ssh_keys/generate', []); |
|
310 | pyroutes.register('my_account_ssh_keys_generate', '/_admin/my_account/ssh_keys/generate', []); | |
311 | pyroutes.register('my_account_ssh_keys_add', '/_admin/my_account/ssh_keys/new', []); |
|
311 | pyroutes.register('my_account_ssh_keys_add', '/_admin/my_account/ssh_keys/new', []); | |
312 | pyroutes.register('my_account_ssh_keys_delete', '/_admin/my_account/ssh_keys/delete', []); |
|
312 | pyroutes.register('my_account_ssh_keys_delete', '/_admin/my_account/ssh_keys/delete', []); | |
313 | pyroutes.register('my_account_user_group_membership', '/_admin/my_account/user_group_membership', []); |
|
313 | pyroutes.register('my_account_user_group_membership', '/_admin/my_account/user_group_membership', []); | |
314 | pyroutes.register('my_account_emails', '/_admin/my_account/emails', []); |
|
314 | pyroutes.register('my_account_emails', '/_admin/my_account/emails', []); | |
315 | pyroutes.register('my_account_emails_add', '/_admin/my_account/emails/new', []); |
|
315 | pyroutes.register('my_account_emails_add', '/_admin/my_account/emails/new', []); | |
316 | pyroutes.register('my_account_emails_delete', '/_admin/my_account/emails/delete', []); |
|
316 | pyroutes.register('my_account_emails_delete', '/_admin/my_account/emails/delete', []); | |
317 | pyroutes.register('my_account_repos', '/_admin/my_account/repos', []); |
|
317 | pyroutes.register('my_account_repos', '/_admin/my_account/repos', []); | |
318 | pyroutes.register('my_account_watched', '/_admin/my_account/watched', []); |
|
318 | pyroutes.register('my_account_watched', '/_admin/my_account/watched', []); | |
319 | pyroutes.register('my_account_bookmarks', '/_admin/my_account/bookmarks', []); |
|
319 | pyroutes.register('my_account_bookmarks', '/_admin/my_account/bookmarks', []); | |
320 | pyroutes.register('my_account_bookmarks_update', '/_admin/my_account/bookmarks/update', []); |
|
320 | pyroutes.register('my_account_bookmarks_update', '/_admin/my_account/bookmarks/update', []); | |
321 | pyroutes.register('my_account_goto_bookmark', '/_admin/my_account/bookmark/%(bookmark_id)s', ['bookmark_id']); |
|
321 | pyroutes.register('my_account_goto_bookmark', '/_admin/my_account/bookmark/%(bookmark_id)s', ['bookmark_id']); | |
322 | pyroutes.register('my_account_perms', '/_admin/my_account/perms', []); |
|
322 | pyroutes.register('my_account_perms', '/_admin/my_account/perms', []); | |
323 | pyroutes.register('my_account_notifications', '/_admin/my_account/notifications', []); |
|
323 | pyroutes.register('my_account_notifications', '/_admin/my_account/notifications', []); | |
324 | pyroutes.register('my_account_notifications_toggle_visibility', '/_admin/my_account/toggle_visibility', []); |
|
324 | pyroutes.register('my_account_notifications_toggle_visibility', '/_admin/my_account/toggle_visibility', []); | |
325 | pyroutes.register('my_account_pullrequests', '/_admin/my_account/pull_requests', []); |
|
325 | pyroutes.register('my_account_pullrequests', '/_admin/my_account/pull_requests', []); | |
326 | pyroutes.register('my_account_pullrequests_data', '/_admin/my_account/pull_requests/data', []); |
|
326 | pyroutes.register('my_account_pullrequests_data', '/_admin/my_account/pull_requests/data', []); | |
327 | pyroutes.register('notifications_show_all', '/_admin/notifications', []); |
|
327 | pyroutes.register('notifications_show_all', '/_admin/notifications', []); | |
328 | pyroutes.register('notifications_mark_all_read', '/_admin/notifications/mark_all_read', []); |
|
328 | pyroutes.register('notifications_mark_all_read', '/_admin/notifications/mark_all_read', []); | |
329 | pyroutes.register('notifications_show', '/_admin/notifications/%(notification_id)s', ['notification_id']); |
|
329 | pyroutes.register('notifications_show', '/_admin/notifications/%(notification_id)s', ['notification_id']); | |
330 | pyroutes.register('notifications_update', '/_admin/notifications/%(notification_id)s/update', ['notification_id']); |
|
330 | pyroutes.register('notifications_update', '/_admin/notifications/%(notification_id)s/update', ['notification_id']); | |
331 | pyroutes.register('notifications_delete', '/_admin/notifications/%(notification_id)s/delete', ['notification_id']); |
|
331 | pyroutes.register('notifications_delete', '/_admin/notifications/%(notification_id)s/delete', ['notification_id']); | |
332 | pyroutes.register('my_account_notifications_test_channelstream', '/_admin/my_account/test_channelstream', []); |
|
332 | pyroutes.register('my_account_notifications_test_channelstream', '/_admin/my_account/test_channelstream', []); | |
333 | pyroutes.register('gists_show', '/_admin/gists', []); |
|
333 | pyroutes.register('gists_show', '/_admin/gists', []); | |
334 | pyroutes.register('gists_new', '/_admin/gists/new', []); |
|
334 | pyroutes.register('gists_new', '/_admin/gists/new', []); | |
335 | pyroutes.register('gists_create', '/_admin/gists/create', []); |
|
335 | pyroutes.register('gists_create', '/_admin/gists/create', []); | |
336 | pyroutes.register('gist_show', '/_admin/gists/%(gist_id)s', ['gist_id']); |
|
336 | pyroutes.register('gist_show', '/_admin/gists/%(gist_id)s', ['gist_id']); | |
337 | pyroutes.register('gist_delete', '/_admin/gists/%(gist_id)s/delete', ['gist_id']); |
|
337 | pyroutes.register('gist_delete', '/_admin/gists/%(gist_id)s/delete', ['gist_id']); | |
338 | pyroutes.register('gist_edit', '/_admin/gists/%(gist_id)s/edit', ['gist_id']); |
|
338 | pyroutes.register('gist_edit', '/_admin/gists/%(gist_id)s/edit', ['gist_id']); | |
339 | pyroutes.register('gist_edit_check_revision', '/_admin/gists/%(gist_id)s/edit/check_revision', ['gist_id']); |
|
339 | pyroutes.register('gist_edit_check_revision', '/_admin/gists/%(gist_id)s/edit/check_revision', ['gist_id']); | |
340 | pyroutes.register('gist_update', '/_admin/gists/%(gist_id)s/update', ['gist_id']); |
|
340 | pyroutes.register('gist_update', '/_admin/gists/%(gist_id)s/update', ['gist_id']); | |
341 | pyroutes.register('gist_show_rev', '/_admin/gists/%(gist_id)s/%(revision)s', ['gist_id', 'revision']); |
|
341 | pyroutes.register('gist_show_rev', '/_admin/gists/%(gist_id)s/%(revision)s', ['gist_id', 'revision']); | |
342 | pyroutes.register('gist_show_formatted', '/_admin/gists/%(gist_id)s/%(revision)s/%(format)s', ['gist_id', 'revision', 'format']); |
|
342 | pyroutes.register('gist_show_formatted', '/_admin/gists/%(gist_id)s/%(revision)s/%(format)s', ['gist_id', 'revision', 'format']); | |
343 | pyroutes.register('gist_show_formatted_path', '/_admin/gists/%(gist_id)s/%(revision)s/%(format)s/%(f_path)s', ['gist_id', 'revision', 'format', 'f_path']); |
|
343 | pyroutes.register('gist_show_formatted_path', '/_admin/gists/%(gist_id)s/%(revision)s/%(format)s/%(f_path)s', ['gist_id', 'revision', 'format', 'f_path']); | |
344 | pyroutes.register('debug_style_home', '/_admin/debug_style', []); |
|
344 | pyroutes.register('debug_style_home', '/_admin/debug_style', []); | |
|
345 | pyroutes.register('debug_style_email', '/_admin/debug_style/email/%(email_id)s', ['email_id']); | |||
|
346 | pyroutes.register('debug_style_email_plain_rendered', '/_admin/debug_style/email-rendered/%(email_id)s', ['email_id']); | |||
345 | pyroutes.register('debug_style_template', '/_admin/debug_style/t/%(t_path)s', ['t_path']); |
|
347 | pyroutes.register('debug_style_template', '/_admin/debug_style/t/%(t_path)s', ['t_path']); | |
346 | pyroutes.register('apiv2', '/_admin/api', []); |
|
348 | pyroutes.register('apiv2', '/_admin/api', []); | |
347 | pyroutes.register('admin_settings_license', '/_admin/settings/license', []); |
|
349 | pyroutes.register('admin_settings_license', '/_admin/settings/license', []); | |
348 | pyroutes.register('admin_settings_license_unlock', '/_admin/settings/license_unlock', []); |
|
350 | pyroutes.register('admin_settings_license_unlock', '/_admin/settings/license_unlock', []); | |
349 | pyroutes.register('login', '/_admin/login', []); |
|
351 | pyroutes.register('login', '/_admin/login', []); | |
350 | pyroutes.register('register', '/_admin/register', []); |
|
352 | pyroutes.register('register', '/_admin/register', []); | |
351 | pyroutes.register('repo_reviewers_review_rule_new', '/%(repo_name)s/settings/review/rules/new', ['repo_name']); |
|
353 | pyroutes.register('repo_reviewers_review_rule_new', '/%(repo_name)s/settings/review/rules/new', ['repo_name']); | |
352 | pyroutes.register('repo_reviewers_review_rule_edit', '/%(repo_name)s/settings/review/rules/%(rule_id)s', ['repo_name', 'rule_id']); |
|
354 | pyroutes.register('repo_reviewers_review_rule_edit', '/%(repo_name)s/settings/review/rules/%(rule_id)s', ['repo_name', 'rule_id']); | |
353 | pyroutes.register('repo_reviewers_review_rule_delete', '/%(repo_name)s/settings/review/rules/%(rule_id)s/delete', ['repo_name', 'rule_id']); |
|
355 | pyroutes.register('repo_reviewers_review_rule_delete', '/%(repo_name)s/settings/review/rules/%(rule_id)s/delete', ['repo_name', 'rule_id']); | |
354 | pyroutes.register('plugin_admin_chat', '/_admin/plugin_admin_chat/%(action)s', ['action']); |
|
356 | pyroutes.register('plugin_admin_chat', '/_admin/plugin_admin_chat/%(action)s', ['action']); | |
355 | pyroutes.register('edit_user_auth_tokens', '/_admin/users/%(user_id)s/edit/auth_tokens', ['user_id']); |
|
357 | pyroutes.register('edit_user_auth_tokens', '/_admin/users/%(user_id)s/edit/auth_tokens', ['user_id']); | |
356 | pyroutes.register('edit_user_auth_tokens_add', '/_admin/users/%(user_id)s/edit/auth_tokens/new', ['user_id']); |
|
358 | pyroutes.register('edit_user_auth_tokens_add', '/_admin/users/%(user_id)s/edit/auth_tokens/new', ['user_id']); | |
357 | pyroutes.register('admin_settings_scheduler_show_tasks', '/_admin/settings/scheduler/_tasks', []); |
|
359 | pyroutes.register('admin_settings_scheduler_show_tasks', '/_admin/settings/scheduler/_tasks', []); | |
358 | pyroutes.register('admin_settings_scheduler_show_all', '/_admin/settings/scheduler', []); |
|
360 | pyroutes.register('admin_settings_scheduler_show_all', '/_admin/settings/scheduler', []); | |
359 | pyroutes.register('admin_settings_scheduler_new', '/_admin/settings/scheduler/new', []); |
|
361 | pyroutes.register('admin_settings_scheduler_new', '/_admin/settings/scheduler/new', []); | |
360 | pyroutes.register('admin_settings_scheduler_create', '/_admin/settings/scheduler/create', []); |
|
362 | pyroutes.register('admin_settings_scheduler_create', '/_admin/settings/scheduler/create', []); | |
361 | pyroutes.register('admin_settings_scheduler_edit', '/_admin/settings/scheduler/%(schedule_id)s', ['schedule_id']); |
|
363 | pyroutes.register('admin_settings_scheduler_edit', '/_admin/settings/scheduler/%(schedule_id)s', ['schedule_id']); | |
362 | pyroutes.register('admin_settings_scheduler_update', '/_admin/settings/scheduler/%(schedule_id)s/update', ['schedule_id']); |
|
364 | pyroutes.register('admin_settings_scheduler_update', '/_admin/settings/scheduler/%(schedule_id)s/update', ['schedule_id']); | |
363 | pyroutes.register('admin_settings_scheduler_delete', '/_admin/settings/scheduler/%(schedule_id)s/delete', ['schedule_id']); |
|
365 | pyroutes.register('admin_settings_scheduler_delete', '/_admin/settings/scheduler/%(schedule_id)s/delete', ['schedule_id']); | |
364 | pyroutes.register('admin_settings_scheduler_execute', '/_admin/settings/scheduler/%(schedule_id)s/execute', ['schedule_id']); |
|
366 | pyroutes.register('admin_settings_scheduler_execute', '/_admin/settings/scheduler/%(schedule_id)s/execute', ['schedule_id']); | |
365 | pyroutes.register('admin_settings_automation', '/_admin/settings/automation', []); |
|
367 | pyroutes.register('admin_settings_automation', '/_admin/settings/automation', []); | |
366 | pyroutes.register('admin_settings_automation_update', '/_admin/settings/automation/%(entry_id)s/update', ['entry_id']); |
|
368 | pyroutes.register('admin_settings_automation_update', '/_admin/settings/automation/%(entry_id)s/update', ['entry_id']); | |
367 | pyroutes.register('admin_permissions_branch', '/_admin/permissions/branch', []); |
|
369 | pyroutes.register('admin_permissions_branch', '/_admin/permissions/branch', []); | |
368 | pyroutes.register('admin_permissions_branch_update', '/_admin/permissions/branch/update', []); |
|
370 | pyroutes.register('admin_permissions_branch_update', '/_admin/permissions/branch/update', []); | |
369 | pyroutes.register('my_account_auth_tokens', '/_admin/my_account/auth_tokens', []); |
|
371 | pyroutes.register('my_account_auth_tokens', '/_admin/my_account/auth_tokens', []); | |
370 | pyroutes.register('my_account_auth_tokens_add', '/_admin/my_account/auth_tokens/new', []); |
|
372 | pyroutes.register('my_account_auth_tokens_add', '/_admin/my_account/auth_tokens/new', []); | |
371 | pyroutes.register('my_account_external_identity', '/_admin/my_account/external-identity', []); |
|
373 | pyroutes.register('my_account_external_identity', '/_admin/my_account/external-identity', []); | |
372 | pyroutes.register('my_account_external_identity_delete', '/_admin/my_account/external-identity/delete', []); |
|
374 | pyroutes.register('my_account_external_identity_delete', '/_admin/my_account/external-identity/delete', []); | |
373 | pyroutes.register('repo_artifacts_list', '/%(repo_name)s/artifacts', ['repo_name']); |
|
375 | pyroutes.register('repo_artifacts_list', '/%(repo_name)s/artifacts', ['repo_name']); | |
374 | pyroutes.register('repo_artifacts_data', '/%(repo_name)s/artifacts_data', ['repo_name']); |
|
376 | pyroutes.register('repo_artifacts_data', '/%(repo_name)s/artifacts_data', ['repo_name']); | |
375 | pyroutes.register('repo_artifacts_new', '/%(repo_name)s/artifacts/new', ['repo_name']); |
|
377 | pyroutes.register('repo_artifacts_new', '/%(repo_name)s/artifacts/new', ['repo_name']); | |
376 | pyroutes.register('repo_artifacts_get', '/%(repo_name)s/artifacts/download/%(uid)s', ['repo_name', 'uid']); |
|
378 | pyroutes.register('repo_artifacts_get', '/%(repo_name)s/artifacts/download/%(uid)s', ['repo_name', 'uid']); | |
377 | pyroutes.register('repo_artifacts_store', '/%(repo_name)s/artifacts/store', ['repo_name']); |
|
379 | pyroutes.register('repo_artifacts_store', '/%(repo_name)s/artifacts/store', ['repo_name']); | |
378 | pyroutes.register('repo_artifacts_info', '/%(repo_name)s/artifacts/info/%(uid)s', ['repo_name', 'uid']); |
|
380 | pyroutes.register('repo_artifacts_info', '/%(repo_name)s/artifacts/info/%(uid)s', ['repo_name', 'uid']); | |
379 | pyroutes.register('repo_artifacts_delete', '/%(repo_name)s/artifacts/delete/%(uid)s', ['repo_name', 'uid']); |
|
381 | pyroutes.register('repo_artifacts_delete', '/%(repo_name)s/artifacts/delete/%(uid)s', ['repo_name', 'uid']); | |
380 | pyroutes.register('repo_automation', '/%(repo_name)s/settings/automation', ['repo_name']); |
|
382 | pyroutes.register('repo_automation', '/%(repo_name)s/settings/automation', ['repo_name']); | |
381 | pyroutes.register('repo_automation_update', '/%(repo_name)s/settings/automation/%(entry_id)s/update', ['repo_name', 'entry_id']); |
|
383 | pyroutes.register('repo_automation_update', '/%(repo_name)s/settings/automation/%(entry_id)s/update', ['repo_name', 'entry_id']); | |
382 | pyroutes.register('edit_repo_remote_push', '/%(repo_name)s/settings/remote/push', ['repo_name']); |
|
384 | pyroutes.register('edit_repo_remote_push', '/%(repo_name)s/settings/remote/push', ['repo_name']); | |
383 | pyroutes.register('edit_repo_perms_branch', '/%(repo_name)s/settings/branch_permissions', ['repo_name']); |
|
385 | pyroutes.register('edit_repo_perms_branch', '/%(repo_name)s/settings/branch_permissions', ['repo_name']); | |
384 | pyroutes.register('edit_repo_perms_branch_delete', '/%(repo_name)s/settings/branch_permissions/%(rule_id)s/delete', ['repo_name', 'rule_id']); |
|
386 | pyroutes.register('edit_repo_perms_branch_delete', '/%(repo_name)s/settings/branch_permissions/%(rule_id)s/delete', ['repo_name', 'rule_id']); | |
385 | } |
|
387 | } |
@@ -1,79 +1,80 b'' | |||||
1 | ## -*- coding: utf-8 -*- |
|
1 | ## -*- coding: utf-8 -*- | |
2 | <%inherit file="/base/base.mako"/> |
|
2 | <%inherit file="/base/base.mako"/> | |
3 |
|
3 | |||
4 | <%def name="title()"> |
|
4 | <%def name="title()"> | |
5 | ${_('Debug Style')} |
|
5 | ${_('Debug Style')} | |
6 | %if c.rhodecode_name: |
|
6 | %if c.rhodecode_name: | |
7 | · ${h.branding(c.rhodecode_name)} |
|
7 | · ${h.branding(c.rhodecode_name)} | |
8 | %endif |
|
8 | %endif | |
9 | </%def> |
|
9 | </%def> | |
10 |
|
10 | |||
11 | <%def name="breadcrumbs_links()"> |
|
11 | <%def name="breadcrumbs_links()"> | |
12 | ${_('Style')} |
|
12 | ${_('Style')} | |
13 | </%def> |
|
13 | </%def> | |
14 |
|
14 | |||
15 | <%def name="menu_bar_nav()"> |
|
15 | <%def name="menu_bar_nav()"> | |
16 | ${self.menu_items(active='debug_style')} |
|
16 | ${self.menu_items(active='debug_style')} | |
17 | </%def> |
|
17 | </%def> | |
18 |
|
18 | |||
19 |
|
19 | |||
20 | <%def name="main()"> |
|
20 | <%def name="main()"> | |
21 | <div id="style-page"> |
|
21 | <div id="style-page"> | |
22 | ${self.real_main()} |
|
22 | ${self.real_main()} | |
23 | </div> |
|
23 | </div> | |
24 | </%def> |
|
24 | </%def> | |
25 |
|
25 | |||
26 | <%def name="real_main()"> |
|
26 | <%def name="real_main()"> | |
27 | <div class="box"> |
|
27 | <div class="box"> | |
28 | <div class="title"> |
|
28 | <div class="title"> | |
29 | ${self.breadcrumbs()} |
|
29 | ${self.breadcrumbs()} | |
30 | </div> |
|
30 | </div> | |
31 |
|
31 | |||
32 | <div class='sidebar-col-wrapper'> |
|
32 | <div class='sidebar-col-wrapper'> | |
33 | ##main |
|
33 | ##main | |
34 | ${self.sidebar()} |
|
34 | ${self.sidebar()} | |
35 |
|
35 | |||
36 | <div class="main-content"> |
|
36 | <div class="main-content"> | |
37 | <h2>Examples of styled elements</h2> |
|
37 | <h2>Examples of styled elements</h2> | |
38 | <p>Taken based on the examples from Bootstrap, form elements based |
|
38 | <p>Taken based on the examples from Bootstrap, form elements based | |
39 | on our current markup.</p> |
|
39 | on our current markup.</p> | |
40 | <p> |
|
40 | <p> | |
41 | The objective of this section is to have a comprehensive style guide which out |
|
41 | The objective of this section is to have a comprehensive style guide which out | |
42 | lines any and all elements used throughout the application, as a reference for |
|
42 | lines any and all elements used throughout the application, as a reference for | |
43 | both existing developers and as a training tool for future hires. |
|
43 | both existing developers and as a training tool for future hires. | |
44 | </p> |
|
44 | </p> | |
45 | </div> |
|
45 | </div> | |
46 | </div> |
|
46 | </div> | |
47 | </div> |
|
47 | </div> | |
48 | </%def> |
|
48 | </%def> | |
49 |
|
49 | |||
50 |
|
50 | |||
51 | <%def name="sidebar()"> |
|
51 | <%def name="sidebar()"> | |
52 | <div class="sidebar"> |
|
52 | <div class="sidebar"> | |
53 | <ul class="nav nav-pills nav-stacked"> |
|
53 | <ul class="nav nav-pills nav-stacked"> | |
54 | <li class="${'active' if c.active=='index' else ''}"><a href="${h.route_path('debug_style_home')}">${_('Index')}</a></li> |
|
54 | <li class="${'active' if c.active=='index' else ''}"><a href="${h.route_path('debug_style_home')}">${_('Index')}</a></li> | |
|
55 | <li class="${'active' if c.active=='emails' else ''}"><a href="${h.route_path('debug_style_template', t_path='emails.html')}">${_('Emails')}</a></li> | |||
55 | <li class="${'active' if c.active=='typography' else ''}"><a href="${h.route_path('debug_style_template', t_path='typography.html')}">${_('Typography')}</a></li> |
|
56 | <li class="${'active' if c.active=='typography' else ''}"><a href="${h.route_path('debug_style_template', t_path='typography.html')}">${_('Typography')}</a></li> | |
56 | <li class="${'active' if c.active=='forms' else ''}"><a href="${h.route_path('debug_style_template', t_path='forms.html')}">${_('Forms')}</a></li> |
|
57 | <li class="${'active' if c.active=='forms' else ''}"><a href="${h.route_path('debug_style_template', t_path='forms.html')}">${_('Forms')}</a></li> | |
57 | <li class="${'active' if c.active=='buttons' else ''}"><a href="${h.route_path('debug_style_template', t_path='buttons.html')}">${_('Buttons')}</a></li> |
|
58 | <li class="${'active' if c.active=='buttons' else ''}"><a href="${h.route_path('debug_style_template', t_path='buttons.html')}">${_('Buttons')}</a></li> | |
58 | <li class="${'active' if c.active=='labels' else ''}"><a href="${h.route_path('debug_style_template', t_path='labels.html')}">${_('Labels')}</a></li> |
|
59 | <li class="${'active' if c.active=='labels' else ''}"><a href="${h.route_path('debug_style_template', t_path='labels.html')}">${_('Labels')}</a></li> | |
59 | <li class="${'active' if c.active=='alerts' else ''}"><a href="${h.route_path('debug_style_template', t_path='alerts.html')}">${_('Alerts')}</a></li> |
|
60 | <li class="${'active' if c.active=='alerts' else ''}"><a href="${h.route_path('debug_style_template', t_path='alerts.html')}">${_('Alerts')}</a></li> | |
60 | <li class="${'active' if c.active=='tables' else ''}"><a href="${h.route_path('debug_style_template', t_path='tables.html')}">${_('Tables')}</a></li> |
|
61 | <li class="${'active' if c.active=='tables' else ''}"><a href="${h.route_path('debug_style_template', t_path='tables.html')}">${_('Tables')}</a></li> | |
61 | <li class="${'active' if c.active=='tables-wide' else ''}"><a href="${h.route_path('debug_style_template', t_path='tables-wide.html')}">${_('Tables wide')}</a></li> |
|
62 | <li class="${'active' if c.active=='tables-wide' else ''}"><a href="${h.route_path('debug_style_template', t_path='tables-wide.html')}">${_('Tables wide')}</a></li> | |
62 | <li class="${'active' if c.active=='collapsable-content' else ''}"><a href="${h.route_path('debug_style_template', t_path='collapsable-content.html')}">${_('Collapsable Content')}</a></li> |
|
63 | <li class="${'active' if c.active=='collapsable-content' else ''}"><a href="${h.route_path('debug_style_template', t_path='collapsable-content.html')}">${_('Collapsable Content')}</a></li> | |
63 | <li class="${'active' if c.active=='icons' else ''}"><a href="${h.route_path('debug_style_template', t_path='icons.html')}">${_('Icons')}</a></li> |
|
64 | <li class="${'active' if c.active=='icons' else ''}"><a href="${h.route_path('debug_style_template', t_path='icons.html')}">${_('Icons')}</a></li> | |
64 | <li class="${'active' if c.active=='layout-form-sidebar' else ''}"><a href="${h.route_path('debug_style_template', t_path='layout-form-sidebar.html')}">${_('Layout form with sidebar')}</a></li> |
|
65 | <li class="${'active' if c.active=='layout-form-sidebar' else ''}"><a href="${h.route_path('debug_style_template', t_path='layout-form-sidebar.html')}">${_('Layout form with sidebar')}</a></li> | |
65 | <li class="${'active' if c.active=='login' else ''}"><a href="${h.route_path('debug_style_template', t_path='login.html')}">${_('Login')}</a></li> |
|
66 | <li class="${'active' if c.active=='login' else ''}"><a href="${h.route_path('debug_style_template', t_path='login.html')}">${_('Login')}</a></li> | |
66 | <li class="${'active' if c.active=='login2' else ''}"><a href="${h.route_path('debug_style_template', t_path='login2.html')}">${_('Login 2')}</a></li> |
|
67 | <li class="${'active' if c.active=='login2' else ''}"><a href="${h.route_path('debug_style_template', t_path='login2.html')}">${_('Login 2')}</a></li> | |
67 | <li class="${'active' if c.active=='code-block' else ''}"><a href="${h.route_path('debug_style_template', t_path='code-block.html')}">${_('Code blocks')}</a></li> |
|
68 | <li class="${'active' if c.active=='code-block' else ''}"><a href="${h.route_path('debug_style_template', t_path='code-block.html')}">${_('Code blocks')}</a></li> | |
68 |
|
69 | |||
69 | <li class="divider"><strong>Experimental</strong></li> |
|
70 | <li class="divider"><strong>Experimental</strong></li> | |
70 | <li class="${'active' if c.active=='panels' else ''}"><a href="${h.route_path('debug_style_template', t_path='panels.html')}">${_('Panels')}</a></li> |
|
71 | <li class="${'active' if c.active=='panels' else ''}"><a href="${h.route_path('debug_style_template', t_path='panels.html')}">${_('Panels')}</a></li> | |
71 |
|
72 | |||
72 | <li class="divider"><strong>Depreciated</strong></li> |
|
73 | <li class="divider"><strong>Depreciated</strong></li> | |
73 | <li class="${'active' if c.active=='form-elements' else ''}"><a href="${h.route_path('debug_style_template', t_path='form-elements.html')}">${_('Form elements')}</a></li> |
|
74 | <li class="${'active' if c.active=='form-elements' else ''}"><a href="${h.route_path('debug_style_template', t_path='form-elements.html')}">${_('Form elements')}</a></li> | |
74 | <li class="${'active' if c.active=='form-elements-small' else ''}"><a href="${h.route_path('debug_style_template', t_path='form-elements-small.html')}">${_('Form elements small')}</a></li> |
|
75 | <li class="${'active' if c.active=='form-elements-small' else ''}"><a href="${h.route_path('debug_style_template', t_path='form-elements-small.html')}">${_('Form elements small')}</a></li> | |
75 | <li class="${'active' if c.active=='form-inline' else ''}"><a href="${h.route_path('debug_style_template', t_path='form-inline.html')}">${_('Form inline elements')}</a></li> |
|
76 | <li class="${'active' if c.active=='form-inline' else ''}"><a href="${h.route_path('debug_style_template', t_path='form-inline.html')}">${_('Form inline elements')}</a></li> | |
76 | <li class="${'active' if c.active=='form-vertical' else ''}"><a href="${h.route_path('debug_style_template', t_path='form-vertical.html')}">${_('Form vertical')}</a></li> |
|
77 | <li class="${'active' if c.active=='form-vertical' else ''}"><a href="${h.route_path('debug_style_template', t_path='form-vertical.html')}">${_('Form vertical')}</a></li> | |
77 | </ul> |
|
78 | </ul> | |
78 | </div> |
|
79 | </div> | |
79 | </%def> No newline at end of file |
|
80 | </%def> |
@@ -1,142 +1,525 b'' | |||||
1 | ## -*- coding: utf-8 -*- |
|
1 | ## -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | ## helpers |
|
3 | ## helpers | |
4 | <%def name="tag_button(text, tag_type=None)"> |
|
4 | <%def name="tag_button(text, tag_type=None)"> | |
5 | <% |
|
5 | <% | |
6 |
|
|
6 | color_scheme = { | |
7 |
|
|
7 | 'default': 'border:1px solid #979797;color:#666666;background-color:#f9f9f9', | |
8 |
|
|
8 | 'approved': 'border:1px solid #0ac878;color:#0ac878;background-color:#f9f9f9', | |
9 |
|
|
9 | 'rejected': 'border:1px solid #e85e4d;color:#e85e4d;background-color:#f9f9f9', | |
10 |
|
|
10 | 'under_review': 'border:1px solid #ffc854;color:#ffc854;background-color:#f9f9f9', | |
11 | } |
|
11 | } | |
12 | %> |
|
12 | ||
13 | <pre style="display:inline;border-radius:2px;font-size:12px;padding:.2em;${color_scheme.get(tag_type, color_scheme['default'])}">${text}</pre> |
|
13 | css_style = ';'.join([ | |
|
14 | 'display:inline', | |||
|
15 | 'border-radius:2px', | |||
|
16 | 'font-size:12px', | |||
|
17 | 'padding:.2em', | |||
|
18 | ]) | |||
|
19 | ||||
|
20 | %> | |||
|
21 | <pre style="${css_style}; ${color_scheme.get(tag_type, color_scheme['default'])}">${text}</pre> | |||
14 | </%def> |
|
22 | </%def> | |
15 |
|
23 | |||
16 | <%def name="status_text(text, tag_type=None)"> |
|
24 | <%def name="status_text(text, tag_type=None)"> | |
17 | <% |
|
25 | <% | |
18 | color_scheme = { |
|
26 | color_scheme = { | |
19 | 'default': 'color:#666666', |
|
27 | 'default': 'color:#666666', | |
20 | 'approved': 'color:#0ac878', |
|
28 | 'approved': 'color:#0ac878', | |
21 | 'rejected': 'color:#e85e4d', |
|
29 | 'rejected': 'color:#e85e4d', | |
22 | 'under_review': 'color:#ffc854', |
|
30 | 'under_review': 'color:#ffc854', | |
23 | } |
|
31 | } | |
24 | %> |
|
32 | %> | |
25 | <span style="font-weight:bold;font-size:12px;padding:.2em;${color_scheme.get(tag_type, color_scheme['default'])}">${text}</span> |
|
33 | <span style="font-weight:bold;font-size:12px;padding:.2em;${color_scheme.get(tag_type, color_scheme['default'])}">${text}</span> | |
26 | </%def> |
|
34 | </%def> | |
27 |
|
35 | |||
|
36 | <%def name="gravatar_img(email, size=16)"> | |||
|
37 | <% | |||
|
38 | css_style = ';'.join([ | |||
|
39 | 'padding: 0', | |||
|
40 | 'margin: -4px 0', | |||
|
41 | 'border-radius: 50%', | |||
|
42 | 'box-sizing: content-box', | |||
|
43 | 'display: inline', | |||
|
44 | 'line-height: 1em', | |||
|
45 | 'min-width: 16px', | |||
|
46 | 'min-height: 16px', | |||
|
47 | ]) | |||
|
48 | %> | |||
|
49 | ||||
|
50 | <img alt="gravatar" style="${css_style}" src="${h.gravatar_url(email, size)}" height="${size}" width="${size}"> | |||
|
51 | </%def> | |||
|
52 | ||||
|
53 | <%def name="link_css()">\ | |||
|
54 | <% | |||
|
55 | css_style = ';'.join([ | |||
|
56 | 'color:#427cc9', | |||
|
57 | 'text-decoration:none', | |||
|
58 | 'cursor:pointer' | |||
|
59 | ]) | |||
|
60 | %>\ | |||
|
61 | ${css_style}\ | |||
|
62 | </%def> | |||
|
63 | ||||
28 | ## Constants |
|
64 | ## Constants | |
29 | <% |
|
65 | <% | |
30 | text_regular = "-apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen-Sans, Ubuntu, Cantarell, 'Helvetica Neue', sans-serif;" |
|
66 | text_regular = "-apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen-Sans, Ubuntu, Cantarell, 'Helvetica Neue', sans-serif;" | |
31 | text_monospace = "'Menlo', 'Liberation Mono', 'Consolas', 'DejaVu Sans Mono', 'Ubuntu Mono', 'Courier New', 'andale mono', 'lucida console', monospace;" |
|
67 | text_monospace = "'Menlo', 'Liberation Mono', 'Consolas', 'DejaVu Sans Mono', 'Ubuntu Mono', 'Courier New', 'andale mono', 'lucida console', monospace;" | |
32 |
|
68 | |||
33 | %> |
|
69 | %> | |
34 |
|
70 | |||
35 | ## headers we additionally can set for email |
|
71 | ## headers we additionally can set for email | |
36 | <%def name="headers()" filter="n,trim"></%def> |
|
72 | <%def name="headers()" filter="n,trim"></%def> | |
37 |
|
73 | |||
38 | <%def name="plaintext_footer()"> |
|
74 | <%def name="plaintext_footer()" filter="trim"> | |
39 |
${_('This is a notification from RhodeCode. |
|
75 | ${_('This is a notification from RhodeCode.')} ${instance_url} | |
40 | </%def> |
|
76 | </%def> | |
41 |
|
77 | |||
42 | <%def name="body_plaintext()" filter="n,trim"> |
|
78 | <%def name="body_plaintext()" filter="n,trim"> | |
43 | ## this example is not called itself but overridden in each template |
|
79 | ## this example is not called itself but overridden in each template | |
44 | ## the plaintext_footer should be at the bottom of both html and text emails |
|
80 | ## the plaintext_footer should be at the bottom of both html and text emails | |
45 | ${self.plaintext_footer()} |
|
81 | ${self.plaintext_footer()} | |
46 | </%def> |
|
82 | </%def> | |
47 |
|
83 | |||
48 | <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"> |
|
84 | <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"> | |
49 | <html xmlns="http://www.w3.org/1999/xhtml"> |
|
85 | <html xmlns="http://www.w3.org/1999/xhtml"> | |
50 | <head> |
|
86 | <head> | |
51 | <meta http-equiv="Content-Type" content="text/html; charset=utf-8" /> |
|
87 | <meta http-equiv="Content-Type" content="text/html; charset=utf-8" /> | |
52 | <meta name="viewport" content="width=device-width, initial-scale=1.0"/> |
|
88 | <meta name="viewport" content="width=device-width, initial-scale=1.0"/> | |
53 | <title>${self.subject()}</title> |
|
89 | <title>${self.subject()}</title> | |
54 | <style type="text/css"> |
|
90 | <style type="text/css"> | |
55 |
/* Based on The MailChimp Reset INLINE: Yes. */ |
|
91 | /* Based on The MailChimp Reset INLINE: Yes. */ | |
56 | #outlook a {padding:0;} /* Force Outlook to provide a "view in browser" menu link. */ |
|
92 | #outlook a { | |
57 | body{width:100% !important; -webkit-text-size-adjust:100%; -ms-text-size-adjust:100%; margin:0; padding:0; font-family: ${text_regular|n}} |
|
93 | padding: 0; | |
58 | /* Prevent Webkit and Windows Mobile platforms from changing default font sizes.*/ |
|
94 | } | |
59 | .ExternalClass {width:100%;} /* Force Hotmail to display emails at full width */ |
|
95 | ||
60 | .ExternalClass, .ExternalClass p, .ExternalClass span, .ExternalClass font, .ExternalClass td, .ExternalClass div {line-height: 100%;} |
|
96 | /* Force Outlook to provide a "view in browser" menu link. */ | |
61 | /* Forces Hotmail to display normal line spacing. More on that: http://www.emailonacid.com/forum/viewthread/43/ */ |
|
97 | body { | |
62 | #backgroundTable {margin:0; padding:0; line-height: 100% !important;} |
|
98 | width: 100% !important; | |
|
99 | -webkit-text-size-adjust: 100%; | |||
|
100 | -ms-text-size-adjust: 100%; | |||
|
101 | margin: 0; | |||
|
102 | padding: 0; | |||
|
103 | font-family: ${text_regular|n} | |||
|
104 | } | |||
|
105 | ||||
|
106 | /* Prevent Webkit and Windows Mobile platforms from changing default font sizes.*/ | |||
|
107 | .ExternalClass { | |||
|
108 | width: 100%; | |||
|
109 | } | |||
|
110 | ||||
|
111 | /* Force Hotmail to display emails at full width */ | |||
|
112 | .ExternalClass, .ExternalClass p, .ExternalClass span, .ExternalClass font, .ExternalClass td, .ExternalClass div { | |||
|
113 | line-height: 100%; | |||
|
114 | } | |||
|
115 | ||||
|
116 | /* Forces Hotmail to display normal line spacing. More on that: http://www.emailonacid.com/forum/viewthread/43/ */ | |||
|
117 | #backgroundTable { | |||
|
118 | margin: 0; | |||
|
119 | padding: 0; | |||
|
120 | line-height: 100% !important; | |||
|
121 | } | |||
|
122 | ||||
63 | /* End reset */ |
|
123 | /* End reset */ | |
64 |
|
124 | |||
65 | /* defaults for images*/ |
|
125 | /* defaults for images*/ | |
66 | img {outline:none; text-decoration:none; -ms-interpolation-mode: bicubic;} |
|
126 | img { | |
67 | a img {border:none;} |
|
127 | outline: none; | |
68 | .image_fix {display:block;} |
|
128 | text-decoration: none; | |
|
129 | -ms-interpolation-mode: bicubic; | |||
|
130 | } | |||
|
131 | ||||
|
132 | a img { | |||
|
133 | border: none; | |||
|
134 | } | |||
|
135 | ||||
|
136 | .image_fix { | |||
|
137 | display: block; | |||
|
138 | } | |||
|
139 | ||||
|
140 | body { | |||
|
141 | line-height: 1.2em; | |||
|
142 | } | |||
|
143 | ||||
|
144 | p { | |||
|
145 | margin: 0 0 20px; | |||
|
146 | } | |||
|
147 | ||||
|
148 | h1, h2, h3, h4, h5, h6 { | |||
|
149 | color: #323232 !important; | |||
|
150 | } | |||
|
151 | ||||
|
152 | a { | |||
|
153 | color: #427cc9; | |||
|
154 | text-decoration: none; | |||
|
155 | outline: none; | |||
|
156 | cursor: pointer; | |||
|
157 | } | |||
|
158 | ||||
|
159 | a:focus { | |||
|
160 | outline: none; | |||
|
161 | } | |||
|
162 | ||||
|
163 | a:hover { | |||
|
164 | color: #305b91; | |||
|
165 | } | |||
69 |
|
166 | |||
70 | body {line-height:1.2em;} |
|
167 | h1 a, h2 a, h3 a, h4 a, h5 a, h6 a { | |
71 | p {margin: 0 0 20px;} |
|
168 | color: #427cc9 !important; | |
72 | h1, h2, h3, h4, h5, h6 {color:#323232!important;} |
|
169 | text-decoration: none !important; | |
73 | a {color:#427cc9;text-decoration:none;outline:none;cursor:pointer;} |
|
170 | } | |
74 | a:focus {outline:none;} |
|
171 | ||
75 | a:hover {color: #305b91;} |
|
172 | h1 a:active, h2 a:active, h3 a:active, h4 a:active, h5 a:active, h6 a:active { | |
76 | h1 a, h2 a, h3 a, h4 a, h5 a, h6 a {color:#427cc9!important;text-decoration:none!important;} |
|
173 | color: #305b91 !important; | |
77 | h1 a:active, h2 a:active, h3 a:active, h4 a:active, h5 a:active, h6 a:active {color: #305b91!important;} |
|
174 | } | |
78 | h1 a:visited, h2 a:visited, h3 a:visited, h4 a:visited, h5 a:visited, h6 a:visited {color: #305b91!important;} |
|
175 | ||
79 | table {font-size:13px;border-collapse:collapse;mso-table-lspace:0pt;mso-table-rspace:0pt;} |
|
176 | h1 a:visited, h2 a:visited, h3 a:visited, h4 a:visited, h5 a:visited, h6 a:visited { | |
80 | table td {padding:.65em 1em .65em 0;border-collapse:collapse;vertical-align:top;text-align:left;} |
|
177 | color: #305b91 !important; | |
81 | input {display:inline;border-radius:2px;border-style:solid;border: 1px solid #dbd9da;padding:.5em;} |
|
178 | } | |
82 | input:focus {outline: 1px solid #979797} |
|
179 | ||
|
180 | table { | |||
|
181 | font-size: 13px; | |||
|
182 | border-collapse: collapse; | |||
|
183 | mso-table-lspace: 0pt; | |||
|
184 | mso-table-rspace: 0pt; | |||
|
185 | } | |||
|
186 | ||||
|
187 | table td { | |||
|
188 | padding: .65em 1em .65em 0; | |||
|
189 | border-collapse: collapse; | |||
|
190 | vertical-align: top; | |||
|
191 | text-align: left; | |||
|
192 | } | |||
|
193 | ||||
|
194 | input { | |||
|
195 | display: inline; | |||
|
196 | border-radius: 2px; | |||
|
197 | border: 1px solid #dbd9da; | |||
|
198 | padding: .5em; | |||
|
199 | } | |||
|
200 | ||||
|
201 | input:focus { | |||
|
202 | outline: 1px solid #979797 | |||
|
203 | } | |||
|
204 | ||||
83 | @media only screen and (-webkit-min-device-pixel-ratio: 2) { |
|
205 | @media only screen and (-webkit-min-device-pixel-ratio: 2) { | |
84 |
/* Put your iPhone 4g styles in here */ |
|
206 | /* Put your iPhone 4g styles in here */ | |
85 | } |
|
207 | } | |
86 |
|
208 | |||
87 | /* Android targeting */ |
|
209 | /* Android targeting */ | |
88 | @media only screen and (-webkit-device-pixel-ratio:.75){ |
|
210 | @media only screen and (-webkit-device-pixel-ratio:.75){ | |
89 | /* Put CSS for low density (ldpi) Android layouts in here */ |
|
211 | /* Put CSS for low density (ldpi) Android layouts in here */ | |
90 | } |
|
212 | } | |
91 | @media only screen and (-webkit-device-pixel-ratio:1){ |
|
213 | @media only screen and (-webkit-device-pixel-ratio:1){ | |
92 | /* Put CSS for medium density (mdpi) Android layouts in here */ |
|
214 | /* Put CSS for medium density (mdpi) Android layouts in here */ | |
93 | } |
|
215 | } | |
94 | @media only screen and (-webkit-device-pixel-ratio:1.5){ |
|
216 | @media only screen and (-webkit-device-pixel-ratio:1.5){ | |
95 | /* Put CSS for high density (hdpi) Android layouts in here */ |
|
217 | /* Put CSS for high density (hdpi) Android layouts in here */ | |
96 | } |
|
218 | } | |
97 | /* end Android targeting */ |
|
219 | /* end Android targeting */ | |
98 |
|
220 | |||
|
221 | /** MARKDOWN styling **/ | |||
|
222 | div.markdown-block { | |||
|
223 | clear: both; | |||
|
224 | overflow: hidden; | |||
|
225 | margin: 0; | |||
|
226 | padding: 3px 5px 3px | |||
|
227 | } | |||
|
228 | ||||
|
229 | div.markdown-block h1, div.markdown-block h2, div.markdown-block h3, div.markdown-block h4, div.markdown-block h5, div.markdown-block h6 { | |||
|
230 | border-bottom: none !important; | |||
|
231 | padding: 0 !important; | |||
|
232 | overflow: visible !important | |||
|
233 | } | |||
|
234 | ||||
|
235 | div.markdown-block h1, div.markdown-block h2 { | |||
|
236 | border-bottom: 1px #e6e5e5 solid !important | |||
|
237 | } | |||
|
238 | ||||
|
239 | div.markdown-block h1 { | |||
|
240 | font-size: 32px; | |||
|
241 | margin: 15px 0 15px 0 !important; | |||
|
242 | padding-bottom: 5px !important | |||
|
243 | } | |||
|
244 | ||||
|
245 | div.markdown-block h2 { | |||
|
246 | font-size: 24px !important; | |||
|
247 | margin: 34px 0 10px 0 !important; | |||
|
248 | padding-top: 15px !important; | |||
|
249 | padding-bottom: 8px !important | |||
|
250 | } | |||
|
251 | ||||
|
252 | div.markdown-block h3 { | |||
|
253 | font-size: 18px !important; | |||
|
254 | margin: 30px 0 8px 0 !important; | |||
|
255 | padding-bottom: 2px !important | |||
|
256 | } | |||
|
257 | ||||
|
258 | div.markdown-block h4 { | |||
|
259 | font-size: 13px !important; | |||
|
260 | margin: 18px 0 3px 0 !important | |||
|
261 | } | |||
|
262 | ||||
|
263 | div.markdown-block h5 { | |||
|
264 | font-size: 12px !important; | |||
|
265 | margin: 15px 0 3px 0 !important | |||
|
266 | } | |||
|
267 | ||||
|
268 | div.markdown-block h6 { | |||
|
269 | font-size: 12px; | |||
|
270 | color: #777777; | |||
|
271 | margin: 15px 0 3px 0 !important | |||
|
272 | } | |||
|
273 | ||||
|
274 | div.markdown-block hr { | |||
|
275 | border: 0; | |||
|
276 | color: #e6e5e5; | |||
|
277 | background-color: #e6e5e5; | |||
|
278 | height: 3px; | |||
|
279 | margin-bottom: 13px | |||
|
280 | } | |||
|
281 | ||||
|
282 | div.markdown-block ol, div.markdown-block ul, div.markdown-block p, div.markdown-block blockquote, div.markdown-block dl, div.markdown-block li, div.markdown-block table { | |||
|
283 | margin: 3px 0 13px 0 !important; | |||
|
284 | color: #424242 !important; | |||
|
285 | font-size: 13px !important; | |||
|
286 | font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Oxygen-Sans, Ubuntu, Cantarell, "Helvetica Neue", sans-serif; | |||
|
287 | font-weight: normal !important; | |||
|
288 | overflow: visible !important; | |||
|
289 | line-height: 140% !important | |||
|
290 | } | |||
|
291 | ||||
|
292 | div.markdown-block pre { | |||
|
293 | margin: 3px 0 13px 0 !important; | |||
|
294 | padding: .5em; | |||
|
295 | color: #424242 !important; | |||
|
296 | font-size: 13px !important; | |||
|
297 | overflow: visible !important; | |||
|
298 | line-height: 140% !important; | |||
|
299 | background-color: #F5F5F5 | |||
|
300 | } | |||
|
301 | ||||
|
302 | div.markdown-block img { | |||
|
303 | border-style: none; | |||
|
304 | background-color: #fff; | |||
|
305 | padding-right: 20px; | |||
|
306 | max-width: 100% | |||
|
307 | } | |||
|
308 | ||||
|
309 | div.markdown-block strong { | |||
|
310 | font-weight: 600; | |||
|
311 | margin: 0 | |||
|
312 | } | |||
|
313 | ||||
|
314 | div.markdown-block ul.checkbox, div.markdown-block ol.checkbox { | |||
|
315 | padding-left: 20px !important; | |||
|
316 | margin-top: 0 !important; | |||
|
317 | margin-bottom: 18px !important | |||
|
318 | } | |||
|
319 | ||||
|
320 | div.markdown-block ul, div.markdown-block ol { | |||
|
321 | padding-left: 30px !important; | |||
|
322 | margin-top: 0 !important; | |||
|
323 | margin-bottom: 18px !important | |||
|
324 | } | |||
|
325 | ||||
|
326 | div.markdown-block ul.checkbox li, div.markdown-block ol.checkbox li { | |||
|
327 | list-style: none !important; | |||
|
328 | margin: 6px !important; | |||
|
329 | padding: 0 !important | |||
|
330 | } | |||
|
331 | ||||
|
332 | div.markdown-block ul li, div.markdown-block ol li { | |||
|
333 | list-style: disc !important; | |||
|
334 | margin: 6px !important; | |||
|
335 | padding: 0 !important | |||
|
336 | } | |||
|
337 | ||||
|
338 | div.markdown-block ol li { | |||
|
339 | list-style: decimal !important | |||
|
340 | } | |||
|
341 | ||||
|
342 | div.markdown-block #message { | |||
|
343 | -webkit-border-radius: 2px; | |||
|
344 | -moz-border-radius: 2px; | |||
|
345 | border-radius: 2px; | |||
|
346 | border: 1px solid #dbd9da; | |||
|
347 | display: block; | |||
|
348 | width: 100%; | |||
|
349 | height: 60px; | |||
|
350 | margin: 6px 0 | |||
|
351 | } | |||
|
352 | ||||
|
353 | div.markdown-block button, div.markdown-block #ws { | |||
|
354 | font-size: 13px; | |||
|
355 | padding: 4px 6px; | |||
|
356 | -webkit-border-radius: 2px; | |||
|
357 | -moz-border-radius: 2px; | |||
|
358 | border-radius: 2px; | |||
|
359 | border: 1px solid #dbd9da; | |||
|
360 | background-color: #eeeeee | |||
|
361 | } | |||
|
362 | ||||
|
363 | div.markdown-block code, div.markdown-block pre, div.markdown-block #ws, div.markdown-block #message { | |||
|
364 | font-family: 'Menlo', 'Liberation Mono', 'Consolas', 'DejaVu Sans Mono', 'Ubuntu Mono', 'Courier New', 'andale mono', 'lucida console', monospace; | |||
|
365 | font-size: 11px; | |||
|
366 | -webkit-border-radius: 2px; | |||
|
367 | -moz-border-radius: 2px; | |||
|
368 | border-radius: 2px; | |||
|
369 | background-color: white; | |||
|
370 | color: #7E7F7F | |||
|
371 | } | |||
|
372 | ||||
|
373 | div.markdown-block code { | |||
|
374 | border: 1px solid #eeeeee; | |||
|
375 | margin: 0 2px; | |||
|
376 | padding: 0 5px | |||
|
377 | } | |||
|
378 | ||||
|
379 | div.markdown-block pre { | |||
|
380 | border: 1px solid #dbd9da; | |||
|
381 | overflow: auto; | |||
|
382 | padding: .5em; | |||
|
383 | background-color: #F5F5F5 | |||
|
384 | } | |||
|
385 | ||||
|
386 | div.markdown-block pre > code { | |||
|
387 | border: 0; | |||
|
388 | margin: 0; | |||
|
389 | padding: 0 | |||
|
390 | } | |||
|
391 | ||||
|
392 | div.rst-block { | |||
|
393 | clear: both; | |||
|
394 | overflow: hidden; | |||
|
395 | margin: 0; | |||
|
396 | padding: 3px 5px 3px | |||
|
397 | } | |||
|
398 | ||||
|
399 | div.rst-block h2 { | |||
|
400 | font-weight: normal | |||
|
401 | } | |||
|
402 | ||||
|
403 | div.rst-block h1, div.rst-block h2, div.rst-block h3, div.rst-block h4, div.rst-block h5, div.rst-block h6 { | |||
|
404 | border-bottom: 0 !important; | |||
|
405 | margin: 0 !important; | |||
|
406 | padding: 0 !important; | |||
|
407 | line-height: 1.5em !important | |||
|
408 | } | |||
|
409 | ||||
|
410 | div.rst-block h1:first-child { | |||
|
411 | padding-top: .25em !important | |||
|
412 | } | |||
|
413 | ||||
|
414 | div.rst-block h2, div.rst-block h3 { | |||
|
415 | margin: 1em 0 !important | |||
|
416 | } | |||
|
417 | ||||
|
418 | div.rst-block h1, div.rst-block h2 { | |||
|
419 | border-bottom: 1px #e6e5e5 solid !important | |||
|
420 | } | |||
|
421 | ||||
|
422 | div.rst-block h2 { | |||
|
423 | margin-top: 1.5em !important; | |||
|
424 | padding-top: .5em !important | |||
|
425 | } | |||
|
426 | ||||
|
427 | div.rst-block p { | |||
|
428 | color: black !important; | |||
|
429 | margin: 1em 0 !important; | |||
|
430 | line-height: 1.5em !important | |||
|
431 | } | |||
|
432 | ||||
|
433 | div.rst-block ul { | |||
|
434 | list-style: disc !important; | |||
|
435 | margin: 1em 0 1em 2em !important; | |||
|
436 | clear: both | |||
|
437 | } | |||
|
438 | ||||
|
439 | div.rst-block ol { | |||
|
440 | list-style: decimal; | |||
|
441 | margin: 1em 0 1em 2em !important | |||
|
442 | } | |||
|
443 | ||||
|
444 | div.rst-block pre, div.rst-block code { | |||
|
445 | font: 12px "Bitstream Vera Sans Mono", "Courier", monospace | |||
|
446 | } | |||
|
447 | ||||
|
448 | div.rst-block code { | |||
|
449 | font-size: 12px !important; | |||
|
450 | background-color: ghostWhite !important; | |||
|
451 | color: #444 !important; | |||
|
452 | padding: 0 .2em !important; | |||
|
453 | border: 1px solid #dedede !important | |||
|
454 | } | |||
|
455 | ||||
|
456 | div.rst-block pre code { | |||
|
457 | padding: 0 !important; | |||
|
458 | font-size: 12px !important; | |||
|
459 | background-color: #eee !important; | |||
|
460 | border: none !important | |||
|
461 | } | |||
|
462 | ||||
|
463 | div.rst-block pre { | |||
|
464 | margin: 1em 0; | |||
|
465 | padding: 15px; | |||
|
466 | border: 1px solid #eeeeee; | |||
|
467 | -webkit-border-radius: 2px; | |||
|
468 | -moz-border-radius: 2px; | |||
|
469 | border-radius: 2px; | |||
|
470 | overflow: auto; | |||
|
471 | font-size: 12px; | |||
|
472 | color: #444; | |||
|
473 | background-color: #F5F5F5 | |||
|
474 | } | |||
|
475 | ||||
|
476 | ||||
99 | </style> |
|
477 | </style> | |
100 |
|
478 | |||
101 | <!-- Targeting Windows Mobile --> |
|
479 | <!-- Targeting Windows Mobile --> | |
102 | <!--[if IEMobile 7]> |
|
480 | <!--[if IEMobile 7]> | |
103 | <style type="text/css"> |
|
481 | <style type="text/css"> | |
104 |
|
482 | |||
105 | </style> |
|
483 | </style> | |
106 | <![endif]--> |
|
484 | <![endif]--> | |
107 |
|
485 | |||
108 | <!--[if gte mso 9]> |
|
486 | <!--[if gte mso 9]> | |
109 |
|
|
487 | <style> | |
110 |
|
|
488 | /* Target Outlook 2007 and 2010 */ | |
111 |
|
|
489 | </style> | |
112 | <![endif]--> |
|
490 | <![endif]--> | |
113 | </head> |
|
491 | </head> | |
114 | <body> |
|
492 | <body> | |
115 | <!-- Wrapper/Container Table: Use a wrapper table to control the width and the background color consistently of your email. Use this approach instead of setting attributes on the body tag. --> |
|
493 | <!-- Wrapper/Container Table: Use a wrapper table to control the width and the background color consistently of your email. Use this approach instead of setting attributes on the body tag. --> | |
116 | <table cellpadding="0" cellspacing="0" border="0" id="backgroundTable" align="left" style="margin:1%;width:97%;padding:0;font-family:sans-serif;font-weight:100;border:1px solid #dbd9da"> |
|
494 | <table cellpadding="0" cellspacing="0" border="0" id="backgroundTable" align="left" style="margin:1%;width:97%;padding:0;font-family:sans-serif;font-weight:100;border:1px solid #dbd9da"> | |
117 | <tr> |
|
495 | <tr> | |
118 |
<td valign="top" style="padding:0;"> |
|
496 | <td valign="top" style="padding:0;"> | |
119 | <table cellpadding="0" cellspacing="0" border="0" align="left" width="100%"> |
|
497 | <table cellpadding="0" cellspacing="0" border="0" align="left" width="100%"> | |
120 | <tr><td style="width:100%;padding:7px;background-color:#202020" valign="top"> |
|
498 | <tr> | |
121 | <a style="color:#eeeeee;text-decoration:none;" href="${instance_url}"> |
|
499 | <td style="width:100%;padding:10px 15px;background-color:#202020" valign="top"> | |
122 | ${_('RhodeCode')} |
|
500 | <a style="color:#eeeeee;text-decoration:none;" href="${instance_url}"> | |
123 |
|
|
501 | ${_('RhodeCode')} | |
124 |
|
|
502 | % if rhodecode_instance_name: | |
125 | % endif |
|
503 | - ${rhodecode_instance_name} | |
126 |
|
|
504 | % endif | |
127 |
</ |
|
505 | </a> | |
128 | <tr><td style="padding:15px;" valign="top">${self.body()}</td></tr> |
|
506 | </td> | |
|
507 | </tr> | |||
|
508 | <tr> | |||
|
509 | <td style="padding:15px;" valign="top">${self.body()}</td> | |||
|
510 | </tr> | |||
129 | </table> |
|
511 | </table> | |
130 | </td> |
|
512 | </td> | |
131 | </tr> |
|
513 | </tr> | |
132 | </table> |
|
514 | </table> | |
133 | <!-- End of wrapper table --> |
|
515 | <!-- End of wrapper table --> | |
134 |
|
516 | |||
135 | <div style="clear: both"></div> |
|
517 | <div style="clear: both"></div> | |
136 | <p> |
|
518 | <div style="margin-left:1%;font-weight:100;font-size:11px;color:#666666;text-decoration:none;font-family:${text_monospace}"> | |
137 | <a style="margin-top:15px;margin-left:1%;font-weight:100;font-size:11px;color:#666666;text-decoration:none;font-family:${text_monospace} " href="${instance_url}"> |
|
519 | ${_('This is a notification from RhodeCode.')} | |
138 | ${self.plaintext_footer()} |
|
520 | <a style="font-weight:100;font-size:11px;color:#666666;text-decoration:none;font-family:${text_monospace}" href="${instance_url}"> | |
139 | </a> |
|
521 | ${instance_url} | |
140 | </p> |
|
522 | </a> | |
|
523 | </div> | |||
141 | </body> |
|
524 | </body> | |
142 | </html> |
|
525 | </html> |
@@ -1,108 +1,161 b'' | |||||
1 | ## -*- coding: utf-8 -*- |
|
1 | ## -*- coding: utf-8 -*- | |
2 | <%inherit file="base.mako"/> |
|
2 | <%inherit file="base.mako"/> | |
3 | <%namespace name="base" file="base.mako"/> |
|
3 | <%namespace name="base" file="base.mako"/> | |
4 |
|
4 | |||
5 | ## EMAIL SUBJECT |
|
5 | ## EMAIL SUBJECT | |
6 | <%def name="subject()" filter="n,trim,whitespace_filter"> |
|
6 | <%def name="subject()" filter="n,trim,whitespace_filter"> | |
7 | <% |
|
7 | <% | |
8 | data = { |
|
8 | data = { | |
9 | 'user': h.person(user), |
|
9 | 'user': '@'+h.person(user), | |
10 | 'repo_name': repo_name, |
|
10 | 'repo_name': repo_name, | |
11 | 'commit_id': h.show_id(commit), |
|
|||
12 | 'status': status_change, |
|
11 | 'status': status_change, | |
13 | 'comment_file': comment_file, |
|
12 | 'comment_file': comment_file, | |
14 | 'comment_line': comment_line, |
|
13 | 'comment_line': comment_line, | |
15 | 'comment_type': comment_type, |
|
14 | 'comment_type': comment_type, | |
|
15 | ||||
|
16 | 'commit_id': h.show_id(commit), | |||
16 | } |
|
17 | } | |
17 | %> |
|
18 | %> | |
18 | ${_('[mention]') if mention else ''} \ |
|
19 | ||
19 |
|
|
20 | ||
20 | % if comment_file: |
|
21 | % if comment_file: | |
21 | ${_('{user} left a {comment_type} on file `{comment_file}` in commit `{commit_id}`').format(**data)} ${_('in the {repo_name} repository').format(**data) |n} |
|
22 | ${(_('[mention]') if mention else '')} ${_('{user} left a {comment_type} on file `{comment_file}` in commit `{commit_id}`').format(**data)} ${_('in the `{repo_name}` repository').format(**data) |n} | |
22 | % else: |
|
23 | % else: | |
23 | % if status_change: |
|
24 | % if status_change: | |
24 | ${_('[status: {status}] {user} left a {comment_type} on commit `{commit_id}`').format(**data) |n} ${_('in the {repo_name} repository').format(**data) |n} |
|
25 | ${(_('[mention]') if mention else '')} ${_('[status: {status}] {user} left a {comment_type} on commit `{commit_id}`').format(**data) |n} ${_('in the `{repo_name}` repository').format(**data) |n} | |
25 | % else: |
|
26 | % else: | |
26 | ${_('{user} left a {comment_type} on commit `{commit_id}`').format(**data) |n} ${_('in the {repo_name} repository').format(**data) |n} |
|
27 | ${(_('[mention]') if mention else '')} ${_('{user} left a {comment_type} on commit `{commit_id}`').format(**data) |n} ${_('in the `{repo_name}` repository').format(**data) |n} | |
27 | % endif |
|
28 | % endif | |
28 | % endif |
|
29 | % endif | |
29 |
|
30 | |||
30 | </%def> |
|
31 | </%def> | |
31 |
|
32 | |||
32 | ## PLAINTEXT VERSION OF BODY |
|
33 | ## PLAINTEXT VERSION OF BODY | |
33 | <%def name="body_plaintext()" filter="n,trim"> |
|
34 | <%def name="body_plaintext()" filter="n,trim"> | |
34 | <% |
|
35 | <% | |
35 | data = { |
|
36 | data = { | |
36 | 'user': h.person(user), |
|
37 | 'user': h.person(user), | |
37 | 'repo_name': repo_name, |
|
38 | 'repo_name': repo_name, | |
38 | 'commit_id': h.show_id(commit), |
|
|||
39 | 'status': status_change, |
|
39 | 'status': status_change, | |
40 | 'comment_file': comment_file, |
|
40 | 'comment_file': comment_file, | |
41 | 'comment_line': comment_line, |
|
41 | 'comment_line': comment_line, | |
42 | 'comment_type': comment_type, |
|
42 | 'comment_type': comment_type, | |
|
43 | ||||
|
44 | 'commit_id': h.show_id(commit), | |||
43 | } |
|
45 | } | |
44 | %> |
|
46 | %> | |
45 | ${self.subject()} |
|
|||
46 |
|
47 | |||
47 | * ${_('Comment link')}: ${commit_comment_url} |
|
48 | * ${_('Comment link')}: ${commit_comment_url} | |
48 |
|
49 | |||
|
50 | %if status_change: | |||
|
51 | * ${_('Commit status')}: ${_('Status was changed to')}: *${status_change}* | |||
|
52 | ||||
|
53 | %endif | |||
49 | * ${_('Commit')}: ${h.show_id(commit)} |
|
54 | * ${_('Commit')}: ${h.show_id(commit)} | |
50 |
|
55 | |||
|
56 | * ${_('Commit message')}: ${commit.message} | |||
|
57 | ||||
51 | %if comment_file: |
|
58 | %if comment_file: | |
52 | * ${_('File: {comment_file} on line {comment_line}').format(**data)} |
|
59 | * ${_('File: {comment_file} on line {comment_line}').format(**data)} | |
|
60 | ||||
53 | %endif |
|
61 | %endif | |
|
62 | % if comment_type == 'todo': | |||
|
63 | ${_('`TODO` comment')}: | |||
|
64 | % else: | |||
|
65 | ${_('`Note` comment')}: | |||
|
66 | % endif | |||
|
67 | ||||
|
68 | ${comment_body |n, trim} | |||
54 |
|
69 | |||
55 | --- |
|
70 | --- | |
56 |
|
||||
57 | %if status_change: |
|
|||
58 | ${_('Commit status was changed to')}: *${status_change}* |
|
|||
59 | %endif |
|
|||
60 |
|
||||
61 | ${comment_body|n} |
|
|||
62 |
|
||||
63 | ${self.plaintext_footer()} |
|
71 | ${self.plaintext_footer()} | |
64 | </%def> |
|
72 | </%def> | |
65 |
|
73 | |||
66 |
|
74 | |||
67 | <% |
|
75 | <% | |
68 | data = { |
|
76 | data = { | |
69 | 'user': h.person(user), |
|
77 | 'user': h.person(user), | |
70 | 'repo': commit_target_repo, |
|
|||
71 | 'repo_name': repo_name, |
|
|||
72 | 'commit_id': h.show_id(commit), |
|
|||
73 | 'comment_file': comment_file, |
|
78 | 'comment_file': comment_file, | |
74 | 'comment_line': comment_line, |
|
79 | 'comment_line': comment_line, | |
75 | 'comment_type': comment_type, |
|
80 | 'comment_type': comment_type, | |
|
81 | 'renderer_type': renderer_type or 'plain', | |||
|
82 | ||||
|
83 | 'repo': commit_target_repo_url, | |||
|
84 | 'repo_name': repo_name, | |||
|
85 | 'commit_id': h.show_id(commit), | |||
76 | } |
|
86 | } | |
77 | %> |
|
87 | %> | |
78 | <table style="text-align:left;vertical-align:middle;"> |
|
88 | ||
79 | <tr><td colspan="2" style="width:100%;padding-bottom:15px;border-bottom:1px solid #dbd9da;"> |
|
89 | <table style="text-align:left;vertical-align:middle;width: 100%"> | |
|
90 | <tr> | |||
|
91 | <td style="width:100%;border-bottom:1px solid #dbd9da;"> | |||
80 |
|
|
92 | ||
81 | % if comment_file: |
|
93 | <h4 style="margin: 0"> | |
82 | <h4><a href="${commit_comment_url}" style="color:#427cc9;text-decoration:none;cursor:pointer">${_('{user} left a {comment_type} on file `{comment_file}` in commit `{commit_id}`').format(**data)}</a> ${_('in the {repo} repository').format(**data) |n}</h4> |
|
94 | <div style="margin-bottom: 4px; color:#7E7F7F"> | |
83 | % else: |
|
95 | @${h.person(user.username)} | |
84 | <h4><a href="${commit_comment_url}" style="color:#427cc9;text-decoration:none;cursor:pointer">${_('{user} left a {comment_type} on commit `{commit_id}`').format(**data) |n}</a> ${_('in the {repo} repository').format(**data) |n}</h4> |
|
96 | </div> | |
85 | % endif |
|
97 | ${_('left a')} | |
86 | </td></tr> |
|
98 | <a href="${commit_comment_url}" style="${base.link_css()}"> | |
|
99 | % if comment_file: | |||
|
100 | ${_('{comment_type} on file `{comment_file}` in commit.').format(**data)} | |||
|
101 | % else: | |||
|
102 | ${_('{comment_type} on commit.').format(**data) |n} | |||
|
103 | % endif | |||
|
104 | </a> | |||
|
105 | <div style="margin-top: 10px"></div> | |||
|
106 | ${_('Commit')} <code>${data['commit_id']}</code> ${_('of repository')}: ${data['repo_name']} | |||
|
107 | </h4> | |||
87 |
|
108 | |||
88 | <tr><td style="padding-right:20px;padding-top:15px;">${_('Commit')}</td><td style="padding-top:15px;"><a href="${commit_comment_url}" style="color:#427cc9;text-decoration:none;cursor:pointer">${h.show_id(commit)}</a></td></tr> |
|
109 | </td> | |
89 | <tr><td style="padding-right:20px;">${_('Description')}</td><td style="white-space:pre-wrap">${h.urlify_commit_message(commit.message, repo_name)}</td></tr> |
|
110 | </tr> | |
|
111 | ||||
|
112 | </table> | |||
|
113 | ||||
|
114 | <table style="text-align:left;vertical-align:middle;width: 100%"> | |||
|
115 | ||||
|
116 | ## spacing def | |||
|
117 | <tr> | |||
|
118 | <td style="width: 130px"></td> | |||
|
119 | <td></td> | |||
|
120 | </tr> | |||
90 |
|
121 | |||
91 | % if status_change: |
|
122 | % if status_change: | |
92 | <tr> |
|
123 | <tr> | |
93 | <td style="padding-right:20px;">${_('Status')}</td> |
|
124 | <td style="padding-right:20px;">${_('Commit Status')}:</td> | |
94 | <td> |
|
125 | <td> | |
95 |
${_(' |
|
126 | ${_('Status was changed to')}: ${base.status_text(status_change, tag_type=status_change_type)} | |
96 | </td> |
|
127 | </td> | |
97 | </tr> |
|
128 | </tr> | |
98 | % endif |
|
129 | % endif | |
|
130 | ||||
99 | <tr> |
|
131 | <tr> | |
100 | <td style="padding-right:20px;"> |
|
132 | <td style="padding-right:20px;">${_('Commit')}:</td> | |
|
133 | <td> | |||
|
134 | <a href="${commit_comment_url}" style="${base.link_css()}">${h.show_id(commit)}</a> | |||
|
135 | </td> | |||
|
136 | </tr> | |||
|
137 | <tr> | |||
|
138 | <td style="padding-right:20px;">${_('Commit message')}:</td> | |||
|
139 | <td style="white-space:pre-wrap">${h.urlify_commit_message(commit.message, repo_name)}</td> | |||
|
140 | </tr> | |||
|
141 | ||||
|
142 | % if comment_file: | |||
|
143 | <tr> | |||
|
144 | <td style="padding-right:20px;">${_('File')}:</td> | |||
|
145 | <td><a href="${commit_comment_url}" style="${base.link_css()}">${_('`{comment_file}` on line {comment_line}').format(**data)}</a></td> | |||
|
146 | </tr> | |||
|
147 | % endif | |||
|
148 | ||||
|
149 | <tr style="background-image: linear-gradient(to right, black 33%, rgba(255,255,255,0) 0%);background-position: bottom;background-size: 3px 1px;background-repeat: repeat-x;"> | |||
|
150 | <td colspan="2" style="padding-right:20px;"> | |||
101 | % if comment_type == 'todo': |
|
151 | % if comment_type == 'todo': | |
102 | ${(_('TODO comment on line: {comment_line}') if comment_file else _('TODO comment')).format(**data)} |
|
152 | ${_('`TODO` comment')}: | |
103 | % else: |
|
153 | % else: | |
104 | ${(_('Note comment on line: {comment_line}') if comment_file else _('Note comment')).format(**data)} |
|
154 | ${_('`Note` comment')}: | |
105 | % endif |
|
155 | % endif | |
106 | </td> |
|
156 | </td> | |
107 | <td style="line-height:1.2em;white-space:pre-wrap">${h.render(comment_body, renderer=renderer_type, mentions=True)}</td></tr> |
|
157 | </tr> | |
|
158 | ||||
|
159 | <td colspan="2" style="background: #F7F7F7">${h.render(comment_body, renderer=data['renderer_type'], mentions=True)}</td> | |||
|
160 | </tr> | |||
108 | </table> |
|
161 | </table> |
@@ -1,13 +1,20 b'' | |||||
1 | ## -*- coding: utf-8 -*- |
|
1 | ## -*- coding: utf-8 -*- | |
2 | <%inherit file="base.mako"/> |
|
2 | <%inherit file="base.mako"/> | |
|
3 | <%namespace name="base" file="base.mako"/> | |||
3 |
|
4 | |||
4 | <%def name="subject()" filter="n,trim,whitespace_filter"> |
|
5 | <%def name="subject()" filter="n,trim,whitespace_filter"> | |
5 | RhodeCode test email: ${h.format_date(date)} |
|
6 | RhodeCode test email: ${h.format_date(date)} | |
6 | </%def> |
|
7 | </%def> | |
7 |
|
8 | |||
8 | ## plain text version of the email. Empty by default |
|
9 | ## plain text version of the email. Empty by default | |
9 | <%def name="body_plaintext()" filter="n,trim"> |
|
10 | <%def name="body_plaintext()" filter="n,trim"> | |
10 |
Test Email from RhodeCode version: ${rhodecode_version} |
|
11 | Test Email from RhodeCode version: ${rhodecode_version} | |
|
12 | Email sent by: ${h.person(user)} | |||
|
13 | ||||
|
14 | --- | |||
|
15 | ${self.plaintext_footer()} | |||
11 | </%def> |
|
16 | </%def> | |
12 |
|
17 | |||
13 | ${body_plaintext()} No newline at end of file |
|
18 | Test Email from RhodeCode version: ${rhodecode_version} | |
|
19 | <br/><br/> | |||
|
20 | Email sent by: <strong>${h.person(user)}</strong> |
@@ -1,21 +1,21 b'' | |||||
1 | ## -*- coding: utf-8 -*- |
|
1 | ## -*- coding: utf-8 -*- | |
2 | <%inherit file="base.mako"/> |
|
2 | <%inherit file="base.mako"/> | |
3 |
|
3 | |||
4 | <%def name="subject()" filter="n,trim,whitespace_filter"> |
|
4 | <%def name="subject()" filter="n,trim,whitespace_filter"> | |
5 | </%def> |
|
5 | </%def> | |
6 |
|
6 | |||
7 |
|
7 | |||
8 | ## plain text version of the email. Empty by default |
|
8 | ## plain text version of the email. Empty by default | |
9 | <%def name="body_plaintext()" filter="n,trim"> |
|
9 | <%def name="body_plaintext()" filter="n,trim"> | |
10 | ${body} |
|
10 | ${body} | |
11 |
|
11 | |||
|
12 | --- | |||
12 | ${self.plaintext_footer()} |
|
13 | ${self.plaintext_footer()} | |
13 | </%def> |
|
14 | </%def> | |
14 |
|
15 | |||
15 | ## BODY GOES BELOW |
|
16 | ## BODY GOES BELOW | |
16 | <table style="text-align:left;vertical-align:top;"> |
|
17 | <table style="text-align:left;vertical-align:top;"> | |
17 | <tr><td style="padding-right:20px;padding-top:15px;white-space:pre-wrap">${body}</td></tr> |
|
18 | <tr> | |
|
19 | <td style="padding-right:20px;padding-top:15px;white-space:pre-wrap">${body}</td> | |||
|
20 | </tr> | |||
18 | </table> |
|
21 | </table> | |
19 | <p><a style="margin-top:15px;margin-left:1%;font-family:sans-serif;font-weight:100;font-size:11px;display:block;color:#666666;text-decoration:none;" href="${instance_url}"> |
|
|||
20 | ${self.plaintext_footer()} |
|
|||
21 | </a></p> No newline at end of file |
|
@@ -1,33 +1,37 b'' | |||||
1 | ## -*- coding: utf-8 -*- |
|
1 | ## -*- coding: utf-8 -*- | |
2 | <%inherit file="base.mako"/> |
|
2 | <%inherit file="base.mako"/> | |
|
3 | <%namespace name="base" file="base.mako"/> | |||
3 |
|
4 | |||
4 | <%def name="subject()" filter="n,trim,whitespace_filter"> |
|
5 | <%def name="subject()" filter="n,trim,whitespace_filter"> | |
5 | RhodeCode Password reset |
|
6 | RhodeCode Password reset | |
6 | </%def> |
|
7 | </%def> | |
7 |
|
8 | |||
8 | ## plain text version of the email. Empty by default |
|
9 | ## plain text version of the email. Empty by default | |
9 | <%def name="body_plaintext()" filter="n,trim"> |
|
10 | <%def name="body_plaintext()" filter="n,trim"> | |
10 |
H |
|
11 | Hello ${user.username}, | |
11 |
|
12 | |||
12 |
|
|
13 | On ${h.format_date(date)} there was a request to reset your password using the email address `${email}` | |
13 |
|
14 | |||
14 |
*If you did |
|
15 | *If you did not request a password reset, please contact your RhodeCode administrator at: ${first_admin_email}* | |
15 |
|
16 | |||
16 | You can continue, and generate new password by clicking following URL: |
|
17 | You can continue, and generate new password by clicking following URL: | |
17 | ${password_reset_url} |
|
18 | ${password_reset_url} | |
18 |
|
19 | |||
19 | This link will be active for 10 minutes. |
|
20 | This link will be active for 10 minutes. | |
|
21 | ||||
|
22 | --- | |||
20 | ${self.plaintext_footer()} |
|
23 | ${self.plaintext_footer()} | |
21 | </%def> |
|
24 | </%def> | |
22 |
|
25 | |||
23 | ## BODY GOES BELOW |
|
26 | ## BODY GOES BELOW | |
24 | <p> |
|
27 | <p> | |
25 | Hello ${user.username}, |
|
28 | Hello ${user.username}, | |
26 | </p><p> |
|
29 | </p><p> | |
27 |
|
|
30 | On ${h.format_date(date)} there was a request to reset your password using the email address `${email}` | |
28 | <br/> |
|
31 | <br/><br/> | |
29 | <strong>If you did not request a password reset, please contact your RhodeCode administrator.</strong> |
|
32 | <strong>If you did not request a password reset, please contact your RhodeCode administrator at: ${first_admin_email}.</strong> | |
30 | </p><p> |
|
33 | </p><p> | |
31 | <a href="${password_reset_url}">${_('Generate new password here')}.</a> |
|
34 | You can continue, and generate new password by clicking following URL:<br/><br/> | |
32 | This link will be active for 10 minutes. |
|
35 | <a href="${password_reset_url}" style="${base.link_css()}">${password_reset_url}</a> | |
|
36 | <br/><br/>This link will be active for 10 minutes. | |||
33 | </p> |
|
37 | </p> |
@@ -1,29 +1,31 b'' | |||||
1 | ## -*- coding: utf-8 -*- |
|
1 | ## -*- coding: utf-8 -*- | |
2 | <%inherit file="base.mako"/> |
|
2 | <%inherit file="base.mako"/> | |
|
3 | <%namespace name="base" file="base.mako"/> | |||
3 |
|
4 | |||
4 | <%def name="subject()" filter="n,trim,whitespace_filter"> |
|
5 | <%def name="subject()" filter="n,trim,whitespace_filter"> | |
5 | Your new RhodeCode password |
|
6 | Your new RhodeCode password | |
6 | </%def> |
|
7 | </%def> | |
7 |
|
8 | |||
8 | ## plain text version of the email. Empty by default |
|
9 | ## plain text version of the email. Empty by default | |
9 | <%def name="body_plaintext()" filter="n,trim"> |
|
10 | <%def name="body_plaintext()" filter="n,trim"> | |
10 |
H |
|
11 | Hello ${user.username}, | |
11 |
|
12 | |||
12 | Below is your new access password for RhodeCode. |
|
13 | Below is your new access password for RhodeCode requested via password reset link. | |
13 |
|
14 | |||
14 |
*If you did |
|
15 | *If you did not request a password reset, please contact your RhodeCode administrator at: ${first_admin_email}.* | |
15 |
|
16 | |||
16 | password: ${new_password} |
|
17 | new password: ${new_password} | |
17 |
|
18 | |||
|
19 | --- | |||
18 | ${self.plaintext_footer()} |
|
20 | ${self.plaintext_footer()} | |
19 | </%def> |
|
21 | </%def> | |
20 |
|
22 | |||
21 | ## BODY GOES BELOW |
|
23 | ## BODY GOES BELOW | |
22 | <p> |
|
24 | <p> | |
23 | Hello ${user.username}, |
|
25 | Hello ${user.username}, | |
24 | </p><p> |
|
26 | </p><p> | |
25 | Below is your new access password for RhodeCode. |
|
27 | Below is your new access password for RhodeCode requested via password reset link. | |
26 | <br/> |
|
28 | <br/><br/> | |
27 |
<strong>If you did |
|
29 | <strong>If you did not request a password reset, please contact your RhodeCode administrator at: ${first_admin_email}.</strong> | |
28 | </p> |
|
30 | </p> | |
29 |
<p>password: < |
|
31 | <p>new password: <code>${new_password}</code> |
@@ -1,114 +1,191 b'' | |||||
1 | ## -*- coding: utf-8 -*- |
|
1 | ## -*- coding: utf-8 -*- | |
2 | <%inherit file="base.mako"/> |
|
2 | <%inherit file="base.mako"/> | |
3 | <%namespace name="base" file="base.mako"/> |
|
3 | <%namespace name="base" file="base.mako"/> | |
4 |
|
4 | |||
5 | ## EMAIL SUBJECT |
|
5 | ## EMAIL SUBJECT | |
6 | <%def name="subject()" filter="n,trim,whitespace_filter"> |
|
6 | <%def name="subject()" filter="n,trim,whitespace_filter"> | |
7 | <% |
|
7 | <% | |
8 | data = { |
|
8 | data = { | |
9 | 'user': h.person(user), |
|
9 | 'user': '@'+h.person(user), | |
10 | 'pr_title': pull_request.title, |
|
10 | 'repo_name': repo_name, | |
11 | 'pr_id': pull_request.pull_request_id, |
|
|||
12 | 'status': status_change, |
|
11 | 'status': status_change, | |
13 | 'comment_file': comment_file, |
|
12 | 'comment_file': comment_file, | |
14 | 'comment_line': comment_line, |
|
13 | 'comment_line': comment_line, | |
15 | 'comment_type': comment_type, |
|
14 | 'comment_type': comment_type, | |
|
15 | ||||
|
16 | 'pr_title': pull_request.title, | |||
|
17 | 'pr_id': pull_request.pull_request_id, | |||
16 | } |
|
18 | } | |
17 | %> |
|
19 | %> | |
18 |
|
20 | |||
19 | ${(_('[mention]') if mention else '')} \ |
|
|||
20 |
|
|
21 | ||
21 | % if comment_file: |
|
22 | % if comment_file: | |
22 |
${_('{user} left a {comment_type} on file `{comment_file}` in pull request |
|
23 | ${(_('[mention]') if mention else '')} ${_('{user} left a {comment_type} on file `{comment_file}` in pull request !{pr_id}: "{pr_title}"').format(**data) |n} | |
23 | % else: |
|
24 | % else: | |
24 | % if status_change: |
|
25 | % if status_change: | |
25 |
${_('[status: {status}] {user} left a {comment_type} on pull request |
|
26 | ${(_('[mention]') if mention else '')} ${_('[status: {status}] {user} left a {comment_type} on pull request !{pr_id}: "{pr_title}"').format(**data) |n} | |
26 | % else: |
|
27 | % else: | |
27 |
${_('{user} left a {comment_type} on pull request |
|
28 | ${(_('[mention]') if mention else '')} ${_('{user} left a {comment_type} on pull request !{pr_id}: "{pr_title}"').format(**data) |n} | |
28 | % endif |
|
29 | % endif | |
29 | % endif |
|
30 | % endif | |
|
31 | ||||
30 | </%def> |
|
32 | </%def> | |
31 |
|
33 | |||
32 | ## PLAINTEXT VERSION OF BODY |
|
34 | ## PLAINTEXT VERSION OF BODY | |
33 | <%def name="body_plaintext()" filter="n,trim"> |
|
35 | <%def name="body_plaintext()" filter="n,trim"> | |
34 | <% |
|
36 | <% | |
35 | data = { |
|
37 | data = { | |
36 | 'user': h.person(user), |
|
38 | 'user': h.person(user), | |
37 | 'pr_title': pull_request.title, |
|
39 | 'repo_name': repo_name, | |
38 | 'pr_id': pull_request.pull_request_id, |
|
|||
39 | 'status': status_change, |
|
40 | 'status': status_change, | |
40 | 'comment_file': comment_file, |
|
41 | 'comment_file': comment_file, | |
41 | 'comment_line': comment_line, |
|
42 | 'comment_line': comment_line, | |
42 | 'comment_type': comment_type, |
|
43 | 'comment_type': comment_type, | |
|
44 | ||||
|
45 | 'pr_title': pull_request.title, | |||
|
46 | 'pr_id': pull_request.pull_request_id, | |||
|
47 | 'source_ref_type': pull_request.source_ref_parts.type, | |||
|
48 | 'source_ref_name': pull_request.source_ref_parts.name, | |||
|
49 | 'target_ref_type': pull_request.target_ref_parts.type, | |||
|
50 | 'target_ref_name': pull_request.target_ref_parts.name, | |||
|
51 | 'source_repo': pull_request_source_repo.repo_name, | |||
|
52 | 'target_repo': pull_request_target_repo.repo_name, | |||
|
53 | 'source_repo_url': pull_request_source_repo_url, | |||
|
54 | 'target_repo_url': pull_request_target_repo_url, | |||
43 | } |
|
55 | } | |
44 | %> |
|
56 | %> | |
45 | ${self.subject()} |
|
57 | ||
|
58 | ${h.literal(_('Pull request !{pr_id}: `{pr_title}`').format(**data))} | |||
|
59 | ||||
|
60 | * ${h.literal(_('Commit flow: {source_ref_type}:{source_ref_name} of {source_repo_url} into {target_ref_type}:{target_ref_name} of {target_repo_url}').format(**data))} | |||
46 |
|
61 | |||
47 | * ${_('Comment link')}: ${pr_comment_url} |
|
62 | * ${_('Comment link')}: ${pr_comment_url} | |
48 |
|
63 | |||
49 | * ${_('Source repository')}: ${pr_source_repo_url} |
|
64 | %if status_change and not closing_pr: | |
|
65 | * ${_('{user} submitted pull request !{pr_id} status: *{status}*').format(**data)} | |||
50 |
|
66 | |||
|
67 | %elif status_change and closing_pr: | |||
|
68 | * ${_('{user} submitted pull request !{pr_id} status: *{status} and closed*').format(**data)} | |||
|
69 | ||||
|
70 | %endif | |||
51 | %if comment_file: |
|
71 | %if comment_file: | |
52 |
* ${_('File: {comment_file} on line {comment_line}').format( |
|
72 | * ${_('File: {comment_file} on line {comment_line}').format(**data)} | |
|
73 | ||||
53 | %endif |
|
74 | %endif | |
|
75 | % if comment_type == 'todo': | |||
|
76 | ${_('`TODO` comment')}: | |||
|
77 | % else: | |||
|
78 | ${_('`Note` comment')}: | |||
|
79 | % endif | |||
|
80 | ||||
|
81 | ${comment_body |n, trim} | |||
54 |
|
82 | |||
55 | --- |
|
83 | --- | |
56 |
|
||||
57 | %if status_change and not closing_pr: |
|
|||
58 | ${_('{user} submitted pull request #{pr_id} status: *{status}*').format(**data)} |
|
|||
59 | %elif status_change and closing_pr: |
|
|||
60 | ${_('{user} submitted pull request #{pr_id} status: *{status} and closed*').format(**data)} |
|
|||
61 | %endif |
|
|||
62 |
|
||||
63 | ${comment_body |n} |
|
|||
64 |
|
||||
65 | ${self.plaintext_footer()} |
|
84 | ${self.plaintext_footer()} | |
66 | </%def> |
|
85 | </%def> | |
67 |
|
86 | |||
68 |
|
87 | |||
69 | <% |
|
88 | <% | |
70 | data = { |
|
89 | data = { | |
71 | 'user': h.person(user), |
|
90 | 'user': h.person(user), | |
72 | 'pr_title': pull_request.title, |
|
|||
73 | 'pr_id': pull_request.pull_request_id, |
|
|||
74 | 'status': status_change, |
|
|||
75 | 'comment_file': comment_file, |
|
91 | 'comment_file': comment_file, | |
76 | 'comment_line': comment_line, |
|
92 | 'comment_line': comment_line, | |
77 | 'comment_type': comment_type, |
|
93 | 'comment_type': comment_type, | |
|
94 | 'renderer_type': renderer_type or 'plain', | |||
|
95 | ||||
|
96 | 'pr_title': pull_request.title, | |||
|
97 | 'pr_id': pull_request.pull_request_id, | |||
|
98 | 'status': status_change, | |||
|
99 | 'source_ref_type': pull_request.source_ref_parts.type, | |||
|
100 | 'source_ref_name': pull_request.source_ref_parts.name, | |||
|
101 | 'target_ref_type': pull_request.target_ref_parts.type, | |||
|
102 | 'target_ref_name': pull_request.target_ref_parts.name, | |||
|
103 | 'source_repo': pull_request_source_repo.repo_name, | |||
|
104 | 'target_repo': pull_request_target_repo.repo_name, | |||
|
105 | 'source_repo_url': h.link_to(pull_request_source_repo.repo_name, pull_request_source_repo_url), | |||
|
106 | 'target_repo_url': h.link_to(pull_request_target_repo.repo_name, pull_request_target_repo_url), | |||
78 | } |
|
107 | } | |
79 | %> |
|
108 | %> | |
80 | <table style="text-align:left;vertical-align:middle;"> |
|
109 | ||
81 | <tr><td colspan="2" style="width:100%;padding-bottom:15px;border-bottom:1px solid #dbd9da;"> |
|
110 | <table style="text-align:left;vertical-align:middle;width: 100%"> | |
|
111 | <tr> | |||
|
112 | <td style="width:100%;border-bottom:1px solid #dbd9da;"> | |||
82 |
|
|
113 | ||
83 | % if comment_file: |
|
114 | <h4 style="margin: 0"> | |
84 | <h4><a href="${pr_comment_url}" style="color:#427cc9;text-decoration:none;cursor:pointer">${_('{user} left a {comment_type} on file `{comment_file}` in pull request #{pr_id} "{pr_title}"').format(**data) |n}</a></h4> |
|
115 | <div style="margin-bottom: 4px; color:#7E7F7F"> | |
85 | % else: |
|
116 | @${h.person(user.username)} | |
86 | <h4><a href="${pr_comment_url}" style="color:#427cc9;text-decoration:none;cursor:pointer">${_('{user} left a {comment_type} on pull request #{pr_id} "{pr_title}"').format(**data) |n}</a></h4> |
|
117 | </div> | |
87 | % endif |
|
118 | ${_('left a')} | |
|
119 | <a href="${pr_comment_url}" style="${base.link_css()}"> | |||
|
120 | % if comment_file: | |||
|
121 | ${_('{comment_type} on file `{comment_file}` in pull request.').format(**data)} | |||
|
122 | % else: | |||
|
123 | ${_('{comment_type} on pull request.').format(**data) |n} | |||
|
124 | % endif | |||
|
125 | </a> | |||
|
126 | <div style="margin-top: 10px"></div> | |||
|
127 | ${_('Pull request')} <code>!${data['pr_id']}: ${data['pr_title']}</code> | |||
|
128 | </h4> | |||
88 |
|
129 | |||
89 |
</td> |
|
130 | </td> | |
90 | <tr><td style="padding-right:20px;padding-top:15px;">${_('Source')}</td><td style="padding-top:15px;"><a style="color:#427cc9;text-decoration:none;cursor:pointer" href="${pr_source_repo_url}">${pr_source_repo.repo_name}</a></td></tr> |
|
131 | </tr> | |
|
132 | ||||
|
133 | </table> | |||
|
134 | ||||
|
135 | <table style="text-align:left;vertical-align:middle;width: 100%"> | |||
|
136 | ||||
|
137 | ## spacing def | |||
|
138 | <tr> | |||
|
139 | <td style="width: 130px"></td> | |||
|
140 | <td></td> | |||
|
141 | </tr> | |||
91 |
|
142 | |||
92 | % if status_change: |
|
143 | % if status_change: | |
|
144 | <tr> | |||
|
145 | <td style="padding-right:20px;">${_('Review Status')}:</td> | |||
|
146 | <td> | |||
|
147 | % if closing_pr: | |||
|
148 | ${_('Closed pull request with status')}: ${base.status_text(status_change, tag_type=status_change_type)} | |||
|
149 | % else: | |||
|
150 | ${_('Submitted review status')}: ${base.status_text(status_change, tag_type=status_change_type)} | |||
|
151 | % endif | |||
|
152 | </td> | |||
|
153 | </tr> | |||
|
154 | % endif | |||
|
155 | ||||
|
156 | <tr> | |||
|
157 | <td style="padding-right:20px;line-height:20px;">${_('Commit Flow')}:</td> | |||
|
158 | <td style="line-height:20px;"> | |||
|
159 | ${base.tag_button('{}:{}'.format(data['source_ref_type'], pull_request.source_ref_parts.name))} ${_('of')} ${data['source_repo_url']} | |||
|
160 | → | |||
|
161 | ${base.tag_button('{}:{}'.format(data['target_ref_type'], pull_request.target_ref_parts.name))} ${_('of')} ${data['target_repo_url']} | |||
|
162 | </td> | |||
|
163 | </tr> | |||
|
164 | <tr> | |||
|
165 | <td style="padding-right:20px;">${_('Pull request')}:</td> | |||
|
166 | <td> | |||
|
167 | <a href="${pull_request_url}" style="${base.link_css()}"> | |||
|
168 | !${pull_request.pull_request_id} | |||
|
169 | </a> | |||
|
170 | </td> | |||
|
171 | </tr> | |||
|
172 | % if comment_file: | |||
93 | <tr> |
|
173 | <tr> | |
94 |
<td style="padding-right:20px;">${_(' |
|
174 | <td style="padding-right:20px;">${_('File')}:</td> | |
95 | <td> |
|
175 | <td><a href="${pr_comment_url}" style="${base.link_css()}">${_('`{comment_file}` on line {comment_line}').format(**data)}</a></td> | |
96 | % if closing_pr: |
|
|||
97 | ${_('Closed pull request with status')}: ${base.status_text(status_change, tag_type=status_change_type)} |
|
|||
98 | % else: |
|
|||
99 | ${_('Submitted review status')}: ${base.status_text(status_change, tag_type=status_change_type)} |
|
|||
100 | % endif |
|
|||
101 | </td> |
|
|||
102 | </tr> |
|
176 | </tr> | |
103 | % endif |
|
177 | % endif | |
104 | <tr> |
|
178 | ||
105 | <td style="padding-right:20px;"> |
|
179 | <tr style="background-image: linear-gradient(to right, black 33%, rgba(255,255,255,0) 0%);background-position: bottom;background-size: 3px 1px;background-repeat: repeat-x;"> | |
|
180 | <td colspan="2" style="padding-right:20px;"> | |||
106 | % if comment_type == 'todo': |
|
181 | % if comment_type == 'todo': | |
107 | ${(_('TODO comment on line: {comment_line}') if comment_file else _('TODO comment')).format(**data)} |
|
182 | ${_('`TODO` comment')}: | |
108 | % else: |
|
183 | % else: | |
109 | ${(_('Note comment on line: {comment_line}') if comment_file else _('Note comment')).format(**data)} |
|
184 | ${_('`Note` comment')}: | |
110 | % endif |
|
185 | % endif | |
111 | </td> |
|
186 | </td> | |
112 | <td style="line-height:1.2em;white-space:pre-wrap">${h.render(comment_body, renderer=renderer_type, mentions=True)}</td> |
|
187 | </tr> | |
|
188 | ||||
|
189 | <td colspan="2" style="background: #F7F7F7">${h.render(comment_body, renderer=data['renderer_type'], mentions=True)}</td> | |||
113 | </tr> |
|
190 | </tr> | |
114 | </table> |
|
191 | </table> |
@@ -1,85 +1,144 b'' | |||||
1 | ## -*- coding: utf-8 -*- |
|
1 | ## -*- coding: utf-8 -*- | |
2 | <%inherit file="base.mako"/> |
|
2 | <%inherit file="base.mako"/> | |
3 | <%namespace name="base" file="base.mako"/> |
|
3 | <%namespace name="base" file="base.mako"/> | |
4 |
|
4 | |||
|
5 | ## EMAIL SUBJECT | |||
5 | <%def name="subject()" filter="n,trim,whitespace_filter"> |
|
6 | <%def name="subject()" filter="n,trim,whitespace_filter"> | |
6 | <% |
|
7 | <% | |
7 | data = { |
|
8 | data = { | |
8 | 'user': h.person(user), |
|
9 | 'user': '@'+h.person(user), | |
9 | 'pr_id': pull_request.pull_request_id, |
|
10 | 'pr_id': pull_request.pull_request_id, | |
10 | 'pr_title': pull_request.title, |
|
11 | 'pr_title': pull_request.title, | |
11 | } |
|
12 | } | |
12 | %> |
|
13 | %> | |
13 |
|
14 | |||
14 |
${_(' |
|
15 | ${_('{user} requested a pull request review. !{pr_id}: "{pr_title}"').format(**data) |n} | |
15 | </%def> |
|
16 | </%def> | |
16 |
|
17 | |||
17 |
|
18 | ## PLAINTEXT VERSION OF BODY | ||
18 | <%def name="body_plaintext()" filter="n,trim"> |
|
19 | <%def name="body_plaintext()" filter="n,trim"> | |
19 | <% |
|
20 | <% | |
20 | data = { |
|
21 | data = { | |
21 | 'user': h.person(user), |
|
22 | 'user': h.person(user), | |
22 | 'pr_id': pull_request.pull_request_id, |
|
23 | 'pr_id': pull_request.pull_request_id, | |
23 | 'pr_title': pull_request.title, |
|
24 | 'pr_title': pull_request.title, | |
24 | 'source_ref_type': pull_request.source_ref_parts.type, |
|
25 | 'source_ref_type': pull_request.source_ref_parts.type, | |
25 | 'source_ref_name': pull_request.source_ref_parts.name, |
|
26 | 'source_ref_name': pull_request.source_ref_parts.name, | |
26 | 'target_ref_type': pull_request.target_ref_parts.type, |
|
27 | 'target_ref_type': pull_request.target_ref_parts.type, | |
27 | 'target_ref_name': pull_request.target_ref_parts.name, |
|
28 | 'target_ref_name': pull_request.target_ref_parts.name, | |
28 | 'repo_url': pull_request_source_repo_url |
|
29 | 'repo_url': pull_request_source_repo_url, | |
|
30 | 'source_repo': pull_request_source_repo.repo_name, | |||
|
31 | 'target_repo': pull_request_target_repo.repo_name, | |||
|
32 | 'source_repo_url': pull_request_source_repo_url, | |||
|
33 | 'target_repo_url': pull_request_target_repo_url, | |||
29 | } |
|
34 | } | |
30 | %> |
|
35 | %> | |
31 | ${self.subject()} |
|
|||
32 |
|
36 | |||
|
37 | ${h.literal(_('Pull request !{pr_id}: `{pr_title}`').format(**data))} | |||
33 |
|
38 | |||
34 |
${h.literal(_(' |
|
39 | * ${h.literal(_('Commit flow: {source_ref_type}:{source_ref_name} of {source_repo_url} into {target_ref_type}:{target_ref_name} of {target_repo_url}').format(**data))} | |
35 |
|
40 | |||
36 |
|
41 | * ${_('Pull Request link')}: ${pull_request_url} | ||
37 | * ${_('Link')}: ${pull_request_url} |
|
|||
38 |
|
42 | |||
39 | * ${_('Title')}: ${pull_request.title} |
|
43 | * ${_('Title')}: ${pull_request.title} | |
40 |
|
44 | |||
41 | * ${_('Description')}: |
|
45 | * ${_('Description')}: | |
42 |
|
46 | |||
43 | ${pull_request.description} |
|
47 | ${pull_request.description | trim} | |
44 |
|
48 | |||
45 |
|
49 | |||
46 | * ${_ungettext('Commit (%(num)s)', 'Commits (%(num)s)', len(pull_request_commits) ) % {'num': len(pull_request_commits)}}: |
|
50 | * ${_ungettext('Commit (%(num)s)', 'Commits (%(num)s)', len(pull_request_commits) ) % {'num': len(pull_request_commits)}}: | |
47 |
|
51 | |||
48 | % for commit_id, message in pull_request_commits: |
|
52 | % for commit_id, message in pull_request_commits: | |
49 |
|
|
53 | - ${h.short_id(commit_id)} | |
50 |
|
|
54 | ${h.chop_at_smart(message, '\n', suffix_if_chopped='...')} | |
51 |
|
55 | |||
52 | % endfor |
|
56 | % endfor | |
53 |
|
57 | |||
|
58 | --- | |||
54 | ${self.plaintext_footer()} |
|
59 | ${self.plaintext_footer()} | |
55 | </%def> |
|
60 | </%def> | |
56 | <% |
|
61 | <% | |
57 | data = { |
|
62 | data = { | |
58 | 'user': h.person(user), |
|
63 | 'user': h.person(user), | |
59 | 'pr_id': pull_request.pull_request_id, |
|
64 | 'pr_id': pull_request.pull_request_id, | |
60 | 'pr_title': pull_request.title, |
|
65 | 'pr_title': pull_request.title, | |
61 | 'source_ref_type': pull_request.source_ref_parts.type, |
|
66 | 'source_ref_type': pull_request.source_ref_parts.type, | |
62 | 'source_ref_name': pull_request.source_ref_parts.name, |
|
67 | 'source_ref_name': pull_request.source_ref_parts.name, | |
63 | 'target_ref_type': pull_request.target_ref_parts.type, |
|
68 | 'target_ref_type': pull_request.target_ref_parts.type, | |
64 | 'target_ref_name': pull_request.target_ref_parts.name, |
|
69 | 'target_ref_name': pull_request.target_ref_parts.name, | |
65 | 'repo_url': pull_request_source_repo_url, |
|
70 | 'repo_url': pull_request_source_repo_url, | |
|
71 | 'source_repo': pull_request_source_repo.repo_name, | |||
|
72 | 'target_repo': pull_request_target_repo.repo_name, | |||
66 | 'source_repo_url': h.link_to(pull_request_source_repo.repo_name, pull_request_source_repo_url), |
|
73 | 'source_repo_url': h.link_to(pull_request_source_repo.repo_name, pull_request_source_repo_url), | |
67 | 'target_repo_url': h.link_to(pull_request_target_repo.repo_name, pull_request_target_repo_url) |
|
74 | 'target_repo_url': h.link_to(pull_request_target_repo.repo_name, pull_request_target_repo_url), | |
68 | } |
|
75 | } | |
69 | %> |
|
76 | %> | |
70 | <table style="text-align:left;vertical-align:middle;"> |
|
77 | ||
71 | <tr><td colspan="2" style="width:100%;padding-bottom:15px;border-bottom:1px solid #dbd9da;"><h4><a href="${pull_request_url}" style="color:#427cc9;text-decoration:none;cursor:pointer">${_('%(user)s wants you to review pull request #%(pr_id)s: "%(pr_title)s".') % data }</a></h4></td></tr> |
|
78 | <table style="text-align:left;vertical-align:middle;width: 100%"> | |
72 | <tr><td style="padding-right:20px;padding-top:15px;">${_('Title')}</td><td style="padding-top:15px;">${pull_request.title}</td></tr> |
|
79 | <tr> | |
73 | <tr><td style="padding-right:20px;">${_('Source')}</td><td>${base.tag_button(pull_request.source_ref_parts.name)} ${h.literal(_('%(source_ref_type)s of %(source_repo_url)s') % data)}</td></tr> |
|
80 | <td style="width:100%;border-bottom:1px solid #dbd9da;"> | |
74 | <tr><td style="padding-right:20px;">${_('Target')}</td><td>${base.tag_button(pull_request.target_ref_parts.name)} ${h.literal(_('%(target_ref_type)s of %(target_repo_url)s') % data)}</td></tr> |
|
81 | ||
75 | <tr><td style="padding-right:20px;">${_('Description')}</td><td style="white-space:pre-wrap">${pull_request.description}</td></tr> |
|
82 | <h4 style="margin: 0"> | |
76 | <tr><td style="padding-right:20px;">${_ungettext('%(num)s Commit', '%(num)s Commits', len(pull_request_commits)) % {'num': len(pull_request_commits)}}</td> |
|
83 | <div style="margin-bottom: 4px; color:#7E7F7F"> | |
77 | <td><ol style="margin:0 0 0 1em;padding:0;text-align:left;"> |
|
84 | @${h.person(user.username)} | |
78 | % for commit_id, message in pull_request_commits: |
|
85 | </div> | |
79 | <li style="margin:0 0 1em;"><pre style="margin:0 0 .5em">${h.short_id(commit_id)}</pre> |
|
86 | ${_('requested a')} | |
80 | ${h.chop_at_smart(message, '\n', suffix_if_chopped='...')} |
|
87 | <a href="${pull_request_url}" style="${base.link_css()}"> | |
81 | </li> |
|
88 | ${_('pull request review.').format(**data) } | |
82 |
|
|
89 | </a> | |
83 | </ol></td> |
|
90 | <div style="margin-top: 10px"></div> | |
|
91 | ${_('Pull request')} <code>!${data['pr_id']}: ${data['pr_title']}</code> | |||
|
92 | </h4> | |||
|
93 | ||||
|
94 | </td> | |||
|
95 | </tr> | |||
|
96 | ||||
|
97 | </table> | |||
|
98 | ||||
|
99 | <table style="text-align:left;vertical-align:middle;width: 100%"> | |||
|
100 | ## spacing def | |||
|
101 | <tr> | |||
|
102 | <td style="width: 130px"></td> | |||
|
103 | <td></td> | |||
|
104 | </tr> | |||
|
105 | ||||
|
106 | <tr> | |||
|
107 | <td style="padding-right:20px;line-height:20px;">${_('Commit Flow')}:</td> | |||
|
108 | <td style="line-height:20px;"> | |||
|
109 | ${base.tag_button('{}:{}'.format(data['source_ref_type'], pull_request.source_ref_parts.name))} ${_('of')} ${data['source_repo_url']} | |||
|
110 | → | |||
|
111 | ${base.tag_button('{}:{}'.format(data['target_ref_type'], pull_request.target_ref_parts.name))} ${_('of')} ${data['target_repo_url']} | |||
|
112 | </td> | |||
|
113 | </tr> | |||
|
114 | ||||
|
115 | <tr> | |||
|
116 | <td style="padding-right:20px;">${_('Pull request')}:</td> | |||
|
117 | <td> | |||
|
118 | <a href="${pull_request_url}" style="${base.link_css()}"> | |||
|
119 | !${pull_request.pull_request_id} | |||
|
120 | </a> | |||
|
121 | </td> | |||
|
122 | </tr> | |||
|
123 | <tr> | |||
|
124 | <td style="padding-right:20px;">${_('Description')}:</td> | |||
|
125 | <td style="white-space:pre-wrap"><code>${pull_request.description | trim}</code></td> | |||
|
126 | </tr> | |||
|
127 | <tr> | |||
|
128 | <td style="padding-right:20px;">${_ungettext('Commit (%(num)s)', 'Commits (%(num)s)', len(pull_request_commits)) % {'num': len(pull_request_commits)}}:</td> | |||
|
129 | <td></td> | |||
|
130 | </tr> | |||
|
131 | ||||
|
132 | <tr> | |||
|
133 | <td colspan="2"> | |||
|
134 | <ol style="margin:0 0 0 1em;padding:0;text-align:left;"> | |||
|
135 | % for commit_id, message in pull_request_commits: | |||
|
136 | <li style="margin:0 0 1em;"> | |||
|
137 | <pre style="margin:0 0 .5em"><a href="${h.route_path('repo_commit', repo_name=pull_request_source_repo.repo_name, commit_id=commit_id)}" style="${base.link_css()}">${h.short_id(commit_id)}</a></pre> | |||
|
138 | ${h.chop_at_smart(message, '\n', suffix_if_chopped='...')} | |||
|
139 | </li> | |||
|
140 | % endfor | |||
|
141 | </ol> | |||
|
142 | </td> | |||
84 | </tr> |
|
143 | </tr> | |
85 | </table> |
|
144 | </table> |
@@ -1,21 +1,22 b'' | |||||
1 | ## -*- coding: utf-8 -*- |
|
1 | ## -*- coding: utf-8 -*- | |
2 | <%inherit file="base.mako"/> |
|
2 | <%inherit file="base.mako"/> | |
3 |
|
3 | |||
4 | <%def name="subject()" filter="n,trim,whitespace_filter"> |
|
4 | <%def name="subject()" filter="n,trim,whitespace_filter"> | |
5 | Test "Subject" ${_('hello "world"')|n} |
|
5 | Test "Subject" ${_('hello "world"')|n} | |
6 | </%def> |
|
6 | </%def> | |
7 |
|
7 | |||
8 | <%def name="headers()" filter="n,trim"> |
|
8 | <%def name="headers()" filter="n,trim"> | |
9 | X=Y |
|
9 | X=Y | |
10 | </%def> |
|
10 | </%def> | |
11 |
|
11 | |||
12 | ## plain text version of the email. Empty by default |
|
12 | ## plain text version of the email. Empty by default | |
13 | <%def name="body_plaintext()" filter="n,trim"> |
|
13 | <%def name="body_plaintext()" filter="n,trim"> | |
14 | Email Plaintext Body |
|
14 | Email Plaintext Body | |
15 | </%def> |
|
15 | </%def> | |
16 |
|
16 | |||
17 | ## BODY GOES BELOW |
|
17 | ## BODY GOES BELOW | |
18 |
< |
|
18 | <strong>Email Body</strong> | |
19 |
|
19 | <br/> | ||
20 | ${h.short_id('0' * 40)} |
|
20 | <br/> | |
21 | ${_('Translation')} No newline at end of file |
|
21 | `h.short_id()`: ${h.short_id('0' * 40)}<br/> | |
|
22 | ${_('Translation String')}<br/> |
@@ -1,27 +1,59 b'' | |||||
1 | ## -*- coding: utf-8 -*- |
|
1 | ## -*- coding: utf-8 -*- | |
2 | <%inherit file="base.mako"/> |
|
2 | <%inherit file="base.mako"/> | |
|
3 | <%namespace name="base" file="base.mako"/> | |||
3 |
|
4 | |||
4 | <%def name="subject()" filter="n,trim,whitespace_filter"> |
|
5 | <%def name="subject()" filter="n,trim,whitespace_filter"> | |
5 | RhodeCode new user registration: ${user.username} |
|
6 | RhodeCode new user registration: ${user.username} | |
6 | </%def> |
|
7 | </%def> | |
7 |
|
8 | |||
8 | <%def name="body_plaintext()" filter="n,trim"> |
|
9 | <%def name="body_plaintext()" filter="n,trim"> | |
9 |
|
10 | |||
10 | A new user `${user.username}` has registered on ${h.format_date(date)} |
|
11 | A new user `${user.username}` has registered on ${h.format_date(date)} | |
11 |
|
12 | |||
12 | - Username: ${user.username} |
|
13 | - Username: ${user.username} | |
13 | - Full Name: ${user.first_name} ${user.last_name} |
|
14 | - Full Name: ${user.first_name} ${user.last_name} | |
14 | - Email: ${user.email} |
|
15 | - Email: ${user.email} | |
15 | - Profile link: ${h.route_url('user_profile', username=user.username)} |
|
16 | - Profile link: ${h.route_url('user_profile', username=user.username)} | |
16 |
|
17 | |||
|
18 | --- | |||
17 | ${self.plaintext_footer()} |
|
19 | ${self.plaintext_footer()} | |
18 | </%def> |
|
20 | </%def> | |
19 |
|
21 | |||
20 | ## BODY GOES BELOW |
|
22 | ||
21 | <table style="text-align:left;vertical-align:middle;"> |
|
23 | <table style="text-align:left;vertical-align:middle;width: 100%"> | |
22 | <tr><td colspan="2" style="width:100%;padding-bottom:15px;border-bottom:1px solid #dbd9da;"><h4><a href="${h.route_url('user_profile', username=user.username)}" style="color:#427cc9;text-decoration:none;cursor:pointer">${_('New user %(user)s has registered on %(date)s') % {'user': user.username, 'date': h.format_date(date)}}</a></h4></td></tr> |
|
24 | <tr> | |
23 | <tr><td style="padding-right:20px;padding-top:20px;">${_('Username')}</td><td style="line-height:1;padding-top:20px;"><img style="margin-bottom:-5px;text-align:left;border:1px solid #dbd9da" src="${h.gravatar_url(user.email, 16)}" height="16" width="16"> ${user.username}</td></tr> |
|
25 | <td style="width:100%;border-bottom:1px solid #dbd9da;"> | |
24 | <tr><td style="padding-right:20px;">${_('Full Name')}</td><td>${user.first_name} ${user.last_name}</td></tr> |
|
26 | <h4 style="margin: 0"> | |
25 | <tr><td style="padding-right:20px;">${_('Email')}</td><td>${user.email}</td></tr> |
|
27 | <a href="${h.route_url('user_profile', username=user.username)}" style="${base.link_css()}"> | |
26 | <tr><td style="padding-right:20px;">${_('Profile')}</td><td><a href="${h.route_url('user_profile', username=user.username)}">${h.route_url('user_profile', username=user.username)}</a></td></tr> |
|
28 | ${_('New user {user} has registered on {date}').format(user=user.username, date=h.format_date(date))} | |
27 | </table> No newline at end of file |
|
29 | </a> | |
|
30 | </h4> | |||
|
31 | </td> | |||
|
32 | </tr> | |||
|
33 | </table> | |||
|
34 | ||||
|
35 | <table style="text-align:left;vertical-align:middle;width: 100%"> | |||
|
36 | ## spacing def | |||
|
37 | <tr> | |||
|
38 | <td style="width: 130px"></td> | |||
|
39 | <td></td> | |||
|
40 | </tr> | |||
|
41 | <tr> | |||
|
42 | <td style="padding-right:20px;padding-top:20px;">${_('Username')}:</td> | |||
|
43 | <td style="line-height:1;padding-top:20px;">${user.username}</td> | |||
|
44 | </tr> | |||
|
45 | <tr> | |||
|
46 | <td style="padding-right:20px;">${_('Full Name')}:</td> | |||
|
47 | <td>${user.first_name} ${user.last_name}</td> | |||
|
48 | </tr> | |||
|
49 | <tr> | |||
|
50 | <td style="padding-right:20px;">${_('Email')}:</td> | |||
|
51 | <td>${user.email}</td> | |||
|
52 | </tr> | |||
|
53 | <tr> | |||
|
54 | <td style="padding-right:20px;">${_('Profile')}:</td> | |||
|
55 | <td> | |||
|
56 | <a href="${h.route_url('user_profile', username=user.username)}">${h.route_url('user_profile', username=user.username)}</a> | |||
|
57 | </td> | |||
|
58 | </tr> | |||
|
59 | </table> |
@@ -1,143 +1,138 b'' | |||||
1 | import collections |
|
|||
2 |
|
|
1 | # -*- coding: utf-8 -*- | |
3 |
|
2 | |||
4 | # Copyright (C) 2010-2019 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2019 RhodeCode GmbH | |
5 | # |
|
4 | # | |
6 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
7 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
8 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
9 | # |
|
8 | # | |
10 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
13 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
14 | # |
|
13 | # | |
15 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
16 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
17 | # |
|
16 | # | |
18 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
19 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
20 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
21 |
|
20 | |||
22 | import pytest |
|
21 | import pytest | |
|
22 | import collections | |||
23 |
|
23 | |||
24 | from rhodecode.lib.partial_renderer import PyramidPartialRenderer |
|
24 | from rhodecode.lib.partial_renderer import PyramidPartialRenderer | |
25 | from rhodecode.lib.utils2 import AttributeDict |
|
25 | from rhodecode.lib.utils2 import AttributeDict | |
|
26 | from rhodecode.model.db import User | |||
26 | from rhodecode.model.notification import EmailNotificationModel |
|
27 | from rhodecode.model.notification import EmailNotificationModel | |
27 |
|
28 | |||
28 |
|
29 | |||
29 | def test_get_template_obj(app, request_stub): |
|
30 | def test_get_template_obj(app, request_stub): | |
30 | template = EmailNotificationModel().get_renderer( |
|
31 | template = EmailNotificationModel().get_renderer( | |
31 | EmailNotificationModel.TYPE_TEST, request_stub) |
|
32 | EmailNotificationModel.TYPE_TEST, request_stub) | |
32 | assert isinstance(template, PyramidPartialRenderer) |
|
33 | assert isinstance(template, PyramidPartialRenderer) | |
33 |
|
34 | |||
34 |
|
35 | |||
35 | def test_render_email(app, http_host_only_stub): |
|
36 | def test_render_email(app, http_host_only_stub): | |
36 | kwargs = {} |
|
37 | kwargs = {} | |
37 | subject, headers, body, body_plaintext = EmailNotificationModel().render_email( |
|
38 | subject, headers, body, body_plaintext = EmailNotificationModel().render_email( | |
38 | EmailNotificationModel.TYPE_TEST, **kwargs) |
|
39 | EmailNotificationModel.TYPE_TEST, **kwargs) | |
39 |
|
40 | |||
40 | # subject |
|
41 | # subject | |
41 | assert subject == 'Test "Subject" hello "world"' |
|
42 | assert subject == 'Test "Subject" hello "world"' | |
42 |
|
43 | |||
43 | # headers |
|
44 | # headers | |
44 | assert headers == 'X=Y' |
|
45 | assert headers == 'X=Y' | |
45 |
|
46 | |||
46 | # body plaintext |
|
47 | # body plaintext | |
47 | assert body_plaintext == 'Email Plaintext Body' |
|
48 | assert body_plaintext == 'Email Plaintext Body' | |
48 |
|
49 | |||
49 | # body |
|
50 | # body | |
50 |
notification_footer = 'This is a notification from RhodeCode. |
|
51 | notification_footer1 = 'This is a notification from RhodeCode.' | |
51 | % http_host_only_stub |
|
52 | notification_footer2 = 'http://{}/'.format(http_host_only_stub) | |
52 | assert notification_footer in body |
|
53 | assert notification_footer1 in body | |
|
54 | assert notification_footer2 in body | |||
53 | assert 'Email Body' in body |
|
55 | assert 'Email Body' in body | |
54 |
|
56 | |||
55 |
|
57 | |||
56 | def test_render_pr_email(app, user_admin): |
|
58 | def test_render_pr_email(app, user_admin): | |
57 |
|
59 | ref = collections.namedtuple( | ||
58 | ref = collections.namedtuple('Ref', |
|
60 | 'Ref', 'name, type')('fxies123', 'book') | |
59 | 'name, type')( |
|
|||
60 | 'fxies123', 'book' |
|
|||
61 | ) |
|
|||
62 |
|
61 | |||
63 | pr = collections.namedtuple('PullRequest', |
|
62 | pr = collections.namedtuple('PullRequest', | |
64 | 'pull_request_id, title, description, source_ref_parts, source_ref_name, target_ref_parts, target_ref_name')( |
|
63 | 'pull_request_id, title, description, source_ref_parts, source_ref_name, target_ref_parts, target_ref_name')( | |
65 | 200, 'Example Pull Request', 'Desc of PR', ref, 'bookmark', ref, 'Branch') |
|
64 | 200, 'Example Pull Request', 'Desc of PR', ref, 'bookmark', ref, 'Branch') | |
66 |
|
65 | |||
67 |
source_repo = target_repo = collections.namedtuple( |
|
66 | source_repo = target_repo = collections.namedtuple( | |
68 | 'type, repo_name')( |
|
67 | 'Repo', 'type, repo_name')('hg', 'pull_request_1') | |
69 | 'hg', 'pull_request_1') |
|
|||
70 |
|
68 | |||
71 | kwargs = { |
|
69 | kwargs = { | |
72 | 'user': '<marcin@rhodecode.com> Marcin Kuzminski', |
|
70 | 'user': User.get_first_super_admin(), | |
73 | 'pull_request': pr, |
|
71 | 'pull_request': pr, | |
74 | 'pull_request_commits': [], |
|
72 | 'pull_request_commits': [], | |
75 |
|
73 | |||
76 | 'pull_request_target_repo': target_repo, |
|
74 | 'pull_request_target_repo': target_repo, | |
77 | 'pull_request_target_repo_url': 'x', |
|
75 | 'pull_request_target_repo_url': 'x', | |
78 |
|
76 | |||
79 | 'pull_request_source_repo': source_repo, |
|
77 | 'pull_request_source_repo': source_repo, | |
80 | 'pull_request_source_repo_url': 'x', |
|
78 | 'pull_request_source_repo_url': 'x', | |
81 |
|
79 | |||
82 | 'pull_request_url': 'http://localhost/pr1', |
|
80 | 'pull_request_url': 'http://localhost/pr1', | |
83 | } |
|
81 | } | |
84 |
|
82 | |||
85 | subject, headers, body, body_plaintext = EmailNotificationModel().render_email( |
|
83 | subject, headers, body, body_plaintext = EmailNotificationModel().render_email( | |
86 | EmailNotificationModel.TYPE_PULL_REQUEST, **kwargs) |
|
84 | EmailNotificationModel.TYPE_PULL_REQUEST, **kwargs) | |
87 |
|
85 | |||
88 | # subject |
|
86 | # subject | |
89 |
assert subject == ' |
|
87 | assert subject == '@test_admin (RhodeCode Admin) requested a pull request review. !200: "Example Pull Request"' | |
90 |
|
88 | |||
91 |
|
89 | |||
92 | @pytest.mark.parametrize('mention', [ |
|
90 | @pytest.mark.parametrize('mention', [ | |
93 | True, |
|
91 | True, | |
94 | False |
|
92 | False | |
95 | ]) |
|
93 | ]) | |
96 | @pytest.mark.parametrize('email_type', [ |
|
94 | @pytest.mark.parametrize('email_type', [ | |
97 | EmailNotificationModel.TYPE_COMMIT_COMMENT, |
|
95 | EmailNotificationModel.TYPE_COMMIT_COMMENT, | |
98 | EmailNotificationModel.TYPE_PULL_REQUEST_COMMENT |
|
96 | EmailNotificationModel.TYPE_PULL_REQUEST_COMMENT | |
99 | ]) |
|
97 | ]) | |
100 | def test_render_comment_subject_no_newlines(app, mention, email_type): |
|
98 | def test_render_comment_subject_no_newlines(app, mention, email_type): | |
101 |
ref = collections.namedtuple( |
|
99 | ref = collections.namedtuple( | |
102 | 'name, type')( |
|
100 | 'Ref', 'name, type')('fxies123', 'book') | |
103 | 'fxies123', 'book' |
|
|||
104 | ) |
|
|||
105 |
|
101 | |||
106 | pr = collections.namedtuple('PullRequest', |
|
102 | pr = collections.namedtuple('PullRequest', | |
107 | 'pull_request_id, title, description, source_ref_parts, source_ref_name, target_ref_parts, target_ref_name')( |
|
103 | 'pull_request_id, title, description, source_ref_parts, source_ref_name, target_ref_parts, target_ref_name')( | |
108 | 200, 'Example Pull Request', 'Desc of PR', ref, 'bookmark', ref, 'Branch') |
|
104 | 200, 'Example Pull Request', 'Desc of PR', ref, 'bookmark', ref, 'Branch') | |
109 |
|
105 | |||
110 |
source_repo = target_repo = collections.namedtuple( |
|
106 | source_repo = target_repo = collections.namedtuple( | |
111 | 'type, repo_name')( |
|
107 | 'Repo', 'type, repo_name')('hg', 'pull_request_1') | |
112 | 'hg', 'pull_request_1') |
|
|||
113 |
|
108 | |||
114 | kwargs = { |
|
109 | kwargs = { | |
115 | 'user': '<marcin@rhodecode.com> Marcin Kuzminski', |
|
110 | 'user': User.get_first_super_admin(), | |
116 | 'commit': AttributeDict(raw_id='a'*40, message='Commit message'), |
|
111 | 'commit': AttributeDict(raw_id='a'*40, message='Commit message'), | |
117 | 'status_change': 'approved', |
|
112 | 'status_change': 'approved', | |
118 | 'commit_target_repo': AttributeDict(), |
|
113 | 'commit_target_repo_url': 'http://foo.example.com/#comment1', | |
119 | 'repo_name': 'test-repo', |
|
114 | 'repo_name': 'test-repo', | |
120 | 'comment_file': 'test-file.py', |
|
115 | 'comment_file': 'test-file.py', | |
121 | 'comment_line': 'n100', |
|
116 | 'comment_line': 'n100', | |
122 | 'comment_type': 'note', |
|
117 | 'comment_type': 'note', | |
123 | 'commit_comment_url': 'http://comment-url', |
|
118 | 'commit_comment_url': 'http://comment-url', | |
124 | 'instance_url': 'http://rc-instance', |
|
119 | 'instance_url': 'http://rc-instance', | |
125 | 'comment_body': 'hello world', |
|
120 | 'comment_body': 'hello world', | |
126 | 'mention': mention, |
|
121 | 'mention': mention, | |
127 |
|
122 | |||
128 | 'pr_comment_url': 'http://comment-url', |
|
123 | 'pr_comment_url': 'http://comment-url', | |
129 | 'pr_source_repo': AttributeDict(repo_name='foobar'), |
|
|||
130 | 'pr_source_repo_url': 'http://soirce-repo/url', |
|
|||
131 | 'pull_request': pr, |
|
124 | 'pull_request': pr, | |
132 | 'pull_request_commits': [], |
|
125 | 'pull_request_commits': [], | |
133 |
|
126 | |||
134 | 'pull_request_target_repo': target_repo, |
|
127 | 'pull_request_target_repo': target_repo, | |
135 | 'pull_request_target_repo_url': 'x', |
|
128 | 'pull_request_target_repo_url': 'x', | |
136 |
|
129 | |||
137 | 'pull_request_source_repo': source_repo, |
|
130 | 'pull_request_source_repo': source_repo, | |
138 | 'pull_request_source_repo_url': 'x', |
|
131 | 'pull_request_source_repo_url': 'x', | |
|
132 | ||||
|
133 | 'pull_request_url': 'http://code.rc.com/_pr/123' | |||
139 | } |
|
134 | } | |
140 | subject, headers, body, body_plaintext = EmailNotificationModel().render_email( |
|
135 | subject, headers, body, body_plaintext = EmailNotificationModel().render_email( | |
141 | email_type, **kwargs) |
|
136 | email_type, **kwargs) | |
142 |
|
137 | |||
143 | assert '\n' not in subject |
|
138 | assert '\n' not in subject |
General Comments 0
You need to be logged in to leave comments.
Login now