##// END OF EJS Templates
rcextensions: new builtin rcextensions....
marcink -
r3133:6afdd8e7 default
parent child Browse files
Show More
@@ -0,0 +1,44 b''
1 .. _integrations-rcextensions:
2
3
4 rcextensions integrations
5 =========================
6
7
8 Since RhodeCode 4.14 release rcextensions aren't part of rhodecode-tools, and instead
9 they are shipped with the new or upgraded installations.
10
11 The rcextensions template `rcextensions.tmpl` is created in the `etc/` directory
12 of enterprise or community installation. It's always re-created and updated on upgrades.
13
14
15 Activating rcextensions
16 +++++++++++++++++++++++
17
18 To activate rcextensions simply copy or rename the created template rcextensions
19 into the path where the rhodecode.ini file is located::
20
21 pushd ~/rccontrol/enterprise-1/
22 or
23 pushd ~/rccontrol/community-1/
24
25 mv etc/rcextensions.tmpl rcextensions
26
27
28 rcextensions are loaded when |RCE| starts. So a restart is required after activation or
29 change of code in rcextensions.
30
31 Simply restart only the enterprise/community instance::
32
33 rccontrol restart enterprise-1
34 or
35 rccontrol restart community-1
36
37
38 Example usage
39 +++++++++++++
40
41
42 To see examples of usage please check the examples directory under:
43
44 https://code.rhodecode.com/rhodecode-enterprise-ce/files/stable/rhodecode/config/rcextensions/examples
@@ -0,0 +1,56 b''
1 # Copyright (C) 2016-2018 RhodeCode GmbH
2 #
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
6 #
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
11 #
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
19 """
20 rcextensions module, please edit `hooks.py` to over write hooks logic
21 """
22
23 from .hooks import (
24 _create_repo_hook,
25 _create_repo_group_hook,
26 _pre_create_user_hook,
27 _create_user_hook,
28 _delete_repo_hook,
29 _delete_user_hook,
30 _pre_push_hook,
31 _push_hook,
32 _pre_pull_hook,
33 _pull_hook,
34 _create_pull_request_hook,
35 _review_pull_request_hook,
36 _update_pull_request_hook,
37 _merge_pull_request_hook,
38 _close_pull_request_hook,
39 )
40
41 # set as module attributes, we use those to call hooks. *do not change this*
42 CREATE_REPO_HOOK = _create_repo_hook
43 CREATE_REPO_GROUP_HOOK = _create_repo_group_hook
44 PRE_CREATE_USER_HOOK = _pre_create_user_hook
45 CREATE_USER_HOOK = _create_user_hook
46 DELETE_REPO_HOOK = _delete_repo_hook
47 DELETE_USER_HOOK = _delete_user_hook
48 PRE_PUSH_HOOK = _pre_push_hook
49 PUSH_HOOK = _push_hook
50 PRE_PULL_HOOK = _pre_pull_hook
51 PULL_HOOK = _pull_hook
52 CREATE_PULL_REQUEST = _create_pull_request_hook
53 REVIEW_PULL_REQUEST = _review_pull_request_hook
54 UPDATE_PULL_REQUEST = _update_pull_request_hook
55 MERGE_PULL_REQUEST = _merge_pull_request_hook
56 CLOSE_PULL_REQUEST = _close_pull_request_hook
@@ -0,0 +1,36 b''
1 # Example to trigger a HTTP call via an HTTP helper via post_push hook
2
3
4 @has_kwargs({
5 'server_url': 'url of instance that triggered this hook',
6 'config': 'path to .ini config used',
7 'scm': 'type of version control "git", "hg", "svn"',
8 'username': 'username of actor who triggered this event',
9 'ip': 'ip address of actor who triggered this hook',
10 'action': '',
11 'repository': 'repository name',
12 'repo_store_path': 'full path to where repositories are stored',
13 'commit_ids': '',
14 'hook_type': '',
15 'user_agent': '',
16 })
17 def _push_hook(*args, **kwargs):
18 """
19 POST PUSH HOOK, this function will be executed after each push it's
20 executed after the build-in hook that RhodeCode uses for logging pushes
21 """
22
23 from .helpers import http_call, extra_fields
24 # returns list of dicts with key-val fetched from extra fields
25 repo_extra_fields = extra_fields.run(**kwargs)
26
27 if repo_extra_fields.get('endpoint_url'):
28 endpoint = repo_extra_fields['endpoint_url']
29 if endpoint:
30 data = {
31 'some_key': 'val'
32 }
33 response = http_call.run(url=endpoint, json_data=data)
34 return HookResponse(0, 'Called endpoint {}, with response {}'.format(endpoint, response))
35
36 return HookResponse(0, '')
@@ -0,0 +1,36 b''
1 # Example to trigger a CI call via an HTTP helper via post_push hook
2
3
4 @has_kwargs({
5 'server_url': 'url of instance that triggered this hook',
6 'config': 'path to .ini config used',
7 'scm': 'type of version control "git", "hg", "svn"',
8 'username': 'username of actor who triggered this event',
9 'ip': 'ip address of actor who triggered this hook',
10 'action': '',
11 'repository': 'repository name',
12 'repo_store_path': 'full path to where repositories are stored',
13 'commit_ids': '',
14 'hook_type': '',
15 'user_agent': '',
16 })
17 def _push_hook(*args, **kwargs):
18 """
19 POST PUSH HOOK, this function will be executed after each push it's
20 executed after the build-in hook that RhodeCode uses for logging pushes
21 """
22
23 from .helpers import http_call, extra_fields
24 # returns list of dicts with key-val fetched from extra fields
25 repo_extra_fields = extra_fields.run(**kwargs)
26
27 if repo_extra_fields.get('endpoint_url'):
28 endpoint = repo_extra_fields['endpoint_url']
29 if endpoint:
30 data = {
31 'some_key': 'val'
32 }
33 response = http_call.run(url=endpoint, json_data=data)
34 return HookResponse(0, 'Called endpoint {}, with response {}'.format(endpoint, response))
35
36 return HookResponse(0, '')
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
@@ -0,0 +1,17 b''
1 # Copyright (C) 2016-2018 RhodeCode GmbH
2 #
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
6 #
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
11 #
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
@@ -0,0 +1,40 b''
1 # -*- coding: utf-8 -*-
2 # Copyright (C) 2016-2018 RhodeCode GmbH
3 #
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
7 #
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
12 #
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
20 """
21 us in hooks::
22
23 from .helpers import extra_fields
24 # returns list of dicts with key-val fetched from extra fields
25 repo_extra_fields = extra_fields.run(**kwargs)
26
27 """
28
29
30 def run(*args, **kwargs):
31 from rhodecode.model.db import Repository
32 # use temp name then the main one propagated
33 repo_name = kwargs.pop('REPOSITORY', None) or kwargs['repository']
34 repo = Repository.get_by_repo_name(repo_name)
35
36 fields = {}
37 for field in repo.extra_fields:
38 fields[field.field_key] = field.get_dict()
39
40 return fields
@@ -0,0 +1,61 b''
1 # -*- coding: utf-8 -*-
2 # Copyright (C) 2016-2018 RhodeCode GmbH
3 #
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
7 #
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
12 #
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
20 """
21 Extract and serialize commits taken from a list of commit_ids. This should
22 be used in post_push hook
23
24 us in hooks::
25
26 from .helpers import extract_post_commits
27 # returns list of dicts with key-val fetched from extra fields
28 commit_list = extract_post_commits.run(**kwargs)
29 """
30 import traceback
31
32
33 def run(*args, **kwargs):
34 from rhodecode.lib.utils2 import extract_mentioned_users
35 from rhodecode.model.db import Repository
36
37 commit_ids = kwargs.get('commit_ids')
38 if not commit_ids:
39 return 0
40
41 # use temp name then the main one propagated
42 repo_name = kwargs.pop('REPOSITORY', None) or kwargs['repository']
43
44 repo = Repository.get_by_repo_name(repo_name)
45 commits = []
46
47 vcs_repo = repo.scm_instance(cache=False)
48 try:
49 for commit_id in commit_ids:
50 cs = vcs_repo.get_changeset(commit_id)
51 cs_data = cs.__json__()
52 cs_data['mentions'] = extract_mentioned_users(cs_data['message'])
53 # optionally add more logic to parse the commits, like reading extra
54 # fields of repository to read managers of reviewers ?
55 commits.append(cs_data)
56 except Exception:
57 print(traceback.format_exc())
58 # we don't send any commits when crash happens, only full list matters
59 # we short circuit then.
60 return []
61 return commits
@@ -0,0 +1,63 b''
1 # -*- coding: utf-8 -*-
2 # Copyright (C) 2016-2018 RhodeCode GmbH
3 #
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
7 #
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
12 #
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
20 """
21 us in hooks::
22
23 from .helpers import extract_pre_commits
24 # returns list of dicts with key-val fetched from extra fields
25 commit_list = extract_pre_commits.run(**kwargs)
26
27 """
28 import re
29 import collections
30
31
32 def get_hg_commits(repo, refs):
33 commits = []
34 return commits
35
36
37 def get_git_commits(repo, refs):
38 commits = []
39 return commits
40
41
42 def run(*args, **kwargs):
43 from rhodecode.model.db import Repository
44
45 vcs_type = kwargs['scm']
46 # use temp name then the main one propagated
47 repo_name = kwargs.pop('REPOSITORY', None) or kwargs['repository']
48
49 repo = Repository.get_by_repo_name(repo_name)
50 vcs_repo = repo.scm_instance(cache=False)
51
52 commits = []
53
54 for rev_data in kwargs['commit_ids']:
55 new_environ = dict((k, v) for k, v in rev_data['hg_env'])
56
57 if vcs_type == 'git':
58 commits = get_git_commits(vcs_repo, kwargs['commit_ids'])
59
60 if vcs_type == 'hg':
61 commits = get_hg_commits(vcs_repo, kwargs['commit_ids'])
62
63 return commits
@@ -0,0 +1,36 b''
1 # -*- coding: utf-8 -*-
2 # Copyright (C) 2016-2018 RhodeCode GmbH
3 #
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
7 #
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
12 #
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
20 """
21 us in hooks::
22
23 from .helpers import http_call
24 # returns response after making a POST call
25 response = http_call.run(url=url, json_data=data)
26
27 """
28
29 from rhodecode.integrations.types.base import requests_retry_call
30
31
32 def run(url, json_data, method='post'):
33 requests_session = requests_retry_call()
34 requests_session.verify = True # Verify SSL
35 resp = requests_session.post(url, json=json_data, timeout=60)
36 return resp.raise_for_status() # raise exception on a failed request
@@ -0,0 +1,431 b''
1 # Copyright (C) 2016-2018 RhodeCode GmbH
2 #
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
6 #
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
11 #
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
19 from .utils import DotDict, HookResponse, has_kwargs
20
21
22 # Config shortcut to keep, all configuration in one place
23 # Example: api_key = CONFIG.my_config.api_key
24 CONFIG = DotDict(
25 my_config=DotDict(
26 api_key='<secret>',
27 ),
28
29 )
30
31
32 @has_kwargs({
33 'repo_name': '',
34 'repo_type': '',
35 'description': '',
36 'private': '',
37 'created_on': '',
38 'enable_downloads': '',
39 'repo_id': '',
40 'user_id': '',
41 'enable_statistics': '',
42 'clone_uri': '',
43 'fork_id': '',
44 'group_id': '',
45 'created_by': ''
46 })
47 def _create_repo_hook(*args, **kwargs):
48 """
49 POST CREATE REPOSITORY HOOK. This function will be executed after
50 each repository is created. kwargs available:
51
52 """
53 return HookResponse(0, '')
54
55
56 @has_kwargs({
57 'group_name': '',
58 'group_parent_id': '',
59 'group_description': '',
60 'group_id': '',
61 'user_id': '',
62 'created_by': '',
63 'created_on': '',
64 'enable_locking': ''
65 })
66 def _create_repo_group_hook(*args, **kwargs):
67 """
68 POST CREATE REPOSITORY GROUP HOOK, this function will be
69 executed after each repository group is created. kwargs available:
70 """
71 return HookResponse(0, '')
72
73
74 @has_kwargs({
75 'username': '',
76 'password': '',
77 'email': '',
78 'firstname': '',
79 'lastname': '',
80 'active': '',
81 'admin': '',
82 'created_by': '',
83 })
84 def _pre_create_user_hook(*args, **kwargs):
85 """
86 PRE CREATE USER HOOK, this function will be executed before each
87 user is created, it returns a tuple of bool, reason.
88 If bool is False the user creation will be stopped and reason
89 will be displayed to the user.
90
91 Return HookResponse(1, reason) to block user creation
92
93 """
94
95 reason = 'allowed'
96 return HookResponse(0, reason)
97
98
99 @has_kwargs({
100 'username': '',
101 'full_name_or_username': '',
102 'full_contact': '',
103 'user_id': '',
104 'name': '',
105 'firstname': '',
106 'short_contact': '',
107 'admin': '',
108 'lastname': '',
109 'ip_addresses': '',
110 'extern_type': '',
111 'extern_name': '',
112 'email': '',
113 'api_key': '',
114 'api_keys': '',
115 'last_login': '',
116 'full_name': '',
117 'active': '',
118 'password': '',
119 'emails': '',
120 'inherit_default_permissions': '',
121 'created_by': '',
122 'created_on': '',
123 })
124 def _create_user_hook(*args, **kwargs):
125 """
126 POST CREATE USER HOOK, this function will be executed after each user is created
127 """
128 return HookResponse(0, '')
129
130
131 @has_kwargs({
132 'repo_name': '',
133 'repo_type': '',
134 'description': '',
135 'private': '',
136 'created_on': '',
137 'enable_downloads': '',
138 'repo_id': '',
139 'user_id': '',
140 'enable_statistics': '',
141 'clone_uri': '',
142 'fork_id': '',
143 'group_id': '',
144 'deleted_by': '',
145 'deleted_on': '',
146 })
147 def _delete_repo_hook(*args, **kwargs):
148 """
149 POST DELETE REPOSITORY HOOK, this function will be executed after
150 each repository deletion
151 """
152 return HookResponse(0, '')
153
154
155 @has_kwargs({
156 'username': '',
157 'full_name_or_username': '',
158 'full_contact': '',
159 'user_id': '',
160 'name': '',
161 'short_contact': '',
162 'admin': '',
163 'firstname': '',
164 'lastname': '',
165 'ip_addresses': '',
166 'email': '',
167 'api_key': '',
168 'last_login': '',
169 'full_name': '',
170 'active': '',
171 'password': '',
172 'emails': '',
173 'inherit_default_permissions': '',
174 'deleted_by': '',
175 })
176 def _delete_user_hook(*args, **kwargs):
177 """
178 POST DELETE USER HOOK, this function will be executed after each
179 user is deleted kwargs available:
180 """
181 return HookResponse(0, '')
182
183
184 # =============================================================================
185 # PUSH/PULL RELATED HOOKS
186 # =============================================================================
187 @has_kwargs({
188 'server_url': 'url of instance that triggered this hook',
189 'config': 'path to .ini config used',
190 'scm': 'type of version control "git", "hg", "svn"',
191 'username': 'username of actor who triggered this event',
192 'ip': 'ip address of actor who triggered this hook',
193 'action': '',
194 'repository': 'repository name',
195 'repo_store_path': 'full path to where repositories are stored',
196 'commit_ids': 'pre transaction metadata for commit ids',
197 'hook_type': '',
198 'user_agent': 'Client user agent, e.g git or mercurial CLI version',
199 })
200 def _pre_push_hook(*args, **kwargs):
201 """
202 Post push hook
203 To stop version control from storing the transaction and send a message to user
204 use non-zero HookResponse with a message, e.g return HookResponse(1, 'Not allowed')
205
206 This message will be shown back to client during PUSH operation
207
208 Commit ids might look like that::
209
210 [{u'hg_env|git_env': ...,
211 u'multiple_heads': [],
212 u'name': u'default',
213 u'new_rev': u'd0befe0692e722e01d5677f27a104631cf798b69',
214 u'old_rev': u'd0befe0692e722e01d5677f27a104631cf798b69',
215 u'ref': u'',
216 u'total_commits': 2,
217 u'type': u'branch'}]
218 """
219 return HookResponse(0, '')
220
221
222 @has_kwargs({
223 'server_url': 'url of instance that triggered this hook',
224 'config': 'path to .ini config used',
225 'scm': 'type of version control "git", "hg", "svn"',
226 'username': 'username of actor who triggered this event',
227 'ip': 'ip address of actor who triggered this hook',
228 'action': '',
229 'repository': 'repository name',
230 'repo_store_path': 'full path to where repositories are stored',
231 'commit_ids': 'list of pushed commit_ids (sha1)',
232 'hook_type': '',
233 'user_agent': 'Client user agent, e.g git or mercurial CLI version',
234 })
235 def _push_hook(*args, **kwargs):
236 """
237 POST PUSH HOOK, this function will be executed after each push it's
238 executed after the build-in hook that RhodeCode uses for logging pushes
239 """
240 return HookResponse(0, '')
241
242
243 @has_kwargs({
244 'server_url': 'url of instance that triggered this hook',
245 'repo_store_path': 'full path to where repositories are stored',
246 'config': 'path to .ini config used',
247 'scm': 'type of version control "git", "hg", "svn"',
248 'username': 'username of actor who triggered this event',
249 'ip': 'ip address of actor who triggered this hook',
250 'action': '',
251 'repository': 'repository name',
252 'hook_type': '',
253 'user_agent': 'Client user agent, e.g git or mercurial CLI version',
254 })
255 def _pre_pull_hook(*args, **kwargs):
256 """
257 Post pull hook
258 """
259 return HookResponse(0, '')
260
261
262 @has_kwargs({
263 'server_url': 'url of instance that triggered this hook',
264 'repo_store_path': 'full path to where repositories are stored',
265 'config': 'path to .ini config used',
266 'scm': 'type of version control "git", "hg", "svn"',
267 'username': 'username of actor who triggered this event',
268 'ip': 'ip address of actor who triggered this hook',
269 'action': '',
270 'repository': 'repository name',
271 'hook_type': '',
272 'user_agent': 'Client user agent, e.g git or mercurial CLI version',
273 })
274 def _pull_hook(*args, **kwargs):
275 """
276 This hook will be executed after each code pull.
277 """
278 return HookResponse(0, '')
279
280
281 # =============================================================================
282 # PULL REQUEST RELATED HOOKS
283 # =============================================================================
284 @has_kwargs({
285 'server_url': 'url of instance that triggered this hook',
286 'config': 'path to .ini config used',
287 'scm': 'type of version control "git", "hg", "svn"',
288 'username': 'username of actor who triggered this event',
289 'ip': 'ip address of actor who triggered this hook',
290 'action': '',
291 'repository': 'repository name',
292 'pull_request_id': '',
293 'url': '',
294 'title': '',
295 'description': '',
296 'status': '',
297 'created_on': '',
298 'updated_on': '',
299 'commit_ids': '',
300 'review_status': '',
301 'mergeable': '',
302 'source': '',
303 'target': '',
304 'author': '',
305 'reviewers': '',
306 })
307 def _create_pull_request_hook(*args, **kwargs):
308 """
309 This hook will be executed after creation of a pull request.
310 """
311 return HookResponse(0, '')
312
313
314 @has_kwargs({
315 'server_url': 'url of instance that triggered this hook',
316 'config': 'path to .ini config used',
317 'scm': 'type of version control "git", "hg", "svn"',
318 'username': 'username of actor who triggered this event',
319 'ip': 'ip address of actor who triggered this hook',
320 'action': '',
321 'repository': 'repository name',
322 'pull_request_id': '',
323 'url': '',
324 'title': '',
325 'description': '',
326 'status': '',
327 'created_on': '',
328 'updated_on': '',
329 'commit_ids': '',
330 'review_status': '',
331 'mergeable': '',
332 'source': '',
333 'target': '',
334 'author': '',
335 'reviewers': '',
336 })
337 def _review_pull_request_hook(*args, **kwargs):
338 """
339 This hook will be executed after review action was made on a pull request.
340 """
341 return HookResponse(0, '')
342
343
344 @has_kwargs({
345 'server_url': 'url of instance that triggered this hook',
346 'config': 'path to .ini config used',
347 'scm': 'type of version control "git", "hg", "svn"',
348 'username': 'username of actor who triggered this event',
349 'ip': 'ip address of actor who triggered this hook',
350 'action': '',
351 'repository': 'repository name',
352 'pull_request_id': '',
353 'url': '',
354 'title': '',
355 'description': '',
356 'status': '',
357 'created_on': '',
358 'updated_on': '',
359 'commit_ids': '',
360 'review_status': '',
361 'mergeable': '',
362 'source': '',
363 'target': '',
364 'author': '',
365 'reviewers': '',
366 })
367 def _update_pull_request_hook(*args, **kwargs):
368 """
369 This hook will be executed after pull requests has been updated with new commits.
370 """
371 return HookResponse(0, '')
372
373
374 @has_kwargs({
375 'server_url': 'url of instance that triggered this hook',
376 'config': 'path to .ini config used',
377 'scm': 'type of version control "git", "hg", "svn"',
378 'username': 'username of actor who triggered this event',
379 'ip': 'ip address of actor who triggered this hook',
380 'action': '',
381 'repository': 'repository name',
382 'pull_request_id': '',
383 'url': '',
384 'title': '',
385 'description': '',
386 'status': '',
387 'created_on': '',
388 'updated_on': '',
389 'commit_ids': '',
390 'review_status': '',
391 'mergeable': '',
392 'source': '',
393 'target': '',
394 'author': '',
395 'reviewers': '',
396 })
397 def _merge_pull_request_hook(*args, **kwargs):
398 """
399 This hook will be executed after merge of a pull request.
400 """
401 return HookResponse(0, '')
402
403
404 @has_kwargs({
405 'server_url': 'url of instance that triggered this hook',
406 'config': 'path to .ini config used',
407 'scm': 'type of version control "git", "hg", "svn"',
408 'username': 'username of actor who triggered this event',
409 'ip': 'ip address of actor who triggered this hook',
410 'action': '',
411 'repository': 'repository name',
412 'pull_request_id': '',
413 'url': '',
414 'title': '',
415 'description': '',
416 'status': '',
417 'created_on': '',
418 'updated_on': '',
419 'commit_ids': '',
420 'review_status': '',
421 'mergeable': '',
422 'source': '',
423 'target': '',
424 'author': '',
425 'reviewers': '',
426 })
427 def _close_pull_request_hook(*args, **kwargs):
428 """
429 This hook will be executed after close of a pull request.
430 """
431 return HookResponse(0, '')
@@ -0,0 +1,21 b''
1 # =============================================================================
2 # END OF UTILITY FUNCTIONS HERE
3 # =============================================================================
4
5 # Additional mappings that are not present in the pygments lexers
6 # used for building stats
7 # format is {'ext':['Names']} eg. {'py':['Python']} note: there can be
8 # more than one name for extension
9 # NOTE: that this will override any mappings in LANGUAGES_EXTENSIONS_MAP
10 # build by pygments
11 EXTRA_MAPPINGS = {'html': ['Text']}
12
13 # additional lexer definitions for custom files it's overrides pygments lexers,
14 # and uses defined name of lexer to colorize the files. Format is {'ext':
15 # 'lexer_name'} List of lexers can be printed running:
16 # >> python -c "import pprint;from pygments import lexers;
17 # pprint.pprint([(x[0], x[1]) for x in lexers.get_all_lexers()]);"
18
19 EXTRA_LEXERS = {
20 'tt': 'vbnet'
21 }
@@ -0,0 +1,147 b''
1 # Copyright (C) 2016-2018 RhodeCode GmbH
2 #
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
6 #
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
11 #
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
19 import os
20 import functools
21 import collections
22
23
24 class HookResponse(object):
25 def __init__(self, status, output):
26 self.status = status
27 self.output = output
28
29 def __add__(self, other):
30 other_status = getattr(other, 'status', 0)
31 new_status = max(self.status, other_status)
32 other_output = getattr(other, 'output', '')
33 new_output = self.output + other_output
34
35 return HookResponse(new_status, new_output)
36
37 def __bool__(self):
38 return self.status == 0
39
40
41 class DotDict(dict):
42
43 def __contains__(self, k):
44 try:
45 return dict.__contains__(self, k) or hasattr(self, k)
46 except:
47 return False
48
49 # only called if k not found in normal places
50 def __getattr__(self, k):
51 try:
52 return object.__getattribute__(self, k)
53 except AttributeError:
54 try:
55 return self[k]
56 except KeyError:
57 raise AttributeError(k)
58
59 def __setattr__(self, k, v):
60 try:
61 object.__getattribute__(self, k)
62 except AttributeError:
63 try:
64 self[k] = v
65 except:
66 raise AttributeError(k)
67 else:
68 object.__setattr__(self, k, v)
69
70 def __delattr__(self, k):
71 try:
72 object.__getattribute__(self, k)
73 except AttributeError:
74 try:
75 del self[k]
76 except KeyError:
77 raise AttributeError(k)
78 else:
79 object.__delattr__(self, k)
80
81 def toDict(self):
82 return unserialize(self)
83
84 def __repr__(self):
85 keys = list(self.keys())
86 keys.sort()
87 args = ', '.join(['%s=%r' % (key, self[key]) for key in keys])
88 return '%s(%s)' % (self.__class__.__name__, args)
89
90 @staticmethod
91 def fromDict(d):
92 return serialize(d)
93
94
95 def serialize(x):
96 if isinstance(x, dict):
97 return DotDict((k, serialize(v)) for k, v in x.items())
98 elif isinstance(x, (list, tuple)):
99 return type(x)(serialize(v) for v in x)
100 else:
101 return x
102
103
104 def unserialize(x):
105 if isinstance(x, dict):
106 return dict((k, unserialize(v)) for k, v in x.items())
107 elif isinstance(x, (list, tuple)):
108 return type(x)(unserialize(v) for v in x)
109 else:
110 return x
111
112
113 def _verify_kwargs(func_name, expected_parameters, kwargs):
114 """
115 Verify that exactly `expected_parameters` are passed in as `kwargs`.
116 """
117 expected_parameters = set(expected_parameters)
118 kwargs_keys = set(kwargs.keys())
119 if kwargs_keys != expected_parameters:
120 missing_kwargs = expected_parameters - kwargs_keys
121 unexpected_kwargs = kwargs_keys - expected_parameters
122 raise AssertionError(
123 "func:%s: missing parameters: %r, unexpected parameters: %s" %
124 (func_name, missing_kwargs, unexpected_kwargs))
125
126
127 def has_kwargs(required_args):
128 """
129 decorator to verify extension calls arguments.
130
131 :param required_args:
132 """
133 def wrap(func):
134 def wrapper(*args, **kwargs):
135 _verify_kwargs(func.func_name, required_args.keys(), kwargs)
136 # in case there's `calls` defined on module we store the data
137 maybe_log_call(func.func_name, args, kwargs)
138 return func(*args, **kwargs)
139 return wrapper
140 return wrap
141
142
143 def maybe_log_call(name, args, kwargs):
144 from rhodecode.config import rcextensions
145 if hasattr(rcextensions, 'calls'):
146 calls = rcextensions.calls
147 calls[name].append((args, kwargs))
@@ -1,272 +1,275 b''
1 # Nix environment for the community edition
1 # Nix environment for the community edition
2 #
2 #
3 # This shall be as lean as possible, just producing the enterprise-ce
3 # This shall be as lean as possible, just producing the enterprise-ce
4 # derivation. For advanced tweaks to pimp up the development environment we use
4 # derivation. For advanced tweaks to pimp up the development environment we use
5 # "shell.nix" so that it does not have to clutter this file.
5 # "shell.nix" so that it does not have to clutter this file.
6 #
6 #
7 # Configuration, set values in "~/.nixpkgs/config.nix".
7 # Configuration, set values in "~/.nixpkgs/config.nix".
8 # example
8 # example
9 # {
9 # {
10 # # Thoughts on how to configure the dev environment
10 # # Thoughts on how to configure the dev environment
11 # rc = {
11 # rc = {
12 # codeInternalUrl = "https://usr:token@internal-code.rhodecode.com";
12 # codeInternalUrl = "https://usr:token@internal-code.rhodecode.com";
13 # sources = {
13 # sources = {
14 # rhodecode-vcsserver = "/home/user/work/rhodecode-vcsserver";
14 # rhodecode-vcsserver = "/home/user/work/rhodecode-vcsserver";
15 # rhodecode-enterprise-ce = "/home/user/work/rhodecode-enterprise-ce";
15 # rhodecode-enterprise-ce = "/home/user/work/rhodecode-enterprise-ce";
16 # rhodecode-enterprise-ee = "/home/user/work/rhodecode-enterprise-ee";
16 # rhodecode-enterprise-ee = "/home/user/work/rhodecode-enterprise-ee";
17 # };
17 # };
18 # };
18 # };
19 # }
19 # }
20
20
21 args@
21 args@
22 { pythonPackages ? "python27Packages"
22 { pythonPackages ? "python27Packages"
23 , pythonExternalOverrides ? self: super: {}
23 , pythonExternalOverrides ? self: super: {}
24 , doCheck ? false
24 , doCheck ? false
25 , ...
25 , ...
26 }:
26 }:
27
27
28 let
28 let
29 # Use nixpkgs from args or import them. We use this indirect approach
29 # Use nixpkgs from args or import them. We use this indirect approach
30 # through args to be able to use the name `pkgs` for our customized packages.
30 # through args to be able to use the name `pkgs` for our customized packages.
31 # Otherwise we will end up with an infinite recursion.
31 # Otherwise we will end up with an infinite recursion.
32 pkgs = args.pkgs or (import <nixpkgs> { });
32 pkgs = args.pkgs or (import <nixpkgs> { });
33
33
34 # Works with the new python-packages, still can fallback to the old
34 # Works with the new python-packages, still can fallback to the old
35 # variant.
35 # variant.
36 basePythonPackagesUnfix = basePythonPackages.__unfix__ or (
36 basePythonPackagesUnfix = basePythonPackages.__unfix__ or (
37 self: basePythonPackages.override (a: { inherit self; }));
37 self: basePythonPackages.override (a: { inherit self; }));
38
38
39 # Evaluates to the last segment of a file system path.
39 # Evaluates to the last segment of a file system path.
40 basename = path: with pkgs.lib; last (splitString "/" path);
40 basename = path: with pkgs.lib; last (splitString "/" path);
41
41
42 # source code filter used as arugment to builtins.filterSource.
42 # source code filter used as arugment to builtins.filterSource.
43 src-filter = path: type: with pkgs.lib;
43 src-filter = path: type: with pkgs.lib;
44 let
44 let
45 ext = last (splitString "." path);
45 ext = last (splitString "." path);
46 in
46 in
47 !builtins.elem (basename path) [
47 !builtins.elem (basename path) [
48 ".git" ".hg" "__pycache__" ".eggs" ".idea" ".dev"
48 ".git" ".hg" "__pycache__" ".eggs" ".idea" ".dev"
49 "bower_components" "node_modules"
49 "bower_components" "node_modules"
50 "build" "data" "result" "tmp"] &&
50 "build" "data" "result" "tmp"] &&
51 !builtins.elem ext ["egg-info" "pyc"] &&
51 !builtins.elem ext ["egg-info" "pyc"] &&
52 # TODO: johbo: This check is wrong, since "path" contains an absolute path,
52 # TODO: johbo: This check is wrong, since "path" contains an absolute path,
53 # it would still be good to restore it since we want to ignore "result-*".
53 # it would still be good to restore it since we want to ignore "result-*".
54 !hasPrefix "result" path;
54 !hasPrefix "result" path;
55
55
56 sources =
56 sources =
57 let
57 let
58 inherit (pkgs.lib) all isString attrValues;
58 inherit (pkgs.lib) all isString attrValues;
59 sourcesConfig = pkgs.config.rc.sources or {};
59 sourcesConfig = pkgs.config.rc.sources or {};
60 in
60 in
61 # Ensure that sources are configured as strings. Using a path
61 # Ensure that sources are configured as strings. Using a path
62 # would result in a copy into the nix store.
62 # would result in a copy into the nix store.
63 assert all isString (attrValues sourcesConfig);
63 assert all isString (attrValues sourcesConfig);
64 sourcesConfig;
64 sourcesConfig;
65
65
66 version = builtins.readFile "${rhodecode-enterprise-ce-src}/rhodecode/VERSION";
66 version = builtins.readFile "${rhodecode-enterprise-ce-src}/rhodecode/VERSION";
67 rhodecode-enterprise-ce-src = builtins.filterSource src-filter ./.;
67 rhodecode-enterprise-ce-src = builtins.filterSource src-filter ./.;
68
68
69 buildBowerComponents = pkgs.buildBowerComponents;
69 buildBowerComponents = pkgs.buildBowerComponents;
70 nodeEnv = import ./pkgs/node-default.nix {
70 nodeEnv = import ./pkgs/node-default.nix {
71 inherit pkgs;
71 inherit pkgs;
72 };
72 };
73 nodeDependencies = nodeEnv.shell.nodeDependencies;
73 nodeDependencies = nodeEnv.shell.nodeDependencies;
74
74
75 bowerComponents = buildBowerComponents {
75 bowerComponents = buildBowerComponents {
76 name = "enterprise-ce-${version}";
76 name = "enterprise-ce-${version}";
77 generated = ./pkgs/bower-packages.nix;
77 generated = ./pkgs/bower-packages.nix;
78 src = rhodecode-enterprise-ce-src;
78 src = rhodecode-enterprise-ce-src;
79 };
79 };
80
80
81 rhodecode-testdata-src = sources.rhodecode-testdata or (
81 rhodecode-testdata-src = sources.rhodecode-testdata or (
82 pkgs.fetchhg {
82 pkgs.fetchhg {
83 url = "https://code.rhodecode.com/upstream/rc_testdata";
83 url = "https://code.rhodecode.com/upstream/rc_testdata";
84 rev = "v0.10.0";
84 rev = "v0.10.0";
85 sha256 = "0zn9swwvx4vgw4qn8q3ri26vvzgrxn15x6xnjrysi1bwmz01qjl0";
85 sha256 = "0zn9swwvx4vgw4qn8q3ri26vvzgrxn15x6xnjrysi1bwmz01qjl0";
86 });
86 });
87
87
88 rhodecode-testdata = import "${rhodecode-testdata-src}/default.nix" {
88 rhodecode-testdata = import "${rhodecode-testdata-src}/default.nix" {
89 inherit
89 inherit
90 doCheck
90 doCheck
91 pkgs
91 pkgs
92 pythonPackages;
92 pythonPackages;
93 };
93 };
94
94
95 pythonLocalOverrides = self: super: {
95 pythonLocalOverrides = self: super: {
96 rhodecode-enterprise-ce =
96 rhodecode-enterprise-ce =
97 let
97 let
98 linkNodeAndBowerPackages = ''
98 linkNodeAndBowerPackages = ''
99 export RHODECODE_CE_PATH=${rhodecode-enterprise-ce-src}
99 export RHODECODE_CE_PATH=${rhodecode-enterprise-ce-src}
100
100
101 echo "[BEGIN]: Link node packages"
101 echo "[BEGIN]: Link node packages"
102 rm -fr node_modules
102 rm -fr node_modules
103 mkdir node_modules
103 mkdir node_modules
104 # johbo: Linking individual packages allows us to run "npm install"
104 # johbo: Linking individual packages allows us to run "npm install"
105 # inside of a shell to try things out. Re-entering the shell will
105 # inside of a shell to try things out. Re-entering the shell will
106 # restore a clean environment.
106 # restore a clean environment.
107 ln -s ${nodeDependencies}/lib/node_modules/* node_modules/
107 ln -s ${nodeDependencies}/lib/node_modules/* node_modules/
108 echo "[DONE]: Link node packages"
108 echo "[DONE]: Link node packages"
109
109
110 echo "[BEGIN]: Link bower packages"
110 echo "[BEGIN]: Link bower packages"
111 rm -fr bower_components
111 rm -fr bower_components
112 mkdir bower_components
112 mkdir bower_components
113 ln -s ${bowerComponents}/bower_components/* bower_components/
113 ln -s ${bowerComponents}/bower_components/* bower_components/
114 echo "[DONE]: Link bower packages"
114 echo "[DONE]: Link bower packages"
115 '';
115 '';
116
116
117 releaseName = "RhodeCodeEnterpriseCE-${version}";
117 releaseName = "RhodeCodeEnterpriseCE-${version}";
118 in super.rhodecode-enterprise-ce.override (attrs: {
118 in super.rhodecode-enterprise-ce.override (attrs: {
119 inherit
119 inherit
120 doCheck
120 doCheck
121 version;
121 version;
122
122
123 name = "rhodecode-enterprise-ce-${version}";
123 name = "rhodecode-enterprise-ce-${version}";
124 releaseName = releaseName;
124 releaseName = releaseName;
125 src = rhodecode-enterprise-ce-src;
125 src = rhodecode-enterprise-ce-src;
126 dontStrip = true; # prevent strip, we don't need it.
126 dontStrip = true; # prevent strip, we don't need it.
127
127
128 # expose following attributed outside
128 # expose following attributed outside
129 passthru = {
129 passthru = {
130 inherit
130 inherit
131 rhodecode-testdata
131 rhodecode-testdata
132 bowerComponents
132 bowerComponents
133 linkNodeAndBowerPackages
133 linkNodeAndBowerPackages
134 myPythonPackagesUnfix
134 myPythonPackagesUnfix
135 pythonLocalOverrides
135 pythonLocalOverrides
136 pythonCommunityOverrides;
136 pythonCommunityOverrides;
137
137
138 pythonPackages = self;
138 pythonPackages = self;
139 };
139 };
140
140
141 buildInputs =
141 buildInputs =
142 attrs.buildInputs or [] ++ [
142 attrs.buildInputs or [] ++ [
143 rhodecode-testdata
143 rhodecode-testdata
144 pkgs.nodePackages.bower
144 pkgs.nodePackages.bower
145 pkgs.nodePackages.grunt-cli
145 pkgs.nodePackages.grunt-cli
146 ];
146 ];
147
147
148 #NOTE: option to inject additional propagatedBuildInputs
148 #NOTE: option to inject additional propagatedBuildInputs
149 propagatedBuildInputs =
149 propagatedBuildInputs =
150 attrs.propagatedBuildInputs or [] ++ [
150 attrs.propagatedBuildInputs or [] ++ [
151
151
152 ];
152 ];
153
153
154 LC_ALL = "en_US.UTF-8";
154 LC_ALL = "en_US.UTF-8";
155 LOCALE_ARCHIVE =
155 LOCALE_ARCHIVE =
156 if pkgs.stdenv.isLinux
156 if pkgs.stdenv.isLinux
157 then "${pkgs.glibcLocales}/lib/locale/locale-archive"
157 then "${pkgs.glibcLocales}/lib/locale/locale-archive"
158 else "";
158 else "";
159
159
160 # Add bin directory to path so that tests can find 'rhodecode'.
160 # Add bin directory to path so that tests can find 'rhodecode'.
161 preCheck = ''
161 preCheck = ''
162 export PATH="$out/bin:$PATH"
162 export PATH="$out/bin:$PATH"
163 '';
163 '';
164
164
165 # custom check phase for testing
165 # custom check phase for testing
166 checkPhase = ''
166 checkPhase = ''
167 runHook preCheck
167 runHook preCheck
168 PYTHONHASHSEED=random py.test -vv -p no:sugar -r xw --cov-config=.coveragerc --cov=rhodecode --cov-report=term-missing rhodecode
168 PYTHONHASHSEED=random py.test -vv -p no:sugar -r xw --cov-config=.coveragerc --cov=rhodecode --cov-report=term-missing rhodecode
169 runHook postCheck
169 runHook postCheck
170 '';
170 '';
171
171
172 postCheck = ''
172 postCheck = ''
173 echo "Cleanup of rhodecode/tests"
173 echo "Cleanup of rhodecode/tests"
174 rm -rf $out/lib/${self.python.libPrefix}/site-packages/rhodecode/tests
174 rm -rf $out/lib/${self.python.libPrefix}/site-packages/rhodecode/tests
175 '';
175 '';
176
176
177 preBuild = ''
177 preBuild = ''
178 echo "Building frontend assets"
178 echo "Building frontend assets"
179 ${linkNodeAndBowerPackages}
179 ${linkNodeAndBowerPackages}
180 grunt
180 grunt
181 rm -fr node_modules
181 rm -fr node_modules
182 '';
182 '';
183
183
184 postInstall = ''
184 postInstall = ''
185 # check required files
185 # check required files
186 if [ ! -f rhodecode/public/js/scripts.js ]; then
186 if [ ! -f rhodecode/public/js/scripts.js ]; then
187 echo "Missing scripts.js"
187 echo "Missing scripts.js"
188 exit 1
188 exit 1
189 fi
189 fi
190 if [ ! -f rhodecode/public/css/style.css ]; then
190 if [ ! -f rhodecode/public/css/style.css ]; then
191 echo "Missing style.css"
191 echo "Missing style.css"
192 exit 1
192 exit 1
193 fi
193 fi
194
194
195 echo "Writing enterprise-ce meta information for rccontrol to nix-support/rccontrol"
195 echo "Writing enterprise-ce meta information for rccontrol to nix-support/rccontrol"
196 mkdir -p $out/nix-support/rccontrol
196 mkdir -p $out/nix-support/rccontrol
197 cp -v rhodecode/VERSION $out/nix-support/rccontrol/version
197 cp -v rhodecode/VERSION $out/nix-support/rccontrol/version
198 echo "[DONE]: enterprise-ce meta information for rccontrol written"
198 echo "[DONE]: enterprise-ce meta information for rccontrol written"
199
199
200 mkdir -p $out/etc
200 mkdir -p $out/etc
201 cp configs/production.ini $out/etc
201 cp configs/production.ini $out/etc
202 echo "[DONE]: saved enterprise-ce production.ini into $out/etc"
202 echo "[DONE]: saved enterprise-ce production.ini into $out/etc"
203
203
204 cp -r rhodecode/config/rcextensions $out/etc/rcextensions.tmpl
205 echo "[DONE]: saved enterprise-ce rcextensions into $out/etc/rcextensions.tmpl"
206
204 # python based programs need to be wrapped
207 # python based programs need to be wrapped
205 mkdir -p $out/bin
208 mkdir -p $out/bin
206
209
207 # required binaries from dependencies
210 # required binaries from dependencies
208 ln -s ${self.supervisor}/bin/supervisorctl $out/bin/
211 ln -s ${self.supervisor}/bin/supervisorctl $out/bin/
209 ln -s ${self.supervisor}/bin/supervisord $out/bin/
212 ln -s ${self.supervisor}/bin/supervisord $out/bin/
210 ln -s ${self.pastescript}/bin/paster $out/bin/
213 ln -s ${self.pastescript}/bin/paster $out/bin/
211 ln -s ${self.channelstream}/bin/channelstream $out/bin/
214 ln -s ${self.channelstream}/bin/channelstream $out/bin/
212 ln -s ${self.celery}/bin/celery $out/bin/
215 ln -s ${self.celery}/bin/celery $out/bin/
213 ln -s ${self.gunicorn}/bin/gunicorn $out/bin/
216 ln -s ${self.gunicorn}/bin/gunicorn $out/bin/
214 ln -s ${self.pyramid}/bin/prequest $out/bin/
217 ln -s ${self.pyramid}/bin/prequest $out/bin/
215 ln -s ${self.pyramid}/bin/pserve $out/bin/
218 ln -s ${self.pyramid}/bin/pserve $out/bin/
216
219
217 echo "[DONE]: created symlinks into $out/bin"
220 echo "[DONE]: created symlinks into $out/bin"
218 DEPS="$out/bin/supervisorctl \
221 DEPS="$out/bin/supervisorctl \
219 $out/bin/supervisord \
222 $out/bin/supervisord \
220 $out/bin/paster \
223 $out/bin/paster \
221 $out/bin/channelstream \
224 $out/bin/channelstream \
222 $out/bin/celery \
225 $out/bin/celery \
223 $out/bin/gunicorn \
226 $out/bin/gunicorn \
224 $out/bin/prequest \
227 $out/bin/prequest \
225 $out/bin/pserve"
228 $out/bin/pserve"
226
229
227 # wrap only dependency scripts, they require to have full PYTHONPATH set
230 # wrap only dependency scripts, they require to have full PYTHONPATH set
228 # to be able to import all packages
231 # to be able to import all packages
229 for file in $DEPS;
232 for file in $DEPS;
230 do
233 do
231 wrapProgram $file \
234 wrapProgram $file \
232 --prefix PATH : $PATH \
235 --prefix PATH : $PATH \
233 --prefix PYTHONPATH : $PYTHONPATH \
236 --prefix PYTHONPATH : $PYTHONPATH \
234 --set PYTHONHASHSEED random
237 --set PYTHONHASHSEED random
235 done
238 done
236
239
237 echo "[DONE]: enterprise-ce binary wrapping"
240 echo "[DONE]: enterprise-ce binary wrapping"
238
241
239 # rhodecode-tools don't need wrapping
242 # rhodecode-tools don't need wrapping
240 ln -s ${self.rhodecode-tools}/bin/rhodecode-* $out/bin/
243 ln -s ${self.rhodecode-tools}/bin/rhodecode-* $out/bin/
241
244
242 '';
245 '';
243 });
246 });
244
247
245 };
248 };
246
249
247 basePythonPackages = with builtins;
250 basePythonPackages = with builtins;
248 if isAttrs pythonPackages then
251 if isAttrs pythonPackages then
249 pythonPackages
252 pythonPackages
250 else
253 else
251 getAttr pythonPackages pkgs;
254 getAttr pythonPackages pkgs;
252
255
253 pythonGeneratedPackages = import ./pkgs/python-packages.nix {
256 pythonGeneratedPackages = import ./pkgs/python-packages.nix {
254 inherit pkgs;
257 inherit pkgs;
255 inherit (pkgs) fetchurl fetchgit fetchhg;
258 inherit (pkgs) fetchurl fetchgit fetchhg;
256 };
259 };
257
260
258 pythonCommunityOverrides = import ./pkgs/python-packages-overrides.nix {
261 pythonCommunityOverrides = import ./pkgs/python-packages-overrides.nix {
259 inherit pkgs basePythonPackages;
262 inherit pkgs basePythonPackages;
260 };
263 };
261
264
262 # Apply all overrides and fix the final package set
265 # Apply all overrides and fix the final package set
263 myPythonPackagesUnfix = with pkgs.lib;
266 myPythonPackagesUnfix = with pkgs.lib;
264 (extends pythonExternalOverrides
267 (extends pythonExternalOverrides
265 (extends pythonLocalOverrides
268 (extends pythonLocalOverrides
266 (extends pythonCommunityOverrides
269 (extends pythonCommunityOverrides
267 (extends pythonGeneratedPackages
270 (extends pythonGeneratedPackages
268 basePythonPackagesUnfix))));
271 basePythonPackagesUnfix))));
269
272
270 myPythonPackages = (pkgs.lib.fix myPythonPackagesUnfix);
273 myPythonPackages = (pkgs.lib.fix myPythonPackagesUnfix);
271
274
272 in myPythonPackages.rhodecode-enterprise-ce
275 in myPythonPackages.rhodecode-enterprise-ce
@@ -1,25 +1,36 b''
1 .. _extensions-hooks-ref:
1 .. _extensions-hooks-ref:
2
2
3 Extensions & Hooks
3 Extensions & Hooks
4 ==================
4 ==================
5
5
6 The extensions & hooks section references three concepts regularly,
6 The extensions & hooks section references three concepts regularly,
7 so to clarify what is meant each time, read the following definitions:
7 so to clarify what is meant each time, read the following definitions:
8
8
9 * **Plugin**: A Plugin is software that adds a specific feature to
9 * **Plugin**: A Plugin is software that adds a specific feature to
10 an existing software application.
10 an existing software application.
11 * **Extension**: An extension extends the capabilities of,
11 * **Extension**: An extension extends the capabilities of,
12 or the data available to, an existing software application.
12 or the data available to, an existing software application.
13 * **Hook**: A hook intercepts function calls, messages, or events passed
13 * **Hook**: A hook intercepts function calls, messages, or events passed
14 between software components and can be used to trigger plugins, or their
14 between software components and can be used to trigger plugins, or their
15 extensions.
15 extensions.
16
16
17 .. toctree::
17
18 Hooks
19 -----
20
21 Within |RCM| there are two types of supported hooks.
18
22
19 rcx
23 * **Internal built-in hooks**: The internal |hg|, |git| or |svn| hooks are
20 install-ext
24 triggered by different VCS operations, like push, pull,
21 config-ext
25 or clone and are non-configurable, but you can add your own VCS hooks,
22 extensions
26 see :ref:`custom-hooks`.
23 hooks
27 * **Custom rcextensions hooks**: User defined hooks centre around the lifecycle of
24 full-blown-example
28 certain actions such are |repo| creation, user creation etc. The actions
25 int-slack
29 these hooks trigger can be rejected based on the API permissions of the
30 user calling them.
31
32 On instructions how to use the custom `rcextensions`
33 see :ref:`integrations-rcextensions` section.
34
35
36
@@ -1,57 +1,60 b''
1 .. _integrations:
1 .. _integrations:
2
2
3 Integrations
3 Integrations
4 ------------
4 ------------
5
5
6 Rhodecode supports integrations with external services for various events,
6 |RCE| supports integrations with external services for various events,
7 such as commit pushes and pull requests. Multiple integrations of the same type
7 such as commit pushes and pull requests. Multiple integrations of the same type
8 can be added at the same time; this is useful for posting different events to
8 can be added at the same time; this is useful for posting different events to
9 different Slack channels, for example.
9 different Slack channels, for example.
10
10
11 Supported integrations
11 Supported integrations
12 ^^^^^^^^^^^^^^^^^^^^^^
12 ^^^^^^^^^^^^^^^^^^^^^^
13
13
14 ============================ ============ =====================================
14 ================================ ============ ========================================
15 Type/Name |RC| Edition Description
15 Type/Name |RC| Edition Description
16 ============================ ============ =====================================
16 ================================ ============ ========================================
17 :ref:`integrations-slack` |RCCEshort| https://slack.com/
17 :ref:`integrations-webhook` |RCCEshort| Trigger events as `json` to a custom url
18 :ref:`integrations-hipchat` |RCCEshort| https://www.hipchat.com/
18 :ref:`integrations-slack` |RCCEshort| Integrate with https://slack.com/
19 :ref:`integrations-webhook` |RCCEshort| POST events as `json` to a custom url
19 :ref:`integrations-hipchat` |RCCEshort| Integrate with https://www.hipchat.com/
20 :ref:`integrations-ci` |RCCEshort| Trigger Builds for Common CI Systems
20 :ref:`integrations-email` |RCCEshort| Send repo push commits by email
21 :ref:`integrations-email` |RCCEshort| Send repo push commits by email
21 :ref:`integrations-ci` |RCCEshort| Trigger Builds for Common CI Systems
22 :ref:`integrations-jenkins` |RCEEshort| Trigger Builds for Jenkins CI System
22 :ref:`integrations-rcextensions` |RCCEshort| Advanced low-level integration framework
23 :ref:`integrations-redmine` |RCEEshort| Close/Resolve/Reference Redmine issues
23
24 :ref:`integrations-jira` |RCEEshort| Close/Resolve/Reference JIRA issues
24 :ref:`integrations-jenkins` |RCEEshort| Trigger Builds for Jenkins CI System
25 ============================ ============ =====================================
25 :ref:`integrations-redmine` |RCEEshort| Close/Resolve/Reference Redmine issues
26 :ref:`integrations-jira` |RCEEshort| Close/Resolve/Reference JIRA issues
27 ================================ ============ ========================================
26
28
27 .. _creating-integrations:
29 .. _creating-integrations:
28
30
29 Creating an Integration
31 Creating an Integration
30 ^^^^^^^^^^^^^^^^^^^^^^^
32 ^^^^^^^^^^^^^^^^^^^^^^^
31
33
32 Integrations can be added globally via the admin UI:
34 Integrations can be added globally via the admin UI:
33
35
34 :menuselection:`Admin --> Integrations`
36 :menuselection:`Admin --> Integrations`
35
37
36 or per repository in each repository's settings:
38 or per repository in each repository's settings:
37
39
38 :menuselection:`Admin --> Repositories --> Edit --> Integrations`
40 :menuselection:`Admin --> Repositories --> Edit --> Integrations`
39
41
40 To create an integration, select the type from the list in the *Create New
42 To create an integration, select the type from the list in the *Create New
41 Integration* section.
43 Integration* section.
42
44
43 The *Current Integrations* section shows existing integrations that have been
45 The *Current Integrations* section shows existing integrations that have been
44 created along with their type (eg. Slack) and enabled status.
46 created along with their type (eg. Slack) and enabled status.
45
47
46 See pages specific to each type of integration for more instructions:
48 See pages specific to each type of integration for more instructions:
47
49
48 .. toctree::
50 .. toctree::
49
51
50 slack
52 slack
51 hipchat
53 hipchat
52 redmine
54 redmine
53 jira
55 jira
54 webhook
56 webhook
55 email
57 email
56 ci
58 ci
57 jenkins
59 jenkins
60 integrations-rcextensions
@@ -1,677 +1,578 b''
1 .. _tools-cli:
1 .. _tools-cli:
2
2
3 |RCT| CLI
3 |RCT| CLI
4 ---------
4 ---------
5
5
6 The commands available with |RCT| can be split into three categories:
6 The commands available with |RCT| can be split into three categories:
7
7
8 - Remotely executable commands that can be run from your local machine once you
8 - Remotely executable commands that can be run from your local machine once you
9 have your connection details to |RCE| configured.
9 have your connection details to |RCE| configured.
10 - Locally executable commands the can be run on the server to carry out
10 - Locally executable commands the can be run on the server to carry out
11 general maintenance.
11 general maintenance.
12 - Local configuration commands used to help set up your |RCT| configuration.
12 - Local configuration commands used to help set up your |RCT| configuration.
13
13
14
14
15 rhodecode-tools
15 rhodecode-tools
16 ---------------
16 ---------------
17
17
18 Use |RCT| to setup automation, run the indexer, and install extensions for
18 Use |RCT| to setup automation, run the indexer, and install extensions for
19 your |RCM| instances. Options:
19 your |RCM| instances. Options:
20
20
21 .. rst-class:: dl-horizontal
21 .. rst-class:: dl-horizontal
22
22
23 \ - -apihost <api_host>
23 \ - -apihost <api_host>
24 Set the API host value.
24 Set the API host value.
25
25
26 \ - -apikey <apikey_value>
26 \ - -apikey <apikey_value>
27 Set the API key value.
27 Set the API key value.
28
28
29 \-c, - -config <config_file>
29 \-c, - -config <config_file>
30 Create a configuration file. The default file is created
30 Create a configuration file. The default file is created
31 in ``~/.rhoderc``
31 in ``~/.rhoderc``
32
32
33 \ - -save-config
33 \ - -save-config
34 Save the configuration file.
34 Save the configuration file.
35
35
36 \ - -show-config
36 \ - -show-config
37 Show the current configuration values.
37 Show the current configuration values.
38
38
39 \ - -format {json,pretty}
39 \ - -format {json,pretty}
40 Set the formatted representation.
40 Set the formatted representation.
41
41
42 Example usage:
42 Example usage:
43
43
44 .. code-block:: bash
44 .. code-block:: bash
45
45
46 $ rhodecode-tools --apikey=key --apihost=http://rhodecode.server \
46 $ rhodecode-tools --apikey=key --apihost=http://rhodecode.server \
47 --save-config
47 --save-config
48
48
49 rhodecode-api
49 rhodecode-api
50 -------------
50 -------------
51
51
52 The |RC| API lets you connect to |RCE| and carry out management tasks from a
52 The |RC| API lets you connect to |RCE| and carry out management tasks from a
53 remote machine, for more information about the API, see the :ref:`api`. To
53 remote machine, for more information about the API, see the :ref:`api`. To
54 pass arguments on the command-line use the ``method:option`` syntax.
54 pass arguments on the command-line use the ``method:option`` syntax.
55
55
56 Example usage:
56 Example usage:
57
57
58 .. code-block:: bash
58 .. code-block:: bash
59
59
60 # Run the get_repos API call and sample output
60 # Run the get_repos API call and sample output
61 $ rhodecode-api --instance-name=enterprise-1 create_repo \
61 $ rhodecode-api --instance-name=enterprise-1 create_repo \
62 repo_name:brand-new repo_type:hg description:repo-desc
62 repo_name:brand-new repo_type:hg description:repo-desc
63
63
64 {
64 {
65 "error": null,
65 "error": null,
66 "id": 1110,
66 "id": 1110,
67 "result": {
67 "result": {
68 "msg": "Created new repository `brand-new`",
68 "msg": "Created new repository `brand-new`",
69 "success": true,
69 "success": true,
70 "task": null
70 "task": null
71 }
71 }
72 }
72 }
73
73
74 Options:
74 Options:
75
75
76 .. rst-class:: dl-horizontal
76 .. rst-class:: dl-horizontal
77
77
78 \ - -api-cache-only
78 \ - -api-cache-only
79 Requires a cache to be present when running this call
79 Requires a cache to be present when running this call
80
80
81 \ - -api-cache-rebuild
81 \ - -api-cache-rebuild
82 Replaces existing cached values with new ones from server
82 Replaces existing cached values with new ones from server
83
83
84 \ - -api-cache <PATH>
84 \ - -api-cache <PATH>
85 Use a special cache dir to read responses from instead of the server
85 Use a special cache dir to read responses from instead of the server
86
86
87 \ - -api-cert-verify
87 \ - -api-cert-verify
88 Verify the endpoint ssl certificate
88 Verify the endpoint ssl certificate
89
89
90 \ - -api-cert <PATH>
90 \ - -api-cert <PATH>
91 Path to alternate CA bundle.
91 Path to alternate CA bundle.
92
92
93 \ - -apihost <api_host>
93 \ - -apihost <api_host>
94 Set the API host value.
94 Set the API host value.
95
95
96 \ - -apikey <apikey_value>
96 \ - -apikey <apikey_value>
97 Set the API key value.
97 Set the API key value.
98
98
99 \ - -instance-name <instance-id>
99 \ - -instance-name <instance-id>
100 Set the instance name
100 Set the instance name
101
101
102 \-I, - -install-dir <DIR>
102 \-I, - -install-dir <DIR>
103 Location of application instances
103 Location of application instances
104
104
105 \-c, - -config <.rhoderc-file>
105 \-c, - -config <.rhoderc-file>
106 Location of the :file:`.rhoderc`
106 Location of the :file:`.rhoderc`
107
107
108 \-F, - -format {json,pretty}
108 \-F, - -format {json,pretty}
109 Set the formatted representation.
109 Set the formatted representation.
110
110
111 \-h, - -help
111 \-h, - -help
112 Show help messages.
112 Show help messages.
113
113
114 \-v, - -verbose
114 \-v, - -verbose
115 Enable verbose messaging
115 Enable verbose messaging
116
116
117 rhodecode-cleanup-gists
117 rhodecode-cleanup-gists
118 -----------------------
118 -----------------------
119
119
120 Use this to delete gists within |RCM|. Options:
120 Use this to delete gists within |RCM|. Options:
121
121
122 .. rst-class:: dl-horizontal
122 .. rst-class:: dl-horizontal
123
123
124 \-c, - -config <config_file>
124 \-c, - -config <config_file>
125 Set the file path to the configuration file. The default file is
125 Set the file path to the configuration file. The default file is
126 :file:`/home/{user}/.rhoderc`
126 :file:`/home/{user}/.rhoderc`
127
127
128 \ - -corrupted
128 \ - -corrupted
129 Remove gists with corrupted metadata.
129 Remove gists with corrupted metadata.
130
130
131 \ - -dont-ask
131 \ - -dont-ask
132 Remove gists without asking for confirmation.
132 Remove gists without asking for confirmation.
133
133
134 \-h, - -help
134 \-h, - -help
135 Show help messages. current configuration values.
135 Show help messages. current configuration values.
136
136
137 \ - -instance-name <instance-id>
137 \ - -instance-name <instance-id>
138 Set the instance name.
138 Set the instance name.
139
139
140 \-R, - -repo-dir
140 \-R, - -repo-dir
141 Set the repository file path.
141 Set the repository file path.
142
142
143 \ - -version
143 \ - -version
144 Display your |RCT| version.
144 Display your |RCT| version.
145
145
146 Example usage:
146 Example usage:
147
147
148 .. code-block:: bash
148 .. code-block:: bash
149
149
150 # Clean up gists related to an instance
150 # Clean up gists related to an instance
151 $ rhodecode-cleanup-gists --instance-name=enterprise-1
151 $ rhodecode-cleanup-gists --instance-name=enterprise-1
152 Scanning for gists in /home/brian/repos/.rc_gist_store...
152 Scanning for gists in /home/brian/repos/.rc_gist_store...
153 preparing to remove [3] found gists
153 preparing to remove [3] found gists
154
154
155 # Clean up corrupted gists in an instance
155 # Clean up corrupted gists in an instance
156 $ rhodecode-cleanup-gists --instance-name=enterprise-1 --corrupted
156 $ rhodecode-cleanup-gists --instance-name=enterprise-1 --corrupted
157 Scanning for gists in /home/brian/repos/.rc_gist_store...
157 Scanning for gists in /home/brian/repos/.rc_gist_store...
158 preparing to remove [2] found gists
158 preparing to remove [2] found gists
159 the following gists will be archived:
159 the following gists will be archived:
160 * EXPIRED: BAD METADATA | /home/brian/repos/.rc_gist_store/5
160 * EXPIRED: BAD METADATA | /home/brian/repos/.rc_gist_store/5
161 * EXPIRED: BAD METADATA | /home/brian/repos/.rc_gist_store/8FtC
161 * EXPIRED: BAD METADATA | /home/brian/repos/.rc_gist_store/8FtC
162 are you sure you want to archive them? [y/N]: y
162 are you sure you want to archive them? [y/N]: y
163 removing gist /home/brian/repos/.rc_gist_store/5
163 removing gist /home/brian/repos/.rc_gist_store/5
164 removing gist /home/brian/repos/.rc_gist_store/8FtCKdcbRKmEvRzTVsEt
164 removing gist /home/brian/repos/.rc_gist_store/8FtCKdcbRKmEvRzTVsEt
165
165
166 rhodecode-cleanup-repos
166 rhodecode-cleanup-repos
167 -----------------------
167 -----------------------
168
168
169 Use this to manage |repos| and |repo| groups within |RCM|. Options:
169 Use this to manage |repos| and |repo| groups within |RCM|. Options:
170
170
171 .. rst-class:: dl-horizontal
171 .. rst-class:: dl-horizontal
172
172
173 \-c, - -config <config_file>
173 \-c, - -config <config_file>
174 Set the file path to the configuration file. The default file is
174 Set the file path to the configuration file. The default file is
175 :file:`/home/{user}/.rhoderc`.
175 :file:`/home/{user}/.rhoderc`.
176
176
177 \-h, - -help
177 \-h, - -help
178 Show help messages. current configuration values.
178 Show help messages. current configuration values.
179
179
180 \ - -interactive
180 \ - -interactive
181 Enable an interactive prompt for each repository when deleting.
181 Enable an interactive prompt for each repository when deleting.
182
182
183 \ - -include-groups
183 \ - -include-groups
184 Remove repository groups.
184 Remove repository groups.
185
185
186 \ - -instance-name <instance-id>
186 \ - -instance-name <instance-id>
187 Set the instance name.
187 Set the instance name.
188
188
189 \ - -list-only
189 \ - -list-only
190 Display repositories selected for deletion.
190 Display repositories selected for deletion.
191
191
192 \ - -older-than <str>
192 \ - -older-than <str>
193 Delete repositories older that a specified time.
193 Delete repositories older that a specified time.
194 You can use the following suffixes; d for days, h for hours,
194 You can use the following suffixes; d for days, h for hours,
195 m for minutes, s for seconds.
195 m for minutes, s for seconds.
196
196
197 \-R, - -repo-dir
197 \-R, - -repo-dir
198 Set the repository file path
198 Set the repository file path
199
199
200 Example usage:
200 Example usage:
201
201
202 .. code-block:: bash
202 .. code-block:: bash
203
203
204 # Cleaning up repos using tools installed with RCE 350 and above
204 # Cleaning up repos using tools installed with RCE 350 and above
205 $ ~/.rccontrol/enterprise-4/profile/bin/rhodecode-cleanup-repos \
205 $ ~/.rccontrol/enterprise-4/profile/bin/rhodecode-cleanup-repos \
206 --instance-name=enterprise-4 --older-than=1d
206 --instance-name=enterprise-4 --older-than=1d
207 Scanning for repositories in /home/brian/repos...
207 Scanning for repositories in /home/brian/repos...
208 preparing to remove [2] found repositories older than 1 day, 0:00:00 (1d)
208 preparing to remove [2] found repositories older than 1 day, 0:00:00 (1d)
209
209
210 the following repositories will be deleted completely:
210 the following repositories will be deleted completely:
211 * REMOVED: 2015-08-05 00:23:18 | /home/brian/repos/rm__20150805_002318_831
211 * REMOVED: 2015-08-05 00:23:18 | /home/brian/repos/rm__20150805_002318_831
212 * REMOVED: 2015-08-04 01:22:10 | /home/brian/repos/rm__20150804_012210_336
212 * REMOVED: 2015-08-04 01:22:10 | /home/brian/repos/rm__20150804_012210_336
213 are you sure you want to remove them? [y/N]:
213 are you sure you want to remove them? [y/N]:
214
214
215 # Clean up repos older than 1 year
215 # Clean up repos older than 1 year
216 # If using virtualenv and pre RCE 350 tools installation
216 # If using virtualenv and pre RCE 350 tools installation
217 (venv)$ rhodecode-cleanup-repos --instance-name=enterprise-1 \
217 (venv)$ rhodecode-cleanup-repos --instance-name=enterprise-1 \
218 --older-than=365d
218 --older-than=365d
219
219
220 Scanning for repositories in /home/brian/repos...
220 Scanning for repositories in /home/brian/repos...
221 preparing to remove [343] found repositories older than 365 days
221 preparing to remove [343] found repositories older than 365 days
222
222
223 # clean up repos older than 3 days
223 # clean up repos older than 3 days
224 # If using virtualenv and pre RCE 350 tools installation
224 # If using virtualenv and pre RCE 350 tools installation
225 (venv)$ rhodecode-cleanup-repos --instance-name=enterprise-1 \
225 (venv)$ rhodecode-cleanup-repos --instance-name=enterprise-1 \
226 --older-than=3d
226 --older-than=3d
227 Scanning for repositories in /home/brian/repos...
227 Scanning for repositories in /home/brian/repos...
228 preparing to remove [3] found repositories older than 3 days
228 preparing to remove [3] found repositories older than 3 days
229
229
230 .. _tools-config:
230 .. _tools-config:
231
231
232 rhodecode-config
232 rhodecode-config
233 ----------------
233 ----------------
234
234
235 Use this to create or update a |RCE| configuration file on the local machine.
235 Use this to create or update a |RCE| configuration file on the local machine.
236
236
237 .. rst-class:: dl-horizontal
237 .. rst-class:: dl-horizontal
238
238
239 \- -filename </path/to/config_file>
239 \- -filename </path/to/config_file>
240 Set the file path to the |RCE| configuration file.
240 Set the file path to the |RCE| configuration file.
241
241
242 \- -show-defaults
242 \- -show-defaults
243 Display the defaults set in the |RCE| configuration file.
243 Display the defaults set in the |RCE| configuration file.
244
244
245 \- -update
245 \- -update
246 Update the configuration with the new settings passed on the command
246 Update the configuration with the new settings passed on the command
247 line.
247 line.
248
248
249 .. code-block:: bash
249 .. code-block:: bash
250
250
251 # Create a new config file
251 # Create a new config file
252 $ rhodecode-config --filename=dev.ini
252 $ rhodecode-config --filename=dev.ini
253 Wrote new config file in /Users/user/dev.ini
253 Wrote new config file in /Users/user/dev.ini
254
254
255 # Update config value for given section:
255 # Update config value for given section:
256 $ rhodecode-config --update --filename=prod.ini [handler_console]level=INFO
256 $ rhodecode-config --update --filename=prod.ini [handler_console]level=INFO
257
257
258 $ rhodecode-config --filename=dev.ini --show-defaults
258 $ rhodecode-config --filename=dev.ini --show-defaults
259 lang=en
259 lang=en
260 cpu_number=4
260 cpu_number=4
261 uuid=<function <lambda> at 0x10d86ac08>
261 uuid=<function <lambda> at 0x10d86ac08>
262 license_token=ff1e-aa9c-bb66-11e5
262 license_token=ff1e-aa9c-bb66-11e5
263 host=127.0.0.1
263 host=127.0.0.1
264 here=/Users/brian
264 here=/Users/brian
265 error_aggregation_service=None
265 error_aggregation_service=None
266 database_url=sqlite:///%(here)s/rhodecode.db?timeout=30
266 database_url=sqlite:///%(here)s/rhodecode.db?timeout=30
267 git_path=git
267 git_path=git
268 http_server=waitress
268 http_server=waitress
269 port=5000
269 port=5000
270
270
271 .. _tools-rhodecode-extensions:
271 .. _tools-rhodecode-extensions:
272
272
273 rhodecode-extensions
273 rhodecode-extensions
274 --------------------
274 --------------------
275
275
276 |RCT| adds additional mapping for :ref:`indexing-ref`, statistics, and adds
276 The `rcextensions` since version 4.14 are now shipped together with |RCE| please check
277 additional code for push/pull/create/delete |repo| hooks. These hooks can be
277 the using :ref:`integrations-rcextensions` section.
278 used to send signals to build-bots such as jenkins. Options:
279
280 .. rst-class:: dl-horizontal
281
282 \-c, - -config <config_file>
283 Create a configuration file. The default file is created
284 in ``~/.rhoderc``
285
286 \-h, - -help
287 Show help messages.
288
289 \-F, - -format {json,pretty}
290 Set the formatted representation.
291
292 \-I, - -install-dir <str>
293 Set the location of the |RCE| installation. The default location is
294 :file:`/home/{user}/.rccontrol/`.
295
296 \ - -ini-file <str>
297 Path to the :file:`rhodecode.ini` file for that instance.
298
299 \ - -instance-name <instance-id>
300 Set the instance name.
301
302 \ - -plugins
303 Add plugins to your |RCE| installation. See the
304 :ref:`extensions-hooks-ref` section for more details.
305
306 \ - -version
307 Display your |RCT| version.
308
309
310 Once installed, you will see a :file:`rcextensions` folder in the instance
311 directory, for example :file:`home/{user}/.rccontrol/{instance-id}/rcextensions`
312
313 To install ``rcextensions``, use the following example:
314
315 .. code-block:: bash
316
317 # install extensions on the given instance
318 # If using virtualenv prior to RCE 350
319 (venv)$ rhodecode-extensions --instance-name=enterprise-1 \
320 --ini-file=rhodecode.ini
321 Writen new extensions file to rcextensions
322
323 # install extensions with additional plugins on the given instance
324 (venv)$ rhodecode-extensions --instance-name=enterprise-1 \
325 --ini-file=rhodecode.ini --plugins
326 Writen new extensions file to rcextensions
327
328 # installing extensions from 350 onwards
329 # as they are packaged with RCE
330 $ .rccontrol/enterprise-4/profile/bin/rhodecode-extensions --plugins \
331 --instance-name=enterprise-4 --ini-file=rhodecode.ini
332
333 Writen new extensions file to rcextensions
334
335 See the new extensions inside this directory for more details about the
336 additional hooks available, for example see the ``push_post.py`` file.
337
338 .. code-block:: python
339
340 import urllib
341 import urllib2
342
343 def run(*args, **kwargs):
344 """
345 Extra params
346
347 :param URL: url to send the data to
348 """
349
350 url = kwargs.pop('URL', None)
351 if url:
352 from rhodecode.lib.compat import json
353 from rhodecode.model.db import Repository
354
355 repo = Repository.get_by_repo_name(kwargs['repository'])
356 changesets = []
357 vcs_repo = repo.scm_instance_no_cache()
358 for r in kwargs['pushed_revs']:
359 cs = vcs_repo.get_changeset(r)
360 changesets.append(json.dumps(cs))
361
362 kwargs['pushed_revs'] = changesets
363 headers = {
364 'User-Agent': 'RhodeCode-SCM web hook',
365 'Content-type': 'application/x-www-form-urlencoded; charset=UTF-8',
366 'Accept': 'text/javascript, text/html, application/xml, '
367 'text/xml, */*',
368 'Accept-Encoding': 'gzip,deflate,sdch',
369 }
370
371 data = kwargs
372 data = urllib.urlencode(data)
373 req = urllib2.Request(url, data, headers)
374 response = urllib2.urlopen(req)
375 response.read()
376 return 0
377
278
378
279
379 rhodecode-gist
280 rhodecode-gist
380 --------------
281 --------------
381
282
382 Use this to create, list, show, or delete gists within |RCM|. Options:
283 Use this to create, list, show, or delete gists within |RCM|. Options:
383
284
384 .. rst-class:: dl-horizontal
285 .. rst-class:: dl-horizontal
385
286
386 \ - -api-cache-only
287 \ - -api-cache-only
387 Requires a cache to be present when running this call
288 Requires a cache to be present when running this call
388
289
389 \ - -api-cache-rebuild
290 \ - -api-cache-rebuild
390 Replaces existing cached values with new ones from server
291 Replaces existing cached values with new ones from server
391
292
392 \ - -api-cache PATH
293 \ - -api-cache PATH
393 Use a special cache dir to read responses from instead of the server
294 Use a special cache dir to read responses from instead of the server
394
295
395 \ - -api-cert-verify
296 \ - -api-cert-verify
396 Verify the endpoint ssl certificate
297 Verify the endpoint ssl certificate
397
298
398 \ - -api-cert PATH
299 \ - -api-cert PATH
399 Path to alternate CA bundle.
300 Path to alternate CA bundle.
400
301
401 \ - -apihost <api_host>
302 \ - -apihost <api_host>
402 Set the API host value.
303 Set the API host value.
403
304
404 \ - -apikey <apikey_value>
305 \ - -apikey <apikey_value>
405 Set the API key value.
306 Set the API key value.
406
307
407 \-c, - -config <config_file>
308 \-c, - -config <config_file>
408 Create a configuration file.
309 Create a configuration file.
409 The default file is created in :file:`~/.rhoderc`
310 The default file is created in :file:`~/.rhoderc`
410
311
411 \ - -create <gistname>
312 \ - -create <gistname>
412 create the gist
313 create the gist
413
314
414 \-d, - -description <str>
315 \-d, - -description <str>
415 Set gist description
316 Set gist description
416
317
417 \ - -delete <gistid>
318 \ - -delete <gistid>
418 Delete the gist
319 Delete the gist
419
320
420 \-f, - -file
321 \-f, - -file
421 Specify the filename The file extension will enable syntax highlighting.
322 Specify the filename The file extension will enable syntax highlighting.
422
323
423 \-F, - -format {json,pretty}
324 \-F, - -format {json,pretty}
424 Set the formatted representation.
325 Set the formatted representation.
425
326
426 \ - -help
327 \ - -help
427 Show help messages.
328 Show help messages.
428
329
429 \-I, - -install-dir <DIR>
330 \-I, - -install-dir <DIR>
430 Location of application instances
331 Location of application instances
431
332
432 \ - -instance-name <instance-id>
333 \ - -instance-name <instance-id>
433 Set the instance name.
334 Set the instance name.
434
335
435 \ - -list
336 \ - -list
436 Display instance gists.
337 Display instance gists.
437
338
438 \-l, --lifetime <minutes>
339 \-l, --lifetime <minutes>
439 Set the gist lifetime. The default value is (-1) forever
340 Set the gist lifetime. The default value is (-1) forever
440
341
441 \ - -show <gistname>
342 \ - -show <gistname>
442 Show the content of the gist
343 Show the content of the gist
443
344
444 \-o, - -open
345 \-o, - -open
445 After creating Gist open it in browser
346 After creating Gist open it in browser
446
347
447 \-p, - -private
348 \-p, - -private
448 Create a private gist
349 Create a private gist
449
350
450 \ - -version
351 \ - -version
451 Display your |RCT| version.
352 Display your |RCT| version.
452
353
453 Example usage:
354 Example usage:
454
355
455 .. code-block:: bash
356 .. code-block:: bash
456
357
457 # List the gists in an instance
358 # List the gists in an instance
458 (venv)brian@ubuntu:~$ rhodecode-gist --instance-name=enterprise-1 list
359 (venv)brian@ubuntu:~$ rhodecode-gist --instance-name=enterprise-1 list
459 {
360 {
460 "error": null,
361 "error": null,
461 "id": 7102,
362 "id": 7102,
462 "result": [
363 "result": [
463 {
364 {
464 "access_id": "2",
365 "access_id": "2",
465 "content": null,
366 "content": null,
466 "created_on": "2015-01-19T12:52:26.494",
367 "created_on": "2015-01-19T12:52:26.494",
467 "description": "A public gust",
368 "description": "A public gust",
468 "expires": -1.0,
369 "expires": -1.0,
469 "gist_id": 2,
370 "gist_id": 2,
470 "type": "public",
371 "type": "public",
471 "url": "http://127.0.0.1:10003/_admin/gists/2"
372 "url": "http://127.0.0.1:10003/_admin/gists/2"
472 },
373 },
473 {
374 {
474 "access_id": "7gs6BsSEC4pKUEPLz5AB",
375 "access_id": "7gs6BsSEC4pKUEPLz5AB",
475 "content": null,
376 "content": null,
476 "created_on": "2015-01-19T11:27:40.812",
377 "created_on": "2015-01-19T11:27:40.812",
477 "description": "Gist testing API",
378 "description": "Gist testing API",
478 "expires": -1.0,
379 "expires": -1.0,
479 "gist_id": 1,
380 "gist_id": 1,
480 "type": "private",
381 "type": "private",
481 "url": "http://127.0.0.1:10003/_admin/gists/7gs6BsSEC4pKUEPLz5AB"
382 "url": "http://127.0.0.1:10003/_admin/gists/7gs6BsSEC4pKUEPLz5AB"
482 }
383 }
483 ]
384 ]
484 }
385 }
485
386
486 # delete a particular gist
387 # delete a particular gist
487 # You use the access_id to specify the gist to delete
388 # You use the access_id to specify the gist to delete
488 (venv)brian@ubuntu:~$ rhodecode-gist delete 2 --instance-name=enterprise-1
389 (venv)brian@ubuntu:~$ rhodecode-gist delete 2 --instance-name=enterprise-1
489 {
390 {
490 "error": null,
391 "error": null,
491 "id": 6284,
392 "id": 6284,
492 "result": {
393 "result": {
493 "gist": null,
394 "gist": null,
494 "msg": "deleted gist ID:2"
395 "msg": "deleted gist ID:2"
495 }
396 }
496 }
397 }
497
398
498 # cat a file and pipe to new gist
399 # cat a file and pipe to new gist
499 # This is if you are using virtualenv
400 # This is if you are using virtualenv
500 (venv)$ cat ~/.rhoderc | rhodecode-gist --instance-name=enterprise-1 \
401 (venv)$ cat ~/.rhoderc | rhodecode-gist --instance-name=enterprise-1 \
501 -d '.rhoderc copy' create
402 -d '.rhoderc copy' create
502
403
503 {
404 {
504 "error": null,
405 "error": null,
505 "id": 5374,
406 "id": 5374,
506 "result": {
407 "result": {
507 "gist": {
408 "gist": {
508 "access_id": "7",
409 "access_id": "7",
509 "content": null,
410 "content": null,
510 "created_on": "2015-01-26T11:31:58.774",
411 "created_on": "2015-01-26T11:31:58.774",
511 "description": ".rhoderc copy",
412 "description": ".rhoderc copy",
512 "expires": -1.0,
413 "expires": -1.0,
513 "gist_id": 7,
414 "gist_id": 7,
514 "type": "public",
415 "type": "public",
515 "url": "http://127.0.0.1:10003/_admin/gists/7"
416 "url": "http://127.0.0.1:10003/_admin/gists/7"
516 },
417 },
517 "msg": "created new gist"
418 "msg": "created new gist"
518 }
419 }
519 }
420 }
520
421
521 # Cat a file and pipe to gist
422 # Cat a file and pipe to gist
522 # in RCE 3.5.0 tools and above
423 # in RCE 3.5.0 tools and above
523 $ cat ~/.rhoderc | ~/.rccontrol/{instance-id}/profile/bin/rhodecode-gist \
424 $ cat ~/.rhoderc | ~/.rccontrol/{instance-id}/profile/bin/rhodecode-gist \
524 --instance-name=enterprise-4 -d '.rhoderc copy' create
425 --instance-name=enterprise-4 -d '.rhoderc copy' create
525 {
426 {
526 "error": null,
427 "error": null,
527 "id": 9253,
428 "id": 9253,
528 "result": {
429 "result": {
529 "gist": {
430 "gist": {
530 "access_id": "4",
431 "access_id": "4",
531 "acl_level": "acl_public",
432 "acl_level": "acl_public",
532 "content": null,
433 "content": null,
533 "created_on": "2015-08-20T05:54:11.250",
434 "created_on": "2015-08-20T05:54:11.250",
534 "description": ".rhoderc copy",
435 "description": ".rhoderc copy",
535 "expires": -1.0,
436 "expires": -1.0,
536 "gist_id": 4,
437 "gist_id": 4,
537 "modified_at": "2015-08-20T05:54:11.250",
438 "modified_at": "2015-08-20T05:54:11.250",
538 "type": "public",
439 "type": "public",
539 "url": "http://127.0.0.1:10000/_admin/gists/4"
440 "url": "http://127.0.0.1:10000/_admin/gists/4"
540 },
441 },
541 "msg": "created new gist"
442 "msg": "created new gist"
542 }
443 }
543 }
444 }
544
445
545
446
546 rhodecode-index
447 rhodecode-index
547 ---------------
448 ---------------
548
449
549 More detailed information regarding setting up the indexer is available in
450 More detailed information regarding setting up the indexer is available in
550 the :ref:`indexing-ref` section. Options:
451 the :ref:`indexing-ref` section. Options:
551
452
552 .. rst-class:: dl-horizontal
453 .. rst-class:: dl-horizontal
553
454
554 \ - -api-cache-only
455 \ - -api-cache-only
555 Requires a cache to be present when running this call
456 Requires a cache to be present when running this call
556
457
557 \ - -api-cache-rebuild
458 \ - -api-cache-rebuild
558 Replaces existing cached values with new ones from server
459 Replaces existing cached values with new ones from server
559
460
560 \ - -api-cache PATH
461 \ - -api-cache PATH
561 Use a special cache dir to read responses from instead of the server
462 Use a special cache dir to read responses from instead of the server
562
463
563 \ - -api-cert-verify
464 \ - -api-cert-verify
564 Verify the endpoint ssl certificate
465 Verify the endpoint ssl certificate
565
466
566 \ - -api-cert PATH
467 \ - -api-cert PATH
567 Path to alternate CA bundle.
468 Path to alternate CA bundle.
568
469
569 \ - -apihost <api_host>
470 \ - -apihost <api_host>
570 Set the API host value.
471 Set the API host value.
571
472
572 \ - -apikey <apikey_value>
473 \ - -apikey <apikey_value>
573 Set the API key value.
474 Set the API key value.
574
475
575 \-c, --config <config_file>
476 \-c, --config <config_file>
576 Create a configuration file.
477 Create a configuration file.
577 The default file is created in :file:`~/.rhoderc`
478 The default file is created in :file:`~/.rhoderc`
578
479
579 \ - -create-mapping <PATH>
480 \ - -create-mapping <PATH>
580 Creates an example mapping configuration for indexer.
481 Creates an example mapping configuration for indexer.
581
482
582 \-F, - -format {json,pretty}
483 \-F, - -format {json,pretty}
583 Set the formatted representation.
484 Set the formatted representation.
584
485
585 \-h, - -help
486 \-h, - -help
586 Show help messages.
487 Show help messages.
587
488
588 \ - -instance-name <instance-id>
489 \ - -instance-name <instance-id>
589 Set the instance name
490 Set the instance name
590
491
591 \-I, - -install-dir <DIR>
492 \-I, - -install-dir <DIR>
592 Location of application instances
493 Location of application instances
593
494
594 \-m, - -mapping <file_name>
495 \-m, - -mapping <file_name>
595 Parse the output to the .ini mapping file.
496 Parse the output to the .ini mapping file.
596
497
597 \ - -optimize
498 \ - -optimize
598 Optimize index for performance by amalgamating multiple index files
499 Optimize index for performance by amalgamating multiple index files
599 into one. Greatly increases incremental indexing speed.
500 into one. Greatly increases incremental indexing speed.
600
501
601 \-R, - -repo-dir <DIRECTORY>
502 \-R, - -repo-dir <DIRECTORY>
602 Location of repositories
503 Location of repositories
603
504
604 \ - -source <PATH>
505 \ - -source <PATH>
605 Use a special source JSON file to feed the indexer
506 Use a special source JSON file to feed the indexer
606
507
607 \ - -version
508 \ - -version
608 Display your |RCT| version.
509 Display your |RCT| version.
609
510
610 Example usage:
511 Example usage:
611
512
612 .. code-block:: bash
513 .. code-block:: bash
613
514
614 # Run the indexer
515 # Run the indexer
615 $ ~/.rccontrol/enterprise-4/profile/bin/rhodecode-index \
516 $ ~/.rccontrol/enterprise-4/profile/bin/rhodecode-index \
616 --instance-name=enterprise-4
517 --instance-name=enterprise-4
617
518
618 # Run indexer based on mapping.ini file
519 # Run indexer based on mapping.ini file
619 # This is using pre-350 virtualenv
520 # This is using pre-350 virtualenv
620 (venv)$ rhodecode-index --instance-name=enterprise-1
521 (venv)$ rhodecode-index --instance-name=enterprise-1
621
522
622 # Index from the command line without creating
523 # Index from the command line without creating
623 # the .rhoderc file
524 # the .rhoderc file
624 $ rhodecode-index --apikey=key --apihost=http://rhodecode.server \
525 $ rhodecode-index --apikey=key --apihost=http://rhodecode.server \
625 --instance-name=enterprise-2 --save-config
526 --instance-name=enterprise-2 --save-config
626
527
627 # Create the indexing mapping file
528 # Create the indexing mapping file
628 $ ~/.rccontrol/enterprise-4/profile/bin/rhodecode-index \
529 $ ~/.rccontrol/enterprise-4/profile/bin/rhodecode-index \
629 --create-mapping mapping.ini --instance-name=enterprise-4
530 --create-mapping mapping.ini --instance-name=enterprise-4
630
531
631 .. _tools-rhodecode-list-instance:
532 .. _tools-rhodecode-list-instance:
632
533
633 rhodecode-list-instances
534 rhodecode-list-instances
634 ------------------------
535 ------------------------
635
536
636 Use this command to list the instance details configured in the
537 Use this command to list the instance details configured in the
637 :file:`~/.rhoderc` file.
538 :file:`~/.rhoderc` file.
638
539
639 .. code-block:: bash
540 .. code-block:: bash
640
541
641 $ .rccontrol/enterprise-1/profile/bin/rhodecode-list-instances
542 $ .rccontrol/enterprise-1/profile/bin/rhodecode-list-instances
642 [instance:production] - Config only
543 [instance:production] - Config only
643 API-HOST: https://some.url.com
544 API-HOST: https://some.url.com
644 API-KEY: some.auth.token
545 API-KEY: some.auth.token
645
546
646 [instance:development] - Config only
547 [instance:development] - Config only
647 API-HOST: http://some.ip.address
548 API-HOST: http://some.ip.address
648 API-KEY: some.auth.token
549 API-KEY: some.auth.token
649
550
650
551
651 .. _tools-setup-config:
552 .. _tools-setup-config:
652
553
653 rhodecode-setup-config
554 rhodecode-setup-config
654 ----------------------
555 ----------------------
655
556
656 Use this command to create the ``~.rhoderc`` file required by |RCT| to access
557 Use this command to create the ``~.rhoderc`` file required by |RCT| to access
657 remote instances.
558 remote instances.
658
559
659 .. rst-class:: dl-horizontal
560 .. rst-class:: dl-horizontal
660
561
661 \- -instance-name <name>
562 \- -instance-name <name>
662 Specify the instance name in the :file:`~/.rhoderc`
563 Specify the instance name in the :file:`~/.rhoderc`
663
564
664 \api_host <hostname>
565 \api_host <hostname>
665 Create a configuration file. The default file is created
566 Create a configuration file. The default file is created
666 in ``~/.rhoderc``
567 in ``~/.rhoderc``
667
568
668 \api_key <auth-token>
569 \api_key <auth-token>
669 Create a configuration file. The default file is created
570 Create a configuration file. The default file is created
670 in ``~/.rhoderc``
571 in ``~/.rhoderc``
671
572
672
573
673 .. code-block:: bash
574 .. code-block:: bash
674
575
675 (venv)$ rhodecode-setup-config --instance-name=tea api_host=URL api_key=xyz
576 (venv)$ rhodecode-setup-config --instance-name=tea api_host=URL api_key=xyz
676 Config not found under /Users/username/.rhoderc, creating a new one
577 Config not found under /Users/username/.rhoderc, creating a new one
677 Wrote new configuration into /Users/username/.rhoderc
578 Wrote new configuration into /Users/username/.rhoderc
@@ -1,76 +1,66 b''
1 .. _tools-overview:
1 .. _tools-overview:
2
2
3 |RCT| Overview
3 |RCT| Overview
4 --------------
4 --------------
5
5
6 To install |RCT| correctly, see the installation steps covered in
6 To install |RCT| correctly, see the installation steps covered in
7 :ref:`install-tools`, and :ref:`config-rhoderc`.
7 :ref:`install-tools`, and :ref:`config-rhoderc`.
8
8
9 Once |RCT| is installed, and the :file:`/home/{user}/.rhoderc` file is
9 Once |RCT| is installed, and the :file:`/home/{user}/.rhoderc` file is
10 configured you can then use |RCT| on each |RCM| instance to carry out admin
10 configured you can then use |RCT| on each |RCM| instance to carry out admin
11 tasks. Use the following example to configure that file,
11 tasks. Use the following example to configure that file,
12 and once configured see the :ref:`tools-cli` for more details.
12 and once configured see the :ref:`tools-cli` for more details.
13
13
14 .. note::
14 .. note::
15
15
16 |RCT| require |PY| 2.7 to run.
16 |RCT| require |PY| 2.7 to run.
17
17
18 .. code-block:: bash
18 .. code-block:: bash
19
19
20 # Get the status of each instance you wish to use with Tools
20 # Get the status of each instance you wish to use with Tools
21 (venv)brian@ubuntu:~$ rccontrol status
21 (venv)brian@ubuntu:~$ rccontrol status
22
22
23 - NAME: momentum-1
23 - NAME: momentum-1
24 - STATUS: RUNNING
24 - STATUS: RUNNING
25 - TYPE: Momentum
25 - TYPE: Momentum
26 - VERSION: 3.0.0-nightly-momentum
26 - VERSION: 3.0.0-nightly-momentum
27 - URL: http://127.0.0.1:10003
27 - URL: http://127.0.0.1:10003
28
28
29 - NAME: momentum-3
29 - NAME: momentum-3
30 - STATUS: RUNNING
30 - STATUS: RUNNING
31 - TYPE: Momentum
31 - TYPE: Momentum
32 - VERSION: 3.0.0-nightly-momentum
32 - VERSION: 3.0.0-nightly-momentum
33 - URL: http://127.0.0.1:10007
33 - URL: http://127.0.0.1:10007
34
34
35 Example :file:`/home/{user}/.rhoderc` file.
35 Example :file:`/home/{user}/.rhoderc` file.
36
36
37 .. code-block:: ini
37 .. code-block:: ini
38
38
39 # Configure the .rhoderc file for each instance
39 # Configure the .rhoderc file for each instance
40 # API keys found in your instance
40 # API keys found in your instance
41 [instance:enterprise-1]
41 [instance:enterprise-1]
42 api_host = http://127.0.0.1:10003/
42 api_host = http://127.0.0.1:10003/
43 api_key = 91fdbdc257289c46633ef5aab274412911de1ba9
43 api_key = 91fdbdc257289c46633ef5aab274412911de1ba9
44 repo_dir = /home/brian/repos
44 repo_dir = /home/brian/repos
45
45
46 [instance:enterprise-3]
46 [instance:enterprise-3]
47 api_host = http://127.0.0.1:10007/
47 api_host = http://127.0.0.1:10007/
48 api_key = 5a925f65438d29f8d6ced8ab8e8c3d305998d1d9
48 api_key = 5a925f65438d29f8d6ced8ab8e8c3d305998d1d9
49 repo_dir = /home/brian/testing-repos/
49 repo_dir = /home/brian/testing-repos/
50
50
51
51
52 Example usage of |RCT| after |RCE| 3.5.0. From this version onwards |RCT| is
52 Example usage of |RCT| after |RCE| 3.5.0. From this version onwards |RCT| is
53 packaged with |RCE| by default.
53 packaged with |RCE| by default.
54
54
55 .. code-block:: bash
55 .. code-block:: bash
56
56
57 $ .rccontrol/enterprise-4/profile/bin/rhodecode-extensions --plugins \
57 $ .rccontrol/enterprise-4/profile/bin/rhodecode-api --instance-name=enterprise-4 get_ip [11:56:57 on 05/10/2018]
58 --instance-name=enterprise-4 --ini-file=rhodecode.ini
59
58
60 Writen new extensions file to rcextensions
59 {
61 Copied hipchat_push_notify.py plugin to rcextensions
60 "error": null,
62 Copied jira_pr_flow.py plugin to rcextensions
61 "id": 1000,
63 Copied default_reviewers.py plugin to rcextensions
62 "result": {
64 Copied extract_commits.py plugin to rcextensions
63 "server_ip_addr": "1.2.3.4",
65 Copied extract_issues.py plugin to rcextensions
64 "user_ips": []
66 Copied redmine_pr_flow.py plugin to rcextensions
65 }
67 Copied extra_fields.py plugin to rcextensions
66 }
68 Copied jira_smart_commits.py plugin to rcextensions
69 Copied http_notify.py plugin to rcextensions
70 Copied slack_push_notify.py plugin to rcextensions
71 Copied slack_message.py plugin to rcextensions
72 Copied extract_jira_issues.py plugin to rcextensions
73 Copied extract_redmine_issues.py plugin to rcextensions
74 Copied redmine_smart_commits.py plugin to rcextensions
75 Copied send_mail.py plugin to rcextensions
76
@@ -1,1233 +1,1233 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 import mock
20 import mock
21 import pytest
21 import pytest
22
22
23 import rhodecode
23 import rhodecode
24 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason
24 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason
25 from rhodecode.lib.vcs.nodes import FileNode
25 from rhodecode.lib.vcs.nodes import FileNode
26 from rhodecode.lib import helpers as h
26 from rhodecode.lib import helpers as h
27 from rhodecode.model.changeset_status import ChangesetStatusModel
27 from rhodecode.model.changeset_status import ChangesetStatusModel
28 from rhodecode.model.db import (
28 from rhodecode.model.db import (
29 PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment, Repository)
29 PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment, Repository)
30 from rhodecode.model.meta import Session
30 from rhodecode.model.meta import Session
31 from rhodecode.model.pull_request import PullRequestModel
31 from rhodecode.model.pull_request import PullRequestModel
32 from rhodecode.model.user import UserModel
32 from rhodecode.model.user import UserModel
33 from rhodecode.tests import (
33 from rhodecode.tests import (
34 assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN)
34 assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN)
35 from rhodecode.tests.utils import AssertResponse
35 from rhodecode.tests.utils import AssertResponse
36
36
37
37
38 def route_path(name, params=None, **kwargs):
38 def route_path(name, params=None, **kwargs):
39 import urllib
39 import urllib
40
40
41 base_url = {
41 base_url = {
42 'repo_changelog': '/{repo_name}/changelog',
42 'repo_changelog': '/{repo_name}/changelog',
43 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}',
43 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}',
44 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}',
44 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}',
45 'pullrequest_show_all': '/{repo_name}/pull-request',
45 'pullrequest_show_all': '/{repo_name}/pull-request',
46 'pullrequest_show_all_data': '/{repo_name}/pull-request-data',
46 'pullrequest_show_all_data': '/{repo_name}/pull-request-data',
47 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
47 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
48 'pullrequest_repo_destinations': '/{repo_name}/pull-request/repo-destinations',
48 'pullrequest_repo_destinations': '/{repo_name}/pull-request/repo-destinations',
49 'pullrequest_new': '/{repo_name}/pull-request/new',
49 'pullrequest_new': '/{repo_name}/pull-request/new',
50 'pullrequest_create': '/{repo_name}/pull-request/create',
50 'pullrequest_create': '/{repo_name}/pull-request/create',
51 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update',
51 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update',
52 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge',
52 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge',
53 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete',
53 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete',
54 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment',
54 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment',
55 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete',
55 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete',
56 }[name].format(**kwargs)
56 }[name].format(**kwargs)
57
57
58 if params:
58 if params:
59 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
59 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
60 return base_url
60 return base_url
61
61
62
62
63 @pytest.mark.usefixtures('app', 'autologin_user')
63 @pytest.mark.usefixtures('app', 'autologin_user')
64 @pytest.mark.backends("git", "hg")
64 @pytest.mark.backends("git", "hg")
65 class TestPullrequestsView(object):
65 class TestPullrequestsView(object):
66
66
67 def test_index(self, backend):
67 def test_index(self, backend):
68 self.app.get(route_path(
68 self.app.get(route_path(
69 'pullrequest_new',
69 'pullrequest_new',
70 repo_name=backend.repo_name))
70 repo_name=backend.repo_name))
71
71
72 def test_option_menu_create_pull_request_exists(self, backend):
72 def test_option_menu_create_pull_request_exists(self, backend):
73 repo_name = backend.repo_name
73 repo_name = backend.repo_name
74 response = self.app.get(h.route_path('repo_summary', repo_name=repo_name))
74 response = self.app.get(h.route_path('repo_summary', repo_name=repo_name))
75
75
76 create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path(
76 create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path(
77 'pullrequest_new', repo_name=repo_name)
77 'pullrequest_new', repo_name=repo_name)
78 response.mustcontain(create_pr_link)
78 response.mustcontain(create_pr_link)
79
79
80 def test_create_pr_form_with_raw_commit_id(self, backend):
80 def test_create_pr_form_with_raw_commit_id(self, backend):
81 repo = backend.repo
81 repo = backend.repo
82
82
83 self.app.get(
83 self.app.get(
84 route_path('pullrequest_new', repo_name=repo.repo_name,
84 route_path('pullrequest_new', repo_name=repo.repo_name,
85 commit=repo.get_commit().raw_id),
85 commit=repo.get_commit().raw_id),
86 status=200)
86 status=200)
87
87
88 @pytest.mark.parametrize('pr_merge_enabled', [True, False])
88 @pytest.mark.parametrize('pr_merge_enabled', [True, False])
89 @pytest.mark.parametrize('range_diff', ["0", "1"])
89 @pytest.mark.parametrize('range_diff', ["0", "1"])
90 def test_show(self, pr_util, pr_merge_enabled, range_diff):
90 def test_show(self, pr_util, pr_merge_enabled, range_diff):
91 pull_request = pr_util.create_pull_request(
91 pull_request = pr_util.create_pull_request(
92 mergeable=pr_merge_enabled, enable_notifications=False)
92 mergeable=pr_merge_enabled, enable_notifications=False)
93
93
94 response = self.app.get(route_path(
94 response = self.app.get(route_path(
95 'pullrequest_show',
95 'pullrequest_show',
96 repo_name=pull_request.target_repo.scm_instance().name,
96 repo_name=pull_request.target_repo.scm_instance().name,
97 pull_request_id=pull_request.pull_request_id,
97 pull_request_id=pull_request.pull_request_id,
98 params={'range-diff': range_diff}))
98 params={'range-diff': range_diff}))
99
99
100 for commit_id in pull_request.revisions:
100 for commit_id in pull_request.revisions:
101 response.mustcontain(commit_id)
101 response.mustcontain(commit_id)
102
102
103 assert pull_request.target_ref_parts.type in response
103 assert pull_request.target_ref_parts.type in response
104 assert pull_request.target_ref_parts.name in response
104 assert pull_request.target_ref_parts.name in response
105 target_clone_url = pull_request.target_repo.clone_url()
105 target_clone_url = pull_request.target_repo.clone_url()
106 assert target_clone_url in response
106 assert target_clone_url in response
107
107
108 assert 'class="pull-request-merge"' in response
108 assert 'class="pull-request-merge"' in response
109 if pr_merge_enabled:
109 if pr_merge_enabled:
110 response.mustcontain('Pull request reviewer approval is pending')
110 response.mustcontain('Pull request reviewer approval is pending')
111 else:
111 else:
112 response.mustcontain('Server-side pull request merging is disabled.')
112 response.mustcontain('Server-side pull request merging is disabled.')
113
113
114 if range_diff == "1":
114 if range_diff == "1":
115 response.mustcontain('Turn off: Show the diff as commit range')
115 response.mustcontain('Turn off: Show the diff as commit range')
116
116
117 def test_close_status_visibility(self, pr_util, user_util, csrf_token):
117 def test_close_status_visibility(self, pr_util, user_util, csrf_token):
118 # Logout
118 # Logout
119 response = self.app.post(
119 response = self.app.post(
120 h.route_path('logout'),
120 h.route_path('logout'),
121 params={'csrf_token': csrf_token})
121 params={'csrf_token': csrf_token})
122 # Login as regular user
122 # Login as regular user
123 response = self.app.post(h.route_path('login'),
123 response = self.app.post(h.route_path('login'),
124 {'username': TEST_USER_REGULAR_LOGIN,
124 {'username': TEST_USER_REGULAR_LOGIN,
125 'password': 'test12'})
125 'password': 'test12'})
126
126
127 pull_request = pr_util.create_pull_request(
127 pull_request = pr_util.create_pull_request(
128 author=TEST_USER_REGULAR_LOGIN)
128 author=TEST_USER_REGULAR_LOGIN)
129
129
130 response = self.app.get(route_path(
130 response = self.app.get(route_path(
131 'pullrequest_show',
131 'pullrequest_show',
132 repo_name=pull_request.target_repo.scm_instance().name,
132 repo_name=pull_request.target_repo.scm_instance().name,
133 pull_request_id=pull_request.pull_request_id))
133 pull_request_id=pull_request.pull_request_id))
134
134
135 response.mustcontain('Server-side pull request merging is disabled.')
135 response.mustcontain('Server-side pull request merging is disabled.')
136
136
137 assert_response = response.assert_response()
137 assert_response = response.assert_response()
138 # for regular user without a merge permissions, we don't see it
138 # for regular user without a merge permissions, we don't see it
139 assert_response.no_element_exists('#close-pull-request-action')
139 assert_response.no_element_exists('#close-pull-request-action')
140
140
141 user_util.grant_user_permission_to_repo(
141 user_util.grant_user_permission_to_repo(
142 pull_request.target_repo,
142 pull_request.target_repo,
143 UserModel().get_by_username(TEST_USER_REGULAR_LOGIN),
143 UserModel().get_by_username(TEST_USER_REGULAR_LOGIN),
144 'repository.write')
144 'repository.write')
145 response = self.app.get(route_path(
145 response = self.app.get(route_path(
146 'pullrequest_show',
146 'pullrequest_show',
147 repo_name=pull_request.target_repo.scm_instance().name,
147 repo_name=pull_request.target_repo.scm_instance().name,
148 pull_request_id=pull_request.pull_request_id))
148 pull_request_id=pull_request.pull_request_id))
149
149
150 response.mustcontain('Server-side pull request merging is disabled.')
150 response.mustcontain('Server-side pull request merging is disabled.')
151
151
152 assert_response = response.assert_response()
152 assert_response = response.assert_response()
153 # now regular user has a merge permissions, we have CLOSE button
153 # now regular user has a merge permissions, we have CLOSE button
154 assert_response.one_element_exists('#close-pull-request-action')
154 assert_response.one_element_exists('#close-pull-request-action')
155
155
156 def test_show_invalid_commit_id(self, pr_util):
156 def test_show_invalid_commit_id(self, pr_util):
157 # Simulating invalid revisions which will cause a lookup error
157 # Simulating invalid revisions which will cause a lookup error
158 pull_request = pr_util.create_pull_request()
158 pull_request = pr_util.create_pull_request()
159 pull_request.revisions = ['invalid']
159 pull_request.revisions = ['invalid']
160 Session().add(pull_request)
160 Session().add(pull_request)
161 Session().commit()
161 Session().commit()
162
162
163 response = self.app.get(route_path(
163 response = self.app.get(route_path(
164 'pullrequest_show',
164 'pullrequest_show',
165 repo_name=pull_request.target_repo.scm_instance().name,
165 repo_name=pull_request.target_repo.scm_instance().name,
166 pull_request_id=pull_request.pull_request_id))
166 pull_request_id=pull_request.pull_request_id))
167
167
168 for commit_id in pull_request.revisions:
168 for commit_id in pull_request.revisions:
169 response.mustcontain(commit_id)
169 response.mustcontain(commit_id)
170
170
171 def test_show_invalid_source_reference(self, pr_util):
171 def test_show_invalid_source_reference(self, pr_util):
172 pull_request = pr_util.create_pull_request()
172 pull_request = pr_util.create_pull_request()
173 pull_request.source_ref = 'branch:b:invalid'
173 pull_request.source_ref = 'branch:b:invalid'
174 Session().add(pull_request)
174 Session().add(pull_request)
175 Session().commit()
175 Session().commit()
176
176
177 self.app.get(route_path(
177 self.app.get(route_path(
178 'pullrequest_show',
178 'pullrequest_show',
179 repo_name=pull_request.target_repo.scm_instance().name,
179 repo_name=pull_request.target_repo.scm_instance().name,
180 pull_request_id=pull_request.pull_request_id))
180 pull_request_id=pull_request.pull_request_id))
181
181
182 def test_edit_title_description(self, pr_util, csrf_token):
182 def test_edit_title_description(self, pr_util, csrf_token):
183 pull_request = pr_util.create_pull_request()
183 pull_request = pr_util.create_pull_request()
184 pull_request_id = pull_request.pull_request_id
184 pull_request_id = pull_request.pull_request_id
185
185
186 response = self.app.post(
186 response = self.app.post(
187 route_path('pullrequest_update',
187 route_path('pullrequest_update',
188 repo_name=pull_request.target_repo.repo_name,
188 repo_name=pull_request.target_repo.repo_name,
189 pull_request_id=pull_request_id),
189 pull_request_id=pull_request_id),
190 params={
190 params={
191 'edit_pull_request': 'true',
191 'edit_pull_request': 'true',
192 'title': 'New title',
192 'title': 'New title',
193 'description': 'New description',
193 'description': 'New description',
194 'csrf_token': csrf_token})
194 'csrf_token': csrf_token})
195
195
196 assert_session_flash(
196 assert_session_flash(
197 response, u'Pull request title & description updated.',
197 response, u'Pull request title & description updated.',
198 category='success')
198 category='success')
199
199
200 pull_request = PullRequest.get(pull_request_id)
200 pull_request = PullRequest.get(pull_request_id)
201 assert pull_request.title == 'New title'
201 assert pull_request.title == 'New title'
202 assert pull_request.description == 'New description'
202 assert pull_request.description == 'New description'
203
203
204 def test_edit_title_description_closed(self, pr_util, csrf_token):
204 def test_edit_title_description_closed(self, pr_util, csrf_token):
205 pull_request = pr_util.create_pull_request()
205 pull_request = pr_util.create_pull_request()
206 pull_request_id = pull_request.pull_request_id
206 pull_request_id = pull_request.pull_request_id
207 repo_name = pull_request.target_repo.repo_name
207 repo_name = pull_request.target_repo.repo_name
208 pr_util.close()
208 pr_util.close()
209
209
210 response = self.app.post(
210 response = self.app.post(
211 route_path('pullrequest_update',
211 route_path('pullrequest_update',
212 repo_name=repo_name, pull_request_id=pull_request_id),
212 repo_name=repo_name, pull_request_id=pull_request_id),
213 params={
213 params={
214 'edit_pull_request': 'true',
214 'edit_pull_request': 'true',
215 'title': 'New title',
215 'title': 'New title',
216 'description': 'New description',
216 'description': 'New description',
217 'csrf_token': csrf_token}, status=200)
217 'csrf_token': csrf_token}, status=200)
218 assert_session_flash(
218 assert_session_flash(
219 response, u'Cannot update closed pull requests.',
219 response, u'Cannot update closed pull requests.',
220 category='error')
220 category='error')
221
221
222 def test_update_invalid_source_reference(self, pr_util, csrf_token):
222 def test_update_invalid_source_reference(self, pr_util, csrf_token):
223 from rhodecode.lib.vcs.backends.base import UpdateFailureReason
223 from rhodecode.lib.vcs.backends.base import UpdateFailureReason
224
224
225 pull_request = pr_util.create_pull_request()
225 pull_request = pr_util.create_pull_request()
226 pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id'
226 pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id'
227 Session().add(pull_request)
227 Session().add(pull_request)
228 Session().commit()
228 Session().commit()
229
229
230 pull_request_id = pull_request.pull_request_id
230 pull_request_id = pull_request.pull_request_id
231
231
232 response = self.app.post(
232 response = self.app.post(
233 route_path('pullrequest_update',
233 route_path('pullrequest_update',
234 repo_name=pull_request.target_repo.repo_name,
234 repo_name=pull_request.target_repo.repo_name,
235 pull_request_id=pull_request_id),
235 pull_request_id=pull_request_id),
236 params={'update_commits': 'true',
236 params={'update_commits': 'true',
237 'csrf_token': csrf_token})
237 'csrf_token': csrf_token})
238
238
239 expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[
239 expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[
240 UpdateFailureReason.MISSING_SOURCE_REF])
240 UpdateFailureReason.MISSING_SOURCE_REF])
241 assert_session_flash(response, expected_msg, category='error')
241 assert_session_flash(response, expected_msg, category='error')
242
242
243 def test_missing_target_reference(self, pr_util, csrf_token):
243 def test_missing_target_reference(self, pr_util, csrf_token):
244 from rhodecode.lib.vcs.backends.base import MergeFailureReason
244 from rhodecode.lib.vcs.backends.base import MergeFailureReason
245 pull_request = pr_util.create_pull_request(
245 pull_request = pr_util.create_pull_request(
246 approved=True, mergeable=True)
246 approved=True, mergeable=True)
247 pull_request.target_ref = 'branch:invalid-branch:invalid-commit-id'
247 pull_request.target_ref = 'branch:invalid-branch:invalid-commit-id'
248 Session().add(pull_request)
248 Session().add(pull_request)
249 Session().commit()
249 Session().commit()
250
250
251 pull_request_id = pull_request.pull_request_id
251 pull_request_id = pull_request.pull_request_id
252 pull_request_url = route_path(
252 pull_request_url = route_path(
253 'pullrequest_show',
253 'pullrequest_show',
254 repo_name=pull_request.target_repo.repo_name,
254 repo_name=pull_request.target_repo.repo_name,
255 pull_request_id=pull_request_id)
255 pull_request_id=pull_request_id)
256
256
257 response = self.app.get(pull_request_url)
257 response = self.app.get(pull_request_url)
258
258
259 assertr = AssertResponse(response)
259 assertr = AssertResponse(response)
260 expected_msg = PullRequestModel.MERGE_STATUS_MESSAGES[
260 expected_msg = PullRequestModel.MERGE_STATUS_MESSAGES[
261 MergeFailureReason.MISSING_TARGET_REF]
261 MergeFailureReason.MISSING_TARGET_REF]
262 assertr.element_contains(
262 assertr.element_contains(
263 'span[data-role="merge-message"]', str(expected_msg))
263 'span[data-role="merge-message"]', str(expected_msg))
264
264
265 def test_comment_and_close_pull_request_custom_message_approved(
265 def test_comment_and_close_pull_request_custom_message_approved(
266 self, pr_util, csrf_token, xhr_header):
266 self, pr_util, csrf_token, xhr_header):
267
267
268 pull_request = pr_util.create_pull_request(approved=True)
268 pull_request = pr_util.create_pull_request(approved=True)
269 pull_request_id = pull_request.pull_request_id
269 pull_request_id = pull_request.pull_request_id
270 author = pull_request.user_id
270 author = pull_request.user_id
271 repo = pull_request.target_repo.repo_id
271 repo = pull_request.target_repo.repo_id
272
272
273 self.app.post(
273 self.app.post(
274 route_path('pullrequest_comment_create',
274 route_path('pullrequest_comment_create',
275 repo_name=pull_request.target_repo.scm_instance().name,
275 repo_name=pull_request.target_repo.scm_instance().name,
276 pull_request_id=pull_request_id),
276 pull_request_id=pull_request_id),
277 params={
277 params={
278 'close_pull_request': '1',
278 'close_pull_request': '1',
279 'text': 'Closing a PR',
279 'text': 'Closing a PR',
280 'csrf_token': csrf_token},
280 'csrf_token': csrf_token},
281 extra_environ=xhr_header,)
281 extra_environ=xhr_header,)
282
282
283 journal = UserLog.query()\
283 journal = UserLog.query()\
284 .filter(UserLog.user_id == author)\
284 .filter(UserLog.user_id == author)\
285 .filter(UserLog.repository_id == repo) \
285 .filter(UserLog.repository_id == repo) \
286 .order_by('user_log_id') \
286 .order_by('user_log_id') \
287 .all()
287 .all()
288 assert journal[-1].action == 'repo.pull_request.close'
288 assert journal[-1].action == 'repo.pull_request.close'
289
289
290 pull_request = PullRequest.get(pull_request_id)
290 pull_request = PullRequest.get(pull_request_id)
291 assert pull_request.is_closed()
291 assert pull_request.is_closed()
292
292
293 status = ChangesetStatusModel().get_status(
293 status = ChangesetStatusModel().get_status(
294 pull_request.source_repo, pull_request=pull_request)
294 pull_request.source_repo, pull_request=pull_request)
295 assert status == ChangesetStatus.STATUS_APPROVED
295 assert status == ChangesetStatus.STATUS_APPROVED
296 comments = ChangesetComment().query() \
296 comments = ChangesetComment().query() \
297 .filter(ChangesetComment.pull_request == pull_request) \
297 .filter(ChangesetComment.pull_request == pull_request) \
298 .order_by(ChangesetComment.comment_id.asc())\
298 .order_by(ChangesetComment.comment_id.asc())\
299 .all()
299 .all()
300 assert comments[-1].text == 'Closing a PR'
300 assert comments[-1].text == 'Closing a PR'
301
301
302 def test_comment_force_close_pull_request_rejected(
302 def test_comment_force_close_pull_request_rejected(
303 self, pr_util, csrf_token, xhr_header):
303 self, pr_util, csrf_token, xhr_header):
304 pull_request = pr_util.create_pull_request()
304 pull_request = pr_util.create_pull_request()
305 pull_request_id = pull_request.pull_request_id
305 pull_request_id = pull_request.pull_request_id
306 PullRequestModel().update_reviewers(
306 PullRequestModel().update_reviewers(
307 pull_request_id, [(1, ['reason'], False, []), (2, ['reason2'], False, [])],
307 pull_request_id, [(1, ['reason'], False, []), (2, ['reason2'], False, [])],
308 pull_request.author)
308 pull_request.author)
309 author = pull_request.user_id
309 author = pull_request.user_id
310 repo = pull_request.target_repo.repo_id
310 repo = pull_request.target_repo.repo_id
311
311
312 self.app.post(
312 self.app.post(
313 route_path('pullrequest_comment_create',
313 route_path('pullrequest_comment_create',
314 repo_name=pull_request.target_repo.scm_instance().name,
314 repo_name=pull_request.target_repo.scm_instance().name,
315 pull_request_id=pull_request_id),
315 pull_request_id=pull_request_id),
316 params={
316 params={
317 'close_pull_request': '1',
317 'close_pull_request': '1',
318 'csrf_token': csrf_token},
318 'csrf_token': csrf_token},
319 extra_environ=xhr_header)
319 extra_environ=xhr_header)
320
320
321 pull_request = PullRequest.get(pull_request_id)
321 pull_request = PullRequest.get(pull_request_id)
322
322
323 journal = UserLog.query()\
323 journal = UserLog.query()\
324 .filter(UserLog.user_id == author, UserLog.repository_id == repo) \
324 .filter(UserLog.user_id == author, UserLog.repository_id == repo) \
325 .order_by('user_log_id') \
325 .order_by('user_log_id') \
326 .all()
326 .all()
327 assert journal[-1].action == 'repo.pull_request.close'
327 assert journal[-1].action == 'repo.pull_request.close'
328
328
329 # check only the latest status, not the review status
329 # check only the latest status, not the review status
330 status = ChangesetStatusModel().get_status(
330 status = ChangesetStatusModel().get_status(
331 pull_request.source_repo, pull_request=pull_request)
331 pull_request.source_repo, pull_request=pull_request)
332 assert status == ChangesetStatus.STATUS_REJECTED
332 assert status == ChangesetStatus.STATUS_REJECTED
333
333
334 def test_comment_and_close_pull_request(
334 def test_comment_and_close_pull_request(
335 self, pr_util, csrf_token, xhr_header):
335 self, pr_util, csrf_token, xhr_header):
336 pull_request = pr_util.create_pull_request()
336 pull_request = pr_util.create_pull_request()
337 pull_request_id = pull_request.pull_request_id
337 pull_request_id = pull_request.pull_request_id
338
338
339 response = self.app.post(
339 response = self.app.post(
340 route_path('pullrequest_comment_create',
340 route_path('pullrequest_comment_create',
341 repo_name=pull_request.target_repo.scm_instance().name,
341 repo_name=pull_request.target_repo.scm_instance().name,
342 pull_request_id=pull_request.pull_request_id),
342 pull_request_id=pull_request.pull_request_id),
343 params={
343 params={
344 'close_pull_request': 'true',
344 'close_pull_request': 'true',
345 'csrf_token': csrf_token},
345 'csrf_token': csrf_token},
346 extra_environ=xhr_header)
346 extra_environ=xhr_header)
347
347
348 assert response.json
348 assert response.json
349
349
350 pull_request = PullRequest.get(pull_request_id)
350 pull_request = PullRequest.get(pull_request_id)
351 assert pull_request.is_closed()
351 assert pull_request.is_closed()
352
352
353 # check only the latest status, not the review status
353 # check only the latest status, not the review status
354 status = ChangesetStatusModel().get_status(
354 status = ChangesetStatusModel().get_status(
355 pull_request.source_repo, pull_request=pull_request)
355 pull_request.source_repo, pull_request=pull_request)
356 assert status == ChangesetStatus.STATUS_REJECTED
356 assert status == ChangesetStatus.STATUS_REJECTED
357
357
358 def test_create_pull_request(self, backend, csrf_token):
358 def test_create_pull_request(self, backend, csrf_token):
359 commits = [
359 commits = [
360 {'message': 'ancestor'},
360 {'message': 'ancestor'},
361 {'message': 'change'},
361 {'message': 'change'},
362 {'message': 'change2'},
362 {'message': 'change2'},
363 ]
363 ]
364 commit_ids = backend.create_master_repo(commits)
364 commit_ids = backend.create_master_repo(commits)
365 target = backend.create_repo(heads=['ancestor'])
365 target = backend.create_repo(heads=['ancestor'])
366 source = backend.create_repo(heads=['change2'])
366 source = backend.create_repo(heads=['change2'])
367
367
368 response = self.app.post(
368 response = self.app.post(
369 route_path('pullrequest_create', repo_name=source.repo_name),
369 route_path('pullrequest_create', repo_name=source.repo_name),
370 [
370 [
371 ('source_repo', source.repo_name),
371 ('source_repo', source.repo_name),
372 ('source_ref', 'branch:default:' + commit_ids['change2']),
372 ('source_ref', 'branch:default:' + commit_ids['change2']),
373 ('target_repo', target.repo_name),
373 ('target_repo', target.repo_name),
374 ('target_ref', 'branch:default:' + commit_ids['ancestor']),
374 ('target_ref', 'branch:default:' + commit_ids['ancestor']),
375 ('common_ancestor', commit_ids['ancestor']),
375 ('common_ancestor', commit_ids['ancestor']),
376 ('pullrequest_title', 'Title'),
376 ('pullrequest_title', 'Title'),
377 ('pullrequest_desc', 'Description'),
377 ('pullrequest_desc', 'Description'),
378 ('description_renderer', 'markdown'),
378 ('description_renderer', 'markdown'),
379 ('__start__', 'review_members:sequence'),
379 ('__start__', 'review_members:sequence'),
380 ('__start__', 'reviewer:mapping'),
380 ('__start__', 'reviewer:mapping'),
381 ('user_id', '1'),
381 ('user_id', '1'),
382 ('__start__', 'reasons:sequence'),
382 ('__start__', 'reasons:sequence'),
383 ('reason', 'Some reason'),
383 ('reason', 'Some reason'),
384 ('__end__', 'reasons:sequence'),
384 ('__end__', 'reasons:sequence'),
385 ('__start__', 'rules:sequence'),
385 ('__start__', 'rules:sequence'),
386 ('__end__', 'rules:sequence'),
386 ('__end__', 'rules:sequence'),
387 ('mandatory', 'False'),
387 ('mandatory', 'False'),
388 ('__end__', 'reviewer:mapping'),
388 ('__end__', 'reviewer:mapping'),
389 ('__end__', 'review_members:sequence'),
389 ('__end__', 'review_members:sequence'),
390 ('__start__', 'revisions:sequence'),
390 ('__start__', 'revisions:sequence'),
391 ('revisions', commit_ids['change']),
391 ('revisions', commit_ids['change']),
392 ('revisions', commit_ids['change2']),
392 ('revisions', commit_ids['change2']),
393 ('__end__', 'revisions:sequence'),
393 ('__end__', 'revisions:sequence'),
394 ('user', ''),
394 ('user', ''),
395 ('csrf_token', csrf_token),
395 ('csrf_token', csrf_token),
396 ],
396 ],
397 status=302)
397 status=302)
398
398
399 location = response.headers['Location']
399 location = response.headers['Location']
400 pull_request_id = location.rsplit('/', 1)[1]
400 pull_request_id = location.rsplit('/', 1)[1]
401 assert pull_request_id != 'new'
401 assert pull_request_id != 'new'
402 pull_request = PullRequest.get(int(pull_request_id))
402 pull_request = PullRequest.get(int(pull_request_id))
403
403
404 # check that we have now both revisions
404 # check that we have now both revisions
405 assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']]
405 assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']]
406 assert pull_request.source_ref == 'branch:default:' + commit_ids['change2']
406 assert pull_request.source_ref == 'branch:default:' + commit_ids['change2']
407 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
407 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
408 assert pull_request.target_ref == expected_target_ref
408 assert pull_request.target_ref == expected_target_ref
409
409
410 def test_reviewer_notifications(self, backend, csrf_token):
410 def test_reviewer_notifications(self, backend, csrf_token):
411 # We have to use the app.post for this test so it will create the
411 # We have to use the app.post for this test so it will create the
412 # notifications properly with the new PR
412 # notifications properly with the new PR
413 commits = [
413 commits = [
414 {'message': 'ancestor',
414 {'message': 'ancestor',
415 'added': [FileNode('file_A', content='content_of_ancestor')]},
415 'added': [FileNode('file_A', content='content_of_ancestor')]},
416 {'message': 'change',
416 {'message': 'change',
417 'added': [FileNode('file_a', content='content_of_change')]},
417 'added': [FileNode('file_a', content='content_of_change')]},
418 {'message': 'change-child'},
418 {'message': 'change-child'},
419 {'message': 'ancestor-child', 'parents': ['ancestor'],
419 {'message': 'ancestor-child', 'parents': ['ancestor'],
420 'added': [
420 'added': [
421 FileNode('file_B', content='content_of_ancestor_child')]},
421 FileNode('file_B', content='content_of_ancestor_child')]},
422 {'message': 'ancestor-child-2'},
422 {'message': 'ancestor-child-2'},
423 ]
423 ]
424 commit_ids = backend.create_master_repo(commits)
424 commit_ids = backend.create_master_repo(commits)
425 target = backend.create_repo(heads=['ancestor-child'])
425 target = backend.create_repo(heads=['ancestor-child'])
426 source = backend.create_repo(heads=['change'])
426 source = backend.create_repo(heads=['change'])
427
427
428 response = self.app.post(
428 response = self.app.post(
429 route_path('pullrequest_create', repo_name=source.repo_name),
429 route_path('pullrequest_create', repo_name=source.repo_name),
430 [
430 [
431 ('source_repo', source.repo_name),
431 ('source_repo', source.repo_name),
432 ('source_ref', 'branch:default:' + commit_ids['change']),
432 ('source_ref', 'branch:default:' + commit_ids['change']),
433 ('target_repo', target.repo_name),
433 ('target_repo', target.repo_name),
434 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
434 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
435 ('common_ancestor', commit_ids['ancestor']),
435 ('common_ancestor', commit_ids['ancestor']),
436 ('pullrequest_title', 'Title'),
436 ('pullrequest_title', 'Title'),
437 ('pullrequest_desc', 'Description'),
437 ('pullrequest_desc', 'Description'),
438 ('description_renderer', 'markdown'),
438 ('description_renderer', 'markdown'),
439 ('__start__', 'review_members:sequence'),
439 ('__start__', 'review_members:sequence'),
440 ('__start__', 'reviewer:mapping'),
440 ('__start__', 'reviewer:mapping'),
441 ('user_id', '2'),
441 ('user_id', '2'),
442 ('__start__', 'reasons:sequence'),
442 ('__start__', 'reasons:sequence'),
443 ('reason', 'Some reason'),
443 ('reason', 'Some reason'),
444 ('__end__', 'reasons:sequence'),
444 ('__end__', 'reasons:sequence'),
445 ('__start__', 'rules:sequence'),
445 ('__start__', 'rules:sequence'),
446 ('__end__', 'rules:sequence'),
446 ('__end__', 'rules:sequence'),
447 ('mandatory', 'False'),
447 ('mandatory', 'False'),
448 ('__end__', 'reviewer:mapping'),
448 ('__end__', 'reviewer:mapping'),
449 ('__end__', 'review_members:sequence'),
449 ('__end__', 'review_members:sequence'),
450 ('__start__', 'revisions:sequence'),
450 ('__start__', 'revisions:sequence'),
451 ('revisions', commit_ids['change']),
451 ('revisions', commit_ids['change']),
452 ('__end__', 'revisions:sequence'),
452 ('__end__', 'revisions:sequence'),
453 ('user', ''),
453 ('user', ''),
454 ('csrf_token', csrf_token),
454 ('csrf_token', csrf_token),
455 ],
455 ],
456 status=302)
456 status=302)
457
457
458 location = response.headers['Location']
458 location = response.headers['Location']
459
459
460 pull_request_id = location.rsplit('/', 1)[1]
460 pull_request_id = location.rsplit('/', 1)[1]
461 assert pull_request_id != 'new'
461 assert pull_request_id != 'new'
462 pull_request = PullRequest.get(int(pull_request_id))
462 pull_request = PullRequest.get(int(pull_request_id))
463
463
464 # Check that a notification was made
464 # Check that a notification was made
465 notifications = Notification.query()\
465 notifications = Notification.query()\
466 .filter(Notification.created_by == pull_request.author.user_id,
466 .filter(Notification.created_by == pull_request.author.user_id,
467 Notification.type_ == Notification.TYPE_PULL_REQUEST,
467 Notification.type_ == Notification.TYPE_PULL_REQUEST,
468 Notification.subject.contains(
468 Notification.subject.contains(
469 "wants you to review pull request #%s" % pull_request_id))
469 "wants you to review pull request #%s" % pull_request_id))
470 assert len(notifications.all()) == 1
470 assert len(notifications.all()) == 1
471
471
472 # Change reviewers and check that a notification was made
472 # Change reviewers and check that a notification was made
473 PullRequestModel().update_reviewers(
473 PullRequestModel().update_reviewers(
474 pull_request.pull_request_id, [(1, [], False, [])],
474 pull_request.pull_request_id, [(1, [], False, [])],
475 pull_request.author)
475 pull_request.author)
476 assert len(notifications.all()) == 2
476 assert len(notifications.all()) == 2
477
477
478 def test_create_pull_request_stores_ancestor_commit_id(self, backend,
478 def test_create_pull_request_stores_ancestor_commit_id(self, backend,
479 csrf_token):
479 csrf_token):
480 commits = [
480 commits = [
481 {'message': 'ancestor',
481 {'message': 'ancestor',
482 'added': [FileNode('file_A', content='content_of_ancestor')]},
482 'added': [FileNode('file_A', content='content_of_ancestor')]},
483 {'message': 'change',
483 {'message': 'change',
484 'added': [FileNode('file_a', content='content_of_change')]},
484 'added': [FileNode('file_a', content='content_of_change')]},
485 {'message': 'change-child'},
485 {'message': 'change-child'},
486 {'message': 'ancestor-child', 'parents': ['ancestor'],
486 {'message': 'ancestor-child', 'parents': ['ancestor'],
487 'added': [
487 'added': [
488 FileNode('file_B', content='content_of_ancestor_child')]},
488 FileNode('file_B', content='content_of_ancestor_child')]},
489 {'message': 'ancestor-child-2'},
489 {'message': 'ancestor-child-2'},
490 ]
490 ]
491 commit_ids = backend.create_master_repo(commits)
491 commit_ids = backend.create_master_repo(commits)
492 target = backend.create_repo(heads=['ancestor-child'])
492 target = backend.create_repo(heads=['ancestor-child'])
493 source = backend.create_repo(heads=['change'])
493 source = backend.create_repo(heads=['change'])
494
494
495 response = self.app.post(
495 response = self.app.post(
496 route_path('pullrequest_create', repo_name=source.repo_name),
496 route_path('pullrequest_create', repo_name=source.repo_name),
497 [
497 [
498 ('source_repo', source.repo_name),
498 ('source_repo', source.repo_name),
499 ('source_ref', 'branch:default:' + commit_ids['change']),
499 ('source_ref', 'branch:default:' + commit_ids['change']),
500 ('target_repo', target.repo_name),
500 ('target_repo', target.repo_name),
501 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
501 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
502 ('common_ancestor', commit_ids['ancestor']),
502 ('common_ancestor', commit_ids['ancestor']),
503 ('pullrequest_title', 'Title'),
503 ('pullrequest_title', 'Title'),
504 ('pullrequest_desc', 'Description'),
504 ('pullrequest_desc', 'Description'),
505 ('description_renderer', 'markdown'),
505 ('description_renderer', 'markdown'),
506 ('__start__', 'review_members:sequence'),
506 ('__start__', 'review_members:sequence'),
507 ('__start__', 'reviewer:mapping'),
507 ('__start__', 'reviewer:mapping'),
508 ('user_id', '1'),
508 ('user_id', '1'),
509 ('__start__', 'reasons:sequence'),
509 ('__start__', 'reasons:sequence'),
510 ('reason', 'Some reason'),
510 ('reason', 'Some reason'),
511 ('__end__', 'reasons:sequence'),
511 ('__end__', 'reasons:sequence'),
512 ('__start__', 'rules:sequence'),
512 ('__start__', 'rules:sequence'),
513 ('__end__', 'rules:sequence'),
513 ('__end__', 'rules:sequence'),
514 ('mandatory', 'False'),
514 ('mandatory', 'False'),
515 ('__end__', 'reviewer:mapping'),
515 ('__end__', 'reviewer:mapping'),
516 ('__end__', 'review_members:sequence'),
516 ('__end__', 'review_members:sequence'),
517 ('__start__', 'revisions:sequence'),
517 ('__start__', 'revisions:sequence'),
518 ('revisions', commit_ids['change']),
518 ('revisions', commit_ids['change']),
519 ('__end__', 'revisions:sequence'),
519 ('__end__', 'revisions:sequence'),
520 ('user', ''),
520 ('user', ''),
521 ('csrf_token', csrf_token),
521 ('csrf_token', csrf_token),
522 ],
522 ],
523 status=302)
523 status=302)
524
524
525 location = response.headers['Location']
525 location = response.headers['Location']
526
526
527 pull_request_id = location.rsplit('/', 1)[1]
527 pull_request_id = location.rsplit('/', 1)[1]
528 assert pull_request_id != 'new'
528 assert pull_request_id != 'new'
529 pull_request = PullRequest.get(int(pull_request_id))
529 pull_request = PullRequest.get(int(pull_request_id))
530
530
531 # target_ref has to point to the ancestor's commit_id in order to
531 # target_ref has to point to the ancestor's commit_id in order to
532 # show the correct diff
532 # show the correct diff
533 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
533 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
534 assert pull_request.target_ref == expected_target_ref
534 assert pull_request.target_ref == expected_target_ref
535
535
536 # Check generated diff contents
536 # Check generated diff contents
537 response = response.follow()
537 response = response.follow()
538 assert 'content_of_ancestor' not in response.body
538 assert 'content_of_ancestor' not in response.body
539 assert 'content_of_ancestor-child' not in response.body
539 assert 'content_of_ancestor-child' not in response.body
540 assert 'content_of_change' in response.body
540 assert 'content_of_change' in response.body
541
541
542 def test_merge_pull_request_enabled(self, pr_util, csrf_token):
542 def test_merge_pull_request_enabled(self, pr_util, csrf_token):
543 # Clear any previous calls to rcextensions
543 # Clear any previous calls to rcextensions
544 rhodecode.EXTENSIONS.calls.clear()
544 rhodecode.EXTENSIONS.calls.clear()
545
545
546 pull_request = pr_util.create_pull_request(
546 pull_request = pr_util.create_pull_request(
547 approved=True, mergeable=True)
547 approved=True, mergeable=True)
548 pull_request_id = pull_request.pull_request_id
548 pull_request_id = pull_request.pull_request_id
549 repo_name = pull_request.target_repo.scm_instance().name,
549 repo_name = pull_request.target_repo.scm_instance().name,
550
550
551 response = self.app.post(
551 response = self.app.post(
552 route_path('pullrequest_merge',
552 route_path('pullrequest_merge',
553 repo_name=str(repo_name[0]),
553 repo_name=str(repo_name[0]),
554 pull_request_id=pull_request_id),
554 pull_request_id=pull_request_id),
555 params={'csrf_token': csrf_token}).follow()
555 params={'csrf_token': csrf_token}).follow()
556
556
557 pull_request = PullRequest.get(pull_request_id)
557 pull_request = PullRequest.get(pull_request_id)
558
558
559 assert response.status_int == 200
559 assert response.status_int == 200
560 assert pull_request.is_closed()
560 assert pull_request.is_closed()
561 assert_pull_request_status(
561 assert_pull_request_status(
562 pull_request, ChangesetStatus.STATUS_APPROVED)
562 pull_request, ChangesetStatus.STATUS_APPROVED)
563
563
564 # Check the relevant log entries were added
564 # Check the relevant log entries were added
565 user_logs = UserLog.query().order_by('-user_log_id').limit(3)
565 user_logs = UserLog.query().order_by('-user_log_id').limit(3)
566 actions = [log.action for log in user_logs]
566 actions = [log.action for log in user_logs]
567 pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request)
567 pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request)
568 expected_actions = [
568 expected_actions = [
569 u'repo.pull_request.close',
569 u'repo.pull_request.close',
570 u'repo.pull_request.merge',
570 u'repo.pull_request.merge',
571 u'repo.pull_request.comment.create'
571 u'repo.pull_request.comment.create'
572 ]
572 ]
573 assert actions == expected_actions
573 assert actions == expected_actions
574
574
575 user_logs = UserLog.query().order_by('-user_log_id').limit(4)
575 user_logs = UserLog.query().order_by('-user_log_id').limit(4)
576 actions = [log for log in user_logs]
576 actions = [log for log in user_logs]
577 assert actions[-1].action == 'user.push'
577 assert actions[-1].action == 'user.push'
578 assert actions[-1].action_data['commit_ids'] == pr_commit_ids
578 assert actions[-1].action_data['commit_ids'] == pr_commit_ids
579
579
580 # Check post_push rcextension was really executed
580 # Check post_push rcextension was really executed
581 push_calls = rhodecode.EXTENSIONS.calls['post_push']
581 push_calls = rhodecode.EXTENSIONS.calls['_push_hook']
582 assert len(push_calls) == 1
582 assert len(push_calls) == 1
583 unused_last_call_args, last_call_kwargs = push_calls[0]
583 unused_last_call_args, last_call_kwargs = push_calls[0]
584 assert last_call_kwargs['action'] == 'push'
584 assert last_call_kwargs['action'] == 'push'
585 assert last_call_kwargs['pushed_revs'] == pr_commit_ids
585 assert last_call_kwargs['commit_ids'] == pr_commit_ids
586
586
587 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
587 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
588 pull_request = pr_util.create_pull_request(mergeable=False)
588 pull_request = pr_util.create_pull_request(mergeable=False)
589 pull_request_id = pull_request.pull_request_id
589 pull_request_id = pull_request.pull_request_id
590 pull_request = PullRequest.get(pull_request_id)
590 pull_request = PullRequest.get(pull_request_id)
591
591
592 response = self.app.post(
592 response = self.app.post(
593 route_path('pullrequest_merge',
593 route_path('pullrequest_merge',
594 repo_name=pull_request.target_repo.scm_instance().name,
594 repo_name=pull_request.target_repo.scm_instance().name,
595 pull_request_id=pull_request.pull_request_id),
595 pull_request_id=pull_request.pull_request_id),
596 params={'csrf_token': csrf_token}).follow()
596 params={'csrf_token': csrf_token}).follow()
597
597
598 assert response.status_int == 200
598 assert response.status_int == 200
599 response.mustcontain(
599 response.mustcontain(
600 'Merge is not currently possible because of below failed checks.')
600 'Merge is not currently possible because of below failed checks.')
601 response.mustcontain('Server-side pull request merging is disabled.')
601 response.mustcontain('Server-side pull request merging is disabled.')
602
602
603 @pytest.mark.skip_backends('svn')
603 @pytest.mark.skip_backends('svn')
604 def test_merge_pull_request_not_approved(self, pr_util, csrf_token):
604 def test_merge_pull_request_not_approved(self, pr_util, csrf_token):
605 pull_request = pr_util.create_pull_request(mergeable=True)
605 pull_request = pr_util.create_pull_request(mergeable=True)
606 pull_request_id = pull_request.pull_request_id
606 pull_request_id = pull_request.pull_request_id
607 repo_name = pull_request.target_repo.scm_instance().name
607 repo_name = pull_request.target_repo.scm_instance().name
608
608
609 response = self.app.post(
609 response = self.app.post(
610 route_path('pullrequest_merge',
610 route_path('pullrequest_merge',
611 repo_name=repo_name,
611 repo_name=repo_name,
612 pull_request_id=pull_request_id),
612 pull_request_id=pull_request_id),
613 params={'csrf_token': csrf_token}).follow()
613 params={'csrf_token': csrf_token}).follow()
614
614
615 assert response.status_int == 200
615 assert response.status_int == 200
616
616
617 response.mustcontain(
617 response.mustcontain(
618 'Merge is not currently possible because of below failed checks.')
618 'Merge is not currently possible because of below failed checks.')
619 response.mustcontain('Pull request reviewer approval is pending.')
619 response.mustcontain('Pull request reviewer approval is pending.')
620
620
621 def test_merge_pull_request_renders_failure_reason(
621 def test_merge_pull_request_renders_failure_reason(
622 self, user_regular, csrf_token, pr_util):
622 self, user_regular, csrf_token, pr_util):
623 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
623 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
624 pull_request_id = pull_request.pull_request_id
624 pull_request_id = pull_request.pull_request_id
625 repo_name = pull_request.target_repo.scm_instance().name
625 repo_name = pull_request.target_repo.scm_instance().name
626
626
627 model_patcher = mock.patch.multiple(
627 model_patcher = mock.patch.multiple(
628 PullRequestModel,
628 PullRequestModel,
629 merge_repo=mock.Mock(return_value=MergeResponse(
629 merge_repo=mock.Mock(return_value=MergeResponse(
630 True, False, 'STUB_COMMIT_ID', MergeFailureReason.PUSH_FAILED)),
630 True, False, 'STUB_COMMIT_ID', MergeFailureReason.PUSH_FAILED)),
631 merge_status=mock.Mock(return_value=(True, 'WRONG_MESSAGE')))
631 merge_status=mock.Mock(return_value=(True, 'WRONG_MESSAGE')))
632
632
633 with model_patcher:
633 with model_patcher:
634 response = self.app.post(
634 response = self.app.post(
635 route_path('pullrequest_merge',
635 route_path('pullrequest_merge',
636 repo_name=repo_name,
636 repo_name=repo_name,
637 pull_request_id=pull_request_id),
637 pull_request_id=pull_request_id),
638 params={'csrf_token': csrf_token}, status=302)
638 params={'csrf_token': csrf_token}, status=302)
639
639
640 assert_session_flash(response, PullRequestModel.MERGE_STATUS_MESSAGES[
640 assert_session_flash(response, PullRequestModel.MERGE_STATUS_MESSAGES[
641 MergeFailureReason.PUSH_FAILED])
641 MergeFailureReason.PUSH_FAILED])
642
642
643 def test_update_source_revision(self, backend, csrf_token):
643 def test_update_source_revision(self, backend, csrf_token):
644 commits = [
644 commits = [
645 {'message': 'ancestor'},
645 {'message': 'ancestor'},
646 {'message': 'change'},
646 {'message': 'change'},
647 {'message': 'change-2'},
647 {'message': 'change-2'},
648 ]
648 ]
649 commit_ids = backend.create_master_repo(commits)
649 commit_ids = backend.create_master_repo(commits)
650 target = backend.create_repo(heads=['ancestor'])
650 target = backend.create_repo(heads=['ancestor'])
651 source = backend.create_repo(heads=['change'])
651 source = backend.create_repo(heads=['change'])
652
652
653 # create pr from a in source to A in target
653 # create pr from a in source to A in target
654 pull_request = PullRequest()
654 pull_request = PullRequest()
655 pull_request.source_repo = source
655 pull_request.source_repo = source
656 # TODO: johbo: Make sure that we write the source ref this way!
656 # TODO: johbo: Make sure that we write the source ref this way!
657 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
657 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
658 branch=backend.default_branch_name, commit_id=commit_ids['change'])
658 branch=backend.default_branch_name, commit_id=commit_ids['change'])
659 pull_request.target_repo = target
659 pull_request.target_repo = target
660
660
661 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
661 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
662 branch=backend.default_branch_name,
662 branch=backend.default_branch_name,
663 commit_id=commit_ids['ancestor'])
663 commit_id=commit_ids['ancestor'])
664 pull_request.revisions = [commit_ids['change']]
664 pull_request.revisions = [commit_ids['change']]
665 pull_request.title = u"Test"
665 pull_request.title = u"Test"
666 pull_request.description = u"Description"
666 pull_request.description = u"Description"
667 pull_request.author = UserModel().get_by_username(
667 pull_request.author = UserModel().get_by_username(
668 TEST_USER_ADMIN_LOGIN)
668 TEST_USER_ADMIN_LOGIN)
669 Session().add(pull_request)
669 Session().add(pull_request)
670 Session().commit()
670 Session().commit()
671 pull_request_id = pull_request.pull_request_id
671 pull_request_id = pull_request.pull_request_id
672
672
673 # source has ancestor - change - change-2
673 # source has ancestor - change - change-2
674 backend.pull_heads(source, heads=['change-2'])
674 backend.pull_heads(source, heads=['change-2'])
675
675
676 # update PR
676 # update PR
677 self.app.post(
677 self.app.post(
678 route_path('pullrequest_update',
678 route_path('pullrequest_update',
679 repo_name=target.repo_name,
679 repo_name=target.repo_name,
680 pull_request_id=pull_request_id),
680 pull_request_id=pull_request_id),
681 params={'update_commits': 'true',
681 params={'update_commits': 'true',
682 'csrf_token': csrf_token})
682 'csrf_token': csrf_token})
683
683
684 # check that we have now both revisions
684 # check that we have now both revisions
685 pull_request = PullRequest.get(pull_request_id)
685 pull_request = PullRequest.get(pull_request_id)
686 assert pull_request.revisions == [
686 assert pull_request.revisions == [
687 commit_ids['change-2'], commit_ids['change']]
687 commit_ids['change-2'], commit_ids['change']]
688
688
689 # TODO: johbo: this should be a test on its own
689 # TODO: johbo: this should be a test on its own
690 response = self.app.get(route_path(
690 response = self.app.get(route_path(
691 'pullrequest_new',
691 'pullrequest_new',
692 repo_name=target.repo_name))
692 repo_name=target.repo_name))
693 assert response.status_int == 200
693 assert response.status_int == 200
694 assert 'Pull request updated to' in response.body
694 assert 'Pull request updated to' in response.body
695 assert 'with 1 added, 0 removed commits.' in response.body
695 assert 'with 1 added, 0 removed commits.' in response.body
696
696
697 def test_update_target_revision(self, backend, csrf_token):
697 def test_update_target_revision(self, backend, csrf_token):
698 commits = [
698 commits = [
699 {'message': 'ancestor'},
699 {'message': 'ancestor'},
700 {'message': 'change'},
700 {'message': 'change'},
701 {'message': 'ancestor-new', 'parents': ['ancestor']},
701 {'message': 'ancestor-new', 'parents': ['ancestor']},
702 {'message': 'change-rebased'},
702 {'message': 'change-rebased'},
703 ]
703 ]
704 commit_ids = backend.create_master_repo(commits)
704 commit_ids = backend.create_master_repo(commits)
705 target = backend.create_repo(heads=['ancestor'])
705 target = backend.create_repo(heads=['ancestor'])
706 source = backend.create_repo(heads=['change'])
706 source = backend.create_repo(heads=['change'])
707
707
708 # create pr from a in source to A in target
708 # create pr from a in source to A in target
709 pull_request = PullRequest()
709 pull_request = PullRequest()
710 pull_request.source_repo = source
710 pull_request.source_repo = source
711 # TODO: johbo: Make sure that we write the source ref this way!
711 # TODO: johbo: Make sure that we write the source ref this way!
712 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
712 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
713 branch=backend.default_branch_name, commit_id=commit_ids['change'])
713 branch=backend.default_branch_name, commit_id=commit_ids['change'])
714 pull_request.target_repo = target
714 pull_request.target_repo = target
715 # TODO: johbo: Target ref should be branch based, since tip can jump
715 # TODO: johbo: Target ref should be branch based, since tip can jump
716 # from branch to branch
716 # from branch to branch
717 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
717 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
718 branch=backend.default_branch_name,
718 branch=backend.default_branch_name,
719 commit_id=commit_ids['ancestor'])
719 commit_id=commit_ids['ancestor'])
720 pull_request.revisions = [commit_ids['change']]
720 pull_request.revisions = [commit_ids['change']]
721 pull_request.title = u"Test"
721 pull_request.title = u"Test"
722 pull_request.description = u"Description"
722 pull_request.description = u"Description"
723 pull_request.author = UserModel().get_by_username(
723 pull_request.author = UserModel().get_by_username(
724 TEST_USER_ADMIN_LOGIN)
724 TEST_USER_ADMIN_LOGIN)
725 Session().add(pull_request)
725 Session().add(pull_request)
726 Session().commit()
726 Session().commit()
727 pull_request_id = pull_request.pull_request_id
727 pull_request_id = pull_request.pull_request_id
728
728
729 # target has ancestor - ancestor-new
729 # target has ancestor - ancestor-new
730 # source has ancestor - ancestor-new - change-rebased
730 # source has ancestor - ancestor-new - change-rebased
731 backend.pull_heads(target, heads=['ancestor-new'])
731 backend.pull_heads(target, heads=['ancestor-new'])
732 backend.pull_heads(source, heads=['change-rebased'])
732 backend.pull_heads(source, heads=['change-rebased'])
733
733
734 # update PR
734 # update PR
735 self.app.post(
735 self.app.post(
736 route_path('pullrequest_update',
736 route_path('pullrequest_update',
737 repo_name=target.repo_name,
737 repo_name=target.repo_name,
738 pull_request_id=pull_request_id),
738 pull_request_id=pull_request_id),
739 params={'update_commits': 'true',
739 params={'update_commits': 'true',
740 'csrf_token': csrf_token},
740 'csrf_token': csrf_token},
741 status=200)
741 status=200)
742
742
743 # check that we have now both revisions
743 # check that we have now both revisions
744 pull_request = PullRequest.get(pull_request_id)
744 pull_request = PullRequest.get(pull_request_id)
745 assert pull_request.revisions == [commit_ids['change-rebased']]
745 assert pull_request.revisions == [commit_ids['change-rebased']]
746 assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format(
746 assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format(
747 branch=backend.default_branch_name,
747 branch=backend.default_branch_name,
748 commit_id=commit_ids['ancestor-new'])
748 commit_id=commit_ids['ancestor-new'])
749
749
750 # TODO: johbo: This should be a test on its own
750 # TODO: johbo: This should be a test on its own
751 response = self.app.get(route_path(
751 response = self.app.get(route_path(
752 'pullrequest_new',
752 'pullrequest_new',
753 repo_name=target.repo_name))
753 repo_name=target.repo_name))
754 assert response.status_int == 200
754 assert response.status_int == 200
755 assert 'Pull request updated to' in response.body
755 assert 'Pull request updated to' in response.body
756 assert 'with 1 added, 1 removed commits.' in response.body
756 assert 'with 1 added, 1 removed commits.' in response.body
757
757
758 def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token):
758 def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token):
759 backend = backend_git
759 backend = backend_git
760 commits = [
760 commits = [
761 {'message': 'master-commit-1'},
761 {'message': 'master-commit-1'},
762 {'message': 'master-commit-2-change-1'},
762 {'message': 'master-commit-2-change-1'},
763 {'message': 'master-commit-3-change-2'},
763 {'message': 'master-commit-3-change-2'},
764
764
765 {'message': 'feat-commit-1', 'parents': ['master-commit-1']},
765 {'message': 'feat-commit-1', 'parents': ['master-commit-1']},
766 {'message': 'feat-commit-2'},
766 {'message': 'feat-commit-2'},
767 ]
767 ]
768 commit_ids = backend.create_master_repo(commits)
768 commit_ids = backend.create_master_repo(commits)
769 target = backend.create_repo(heads=['master-commit-3-change-2'])
769 target = backend.create_repo(heads=['master-commit-3-change-2'])
770 source = backend.create_repo(heads=['feat-commit-2'])
770 source = backend.create_repo(heads=['feat-commit-2'])
771
771
772 # create pr from a in source to A in target
772 # create pr from a in source to A in target
773 pull_request = PullRequest()
773 pull_request = PullRequest()
774 pull_request.source_repo = source
774 pull_request.source_repo = source
775 # TODO: johbo: Make sure that we write the source ref this way!
775 # TODO: johbo: Make sure that we write the source ref this way!
776 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
776 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
777 branch=backend.default_branch_name,
777 branch=backend.default_branch_name,
778 commit_id=commit_ids['master-commit-3-change-2'])
778 commit_id=commit_ids['master-commit-3-change-2'])
779
779
780 pull_request.target_repo = target
780 pull_request.target_repo = target
781 # TODO: johbo: Target ref should be branch based, since tip can jump
781 # TODO: johbo: Target ref should be branch based, since tip can jump
782 # from branch to branch
782 # from branch to branch
783 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
783 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
784 branch=backend.default_branch_name,
784 branch=backend.default_branch_name,
785 commit_id=commit_ids['feat-commit-2'])
785 commit_id=commit_ids['feat-commit-2'])
786
786
787 pull_request.revisions = [
787 pull_request.revisions = [
788 commit_ids['feat-commit-1'],
788 commit_ids['feat-commit-1'],
789 commit_ids['feat-commit-2']
789 commit_ids['feat-commit-2']
790 ]
790 ]
791 pull_request.title = u"Test"
791 pull_request.title = u"Test"
792 pull_request.description = u"Description"
792 pull_request.description = u"Description"
793 pull_request.author = UserModel().get_by_username(
793 pull_request.author = UserModel().get_by_username(
794 TEST_USER_ADMIN_LOGIN)
794 TEST_USER_ADMIN_LOGIN)
795 Session().add(pull_request)
795 Session().add(pull_request)
796 Session().commit()
796 Session().commit()
797 pull_request_id = pull_request.pull_request_id
797 pull_request_id = pull_request.pull_request_id
798
798
799 # PR is created, now we simulate a force-push into target,
799 # PR is created, now we simulate a force-push into target,
800 # that drops a 2 last commits
800 # that drops a 2 last commits
801 vcsrepo = target.scm_instance()
801 vcsrepo = target.scm_instance()
802 vcsrepo.config.clear_section('hooks')
802 vcsrepo.config.clear_section('hooks')
803 vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2'])
803 vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2'])
804
804
805 # update PR
805 # update PR
806 self.app.post(
806 self.app.post(
807 route_path('pullrequest_update',
807 route_path('pullrequest_update',
808 repo_name=target.repo_name,
808 repo_name=target.repo_name,
809 pull_request_id=pull_request_id),
809 pull_request_id=pull_request_id),
810 params={'update_commits': 'true',
810 params={'update_commits': 'true',
811 'csrf_token': csrf_token},
811 'csrf_token': csrf_token},
812 status=200)
812 status=200)
813
813
814 response = self.app.get(route_path(
814 response = self.app.get(route_path(
815 'pullrequest_new',
815 'pullrequest_new',
816 repo_name=target.repo_name))
816 repo_name=target.repo_name))
817 assert response.status_int == 200
817 assert response.status_int == 200
818 response.mustcontain('Pull request updated to')
818 response.mustcontain('Pull request updated to')
819 response.mustcontain('with 0 added, 0 removed commits.')
819 response.mustcontain('with 0 added, 0 removed commits.')
820
820
821 def test_update_of_ancestor_reference(self, backend, csrf_token):
821 def test_update_of_ancestor_reference(self, backend, csrf_token):
822 commits = [
822 commits = [
823 {'message': 'ancestor'},
823 {'message': 'ancestor'},
824 {'message': 'change'},
824 {'message': 'change'},
825 {'message': 'change-2'},
825 {'message': 'change-2'},
826 {'message': 'ancestor-new', 'parents': ['ancestor']},
826 {'message': 'ancestor-new', 'parents': ['ancestor']},
827 {'message': 'change-rebased'},
827 {'message': 'change-rebased'},
828 ]
828 ]
829 commit_ids = backend.create_master_repo(commits)
829 commit_ids = backend.create_master_repo(commits)
830 target = backend.create_repo(heads=['ancestor'])
830 target = backend.create_repo(heads=['ancestor'])
831 source = backend.create_repo(heads=['change'])
831 source = backend.create_repo(heads=['change'])
832
832
833 # create pr from a in source to A in target
833 # create pr from a in source to A in target
834 pull_request = PullRequest()
834 pull_request = PullRequest()
835 pull_request.source_repo = source
835 pull_request.source_repo = source
836 # TODO: johbo: Make sure that we write the source ref this way!
836 # TODO: johbo: Make sure that we write the source ref this way!
837 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
837 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
838 branch=backend.default_branch_name,
838 branch=backend.default_branch_name,
839 commit_id=commit_ids['change'])
839 commit_id=commit_ids['change'])
840 pull_request.target_repo = target
840 pull_request.target_repo = target
841 # TODO: johbo: Target ref should be branch based, since tip can jump
841 # TODO: johbo: Target ref should be branch based, since tip can jump
842 # from branch to branch
842 # from branch to branch
843 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
843 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
844 branch=backend.default_branch_name,
844 branch=backend.default_branch_name,
845 commit_id=commit_ids['ancestor'])
845 commit_id=commit_ids['ancestor'])
846 pull_request.revisions = [commit_ids['change']]
846 pull_request.revisions = [commit_ids['change']]
847 pull_request.title = u"Test"
847 pull_request.title = u"Test"
848 pull_request.description = u"Description"
848 pull_request.description = u"Description"
849 pull_request.author = UserModel().get_by_username(
849 pull_request.author = UserModel().get_by_username(
850 TEST_USER_ADMIN_LOGIN)
850 TEST_USER_ADMIN_LOGIN)
851 Session().add(pull_request)
851 Session().add(pull_request)
852 Session().commit()
852 Session().commit()
853 pull_request_id = pull_request.pull_request_id
853 pull_request_id = pull_request.pull_request_id
854
854
855 # target has ancestor - ancestor-new
855 # target has ancestor - ancestor-new
856 # source has ancestor - ancestor-new - change-rebased
856 # source has ancestor - ancestor-new - change-rebased
857 backend.pull_heads(target, heads=['ancestor-new'])
857 backend.pull_heads(target, heads=['ancestor-new'])
858 backend.pull_heads(source, heads=['change-rebased'])
858 backend.pull_heads(source, heads=['change-rebased'])
859
859
860 # update PR
860 # update PR
861 self.app.post(
861 self.app.post(
862 route_path('pullrequest_update',
862 route_path('pullrequest_update',
863 repo_name=target.repo_name,
863 repo_name=target.repo_name,
864 pull_request_id=pull_request_id),
864 pull_request_id=pull_request_id),
865 params={'update_commits': 'true',
865 params={'update_commits': 'true',
866 'csrf_token': csrf_token},
866 'csrf_token': csrf_token},
867 status=200)
867 status=200)
868
868
869 # Expect the target reference to be updated correctly
869 # Expect the target reference to be updated correctly
870 pull_request = PullRequest.get(pull_request_id)
870 pull_request = PullRequest.get(pull_request_id)
871 assert pull_request.revisions == [commit_ids['change-rebased']]
871 assert pull_request.revisions == [commit_ids['change-rebased']]
872 expected_target_ref = 'branch:{branch}:{commit_id}'.format(
872 expected_target_ref = 'branch:{branch}:{commit_id}'.format(
873 branch=backend.default_branch_name,
873 branch=backend.default_branch_name,
874 commit_id=commit_ids['ancestor-new'])
874 commit_id=commit_ids['ancestor-new'])
875 assert pull_request.target_ref == expected_target_ref
875 assert pull_request.target_ref == expected_target_ref
876
876
877 def test_remove_pull_request_branch(self, backend_git, csrf_token):
877 def test_remove_pull_request_branch(self, backend_git, csrf_token):
878 branch_name = 'development'
878 branch_name = 'development'
879 commits = [
879 commits = [
880 {'message': 'initial-commit'},
880 {'message': 'initial-commit'},
881 {'message': 'old-feature'},
881 {'message': 'old-feature'},
882 {'message': 'new-feature', 'branch': branch_name},
882 {'message': 'new-feature', 'branch': branch_name},
883 ]
883 ]
884 repo = backend_git.create_repo(commits)
884 repo = backend_git.create_repo(commits)
885 commit_ids = backend_git.commit_ids
885 commit_ids = backend_git.commit_ids
886
886
887 pull_request = PullRequest()
887 pull_request = PullRequest()
888 pull_request.source_repo = repo
888 pull_request.source_repo = repo
889 pull_request.target_repo = repo
889 pull_request.target_repo = repo
890 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
890 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
891 branch=branch_name, commit_id=commit_ids['new-feature'])
891 branch=branch_name, commit_id=commit_ids['new-feature'])
892 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
892 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
893 branch=backend_git.default_branch_name,
893 branch=backend_git.default_branch_name,
894 commit_id=commit_ids['old-feature'])
894 commit_id=commit_ids['old-feature'])
895 pull_request.revisions = [commit_ids['new-feature']]
895 pull_request.revisions = [commit_ids['new-feature']]
896 pull_request.title = u"Test"
896 pull_request.title = u"Test"
897 pull_request.description = u"Description"
897 pull_request.description = u"Description"
898 pull_request.author = UserModel().get_by_username(
898 pull_request.author = UserModel().get_by_username(
899 TEST_USER_ADMIN_LOGIN)
899 TEST_USER_ADMIN_LOGIN)
900 Session().add(pull_request)
900 Session().add(pull_request)
901 Session().commit()
901 Session().commit()
902
902
903 vcs = repo.scm_instance()
903 vcs = repo.scm_instance()
904 vcs.remove_ref('refs/heads/{}'.format(branch_name))
904 vcs.remove_ref('refs/heads/{}'.format(branch_name))
905
905
906 response = self.app.get(route_path(
906 response = self.app.get(route_path(
907 'pullrequest_show',
907 'pullrequest_show',
908 repo_name=repo.repo_name,
908 repo_name=repo.repo_name,
909 pull_request_id=pull_request.pull_request_id))
909 pull_request_id=pull_request.pull_request_id))
910
910
911 assert response.status_int == 200
911 assert response.status_int == 200
912 assert_response = AssertResponse(response)
912 assert_response = AssertResponse(response)
913 assert_response.element_contains(
913 assert_response.element_contains(
914 '#changeset_compare_view_content .alert strong',
914 '#changeset_compare_view_content .alert strong',
915 'Missing commits')
915 'Missing commits')
916 assert_response.element_contains(
916 assert_response.element_contains(
917 '#changeset_compare_view_content .alert',
917 '#changeset_compare_view_content .alert',
918 'This pull request cannot be displayed, because one or more'
918 'This pull request cannot be displayed, because one or more'
919 ' commits no longer exist in the source repository.')
919 ' commits no longer exist in the source repository.')
920
920
921 def test_strip_commits_from_pull_request(
921 def test_strip_commits_from_pull_request(
922 self, backend, pr_util, csrf_token):
922 self, backend, pr_util, csrf_token):
923 commits = [
923 commits = [
924 {'message': 'initial-commit'},
924 {'message': 'initial-commit'},
925 {'message': 'old-feature'},
925 {'message': 'old-feature'},
926 {'message': 'new-feature', 'parents': ['initial-commit']},
926 {'message': 'new-feature', 'parents': ['initial-commit']},
927 ]
927 ]
928 pull_request = pr_util.create_pull_request(
928 pull_request = pr_util.create_pull_request(
929 commits, target_head='initial-commit', source_head='new-feature',
929 commits, target_head='initial-commit', source_head='new-feature',
930 revisions=['new-feature'])
930 revisions=['new-feature'])
931
931
932 vcs = pr_util.source_repository.scm_instance()
932 vcs = pr_util.source_repository.scm_instance()
933 if backend.alias == 'git':
933 if backend.alias == 'git':
934 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
934 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
935 else:
935 else:
936 vcs.strip(pr_util.commit_ids['new-feature'])
936 vcs.strip(pr_util.commit_ids['new-feature'])
937
937
938 response = self.app.get(route_path(
938 response = self.app.get(route_path(
939 'pullrequest_show',
939 'pullrequest_show',
940 repo_name=pr_util.target_repository.repo_name,
940 repo_name=pr_util.target_repository.repo_name,
941 pull_request_id=pull_request.pull_request_id))
941 pull_request_id=pull_request.pull_request_id))
942
942
943 assert response.status_int == 200
943 assert response.status_int == 200
944 assert_response = AssertResponse(response)
944 assert_response = AssertResponse(response)
945 assert_response.element_contains(
945 assert_response.element_contains(
946 '#changeset_compare_view_content .alert strong',
946 '#changeset_compare_view_content .alert strong',
947 'Missing commits')
947 'Missing commits')
948 assert_response.element_contains(
948 assert_response.element_contains(
949 '#changeset_compare_view_content .alert',
949 '#changeset_compare_view_content .alert',
950 'This pull request cannot be displayed, because one or more'
950 'This pull request cannot be displayed, because one or more'
951 ' commits no longer exist in the source repository.')
951 ' commits no longer exist in the source repository.')
952 assert_response.element_contains(
952 assert_response.element_contains(
953 '#update_commits',
953 '#update_commits',
954 'Update commits')
954 'Update commits')
955
955
956 def test_strip_commits_and_update(
956 def test_strip_commits_and_update(
957 self, backend, pr_util, csrf_token):
957 self, backend, pr_util, csrf_token):
958 commits = [
958 commits = [
959 {'message': 'initial-commit'},
959 {'message': 'initial-commit'},
960 {'message': 'old-feature'},
960 {'message': 'old-feature'},
961 {'message': 'new-feature', 'parents': ['old-feature']},
961 {'message': 'new-feature', 'parents': ['old-feature']},
962 ]
962 ]
963 pull_request = pr_util.create_pull_request(
963 pull_request = pr_util.create_pull_request(
964 commits, target_head='old-feature', source_head='new-feature',
964 commits, target_head='old-feature', source_head='new-feature',
965 revisions=['new-feature'], mergeable=True)
965 revisions=['new-feature'], mergeable=True)
966
966
967 vcs = pr_util.source_repository.scm_instance()
967 vcs = pr_util.source_repository.scm_instance()
968 if backend.alias == 'git':
968 if backend.alias == 'git':
969 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
969 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
970 else:
970 else:
971 vcs.strip(pr_util.commit_ids['new-feature'])
971 vcs.strip(pr_util.commit_ids['new-feature'])
972
972
973 response = self.app.post(
973 response = self.app.post(
974 route_path('pullrequest_update',
974 route_path('pullrequest_update',
975 repo_name=pull_request.target_repo.repo_name,
975 repo_name=pull_request.target_repo.repo_name,
976 pull_request_id=pull_request.pull_request_id),
976 pull_request_id=pull_request.pull_request_id),
977 params={'update_commits': 'true',
977 params={'update_commits': 'true',
978 'csrf_token': csrf_token})
978 'csrf_token': csrf_token})
979
979
980 assert response.status_int == 200
980 assert response.status_int == 200
981 assert response.body == 'true'
981 assert response.body == 'true'
982
982
983 # Make sure that after update, it won't raise 500 errors
983 # Make sure that after update, it won't raise 500 errors
984 response = self.app.get(route_path(
984 response = self.app.get(route_path(
985 'pullrequest_show',
985 'pullrequest_show',
986 repo_name=pr_util.target_repository.repo_name,
986 repo_name=pr_util.target_repository.repo_name,
987 pull_request_id=pull_request.pull_request_id))
987 pull_request_id=pull_request.pull_request_id))
988
988
989 assert response.status_int == 200
989 assert response.status_int == 200
990 assert_response = AssertResponse(response)
990 assert_response = AssertResponse(response)
991 assert_response.element_contains(
991 assert_response.element_contains(
992 '#changeset_compare_view_content .alert strong',
992 '#changeset_compare_view_content .alert strong',
993 'Missing commits')
993 'Missing commits')
994
994
995 def test_branch_is_a_link(self, pr_util):
995 def test_branch_is_a_link(self, pr_util):
996 pull_request = pr_util.create_pull_request()
996 pull_request = pr_util.create_pull_request()
997 pull_request.source_ref = 'branch:origin:1234567890abcdef'
997 pull_request.source_ref = 'branch:origin:1234567890abcdef'
998 pull_request.target_ref = 'branch:target:abcdef1234567890'
998 pull_request.target_ref = 'branch:target:abcdef1234567890'
999 Session().add(pull_request)
999 Session().add(pull_request)
1000 Session().commit()
1000 Session().commit()
1001
1001
1002 response = self.app.get(route_path(
1002 response = self.app.get(route_path(
1003 'pullrequest_show',
1003 'pullrequest_show',
1004 repo_name=pull_request.target_repo.scm_instance().name,
1004 repo_name=pull_request.target_repo.scm_instance().name,
1005 pull_request_id=pull_request.pull_request_id))
1005 pull_request_id=pull_request.pull_request_id))
1006 assert response.status_int == 200
1006 assert response.status_int == 200
1007 assert_response = AssertResponse(response)
1007 assert_response = AssertResponse(response)
1008
1008
1009 origin = assert_response.get_element('.pr-origininfo .tag')
1009 origin = assert_response.get_element('.pr-origininfo .tag')
1010 origin_children = origin.getchildren()
1010 origin_children = origin.getchildren()
1011 assert len(origin_children) == 1
1011 assert len(origin_children) == 1
1012 target = assert_response.get_element('.pr-targetinfo .tag')
1012 target = assert_response.get_element('.pr-targetinfo .tag')
1013 target_children = target.getchildren()
1013 target_children = target.getchildren()
1014 assert len(target_children) == 1
1014 assert len(target_children) == 1
1015
1015
1016 expected_origin_link = route_path(
1016 expected_origin_link = route_path(
1017 'repo_changelog',
1017 'repo_changelog',
1018 repo_name=pull_request.source_repo.scm_instance().name,
1018 repo_name=pull_request.source_repo.scm_instance().name,
1019 params=dict(branch='origin'))
1019 params=dict(branch='origin'))
1020 expected_target_link = route_path(
1020 expected_target_link = route_path(
1021 'repo_changelog',
1021 'repo_changelog',
1022 repo_name=pull_request.target_repo.scm_instance().name,
1022 repo_name=pull_request.target_repo.scm_instance().name,
1023 params=dict(branch='target'))
1023 params=dict(branch='target'))
1024 assert origin_children[0].attrib['href'] == expected_origin_link
1024 assert origin_children[0].attrib['href'] == expected_origin_link
1025 assert origin_children[0].text == 'branch: origin'
1025 assert origin_children[0].text == 'branch: origin'
1026 assert target_children[0].attrib['href'] == expected_target_link
1026 assert target_children[0].attrib['href'] == expected_target_link
1027 assert target_children[0].text == 'branch: target'
1027 assert target_children[0].text == 'branch: target'
1028
1028
1029 def test_bookmark_is_not_a_link(self, pr_util):
1029 def test_bookmark_is_not_a_link(self, pr_util):
1030 pull_request = pr_util.create_pull_request()
1030 pull_request = pr_util.create_pull_request()
1031 pull_request.source_ref = 'bookmark:origin:1234567890abcdef'
1031 pull_request.source_ref = 'bookmark:origin:1234567890abcdef'
1032 pull_request.target_ref = 'bookmark:target:abcdef1234567890'
1032 pull_request.target_ref = 'bookmark:target:abcdef1234567890'
1033 Session().add(pull_request)
1033 Session().add(pull_request)
1034 Session().commit()
1034 Session().commit()
1035
1035
1036 response = self.app.get(route_path(
1036 response = self.app.get(route_path(
1037 'pullrequest_show',
1037 'pullrequest_show',
1038 repo_name=pull_request.target_repo.scm_instance().name,
1038 repo_name=pull_request.target_repo.scm_instance().name,
1039 pull_request_id=pull_request.pull_request_id))
1039 pull_request_id=pull_request.pull_request_id))
1040 assert response.status_int == 200
1040 assert response.status_int == 200
1041 assert_response = AssertResponse(response)
1041 assert_response = AssertResponse(response)
1042
1042
1043 origin = assert_response.get_element('.pr-origininfo .tag')
1043 origin = assert_response.get_element('.pr-origininfo .tag')
1044 assert origin.text.strip() == 'bookmark: origin'
1044 assert origin.text.strip() == 'bookmark: origin'
1045 assert origin.getchildren() == []
1045 assert origin.getchildren() == []
1046
1046
1047 target = assert_response.get_element('.pr-targetinfo .tag')
1047 target = assert_response.get_element('.pr-targetinfo .tag')
1048 assert target.text.strip() == 'bookmark: target'
1048 assert target.text.strip() == 'bookmark: target'
1049 assert target.getchildren() == []
1049 assert target.getchildren() == []
1050
1050
1051 def test_tag_is_not_a_link(self, pr_util):
1051 def test_tag_is_not_a_link(self, pr_util):
1052 pull_request = pr_util.create_pull_request()
1052 pull_request = pr_util.create_pull_request()
1053 pull_request.source_ref = 'tag:origin:1234567890abcdef'
1053 pull_request.source_ref = 'tag:origin:1234567890abcdef'
1054 pull_request.target_ref = 'tag:target:abcdef1234567890'
1054 pull_request.target_ref = 'tag:target:abcdef1234567890'
1055 Session().add(pull_request)
1055 Session().add(pull_request)
1056 Session().commit()
1056 Session().commit()
1057
1057
1058 response = self.app.get(route_path(
1058 response = self.app.get(route_path(
1059 'pullrequest_show',
1059 'pullrequest_show',
1060 repo_name=pull_request.target_repo.scm_instance().name,
1060 repo_name=pull_request.target_repo.scm_instance().name,
1061 pull_request_id=pull_request.pull_request_id))
1061 pull_request_id=pull_request.pull_request_id))
1062 assert response.status_int == 200
1062 assert response.status_int == 200
1063 assert_response = AssertResponse(response)
1063 assert_response = AssertResponse(response)
1064
1064
1065 origin = assert_response.get_element('.pr-origininfo .tag')
1065 origin = assert_response.get_element('.pr-origininfo .tag')
1066 assert origin.text.strip() == 'tag: origin'
1066 assert origin.text.strip() == 'tag: origin'
1067 assert origin.getchildren() == []
1067 assert origin.getchildren() == []
1068
1068
1069 target = assert_response.get_element('.pr-targetinfo .tag')
1069 target = assert_response.get_element('.pr-targetinfo .tag')
1070 assert target.text.strip() == 'tag: target'
1070 assert target.text.strip() == 'tag: target'
1071 assert target.getchildren() == []
1071 assert target.getchildren() == []
1072
1072
1073 @pytest.mark.parametrize('mergeable', [True, False])
1073 @pytest.mark.parametrize('mergeable', [True, False])
1074 def test_shadow_repository_link(
1074 def test_shadow_repository_link(
1075 self, mergeable, pr_util, http_host_only_stub):
1075 self, mergeable, pr_util, http_host_only_stub):
1076 """
1076 """
1077 Check that the pull request summary page displays a link to the shadow
1077 Check that the pull request summary page displays a link to the shadow
1078 repository if the pull request is mergeable. If it is not mergeable
1078 repository if the pull request is mergeable. If it is not mergeable
1079 the link should not be displayed.
1079 the link should not be displayed.
1080 """
1080 """
1081 pull_request = pr_util.create_pull_request(
1081 pull_request = pr_util.create_pull_request(
1082 mergeable=mergeable, enable_notifications=False)
1082 mergeable=mergeable, enable_notifications=False)
1083 target_repo = pull_request.target_repo.scm_instance()
1083 target_repo = pull_request.target_repo.scm_instance()
1084 pr_id = pull_request.pull_request_id
1084 pr_id = pull_request.pull_request_id
1085 shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format(
1085 shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format(
1086 host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id)
1086 host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id)
1087
1087
1088 response = self.app.get(route_path(
1088 response = self.app.get(route_path(
1089 'pullrequest_show',
1089 'pullrequest_show',
1090 repo_name=target_repo.name,
1090 repo_name=target_repo.name,
1091 pull_request_id=pr_id))
1091 pull_request_id=pr_id))
1092
1092
1093 assertr = AssertResponse(response)
1093 assertr = AssertResponse(response)
1094 if mergeable:
1094 if mergeable:
1095 assertr.element_value_contains('input.pr-mergeinfo', shadow_url)
1095 assertr.element_value_contains('input.pr-mergeinfo', shadow_url)
1096 assertr.element_value_contains('input.pr-mergeinfo ', 'pr-merge')
1096 assertr.element_value_contains('input.pr-mergeinfo ', 'pr-merge')
1097 else:
1097 else:
1098 assertr.no_element_exists('.pr-mergeinfo')
1098 assertr.no_element_exists('.pr-mergeinfo')
1099
1099
1100
1100
1101 @pytest.mark.usefixtures('app')
1101 @pytest.mark.usefixtures('app')
1102 @pytest.mark.backends("git", "hg")
1102 @pytest.mark.backends("git", "hg")
1103 class TestPullrequestsControllerDelete(object):
1103 class TestPullrequestsControllerDelete(object):
1104 def test_pull_request_delete_button_permissions_admin(
1104 def test_pull_request_delete_button_permissions_admin(
1105 self, autologin_user, user_admin, pr_util):
1105 self, autologin_user, user_admin, pr_util):
1106 pull_request = pr_util.create_pull_request(
1106 pull_request = pr_util.create_pull_request(
1107 author=user_admin.username, enable_notifications=False)
1107 author=user_admin.username, enable_notifications=False)
1108
1108
1109 response = self.app.get(route_path(
1109 response = self.app.get(route_path(
1110 'pullrequest_show',
1110 'pullrequest_show',
1111 repo_name=pull_request.target_repo.scm_instance().name,
1111 repo_name=pull_request.target_repo.scm_instance().name,
1112 pull_request_id=pull_request.pull_request_id))
1112 pull_request_id=pull_request.pull_request_id))
1113
1113
1114 response.mustcontain('id="delete_pullrequest"')
1114 response.mustcontain('id="delete_pullrequest"')
1115 response.mustcontain('Confirm to delete this pull request')
1115 response.mustcontain('Confirm to delete this pull request')
1116
1116
1117 def test_pull_request_delete_button_permissions_owner(
1117 def test_pull_request_delete_button_permissions_owner(
1118 self, autologin_regular_user, user_regular, pr_util):
1118 self, autologin_regular_user, user_regular, pr_util):
1119 pull_request = pr_util.create_pull_request(
1119 pull_request = pr_util.create_pull_request(
1120 author=user_regular.username, enable_notifications=False)
1120 author=user_regular.username, enable_notifications=False)
1121
1121
1122 response = self.app.get(route_path(
1122 response = self.app.get(route_path(
1123 'pullrequest_show',
1123 'pullrequest_show',
1124 repo_name=pull_request.target_repo.scm_instance().name,
1124 repo_name=pull_request.target_repo.scm_instance().name,
1125 pull_request_id=pull_request.pull_request_id))
1125 pull_request_id=pull_request.pull_request_id))
1126
1126
1127 response.mustcontain('id="delete_pullrequest"')
1127 response.mustcontain('id="delete_pullrequest"')
1128 response.mustcontain('Confirm to delete this pull request')
1128 response.mustcontain('Confirm to delete this pull request')
1129
1129
1130 def test_pull_request_delete_button_permissions_forbidden(
1130 def test_pull_request_delete_button_permissions_forbidden(
1131 self, autologin_regular_user, user_regular, user_admin, pr_util):
1131 self, autologin_regular_user, user_regular, user_admin, pr_util):
1132 pull_request = pr_util.create_pull_request(
1132 pull_request = pr_util.create_pull_request(
1133 author=user_admin.username, enable_notifications=False)
1133 author=user_admin.username, enable_notifications=False)
1134
1134
1135 response = self.app.get(route_path(
1135 response = self.app.get(route_path(
1136 'pullrequest_show',
1136 'pullrequest_show',
1137 repo_name=pull_request.target_repo.scm_instance().name,
1137 repo_name=pull_request.target_repo.scm_instance().name,
1138 pull_request_id=pull_request.pull_request_id))
1138 pull_request_id=pull_request.pull_request_id))
1139 response.mustcontain(no=['id="delete_pullrequest"'])
1139 response.mustcontain(no=['id="delete_pullrequest"'])
1140 response.mustcontain(no=['Confirm to delete this pull request'])
1140 response.mustcontain(no=['Confirm to delete this pull request'])
1141
1141
1142 def test_pull_request_delete_button_permissions_can_update_cannot_delete(
1142 def test_pull_request_delete_button_permissions_can_update_cannot_delete(
1143 self, autologin_regular_user, user_regular, user_admin, pr_util,
1143 self, autologin_regular_user, user_regular, user_admin, pr_util,
1144 user_util):
1144 user_util):
1145
1145
1146 pull_request = pr_util.create_pull_request(
1146 pull_request = pr_util.create_pull_request(
1147 author=user_admin.username, enable_notifications=False)
1147 author=user_admin.username, enable_notifications=False)
1148
1148
1149 user_util.grant_user_permission_to_repo(
1149 user_util.grant_user_permission_to_repo(
1150 pull_request.target_repo, user_regular,
1150 pull_request.target_repo, user_regular,
1151 'repository.write')
1151 'repository.write')
1152
1152
1153 response = self.app.get(route_path(
1153 response = self.app.get(route_path(
1154 'pullrequest_show',
1154 'pullrequest_show',
1155 repo_name=pull_request.target_repo.scm_instance().name,
1155 repo_name=pull_request.target_repo.scm_instance().name,
1156 pull_request_id=pull_request.pull_request_id))
1156 pull_request_id=pull_request.pull_request_id))
1157
1157
1158 response.mustcontain('id="open_edit_pullrequest"')
1158 response.mustcontain('id="open_edit_pullrequest"')
1159 response.mustcontain('id="delete_pullrequest"')
1159 response.mustcontain('id="delete_pullrequest"')
1160 response.mustcontain(no=['Confirm to delete this pull request'])
1160 response.mustcontain(no=['Confirm to delete this pull request'])
1161
1161
1162 def test_delete_comment_returns_404_if_comment_does_not_exist(
1162 def test_delete_comment_returns_404_if_comment_does_not_exist(
1163 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1163 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1164
1164
1165 pull_request = pr_util.create_pull_request(
1165 pull_request = pr_util.create_pull_request(
1166 author=user_admin.username, enable_notifications=False)
1166 author=user_admin.username, enable_notifications=False)
1167
1167
1168 self.app.post(
1168 self.app.post(
1169 route_path(
1169 route_path(
1170 'pullrequest_comment_delete',
1170 'pullrequest_comment_delete',
1171 repo_name=pull_request.target_repo.scm_instance().name,
1171 repo_name=pull_request.target_repo.scm_instance().name,
1172 pull_request_id=pull_request.pull_request_id,
1172 pull_request_id=pull_request.pull_request_id,
1173 comment_id=1024404),
1173 comment_id=1024404),
1174 extra_environ=xhr_header,
1174 extra_environ=xhr_header,
1175 params={'csrf_token': csrf_token},
1175 params={'csrf_token': csrf_token},
1176 status=404
1176 status=404
1177 )
1177 )
1178
1178
1179 def test_delete_comment(
1179 def test_delete_comment(
1180 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1180 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1181
1181
1182 pull_request = pr_util.create_pull_request(
1182 pull_request = pr_util.create_pull_request(
1183 author=user_admin.username, enable_notifications=False)
1183 author=user_admin.username, enable_notifications=False)
1184 comment = pr_util.create_comment()
1184 comment = pr_util.create_comment()
1185 comment_id = comment.comment_id
1185 comment_id = comment.comment_id
1186
1186
1187 response = self.app.post(
1187 response = self.app.post(
1188 route_path(
1188 route_path(
1189 'pullrequest_comment_delete',
1189 'pullrequest_comment_delete',
1190 repo_name=pull_request.target_repo.scm_instance().name,
1190 repo_name=pull_request.target_repo.scm_instance().name,
1191 pull_request_id=pull_request.pull_request_id,
1191 pull_request_id=pull_request.pull_request_id,
1192 comment_id=comment_id),
1192 comment_id=comment_id),
1193 extra_environ=xhr_header,
1193 extra_environ=xhr_header,
1194 params={'csrf_token': csrf_token},
1194 params={'csrf_token': csrf_token},
1195 status=200
1195 status=200
1196 )
1196 )
1197 assert response.body == 'true'
1197 assert response.body == 'true'
1198
1198
1199 @pytest.mark.parametrize('url_type', [
1199 @pytest.mark.parametrize('url_type', [
1200 'pullrequest_new',
1200 'pullrequest_new',
1201 'pullrequest_create',
1201 'pullrequest_create',
1202 'pullrequest_update',
1202 'pullrequest_update',
1203 'pullrequest_merge',
1203 'pullrequest_merge',
1204 ])
1204 ])
1205 def test_pull_request_is_forbidden_on_archived_repo(
1205 def test_pull_request_is_forbidden_on_archived_repo(
1206 self, autologin_user, backend, xhr_header, user_util, url_type):
1206 self, autologin_user, backend, xhr_header, user_util, url_type):
1207
1207
1208 # create a temporary repo
1208 # create a temporary repo
1209 source = user_util.create_repo(repo_type=backend.alias)
1209 source = user_util.create_repo(repo_type=backend.alias)
1210 repo_name = source.repo_name
1210 repo_name = source.repo_name
1211 repo = Repository.get_by_repo_name(repo_name)
1211 repo = Repository.get_by_repo_name(repo_name)
1212 repo.archived = True
1212 repo.archived = True
1213 Session().commit()
1213 Session().commit()
1214
1214
1215 response = self.app.get(
1215 response = self.app.get(
1216 route_path(url_type, repo_name=repo_name, pull_request_id=1), status=302)
1216 route_path(url_type, repo_name=repo_name, pull_request_id=1), status=302)
1217
1217
1218 msg = 'Action not supported for archived repository.'
1218 msg = 'Action not supported for archived repository.'
1219 assert_session_flash(response, msg)
1219 assert_session_flash(response, msg)
1220
1220
1221
1221
1222 def assert_pull_request_status(pull_request, expected_status):
1222 def assert_pull_request_status(pull_request, expected_status):
1223 status = ChangesetStatusModel().calculated_review_status(
1223 status = ChangesetStatusModel().calculated_review_status(
1224 pull_request=pull_request)
1224 pull_request=pull_request)
1225 assert status == expected_status
1225 assert status == expected_status
1226
1226
1227
1227
1228 @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create'])
1228 @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create'])
1229 @pytest.mark.usefixtures("autologin_user")
1229 @pytest.mark.usefixtures("autologin_user")
1230 def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route):
1230 def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route):
1231 response = app.get(
1231 response = app.get(
1232 route_path(route, repo_name=backend_svn.repo_name), status=404)
1232 route_path(route, repo_name=backend_svn.repo_name), status=404)
1233
1233
@@ -1,465 +1,493 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2013-2018 RhodeCode GmbH
3 # Copyright (C) 2013-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 Set of hooks run by RhodeCode Enterprise
23 Set of hooks run by RhodeCode Enterprise
24 """
24 """
25
25
26 import os
26 import os
27 import collections
27 import collections
28 import logging
28 import logging
29
29
30 import rhodecode
30 import rhodecode
31 from rhodecode import events
31 from rhodecode import events
32 from rhodecode.lib import helpers as h
32 from rhodecode.lib import helpers as h
33 from rhodecode.lib import audit_logger
33 from rhodecode.lib import audit_logger
34 from rhodecode.lib.utils2 import safe_str
34 from rhodecode.lib.utils2 import safe_str
35 from rhodecode.lib.exceptions import (
35 from rhodecode.lib.exceptions import (
36 HTTPLockedRC, HTTPBranchProtected, UserCreationError)
36 HTTPLockedRC, HTTPBranchProtected, UserCreationError)
37 from rhodecode.model.db import Repository, User
37 from rhodecode.model.db import Repository, User
38
38
39 log = logging.getLogger(__name__)
39 log = logging.getLogger(__name__)
40
40
41
41
42 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
42 class HookResponse(object):
43 def __init__(self, status, output):
44 self.status = status
45 self.output = output
46
47 def __add__(self, other):
48 other_status = getattr(other, 'status', 0)
49 new_status = max(self.status, other_status)
50 other_output = getattr(other, 'output', '')
51 new_output = self.output + other_output
52
53 return HookResponse(new_status, new_output)
54
55 def __bool__(self):
56 return self.status == 0
43
57
44
58
45 def is_shadow_repo(extras):
59 def is_shadow_repo(extras):
46 """
60 """
47 Returns ``True`` if this is an action executed against a shadow repository.
61 Returns ``True`` if this is an action executed against a shadow repository.
48 """
62 """
49 return extras['is_shadow_repo']
63 return extras['is_shadow_repo']
50
64
51
65
52 def _get_scm_size(alias, root_path):
66 def _get_scm_size(alias, root_path):
53
67
54 if not alias.startswith('.'):
68 if not alias.startswith('.'):
55 alias += '.'
69 alias += '.'
56
70
57 size_scm, size_root = 0, 0
71 size_scm, size_root = 0, 0
58 for path, unused_dirs, files in os.walk(safe_str(root_path)):
72 for path, unused_dirs, files in os.walk(safe_str(root_path)):
59 if path.find(alias) != -1:
73 if path.find(alias) != -1:
60 for f in files:
74 for f in files:
61 try:
75 try:
62 size_scm += os.path.getsize(os.path.join(path, f))
76 size_scm += os.path.getsize(os.path.join(path, f))
63 except OSError:
77 except OSError:
64 pass
78 pass
65 else:
79 else:
66 for f in files:
80 for f in files:
67 try:
81 try:
68 size_root += os.path.getsize(os.path.join(path, f))
82 size_root += os.path.getsize(os.path.join(path, f))
69 except OSError:
83 except OSError:
70 pass
84 pass
71
85
72 size_scm_f = h.format_byte_size_binary(size_scm)
86 size_scm_f = h.format_byte_size_binary(size_scm)
73 size_root_f = h.format_byte_size_binary(size_root)
87 size_root_f = h.format_byte_size_binary(size_root)
74 size_total_f = h.format_byte_size_binary(size_root + size_scm)
88 size_total_f = h.format_byte_size_binary(size_root + size_scm)
75
89
76 return size_scm_f, size_root_f, size_total_f
90 return size_scm_f, size_root_f, size_total_f
77
91
78
92
79 # actual hooks called by Mercurial internally, and GIT by our Python Hooks
93 # actual hooks called by Mercurial internally, and GIT by our Python Hooks
80 def repo_size(extras):
94 def repo_size(extras):
81 """Present size of repository after push."""
95 """Present size of repository after push."""
82 repo = Repository.get_by_repo_name(extras.repository)
96 repo = Repository.get_by_repo_name(extras.repository)
83 vcs_part = safe_str(u'.%s' % repo.repo_type)
97 vcs_part = safe_str(u'.%s' % repo.repo_type)
84 size_vcs, size_root, size_total = _get_scm_size(vcs_part,
98 size_vcs, size_root, size_total = _get_scm_size(vcs_part,
85 repo.repo_full_path)
99 repo.repo_full_path)
86 msg = ('Repository `%s` size summary %s:%s repo:%s total:%s\n'
100 msg = ('Repository `%s` size summary %s:%s repo:%s total:%s\n'
87 % (repo.repo_name, vcs_part, size_vcs, size_root, size_total))
101 % (repo.repo_name, vcs_part, size_vcs, size_root, size_total))
88 return HookResponse(0, msg)
102 return HookResponse(0, msg)
89
103
90
104
91 def pre_push(extras):
105 def pre_push(extras):
92 """
106 """
93 Hook executed before pushing code.
107 Hook executed before pushing code.
94
108
95 It bans pushing when the repository is locked.
109 It bans pushing when the repository is locked.
96 """
110 """
97
111
98 user = User.get_by_username(extras.username)
112 user = User.get_by_username(extras.username)
99 output = ''
113 output = ''
100 if extras.locked_by[0] and user.user_id != int(extras.locked_by[0]):
114 if extras.locked_by[0] and user.user_id != int(extras.locked_by[0]):
101 locked_by = User.get(extras.locked_by[0]).username
115 locked_by = User.get(extras.locked_by[0]).username
102 reason = extras.locked_by[2]
116 reason = extras.locked_by[2]
103 # this exception is interpreted in git/hg middlewares and based
117 # this exception is interpreted in git/hg middlewares and based
104 # on that proper return code is server to client
118 # on that proper return code is server to client
105 _http_ret = HTTPLockedRC(
119 _http_ret = HTTPLockedRC(
106 _locked_by_explanation(extras.repository, locked_by, reason))
120 _locked_by_explanation(extras.repository, locked_by, reason))
107 if str(_http_ret.code).startswith('2'):
121 if str(_http_ret.code).startswith('2'):
108 # 2xx Codes don't raise exceptions
122 # 2xx Codes don't raise exceptions
109 output = _http_ret.title
123 output = _http_ret.title
110 else:
124 else:
111 raise _http_ret
125 raise _http_ret
112
126
127 hook_response = ''
113 if not is_shadow_repo(extras):
128 if not is_shadow_repo(extras):
114 if extras.commit_ids and extras.check_branch_perms:
129 if extras.commit_ids and extras.check_branch_perms:
115
130
116 auth_user = user.AuthUser()
131 auth_user = user.AuthUser()
117 repo = Repository.get_by_repo_name(extras.repository)
132 repo = Repository.get_by_repo_name(extras.repository)
118 affected_branches = []
133 affected_branches = []
119 if repo.repo_type == 'hg':
134 if repo.repo_type == 'hg':
120 for entry in extras.commit_ids:
135 for entry in extras.commit_ids:
121 if entry['type'] == 'branch':
136 if entry['type'] == 'branch':
122 is_forced = bool(entry['multiple_heads'])
137 is_forced = bool(entry['multiple_heads'])
123 affected_branches.append([entry['name'], is_forced])
138 affected_branches.append([entry['name'], is_forced])
124 elif repo.repo_type == 'git':
139 elif repo.repo_type == 'git':
125 for entry in extras.commit_ids:
140 for entry in extras.commit_ids:
126 if entry['type'] == 'heads':
141 if entry['type'] == 'heads':
127 is_forced = bool(entry['pruned_sha'])
142 is_forced = bool(entry['pruned_sha'])
128 affected_branches.append([entry['name'], is_forced])
143 affected_branches.append([entry['name'], is_forced])
129
144
130 for branch_name, is_forced in affected_branches:
145 for branch_name, is_forced in affected_branches:
131
146
132 rule, branch_perm = auth_user.get_rule_and_branch_permission(
147 rule, branch_perm = auth_user.get_rule_and_branch_permission(
133 extras.repository, branch_name)
148 extras.repository, branch_name)
134 if not branch_perm:
149 if not branch_perm:
135 # no branch permission found for this branch, just keep checking
150 # no branch permission found for this branch, just keep checking
136 continue
151 continue
137
152
138 if branch_perm == 'branch.push_force':
153 if branch_perm == 'branch.push_force':
139 continue
154 continue
140 elif branch_perm == 'branch.push' and is_forced is False:
155 elif branch_perm == 'branch.push' and is_forced is False:
141 continue
156 continue
142 elif branch_perm == 'branch.push' and is_forced is True:
157 elif branch_perm == 'branch.push' and is_forced is True:
143 halt_message = 'Branch `{}` changes rejected by rule {}. ' \
158 halt_message = 'Branch `{}` changes rejected by rule {}. ' \
144 'FORCE PUSH FORBIDDEN.'.format(branch_name, rule)
159 'FORCE PUSH FORBIDDEN.'.format(branch_name, rule)
145 else:
160 else:
146 halt_message = 'Branch `{}` changes rejected by rule {}.'.format(
161 halt_message = 'Branch `{}` changes rejected by rule {}.'.format(
147 branch_name, rule)
162 branch_name, rule)
148
163
149 if halt_message:
164 if halt_message:
150 _http_ret = HTTPBranchProtected(halt_message)
165 _http_ret = HTTPBranchProtected(halt_message)
151 raise _http_ret
166 raise _http_ret
152
167
153 # Propagate to external components. This is done after checking the
168 # Propagate to external components. This is done after checking the
154 # lock, for consistent behavior.
169 # lock, for consistent behavior.
155 pre_push_extension(repo_store_path=Repository.base_path(), **extras)
170 hook_response = pre_push_extension(
171 repo_store_path=Repository.base_path(), **extras)
156 events.trigger(events.RepoPrePushEvent(
172 events.trigger(events.RepoPrePushEvent(
157 repo_name=extras.repository, extras=extras))
173 repo_name=extras.repository, extras=extras))
158
174
159 return HookResponse(0, output)
175 return HookResponse(0, output) + hook_response
160
176
161
177
162 def pre_pull(extras):
178 def pre_pull(extras):
163 """
179 """
164 Hook executed before pulling the code.
180 Hook executed before pulling the code.
165
181
166 It bans pulling when the repository is locked.
182 It bans pulling when the repository is locked.
167 """
183 """
168
184
169 output = ''
185 output = ''
170 if extras.locked_by[0]:
186 if extras.locked_by[0]:
171 locked_by = User.get(extras.locked_by[0]).username
187 locked_by = User.get(extras.locked_by[0]).username
172 reason = extras.locked_by[2]
188 reason = extras.locked_by[2]
173 # this exception is interpreted in git/hg middlewares and based
189 # this exception is interpreted in git/hg middlewares and based
174 # on that proper return code is server to client
190 # on that proper return code is server to client
175 _http_ret = HTTPLockedRC(
191 _http_ret = HTTPLockedRC(
176 _locked_by_explanation(extras.repository, locked_by, reason))
192 _locked_by_explanation(extras.repository, locked_by, reason))
177 if str(_http_ret.code).startswith('2'):
193 if str(_http_ret.code).startswith('2'):
178 # 2xx Codes don't raise exceptions
194 # 2xx Codes don't raise exceptions
179 output = _http_ret.title
195 output = _http_ret.title
180 else:
196 else:
181 raise _http_ret
197 raise _http_ret
182
198
183 # Propagate to external components. This is done after checking the
199 # Propagate to external components. This is done after checking the
184 # lock, for consistent behavior.
200 # lock, for consistent behavior.
201 hook_response = ''
185 if not is_shadow_repo(extras):
202 if not is_shadow_repo(extras):
186 pre_pull_extension(**extras)
203 extras.hook_type = extras.hook_type or 'pre_pull'
204 hook_response = pre_pull_extension(
205 repo_store_path=Repository.base_path(), **extras)
187 events.trigger(events.RepoPrePullEvent(
206 events.trigger(events.RepoPrePullEvent(
188 repo_name=extras.repository, extras=extras))
207 repo_name=extras.repository, extras=extras))
189
208
190 return HookResponse(0, output)
209 return HookResponse(0, output) + hook_response
191
210
192
211
193 def post_pull(extras):
212 def post_pull(extras):
194 """Hook executed after client pulls the code."""
213 """Hook executed after client pulls the code."""
195
214
196 audit_user = audit_logger.UserWrap(
215 audit_user = audit_logger.UserWrap(
197 username=extras.username,
216 username=extras.username,
198 ip_addr=extras.ip)
217 ip_addr=extras.ip)
199 repo = audit_logger.RepoWrap(repo_name=extras.repository)
218 repo = audit_logger.RepoWrap(repo_name=extras.repository)
200 audit_logger.store(
219 audit_logger.store(
201 'user.pull', action_data={
220 'user.pull', action_data={'user_agent': extras.user_agent},
202 'user_agent': extras.user_agent},
203 user=audit_user, repo=repo, commit=True)
221 user=audit_user, repo=repo, commit=True)
204
222
205 # Propagate to external components.
206 if not is_shadow_repo(extras):
207 post_pull_extension(**extras)
208 events.trigger(events.RepoPullEvent(
209 repo_name=extras.repository, extras=extras))
210
211 output = ''
223 output = ''
212 # make lock is a tri state False, True, None. We only make lock on True
224 # make lock is a tri state False, True, None. We only make lock on True
213 if extras.make_lock is True and not is_shadow_repo(extras):
225 if extras.make_lock is True and not is_shadow_repo(extras):
214 user = User.get_by_username(extras.username)
226 user = User.get_by_username(extras.username)
215 Repository.lock(Repository.get_by_repo_name(extras.repository),
227 Repository.lock(Repository.get_by_repo_name(extras.repository),
216 user.user_id,
228 user.user_id,
217 lock_reason=Repository.LOCK_PULL)
229 lock_reason=Repository.LOCK_PULL)
218 msg = 'Made lock on repo `%s`' % (extras.repository,)
230 msg = 'Made lock on repo `%s`' % (extras.repository,)
219 output += msg
231 output += msg
220
232
221 if extras.locked_by[0]:
233 if extras.locked_by[0]:
222 locked_by = User.get(extras.locked_by[0]).username
234 locked_by = User.get(extras.locked_by[0]).username
223 reason = extras.locked_by[2]
235 reason = extras.locked_by[2]
224 _http_ret = HTTPLockedRC(
236 _http_ret = HTTPLockedRC(
225 _locked_by_explanation(extras.repository, locked_by, reason))
237 _locked_by_explanation(extras.repository, locked_by, reason))
226 if str(_http_ret.code).startswith('2'):
238 if str(_http_ret.code).startswith('2'):
227 # 2xx Codes don't raise exceptions
239 # 2xx Codes don't raise exceptions
228 output += _http_ret.title
240 output += _http_ret.title
229
241
230 return HookResponse(0, output)
242 # Propagate to external components.
243 hook_response = ''
244 if not is_shadow_repo(extras):
245 extras.hook_type = extras.hook_type or 'post_pull'
246 hook_response = post_pull_extension(
247 repo_store_path=Repository.base_path(), **extras)
248 events.trigger(events.RepoPullEvent(
249 repo_name=extras.repository, extras=extras))
250
251 return HookResponse(0, output) + hook_response
231
252
232
253
233 def post_push(extras):
254 def post_push(extras):
234 """Hook executed after user pushes to the repository."""
255 """Hook executed after user pushes to the repository."""
235 commit_ids = extras.commit_ids
256 commit_ids = extras.commit_ids
236
257
237 # log the push call
258 # log the push call
238 audit_user = audit_logger.UserWrap(
259 audit_user = audit_logger.UserWrap(
239 username=extras.username, ip_addr=extras.ip)
260 username=extras.username, ip_addr=extras.ip)
240 repo = audit_logger.RepoWrap(repo_name=extras.repository)
261 repo = audit_logger.RepoWrap(repo_name=extras.repository)
241 audit_logger.store(
262 audit_logger.store(
242 'user.push', action_data={
263 'user.push', action_data={
243 'user_agent': extras.user_agent,
264 'user_agent': extras.user_agent,
244 'commit_ids': commit_ids[:400]},
265 'commit_ids': commit_ids[:400]},
245 user=audit_user, repo=repo, commit=True)
266 user=audit_user, repo=repo, commit=True)
246
267
247 # Propagate to external components.
268 # Propagate to external components.
248 if not is_shadow_repo(extras):
249 post_push_extension(
250 repo_store_path=Repository.base_path(),
251 pushed_revs=commit_ids,
252 **extras)
253 events.trigger(events.RepoPushEvent(
254 repo_name=extras.repository,
255 pushed_commit_ids=commit_ids,
256 extras=extras))
257
258 output = ''
269 output = ''
259 # make lock is a tri state False, True, None. We only release lock on False
270 # make lock is a tri state False, True, None. We only release lock on False
260 if extras.make_lock is False and not is_shadow_repo(extras):
271 if extras.make_lock is False and not is_shadow_repo(extras):
261 Repository.unlock(Repository.get_by_repo_name(extras.repository))
272 Repository.unlock(Repository.get_by_repo_name(extras.repository))
262 msg = 'Released lock on repo `%s`\n' % extras.repository
273 msg = 'Released lock on repo `%s`\n' % extras.repository
263 output += msg
274 output += msg
264
275
265 if extras.locked_by[0]:
276 if extras.locked_by[0]:
266 locked_by = User.get(extras.locked_by[0]).username
277 locked_by = User.get(extras.locked_by[0]).username
267 reason = extras.locked_by[2]
278 reason = extras.locked_by[2]
268 _http_ret = HTTPLockedRC(
279 _http_ret = HTTPLockedRC(
269 _locked_by_explanation(extras.repository, locked_by, reason))
280 _locked_by_explanation(extras.repository, locked_by, reason))
270 # TODO: johbo: if not?
281 # TODO: johbo: if not?
271 if str(_http_ret.code).startswith('2'):
282 if str(_http_ret.code).startswith('2'):
272 # 2xx Codes don't raise exceptions
283 # 2xx Codes don't raise exceptions
273 output += _http_ret.title
284 output += _http_ret.title
274
285
275 if extras.new_refs:
286 if extras.new_refs:
276 tmpl = \
287 tmpl = \
277 extras.server_url + '/' + \
288 extras.server_url + '/' + \
278 extras.repository + \
289 extras.repository + \
279 "/pull-request/new?{ref_type}={ref_name}"
290 "/pull-request/new?{ref_type}={ref_name}"
280 for branch_name in extras.new_refs['branches']:
291 for branch_name in extras.new_refs['branches']:
281 output += 'RhodeCode: open pull request link: {}\n'.format(
292 output += 'RhodeCode: open pull request link: {}\n'.format(
282 tmpl.format(ref_type='branch', ref_name=branch_name))
293 tmpl.format(ref_type='branch', ref_name=branch_name))
283
294
284 for book_name in extras.new_refs['bookmarks']:
295 for book_name in extras.new_refs['bookmarks']:
285 output += 'RhodeCode: open pull request link: {}\n'.format(
296 output += 'RhodeCode: open pull request link: {}\n'.format(
286 tmpl.format(ref_type='bookmark', ref_name=book_name))
297 tmpl.format(ref_type='bookmark', ref_name=book_name))
287
298
299 hook_response = ''
300 if not is_shadow_repo(extras):
301 hook_response = post_push_extension(
302 repo_store_path=Repository.base_path(),
303 **extras)
304 events.trigger(events.RepoPushEvent(
305 repo_name=extras.repository, pushed_commit_ids=commit_ids, extras=extras))
306
288 output += 'RhodeCode: push completed\n'
307 output += 'RhodeCode: push completed\n'
289 return HookResponse(0, output)
308 return HookResponse(0, output) + hook_response
290
309
291
310
292 def _locked_by_explanation(repo_name, user_name, reason):
311 def _locked_by_explanation(repo_name, user_name, reason):
293 message = (
312 message = (
294 'Repository `%s` locked by user `%s`. Reason:`%s`'
313 'Repository `%s` locked by user `%s`. Reason:`%s`'
295 % (repo_name, user_name, reason))
314 % (repo_name, user_name, reason))
296 return message
315 return message
297
316
298
317
299 def check_allowed_create_user(user_dict, created_by, **kwargs):
318 def check_allowed_create_user(user_dict, created_by, **kwargs):
300 # pre create hooks
319 # pre create hooks
301 if pre_create_user.is_active():
320 if pre_create_user.is_active():
302 allowed, reason = pre_create_user(created_by=created_by, **user_dict)
321 hook_result = pre_create_user(created_by=created_by, **user_dict)
322 allowed = hook_result.status == 0
303 if not allowed:
323 if not allowed:
324 reason = hook_result.output
304 raise UserCreationError(reason)
325 raise UserCreationError(reason)
305
326
306
327
307 class ExtensionCallback(object):
328 class ExtensionCallback(object):
308 """
329 """
309 Forwards a given call to rcextensions, sanitizes keyword arguments.
330 Forwards a given call to rcextensions, sanitizes keyword arguments.
310
331
311 Does check if there is an extension active for that hook. If it is
332 Does check if there is an extension active for that hook. If it is
312 there, it will forward all `kwargs_keys` keyword arguments to the
333 there, it will forward all `kwargs_keys` keyword arguments to the
313 extension callback.
334 extension callback.
314 """
335 """
315
336
316 def __init__(self, hook_name, kwargs_keys):
337 def __init__(self, hook_name, kwargs_keys):
317 self._hook_name = hook_name
338 self._hook_name = hook_name
318 self._kwargs_keys = set(kwargs_keys)
339 self._kwargs_keys = set(kwargs_keys)
319
340
320 def __call__(self, *args, **kwargs):
341 def __call__(self, *args, **kwargs):
321 log.debug('Calling extension callback for %s', self._hook_name)
342 log.debug('Calling extension callback for %s', self._hook_name)
343 kwargs_to_pass = {}
344 for key in self._kwargs_keys:
345 try:
346 kwargs_to_pass[key] = kwargs[key]
347 except KeyError:
348 log.error('Failed to fetch %s key. Expected keys: %s',
349 key, self._kwargs_keys)
350 raise
322
351
323 kwargs_to_pass = dict((key, kwargs[key]) for key in self._kwargs_keys)
324 # backward compat for removed api_key for old hooks. THis was it works
352 # backward compat for removed api_key for old hooks. THis was it works
325 # with older rcextensions that require api_key present
353 # with older rcextensions that require api_key present
326 if self._hook_name in ['CREATE_USER_HOOK', 'DELETE_USER_HOOK']:
354 if self._hook_name in ['CREATE_USER_HOOK', 'DELETE_USER_HOOK']:
327 kwargs_to_pass['api_key'] = '_DEPRECATED_'
355 kwargs_to_pass['api_key'] = '_DEPRECATED_'
328
356
329 callback = self._get_callback()
357 callback = self._get_callback()
330 if callback:
358 if callback:
331 return callback(**kwargs_to_pass)
359 return callback(**kwargs_to_pass)
332 else:
360 else:
333 log.debug('extensions callback not found skipping...')
361 log.debug('extensions callback not found skipping...')
334
362
335 def is_active(self):
363 def is_active(self):
336 return hasattr(rhodecode.EXTENSIONS, self._hook_name)
364 return hasattr(rhodecode.EXTENSIONS, self._hook_name)
337
365
338 def _get_callback(self):
366 def _get_callback(self):
339 return getattr(rhodecode.EXTENSIONS, self._hook_name, None)
367 return getattr(rhodecode.EXTENSIONS, self._hook_name, None)
340
368
341
369
342 pre_pull_extension = ExtensionCallback(
370 pre_pull_extension = ExtensionCallback(
343 hook_name='PRE_PULL_HOOK',
371 hook_name='PRE_PULL_HOOK',
344 kwargs_keys=(
372 kwargs_keys=(
345 'server_url', 'config', 'scm', 'username', 'ip', 'action',
373 'server_url', 'config', 'scm', 'username', 'ip', 'action',
346 'repository'))
374 'repository', 'hook_type', 'user_agent', 'repo_store_path',))
347
375
348
376
349 post_pull_extension = ExtensionCallback(
377 post_pull_extension = ExtensionCallback(
350 hook_name='PULL_HOOK',
378 hook_name='PULL_HOOK',
351 kwargs_keys=(
379 kwargs_keys=(
352 'server_url', 'config', 'scm', 'username', 'ip', 'action',
380 'server_url', 'config', 'scm', 'username', 'ip', 'action',
353 'repository'))
381 'repository', 'hook_type', 'user_agent', 'repo_store_path',))
354
382
355
383
356 pre_push_extension = ExtensionCallback(
384 pre_push_extension = ExtensionCallback(
357 hook_name='PRE_PUSH_HOOK',
385 hook_name='PRE_PUSH_HOOK',
358 kwargs_keys=(
386 kwargs_keys=(
359 'server_url', 'config', 'scm', 'username', 'ip', 'action',
387 'server_url', 'config', 'scm', 'username', 'ip', 'action',
360 'repository', 'repo_store_path', 'commit_ids'))
388 'repository', 'repo_store_path', 'commit_ids', 'hook_type', 'user_agent',))
361
389
362
390
363 post_push_extension = ExtensionCallback(
391 post_push_extension = ExtensionCallback(
364 hook_name='PUSH_HOOK',
392 hook_name='PUSH_HOOK',
365 kwargs_keys=(
393 kwargs_keys=(
366 'server_url', 'config', 'scm', 'username', 'ip', 'action',
394 'server_url', 'config', 'scm', 'username', 'ip', 'action',
367 'repository', 'repo_store_path', 'pushed_revs'))
395 'repository', 'repo_store_path', 'commit_ids', 'hook_type', 'user_agent',))
368
396
369
397
370 pre_create_user = ExtensionCallback(
398 pre_create_user = ExtensionCallback(
371 hook_name='PRE_CREATE_USER_HOOK',
399 hook_name='PRE_CREATE_USER_HOOK',
372 kwargs_keys=(
400 kwargs_keys=(
373 'username', 'password', 'email', 'firstname', 'lastname', 'active',
401 'username', 'password', 'email', 'firstname', 'lastname', 'active',
374 'admin', 'created_by'))
402 'admin', 'created_by'))
375
403
376
404
377 log_create_pull_request = ExtensionCallback(
405 log_create_pull_request = ExtensionCallback(
378 hook_name='CREATE_PULL_REQUEST',
406 hook_name='CREATE_PULL_REQUEST',
379 kwargs_keys=(
407 kwargs_keys=(
380 'server_url', 'config', 'scm', 'username', 'ip', 'action',
408 'server_url', 'config', 'scm', 'username', 'ip', 'action',
381 'repository', 'pull_request_id', 'url', 'title', 'description',
409 'repository', 'pull_request_id', 'url', 'title', 'description',
382 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
410 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
383 'mergeable', 'source', 'target', 'author', 'reviewers'))
411 'mergeable', 'source', 'target', 'author', 'reviewers'))
384
412
385
413
386 log_merge_pull_request = ExtensionCallback(
414 log_merge_pull_request = ExtensionCallback(
387 hook_name='MERGE_PULL_REQUEST',
415 hook_name='MERGE_PULL_REQUEST',
388 kwargs_keys=(
416 kwargs_keys=(
389 'server_url', 'config', 'scm', 'username', 'ip', 'action',
417 'server_url', 'config', 'scm', 'username', 'ip', 'action',
390 'repository', 'pull_request_id', 'url', 'title', 'description',
418 'repository', 'pull_request_id', 'url', 'title', 'description',
391 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
419 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
392 'mergeable', 'source', 'target', 'author', 'reviewers'))
420 'mergeable', 'source', 'target', 'author', 'reviewers'))
393
421
394
422
395 log_close_pull_request = ExtensionCallback(
423 log_close_pull_request = ExtensionCallback(
396 hook_name='CLOSE_PULL_REQUEST',
424 hook_name='CLOSE_PULL_REQUEST',
397 kwargs_keys=(
425 kwargs_keys=(
398 'server_url', 'config', 'scm', 'username', 'ip', 'action',
426 'server_url', 'config', 'scm', 'username', 'ip', 'action',
399 'repository', 'pull_request_id', 'url', 'title', 'description',
427 'repository', 'pull_request_id', 'url', 'title', 'description',
400 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
428 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
401 'mergeable', 'source', 'target', 'author', 'reviewers'))
429 'mergeable', 'source', 'target', 'author', 'reviewers'))
402
430
403
431
404 log_review_pull_request = ExtensionCallback(
432 log_review_pull_request = ExtensionCallback(
405 hook_name='REVIEW_PULL_REQUEST',
433 hook_name='REVIEW_PULL_REQUEST',
406 kwargs_keys=(
434 kwargs_keys=(
407 'server_url', 'config', 'scm', 'username', 'ip', 'action',
435 'server_url', 'config', 'scm', 'username', 'ip', 'action',
408 'repository', 'pull_request_id', 'url', 'title', 'description',
436 'repository', 'pull_request_id', 'url', 'title', 'description',
409 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
437 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
410 'mergeable', 'source', 'target', 'author', 'reviewers'))
438 'mergeable', 'source', 'target', 'author', 'reviewers'))
411
439
412
440
413 log_update_pull_request = ExtensionCallback(
441 log_update_pull_request = ExtensionCallback(
414 hook_name='UPDATE_PULL_REQUEST',
442 hook_name='UPDATE_PULL_REQUEST',
415 kwargs_keys=(
443 kwargs_keys=(
416 'server_url', 'config', 'scm', 'username', 'ip', 'action',
444 'server_url', 'config', 'scm', 'username', 'ip', 'action',
417 'repository', 'pull_request_id', 'url', 'title', 'description',
445 'repository', 'pull_request_id', 'url', 'title', 'description',
418 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
446 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
419 'mergeable', 'source', 'target', 'author', 'reviewers'))
447 'mergeable', 'source', 'target', 'author', 'reviewers'))
420
448
421
449
422 log_create_user = ExtensionCallback(
450 log_create_user = ExtensionCallback(
423 hook_name='CREATE_USER_HOOK',
451 hook_name='CREATE_USER_HOOK',
424 kwargs_keys=(
452 kwargs_keys=(
425 'username', 'full_name_or_username', 'full_contact', 'user_id',
453 'username', 'full_name_or_username', 'full_contact', 'user_id',
426 'name', 'firstname', 'short_contact', 'admin', 'lastname',
454 'name', 'firstname', 'short_contact', 'admin', 'lastname',
427 'ip_addresses', 'extern_type', 'extern_name',
455 'ip_addresses', 'extern_type', 'extern_name',
428 'email', 'api_keys', 'last_login',
456 'email', 'api_keys', 'last_login',
429 'full_name', 'active', 'password', 'emails',
457 'full_name', 'active', 'password', 'emails',
430 'inherit_default_permissions', 'created_by', 'created_on'))
458 'inherit_default_permissions', 'created_by', 'created_on'))
431
459
432
460
433 log_delete_user = ExtensionCallback(
461 log_delete_user = ExtensionCallback(
434 hook_name='DELETE_USER_HOOK',
462 hook_name='DELETE_USER_HOOK',
435 kwargs_keys=(
463 kwargs_keys=(
436 'username', 'full_name_or_username', 'full_contact', 'user_id',
464 'username', 'full_name_or_username', 'full_contact', 'user_id',
437 'name', 'firstname', 'short_contact', 'admin', 'lastname',
465 'name', 'firstname', 'short_contact', 'admin', 'lastname',
438 'ip_addresses',
466 'ip_addresses',
439 'email', 'last_login',
467 'email', 'last_login',
440 'full_name', 'active', 'password', 'emails',
468 'full_name', 'active', 'password', 'emails',
441 'inherit_default_permissions', 'deleted_by'))
469 'inherit_default_permissions', 'deleted_by'))
442
470
443
471
444 log_create_repository = ExtensionCallback(
472 log_create_repository = ExtensionCallback(
445 hook_name='CREATE_REPO_HOOK',
473 hook_name='CREATE_REPO_HOOK',
446 kwargs_keys=(
474 kwargs_keys=(
447 'repo_name', 'repo_type', 'description', 'private', 'created_on',
475 'repo_name', 'repo_type', 'description', 'private', 'created_on',
448 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics',
476 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics',
449 'clone_uri', 'fork_id', 'group_id', 'created_by'))
477 'clone_uri', 'fork_id', 'group_id', 'created_by'))
450
478
451
479
452 log_delete_repository = ExtensionCallback(
480 log_delete_repository = ExtensionCallback(
453 hook_name='DELETE_REPO_HOOK',
481 hook_name='DELETE_REPO_HOOK',
454 kwargs_keys=(
482 kwargs_keys=(
455 'repo_name', 'repo_type', 'description', 'private', 'created_on',
483 'repo_name', 'repo_type', 'description', 'private', 'created_on',
456 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics',
484 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics',
457 'clone_uri', 'fork_id', 'group_id', 'deleted_by', 'deleted_on'))
485 'clone_uri', 'fork_id', 'group_id', 'deleted_by', 'deleted_on'))
458
486
459
487
460 log_create_repository_group = ExtensionCallback(
488 log_create_repository_group = ExtensionCallback(
461 hook_name='CREATE_REPO_GROUP_HOOK',
489 hook_name='CREATE_REPO_GROUP_HOOK',
462 kwargs_keys=(
490 kwargs_keys=(
463 'group_name', 'group_parent_id', 'group_description',
491 'group_name', 'group_parent_id', 'group_description',
464 'group_id', 'user_id', 'created_by', 'created_on',
492 'group_id', 'user_id', 'created_by', 'created_on',
465 'enable_locking'))
493 'enable_locking'))
@@ -1,162 +1,163 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import webob
21 import webob
22 from pyramid.threadlocal import get_current_request
22 from pyramid.threadlocal import get_current_request
23
23
24 from rhodecode import events
24 from rhodecode import events
25 from rhodecode.lib import hooks_base
25 from rhodecode.lib import hooks_base
26 from rhodecode.lib import utils2
26 from rhodecode.lib import utils2
27
27
28
28
29 def _get_rc_scm_extras(username, repo_name, repo_alias, action):
29 def _get_rc_scm_extras(username, repo_name, repo_alias, action):
30 # TODO: johbo: Replace by vcs_operation_context and remove fully
30 # TODO: johbo: Replace by vcs_operation_context and remove fully
31 from rhodecode.lib.base import vcs_operation_context
31 from rhodecode.lib.base import vcs_operation_context
32 check_locking = action in ('pull', 'push')
32 check_locking = action in ('pull', 'push')
33
33
34 request = get_current_request()
34 request = get_current_request()
35
35
36 # default
36 # default
37 dummy_environ = webob.Request.blank('').environ
37 dummy_environ = webob.Request.blank('').environ
38 try:
38 try:
39 environ = request.environ or dummy_environ
39 environ = request.environ or dummy_environ
40 except TypeError:
40 except TypeError:
41 # we might use this outside of request context
41 # we might use this outside of request context
42 environ = dummy_environ
42 environ = dummy_environ
43
43
44 extras = vcs_operation_context(
44 extras = vcs_operation_context(
45 environ, repo_name, username, action, repo_alias, check_locking)
45 environ, repo_name, username, action, repo_alias, check_locking)
46 return utils2.AttributeDict(extras)
46 return utils2.AttributeDict(extras)
47
47
48
48
49 def trigger_post_push_hook(
49 def trigger_post_push_hook(
50 username, action, repo_name, repo_alias, commit_ids):
50 username, action, hook_type, repo_name, repo_alias, commit_ids):
51 """
51 """
52 Triggers push action hooks
52 Triggers push action hooks
53
53
54 :param username: username who pushes
54 :param username: username who pushes
55 :param action: push/push_local/push_remote
55 :param action: push/push_local/push_remote
56 :param repo_name: name of repo
56 :param repo_name: name of repo
57 :param repo_alias: the type of SCM repo
57 :param repo_alias: the type of SCM repo
58 :param commit_ids: list of commit ids that we pushed
58 :param commit_ids: list of commit ids that we pushed
59 """
59 """
60 extras = _get_rc_scm_extras(username, repo_name, repo_alias, action)
60 extras = _get_rc_scm_extras(username, repo_name, repo_alias, action)
61 extras.commit_ids = commit_ids
61 extras.commit_ids = commit_ids
62 extras.hook_type = hook_type
62 hooks_base.post_push(extras)
63 hooks_base.post_push(extras)
63
64
64
65
65 def trigger_log_create_pull_request_hook(username, repo_name, repo_alias,
66 def trigger_log_create_pull_request_hook(username, repo_name, repo_alias,
66 pull_request):
67 pull_request):
67 """
68 """
68 Triggers create pull request action hooks
69 Triggers create pull request action hooks
69
70
70 :param username: username who creates the pull request
71 :param username: username who creates the pull request
71 :param repo_name: name of target repo
72 :param repo_name: name of target repo
72 :param repo_alias: the type of SCM target repo
73 :param repo_alias: the type of SCM target repo
73 :param pull_request: the pull request that was created
74 :param pull_request: the pull request that was created
74 """
75 """
75 if repo_alias not in ('hg', 'git'):
76 if repo_alias not in ('hg', 'git'):
76 return
77 return
77
78
78 extras = _get_rc_scm_extras(username, repo_name, repo_alias,
79 extras = _get_rc_scm_extras(username, repo_name, repo_alias,
79 'create_pull_request')
80 'create_pull_request')
80 events.trigger(events.PullRequestCreateEvent(pull_request))
81 events.trigger(events.PullRequestCreateEvent(pull_request))
81 extras.update(pull_request.get_api_data())
82 extras.update(pull_request.get_api_data())
82 hooks_base.log_create_pull_request(**extras)
83 hooks_base.log_create_pull_request(**extras)
83
84
84
85
85 def trigger_log_merge_pull_request_hook(username, repo_name, repo_alias,
86 def trigger_log_merge_pull_request_hook(username, repo_name, repo_alias,
86 pull_request):
87 pull_request):
87 """
88 """
88 Triggers merge pull request action hooks
89 Triggers merge pull request action hooks
89
90
90 :param username: username who creates the pull request
91 :param username: username who creates the pull request
91 :param repo_name: name of target repo
92 :param repo_name: name of target repo
92 :param repo_alias: the type of SCM target repo
93 :param repo_alias: the type of SCM target repo
93 :param pull_request: the pull request that was merged
94 :param pull_request: the pull request that was merged
94 """
95 """
95 if repo_alias not in ('hg', 'git'):
96 if repo_alias not in ('hg', 'git'):
96 return
97 return
97
98
98 extras = _get_rc_scm_extras(username, repo_name, repo_alias,
99 extras = _get_rc_scm_extras(username, repo_name, repo_alias,
99 'merge_pull_request')
100 'merge_pull_request')
100 events.trigger(events.PullRequestMergeEvent(pull_request))
101 events.trigger(events.PullRequestMergeEvent(pull_request))
101 extras.update(pull_request.get_api_data())
102 extras.update(pull_request.get_api_data())
102 hooks_base.log_merge_pull_request(**extras)
103 hooks_base.log_merge_pull_request(**extras)
103
104
104
105
105 def trigger_log_close_pull_request_hook(username, repo_name, repo_alias,
106 def trigger_log_close_pull_request_hook(username, repo_name, repo_alias,
106 pull_request):
107 pull_request):
107 """
108 """
108 Triggers close pull request action hooks
109 Triggers close pull request action hooks
109
110
110 :param username: username who creates the pull request
111 :param username: username who creates the pull request
111 :param repo_name: name of target repo
112 :param repo_name: name of target repo
112 :param repo_alias: the type of SCM target repo
113 :param repo_alias: the type of SCM target repo
113 :param pull_request: the pull request that was closed
114 :param pull_request: the pull request that was closed
114 """
115 """
115 if repo_alias not in ('hg', 'git'):
116 if repo_alias not in ('hg', 'git'):
116 return
117 return
117
118
118 extras = _get_rc_scm_extras(username, repo_name, repo_alias,
119 extras = _get_rc_scm_extras(username, repo_name, repo_alias,
119 'close_pull_request')
120 'close_pull_request')
120 events.trigger(events.PullRequestCloseEvent(pull_request))
121 events.trigger(events.PullRequestCloseEvent(pull_request))
121 extras.update(pull_request.get_api_data())
122 extras.update(pull_request.get_api_data())
122 hooks_base.log_close_pull_request(**extras)
123 hooks_base.log_close_pull_request(**extras)
123
124
124
125
125 def trigger_log_review_pull_request_hook(username, repo_name, repo_alias,
126 def trigger_log_review_pull_request_hook(username, repo_name, repo_alias,
126 pull_request):
127 pull_request):
127 """
128 """
128 Triggers review status change pull request action hooks
129 Triggers review status change pull request action hooks
129
130
130 :param username: username who creates the pull request
131 :param username: username who creates the pull request
131 :param repo_name: name of target repo
132 :param repo_name: name of target repo
132 :param repo_alias: the type of SCM target repo
133 :param repo_alias: the type of SCM target repo
133 :param pull_request: the pull request that review status changed
134 :param pull_request: the pull request that review status changed
134 """
135 """
135 if repo_alias not in ('hg', 'git'):
136 if repo_alias not in ('hg', 'git'):
136 return
137 return
137
138
138 extras = _get_rc_scm_extras(username, repo_name, repo_alias,
139 extras = _get_rc_scm_extras(username, repo_name, repo_alias,
139 'review_pull_request')
140 'review_pull_request')
140 events.trigger(events.PullRequestReviewEvent(pull_request))
141 events.trigger(events.PullRequestReviewEvent(pull_request))
141 extras.update(pull_request.get_api_data())
142 extras.update(pull_request.get_api_data())
142 hooks_base.log_review_pull_request(**extras)
143 hooks_base.log_review_pull_request(**extras)
143
144
144
145
145 def trigger_log_update_pull_request_hook(username, repo_name, repo_alias,
146 def trigger_log_update_pull_request_hook(username, repo_name, repo_alias,
146 pull_request):
147 pull_request):
147 """
148 """
148 Triggers update pull request action hooks
149 Triggers update pull request action hooks
149
150
150 :param username: username who creates the pull request
151 :param username: username who creates the pull request
151 :param repo_name: name of target repo
152 :param repo_name: name of target repo
152 :param repo_alias: the type of SCM target repo
153 :param repo_alias: the type of SCM target repo
153 :param pull_request: the pull request that was updated
154 :param pull_request: the pull request that was updated
154 """
155 """
155 if repo_alias not in ('hg', 'git'):
156 if repo_alias not in ('hg', 'git'):
156 return
157 return
157
158
158 extras = _get_rc_scm_extras(username, repo_name, repo_alias,
159 extras = _get_rc_scm_extras(username, repo_name, repo_alias,
159 'update_pull_request')
160 'update_pull_request')
160 events.trigger(events.PullRequestUpdateEvent(pull_request))
161 events.trigger(events.PullRequestUpdateEvent(pull_request))
161 extras.update(pull_request.get_api_data())
162 extras.update(pull_request.get_api_data())
162 hooks_base.log_update_pull_request(**extras)
163 hooks_base.log_update_pull_request(**extras)
@@ -1,781 +1,781 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Utilities library for RhodeCode
22 Utilities library for RhodeCode
23 """
23 """
24
24
25 import datetime
25 import datetime
26 import decorator
26 import decorator
27 import json
27 import json
28 import logging
28 import logging
29 import os
29 import os
30 import re
30 import re
31 import sys
31 import shutil
32 import shutil
32 import tempfile
33 import tempfile
33 import traceback
34 import traceback
34 import tarfile
35 import tarfile
35 import warnings
36 import warnings
36 import hashlib
37 import hashlib
37 from os.path import join as jn
38 from os.path import join as jn
38
39
39 import paste
40 import paste
40 import pkg_resources
41 import pkg_resources
41 from webhelpers.text import collapse, remove_formatting, strip_tags
42 from webhelpers.text import collapse, remove_formatting, strip_tags
42 from mako import exceptions
43 from mako import exceptions
43 from pyramid.threadlocal import get_current_registry
44 from pyramid.threadlocal import get_current_registry
44 from rhodecode.lib.request import Request
45 from rhodecode.lib.request import Request
45
46
46 from rhodecode.lib.fakemod import create_module
47 from rhodecode.lib.vcs.backends.base import Config
47 from rhodecode.lib.vcs.backends.base import Config
48 from rhodecode.lib.vcs.exceptions import VCSError
48 from rhodecode.lib.vcs.exceptions import VCSError
49 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
49 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
50 from rhodecode.lib.utils2 import (
50 from rhodecode.lib.utils2 import (
51 safe_str, safe_unicode, get_current_rhodecode_user, md5, sha1)
51 safe_str, safe_unicode, get_current_rhodecode_user, md5, sha1)
52 from rhodecode.model import meta
52 from rhodecode.model import meta
53 from rhodecode.model.db import (
53 from rhodecode.model.db import (
54 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
54 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
55 from rhodecode.model.meta import Session
55 from rhodecode.model.meta import Session
56
56
57
57
58 log = logging.getLogger(__name__)
58 log = logging.getLogger(__name__)
59
59
60 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
60 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
61
61
62 # String which contains characters that are not allowed in slug names for
62 # String which contains characters that are not allowed in slug names for
63 # repositories or repository groups. It is properly escaped to use it in
63 # repositories or repository groups. It is properly escaped to use it in
64 # regular expressions.
64 # regular expressions.
65 SLUG_BAD_CHARS = re.escape('`?=[]\;\'"<>,/~!@#$%^&*()+{}|:')
65 SLUG_BAD_CHARS = re.escape('`?=[]\;\'"<>,/~!@#$%^&*()+{}|:')
66
66
67 # Regex that matches forbidden characters in repo/group slugs.
67 # Regex that matches forbidden characters in repo/group slugs.
68 SLUG_BAD_CHAR_RE = re.compile('[{}]'.format(SLUG_BAD_CHARS))
68 SLUG_BAD_CHAR_RE = re.compile('[{}]'.format(SLUG_BAD_CHARS))
69
69
70 # Regex that matches allowed characters in repo/group slugs.
70 # Regex that matches allowed characters in repo/group slugs.
71 SLUG_GOOD_CHAR_RE = re.compile('[^{}]'.format(SLUG_BAD_CHARS))
71 SLUG_GOOD_CHAR_RE = re.compile('[^{}]'.format(SLUG_BAD_CHARS))
72
72
73 # Regex that matches whole repo/group slugs.
73 # Regex that matches whole repo/group slugs.
74 SLUG_RE = re.compile('[^{}]+'.format(SLUG_BAD_CHARS))
74 SLUG_RE = re.compile('[^{}]+'.format(SLUG_BAD_CHARS))
75
75
76 _license_cache = None
76 _license_cache = None
77
77
78
78
79 def repo_name_slug(value):
79 def repo_name_slug(value):
80 """
80 """
81 Return slug of name of repository
81 Return slug of name of repository
82 This function is called on each creation/modification
82 This function is called on each creation/modification
83 of repository to prevent bad names in repo
83 of repository to prevent bad names in repo
84 """
84 """
85 replacement_char = '-'
85 replacement_char = '-'
86
86
87 slug = remove_formatting(value)
87 slug = remove_formatting(value)
88 slug = SLUG_BAD_CHAR_RE.sub('', slug)
88 slug = SLUG_BAD_CHAR_RE.sub('', slug)
89 slug = re.sub('[\s]+', '-', slug)
89 slug = re.sub('[\s]+', '-', slug)
90 slug = collapse(slug, replacement_char)
90 slug = collapse(slug, replacement_char)
91 return slug
91 return slug
92
92
93
93
94 #==============================================================================
94 #==============================================================================
95 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
95 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
96 #==============================================================================
96 #==============================================================================
97 def get_repo_slug(request):
97 def get_repo_slug(request):
98 _repo = ''
98 _repo = ''
99
99
100 if hasattr(request, 'db_repo'):
100 if hasattr(request, 'db_repo'):
101 # if our requests has set db reference use it for name, this
101 # if our requests has set db reference use it for name, this
102 # translates the example.com/_<id> into proper repo names
102 # translates the example.com/_<id> into proper repo names
103 _repo = request.db_repo.repo_name
103 _repo = request.db_repo.repo_name
104 elif getattr(request, 'matchdict', None):
104 elif getattr(request, 'matchdict', None):
105 # pyramid
105 # pyramid
106 _repo = request.matchdict.get('repo_name')
106 _repo = request.matchdict.get('repo_name')
107
107
108 if _repo:
108 if _repo:
109 _repo = _repo.rstrip('/')
109 _repo = _repo.rstrip('/')
110 return _repo
110 return _repo
111
111
112
112
113 def get_repo_group_slug(request):
113 def get_repo_group_slug(request):
114 _group = ''
114 _group = ''
115 if hasattr(request, 'db_repo_group'):
115 if hasattr(request, 'db_repo_group'):
116 # if our requests has set db reference use it for name, this
116 # if our requests has set db reference use it for name, this
117 # translates the example.com/_<id> into proper repo group names
117 # translates the example.com/_<id> into proper repo group names
118 _group = request.db_repo_group.group_name
118 _group = request.db_repo_group.group_name
119 elif getattr(request, 'matchdict', None):
119 elif getattr(request, 'matchdict', None):
120 # pyramid
120 # pyramid
121 _group = request.matchdict.get('repo_group_name')
121 _group = request.matchdict.get('repo_group_name')
122
122
123 if _group:
123 if _group:
124 _group = _group.rstrip('/')
124 _group = _group.rstrip('/')
125 return _group
125 return _group
126
126
127
127
128 def get_user_group_slug(request):
128 def get_user_group_slug(request):
129 _user_group = ''
129 _user_group = ''
130
130
131 if hasattr(request, 'db_user_group'):
131 if hasattr(request, 'db_user_group'):
132 _user_group = request.db_user_group.users_group_name
132 _user_group = request.db_user_group.users_group_name
133 elif getattr(request, 'matchdict', None):
133 elif getattr(request, 'matchdict', None):
134 # pyramid
134 # pyramid
135 _user_group = request.matchdict.get('user_group_id')
135 _user_group = request.matchdict.get('user_group_id')
136 _user_group_name = request.matchdict.get('user_group_name')
136 _user_group_name = request.matchdict.get('user_group_name')
137 try:
137 try:
138 if _user_group:
138 if _user_group:
139 _user_group = UserGroup.get(_user_group)
139 _user_group = UserGroup.get(_user_group)
140 elif _user_group_name:
140 elif _user_group_name:
141 _user_group = UserGroup.get_by_group_name(_user_group_name)
141 _user_group = UserGroup.get_by_group_name(_user_group_name)
142
142
143 if _user_group:
143 if _user_group:
144 _user_group = _user_group.users_group_name
144 _user_group = _user_group.users_group_name
145 except Exception:
145 except Exception:
146 log.exception('Failed to get user group by id and name')
146 log.exception('Failed to get user group by id and name')
147 # catch all failures here
147 # catch all failures here
148 return None
148 return None
149
149
150 return _user_group
150 return _user_group
151
151
152
152
153 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
153 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
154 """
154 """
155 Scans given path for repos and return (name,(type,path)) tuple
155 Scans given path for repos and return (name,(type,path)) tuple
156
156
157 :param path: path to scan for repositories
157 :param path: path to scan for repositories
158 :param recursive: recursive search and return names with subdirs in front
158 :param recursive: recursive search and return names with subdirs in front
159 """
159 """
160
160
161 # remove ending slash for better results
161 # remove ending slash for better results
162 path = path.rstrip(os.sep)
162 path = path.rstrip(os.sep)
163 log.debug('now scanning in %s location recursive:%s...', path, recursive)
163 log.debug('now scanning in %s location recursive:%s...', path, recursive)
164
164
165 def _get_repos(p):
165 def _get_repos(p):
166 dirpaths = _get_dirpaths(p)
166 dirpaths = _get_dirpaths(p)
167 if not _is_dir_writable(p):
167 if not _is_dir_writable(p):
168 log.warning('repo path without write access: %s', p)
168 log.warning('repo path without write access: %s', p)
169
169
170 for dirpath in dirpaths:
170 for dirpath in dirpaths:
171 if os.path.isfile(os.path.join(p, dirpath)):
171 if os.path.isfile(os.path.join(p, dirpath)):
172 continue
172 continue
173 cur_path = os.path.join(p, dirpath)
173 cur_path = os.path.join(p, dirpath)
174
174
175 # skip removed repos
175 # skip removed repos
176 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
176 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
177 continue
177 continue
178
178
179 #skip .<somethin> dirs
179 #skip .<somethin> dirs
180 if dirpath.startswith('.'):
180 if dirpath.startswith('.'):
181 continue
181 continue
182
182
183 try:
183 try:
184 scm_info = get_scm(cur_path)
184 scm_info = get_scm(cur_path)
185 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
185 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
186 except VCSError:
186 except VCSError:
187 if not recursive:
187 if not recursive:
188 continue
188 continue
189 #check if this dir containts other repos for recursive scan
189 #check if this dir containts other repos for recursive scan
190 rec_path = os.path.join(p, dirpath)
190 rec_path = os.path.join(p, dirpath)
191 if os.path.isdir(rec_path):
191 if os.path.isdir(rec_path):
192 for inner_scm in _get_repos(rec_path):
192 for inner_scm in _get_repos(rec_path):
193 yield inner_scm
193 yield inner_scm
194
194
195 return _get_repos(path)
195 return _get_repos(path)
196
196
197
197
198 def _get_dirpaths(p):
198 def _get_dirpaths(p):
199 try:
199 try:
200 # OS-independable way of checking if we have at least read-only
200 # OS-independable way of checking if we have at least read-only
201 # access or not.
201 # access or not.
202 dirpaths = os.listdir(p)
202 dirpaths = os.listdir(p)
203 except OSError:
203 except OSError:
204 log.warning('ignoring repo path without read access: %s', p)
204 log.warning('ignoring repo path without read access: %s', p)
205 return []
205 return []
206
206
207 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
207 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
208 # decode paths and suddenly returns unicode objects itself. The items it
208 # decode paths and suddenly returns unicode objects itself. The items it
209 # cannot decode are returned as strings and cause issues.
209 # cannot decode are returned as strings and cause issues.
210 #
210 #
211 # Those paths are ignored here until a solid solution for path handling has
211 # Those paths are ignored here until a solid solution for path handling has
212 # been built.
212 # been built.
213 expected_type = type(p)
213 expected_type = type(p)
214
214
215 def _has_correct_type(item):
215 def _has_correct_type(item):
216 if type(item) is not expected_type:
216 if type(item) is not expected_type:
217 log.error(
217 log.error(
218 u"Ignoring path %s since it cannot be decoded into unicode.",
218 u"Ignoring path %s since it cannot be decoded into unicode.",
219 # Using "repr" to make sure that we see the byte value in case
219 # Using "repr" to make sure that we see the byte value in case
220 # of support.
220 # of support.
221 repr(item))
221 repr(item))
222 return False
222 return False
223 return True
223 return True
224
224
225 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
225 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
226
226
227 return dirpaths
227 return dirpaths
228
228
229
229
230 def _is_dir_writable(path):
230 def _is_dir_writable(path):
231 """
231 """
232 Probe if `path` is writable.
232 Probe if `path` is writable.
233
233
234 Due to trouble on Cygwin / Windows, this is actually probing if it is
234 Due to trouble on Cygwin / Windows, this is actually probing if it is
235 possible to create a file inside of `path`, stat does not produce reliable
235 possible to create a file inside of `path`, stat does not produce reliable
236 results in this case.
236 results in this case.
237 """
237 """
238 try:
238 try:
239 with tempfile.TemporaryFile(dir=path):
239 with tempfile.TemporaryFile(dir=path):
240 pass
240 pass
241 except OSError:
241 except OSError:
242 return False
242 return False
243 return True
243 return True
244
244
245
245
246 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None, config=None):
246 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None, config=None):
247 """
247 """
248 Returns True if given path is a valid repository False otherwise.
248 Returns True if given path is a valid repository False otherwise.
249 If expect_scm param is given also, compare if given scm is the same
249 If expect_scm param is given also, compare if given scm is the same
250 as expected from scm parameter. If explicit_scm is given don't try to
250 as expected from scm parameter. If explicit_scm is given don't try to
251 detect the scm, just use the given one to check if repo is valid
251 detect the scm, just use the given one to check if repo is valid
252
252
253 :param repo_name:
253 :param repo_name:
254 :param base_path:
254 :param base_path:
255 :param expect_scm:
255 :param expect_scm:
256 :param explicit_scm:
256 :param explicit_scm:
257 :param config:
257 :param config:
258
258
259 :return True: if given path is a valid repository
259 :return True: if given path is a valid repository
260 """
260 """
261 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
261 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
262 log.debug('Checking if `%s` is a valid path for repository. '
262 log.debug('Checking if `%s` is a valid path for repository. '
263 'Explicit type: %s', repo_name, explicit_scm)
263 'Explicit type: %s', repo_name, explicit_scm)
264
264
265 try:
265 try:
266 if explicit_scm:
266 if explicit_scm:
267 detected_scms = [get_scm_backend(explicit_scm)(
267 detected_scms = [get_scm_backend(explicit_scm)(
268 full_path, config=config).alias]
268 full_path, config=config).alias]
269 else:
269 else:
270 detected_scms = get_scm(full_path)
270 detected_scms = get_scm(full_path)
271
271
272 if expect_scm:
272 if expect_scm:
273 return detected_scms[0] == expect_scm
273 return detected_scms[0] == expect_scm
274 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
274 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
275 return True
275 return True
276 except VCSError:
276 except VCSError:
277 log.debug('path: %s is not a valid repo !', full_path)
277 log.debug('path: %s is not a valid repo !', full_path)
278 return False
278 return False
279
279
280
280
281 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
281 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
282 """
282 """
283 Returns True if given path is a repository group, False otherwise
283 Returns True if given path is a repository group, False otherwise
284
284
285 :param repo_name:
285 :param repo_name:
286 :param base_path:
286 :param base_path:
287 """
287 """
288 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
288 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
289 log.debug('Checking if `%s` is a valid path for repository group',
289 log.debug('Checking if `%s` is a valid path for repository group',
290 repo_group_name)
290 repo_group_name)
291
291
292 # check if it's not a repo
292 # check if it's not a repo
293 if is_valid_repo(repo_group_name, base_path):
293 if is_valid_repo(repo_group_name, base_path):
294 log.debug('Repo called %s exist, it is not a valid repo group', repo_group_name)
294 log.debug('Repo called %s exist, it is not a valid repo group', repo_group_name)
295 return False
295 return False
296
296
297 try:
297 try:
298 # we need to check bare git repos at higher level
298 # we need to check bare git repos at higher level
299 # since we might match branches/hooks/info/objects or possible
299 # since we might match branches/hooks/info/objects or possible
300 # other things inside bare git repo
300 # other things inside bare git repo
301 maybe_repo = os.path.dirname(full_path)
301 maybe_repo = os.path.dirname(full_path)
302 if maybe_repo == base_path:
302 if maybe_repo == base_path:
303 # skip root level repo check, we know root location CANNOT BE a repo group
303 # skip root level repo check, we know root location CANNOT BE a repo group
304 return False
304 return False
305
305
306 scm_ = get_scm(maybe_repo)
306 scm_ = get_scm(maybe_repo)
307 log.debug('path: %s is a vcs object:%s, not valid repo group', full_path, scm_)
307 log.debug('path: %s is a vcs object:%s, not valid repo group', full_path, scm_)
308 return False
308 return False
309 except VCSError:
309 except VCSError:
310 pass
310 pass
311
311
312 # check if it's a valid path
312 # check if it's a valid path
313 if skip_path_check or os.path.isdir(full_path):
313 if skip_path_check or os.path.isdir(full_path):
314 log.debug('path: %s is a valid repo group !', full_path)
314 log.debug('path: %s is a valid repo group !', full_path)
315 return True
315 return True
316
316
317 log.debug('path: %s is not a valid repo group !', full_path)
317 log.debug('path: %s is not a valid repo group !', full_path)
318 return False
318 return False
319
319
320
320
321 def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'):
321 def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'):
322 while True:
322 while True:
323 ok = raw_input(prompt)
323 ok = raw_input(prompt)
324 if ok.lower() in ('y', 'ye', 'yes'):
324 if ok.lower() in ('y', 'ye', 'yes'):
325 return True
325 return True
326 if ok.lower() in ('n', 'no', 'nop', 'nope'):
326 if ok.lower() in ('n', 'no', 'nop', 'nope'):
327 return False
327 return False
328 retries = retries - 1
328 retries = retries - 1
329 if retries < 0:
329 if retries < 0:
330 raise IOError
330 raise IOError
331 print(complaint)
331 print(complaint)
332
332
333 # propagated from mercurial documentation
333 # propagated from mercurial documentation
334 ui_sections = [
334 ui_sections = [
335 'alias', 'auth',
335 'alias', 'auth',
336 'decode/encode', 'defaults',
336 'decode/encode', 'defaults',
337 'diff', 'email',
337 'diff', 'email',
338 'extensions', 'format',
338 'extensions', 'format',
339 'merge-patterns', 'merge-tools',
339 'merge-patterns', 'merge-tools',
340 'hooks', 'http_proxy',
340 'hooks', 'http_proxy',
341 'smtp', 'patch',
341 'smtp', 'patch',
342 'paths', 'profiling',
342 'paths', 'profiling',
343 'server', 'trusted',
343 'server', 'trusted',
344 'ui', 'web', ]
344 'ui', 'web', ]
345
345
346
346
347 def config_data_from_db(clear_session=True, repo=None):
347 def config_data_from_db(clear_session=True, repo=None):
348 """
348 """
349 Read the configuration data from the database and return configuration
349 Read the configuration data from the database and return configuration
350 tuples.
350 tuples.
351 """
351 """
352 from rhodecode.model.settings import VcsSettingsModel
352 from rhodecode.model.settings import VcsSettingsModel
353
353
354 config = []
354 config = []
355
355
356 sa = meta.Session()
356 sa = meta.Session()
357 settings_model = VcsSettingsModel(repo=repo, sa=sa)
357 settings_model = VcsSettingsModel(repo=repo, sa=sa)
358
358
359 ui_settings = settings_model.get_ui_settings()
359 ui_settings = settings_model.get_ui_settings()
360
360
361 ui_data = []
361 ui_data = []
362 for setting in ui_settings:
362 for setting in ui_settings:
363 if setting.active:
363 if setting.active:
364 ui_data.append((setting.section, setting.key, setting.value))
364 ui_data.append((setting.section, setting.key, setting.value))
365 config.append((
365 config.append((
366 safe_str(setting.section), safe_str(setting.key),
366 safe_str(setting.section), safe_str(setting.key),
367 safe_str(setting.value)))
367 safe_str(setting.value)))
368 if setting.key == 'push_ssl':
368 if setting.key == 'push_ssl':
369 # force set push_ssl requirement to False, rhodecode
369 # force set push_ssl requirement to False, rhodecode
370 # handles that
370 # handles that
371 config.append((
371 config.append((
372 safe_str(setting.section), safe_str(setting.key), False))
372 safe_str(setting.section), safe_str(setting.key), False))
373 log.debug(
373 log.debug(
374 'settings ui from db: %s',
374 'settings ui from db: %s',
375 ','.join(map(lambda s: '[{}] {}={}'.format(*s), ui_data)))
375 ','.join(map(lambda s: '[{}] {}={}'.format(*s), ui_data)))
376 if clear_session:
376 if clear_session:
377 meta.Session.remove()
377 meta.Session.remove()
378
378
379 # TODO: mikhail: probably it makes no sense to re-read hooks information.
379 # TODO: mikhail: probably it makes no sense to re-read hooks information.
380 # It's already there and activated/deactivated
380 # It's already there and activated/deactivated
381 skip_entries = []
381 skip_entries = []
382 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
382 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
383 if 'pull' not in enabled_hook_classes:
383 if 'pull' not in enabled_hook_classes:
384 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
384 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
385 if 'push' not in enabled_hook_classes:
385 if 'push' not in enabled_hook_classes:
386 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
386 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
387 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRETX_PUSH))
387 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRETX_PUSH))
388 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PUSH_KEY))
388 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PUSH_KEY))
389
389
390 config = [entry for entry in config if entry[:2] not in skip_entries]
390 config = [entry for entry in config if entry[:2] not in skip_entries]
391
391
392 return config
392 return config
393
393
394
394
395 def make_db_config(clear_session=True, repo=None):
395 def make_db_config(clear_session=True, repo=None):
396 """
396 """
397 Create a :class:`Config` instance based on the values in the database.
397 Create a :class:`Config` instance based on the values in the database.
398 """
398 """
399 config = Config()
399 config = Config()
400 config_data = config_data_from_db(clear_session=clear_session, repo=repo)
400 config_data = config_data_from_db(clear_session=clear_session, repo=repo)
401 for section, option, value in config_data:
401 for section, option, value in config_data:
402 config.set(section, option, value)
402 config.set(section, option, value)
403 return config
403 return config
404
404
405
405
406 def get_enabled_hook_classes(ui_settings):
406 def get_enabled_hook_classes(ui_settings):
407 """
407 """
408 Return the enabled hook classes.
408 Return the enabled hook classes.
409
409
410 :param ui_settings: List of ui_settings as returned
410 :param ui_settings: List of ui_settings as returned
411 by :meth:`VcsSettingsModel.get_ui_settings`
411 by :meth:`VcsSettingsModel.get_ui_settings`
412
412
413 :return: a list with the enabled hook classes. The order is not guaranteed.
413 :return: a list with the enabled hook classes. The order is not guaranteed.
414 :rtype: list
414 :rtype: list
415 """
415 """
416 enabled_hooks = []
416 enabled_hooks = []
417 active_hook_keys = [
417 active_hook_keys = [
418 key for section, key, value, active in ui_settings
418 key for section, key, value, active in ui_settings
419 if section == 'hooks' and active]
419 if section == 'hooks' and active]
420
420
421 hook_names = {
421 hook_names = {
422 RhodeCodeUi.HOOK_PUSH: 'push',
422 RhodeCodeUi.HOOK_PUSH: 'push',
423 RhodeCodeUi.HOOK_PULL: 'pull',
423 RhodeCodeUi.HOOK_PULL: 'pull',
424 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
424 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
425 }
425 }
426
426
427 for key in active_hook_keys:
427 for key in active_hook_keys:
428 hook = hook_names.get(key)
428 hook = hook_names.get(key)
429 if hook:
429 if hook:
430 enabled_hooks.append(hook)
430 enabled_hooks.append(hook)
431
431
432 return enabled_hooks
432 return enabled_hooks
433
433
434
434
435 def set_rhodecode_config(config):
435 def set_rhodecode_config(config):
436 """
436 """
437 Updates pyramid config with new settings from database
437 Updates pyramid config with new settings from database
438
438
439 :param config:
439 :param config:
440 """
440 """
441 from rhodecode.model.settings import SettingsModel
441 from rhodecode.model.settings import SettingsModel
442 app_settings = SettingsModel().get_all_settings()
442 app_settings = SettingsModel().get_all_settings()
443
443
444 for k, v in app_settings.items():
444 for k, v in app_settings.items():
445 config[k] = v
445 config[k] = v
446
446
447
447
448 def get_rhodecode_realm():
448 def get_rhodecode_realm():
449 """
449 """
450 Return the rhodecode realm from database.
450 Return the rhodecode realm from database.
451 """
451 """
452 from rhodecode.model.settings import SettingsModel
452 from rhodecode.model.settings import SettingsModel
453 realm = SettingsModel().get_setting_by_name('realm')
453 realm = SettingsModel().get_setting_by_name('realm')
454 return safe_str(realm.app_settings_value)
454 return safe_str(realm.app_settings_value)
455
455
456
456
457 def get_rhodecode_base_path():
457 def get_rhodecode_base_path():
458 """
458 """
459 Returns the base path. The base path is the filesystem path which points
459 Returns the base path. The base path is the filesystem path which points
460 to the repository store.
460 to the repository store.
461 """
461 """
462 from rhodecode.model.settings import SettingsModel
462 from rhodecode.model.settings import SettingsModel
463 paths_ui = SettingsModel().get_ui_by_section_and_key('paths', '/')
463 paths_ui = SettingsModel().get_ui_by_section_and_key('paths', '/')
464 return safe_str(paths_ui.ui_value)
464 return safe_str(paths_ui.ui_value)
465
465
466
466
467 def map_groups(path):
467 def map_groups(path):
468 """
468 """
469 Given a full path to a repository, create all nested groups that this
469 Given a full path to a repository, create all nested groups that this
470 repo is inside. This function creates parent-child relationships between
470 repo is inside. This function creates parent-child relationships between
471 groups and creates default perms for all new groups.
471 groups and creates default perms for all new groups.
472
472
473 :param paths: full path to repository
473 :param paths: full path to repository
474 """
474 """
475 from rhodecode.model.repo_group import RepoGroupModel
475 from rhodecode.model.repo_group import RepoGroupModel
476 sa = meta.Session()
476 sa = meta.Session()
477 groups = path.split(Repository.NAME_SEP)
477 groups = path.split(Repository.NAME_SEP)
478 parent = None
478 parent = None
479 group = None
479 group = None
480
480
481 # last element is repo in nested groups structure
481 # last element is repo in nested groups structure
482 groups = groups[:-1]
482 groups = groups[:-1]
483 rgm = RepoGroupModel(sa)
483 rgm = RepoGroupModel(sa)
484 owner = User.get_first_super_admin()
484 owner = User.get_first_super_admin()
485 for lvl, group_name in enumerate(groups):
485 for lvl, group_name in enumerate(groups):
486 group_name = '/'.join(groups[:lvl] + [group_name])
486 group_name = '/'.join(groups[:lvl] + [group_name])
487 group = RepoGroup.get_by_group_name(group_name)
487 group = RepoGroup.get_by_group_name(group_name)
488 desc = '%s group' % group_name
488 desc = '%s group' % group_name
489
489
490 # skip folders that are now removed repos
490 # skip folders that are now removed repos
491 if REMOVED_REPO_PAT.match(group_name):
491 if REMOVED_REPO_PAT.match(group_name):
492 break
492 break
493
493
494 if group is None:
494 if group is None:
495 log.debug('creating group level: %s group_name: %s',
495 log.debug('creating group level: %s group_name: %s',
496 lvl, group_name)
496 lvl, group_name)
497 group = RepoGroup(group_name, parent)
497 group = RepoGroup(group_name, parent)
498 group.group_description = desc
498 group.group_description = desc
499 group.user = owner
499 group.user = owner
500 sa.add(group)
500 sa.add(group)
501 perm_obj = rgm._create_default_perms(group)
501 perm_obj = rgm._create_default_perms(group)
502 sa.add(perm_obj)
502 sa.add(perm_obj)
503 sa.flush()
503 sa.flush()
504
504
505 parent = group
505 parent = group
506 return group
506 return group
507
507
508
508
509 def repo2db_mapper(initial_repo_list, remove_obsolete=False):
509 def repo2db_mapper(initial_repo_list, remove_obsolete=False):
510 """
510 """
511 maps all repos given in initial_repo_list, non existing repositories
511 maps all repos given in initial_repo_list, non existing repositories
512 are created, if remove_obsolete is True it also checks for db entries
512 are created, if remove_obsolete is True it also checks for db entries
513 that are not in initial_repo_list and removes them.
513 that are not in initial_repo_list and removes them.
514
514
515 :param initial_repo_list: list of repositories found by scanning methods
515 :param initial_repo_list: list of repositories found by scanning methods
516 :param remove_obsolete: check for obsolete entries in database
516 :param remove_obsolete: check for obsolete entries in database
517 """
517 """
518 from rhodecode.model.repo import RepoModel
518 from rhodecode.model.repo import RepoModel
519 from rhodecode.model.repo_group import RepoGroupModel
519 from rhodecode.model.repo_group import RepoGroupModel
520 from rhodecode.model.settings import SettingsModel
520 from rhodecode.model.settings import SettingsModel
521
521
522 sa = meta.Session()
522 sa = meta.Session()
523 repo_model = RepoModel()
523 repo_model = RepoModel()
524 user = User.get_first_super_admin()
524 user = User.get_first_super_admin()
525 added = []
525 added = []
526
526
527 # creation defaults
527 # creation defaults
528 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
528 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
529 enable_statistics = defs.get('repo_enable_statistics')
529 enable_statistics = defs.get('repo_enable_statistics')
530 enable_locking = defs.get('repo_enable_locking')
530 enable_locking = defs.get('repo_enable_locking')
531 enable_downloads = defs.get('repo_enable_downloads')
531 enable_downloads = defs.get('repo_enable_downloads')
532 private = defs.get('repo_private')
532 private = defs.get('repo_private')
533
533
534 for name, repo in initial_repo_list.items():
534 for name, repo in initial_repo_list.items():
535 group = map_groups(name)
535 group = map_groups(name)
536 unicode_name = safe_unicode(name)
536 unicode_name = safe_unicode(name)
537 db_repo = repo_model.get_by_repo_name(unicode_name)
537 db_repo = repo_model.get_by_repo_name(unicode_name)
538 # found repo that is on filesystem not in RhodeCode database
538 # found repo that is on filesystem not in RhodeCode database
539 if not db_repo:
539 if not db_repo:
540 log.info('repository %s not found, creating now', name)
540 log.info('repository %s not found, creating now', name)
541 added.append(name)
541 added.append(name)
542 desc = (repo.description
542 desc = (repo.description
543 if repo.description != 'unknown'
543 if repo.description != 'unknown'
544 else '%s repository' % name)
544 else '%s repository' % name)
545
545
546 db_repo = repo_model._create_repo(
546 db_repo = repo_model._create_repo(
547 repo_name=name,
547 repo_name=name,
548 repo_type=repo.alias,
548 repo_type=repo.alias,
549 description=desc,
549 description=desc,
550 repo_group=getattr(group, 'group_id', None),
550 repo_group=getattr(group, 'group_id', None),
551 owner=user,
551 owner=user,
552 enable_locking=enable_locking,
552 enable_locking=enable_locking,
553 enable_downloads=enable_downloads,
553 enable_downloads=enable_downloads,
554 enable_statistics=enable_statistics,
554 enable_statistics=enable_statistics,
555 private=private,
555 private=private,
556 state=Repository.STATE_CREATED
556 state=Repository.STATE_CREATED
557 )
557 )
558 sa.commit()
558 sa.commit()
559 # we added that repo just now, and make sure we updated server info
559 # we added that repo just now, and make sure we updated server info
560 if db_repo.repo_type == 'git':
560 if db_repo.repo_type == 'git':
561 git_repo = db_repo.scm_instance()
561 git_repo = db_repo.scm_instance()
562 # update repository server-info
562 # update repository server-info
563 log.debug('Running update server info')
563 log.debug('Running update server info')
564 git_repo._update_server_info()
564 git_repo._update_server_info()
565
565
566 db_repo.update_commit_cache()
566 db_repo.update_commit_cache()
567
567
568 config = db_repo._config
568 config = db_repo._config
569 config.set('extensions', 'largefiles', '')
569 config.set('extensions', 'largefiles', '')
570 repo = db_repo.scm_instance(config=config)
570 repo = db_repo.scm_instance(config=config)
571 repo.install_hooks()
571 repo.install_hooks()
572
572
573 removed = []
573 removed = []
574 if remove_obsolete:
574 if remove_obsolete:
575 # remove from database those repositories that are not in the filesystem
575 # remove from database those repositories that are not in the filesystem
576 for repo in sa.query(Repository).all():
576 for repo in sa.query(Repository).all():
577 if repo.repo_name not in initial_repo_list.keys():
577 if repo.repo_name not in initial_repo_list.keys():
578 log.debug("Removing non-existing repository found in db `%s`",
578 log.debug("Removing non-existing repository found in db `%s`",
579 repo.repo_name)
579 repo.repo_name)
580 try:
580 try:
581 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
581 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
582 sa.commit()
582 sa.commit()
583 removed.append(repo.repo_name)
583 removed.append(repo.repo_name)
584 except Exception:
584 except Exception:
585 # don't hold further removals on error
585 # don't hold further removals on error
586 log.error(traceback.format_exc())
586 log.error(traceback.format_exc())
587 sa.rollback()
587 sa.rollback()
588
588
589 def splitter(full_repo_name):
589 def splitter(full_repo_name):
590 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
590 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
591 gr_name = None
591 gr_name = None
592 if len(_parts) == 2:
592 if len(_parts) == 2:
593 gr_name = _parts[0]
593 gr_name = _parts[0]
594 return gr_name
594 return gr_name
595
595
596 initial_repo_group_list = [splitter(x) for x in
596 initial_repo_group_list = [splitter(x) for x in
597 initial_repo_list.keys() if splitter(x)]
597 initial_repo_list.keys() if splitter(x)]
598
598
599 # remove from database those repository groups that are not in the
599 # remove from database those repository groups that are not in the
600 # filesystem due to parent child relationships we need to delete them
600 # filesystem due to parent child relationships we need to delete them
601 # in a specific order of most nested first
601 # in a specific order of most nested first
602 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
602 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
603 nested_sort = lambda gr: len(gr.split('/'))
603 nested_sort = lambda gr: len(gr.split('/'))
604 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
604 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
605 if group_name not in initial_repo_group_list:
605 if group_name not in initial_repo_group_list:
606 repo_group = RepoGroup.get_by_group_name(group_name)
606 repo_group = RepoGroup.get_by_group_name(group_name)
607 if (repo_group.children.all() or
607 if (repo_group.children.all() or
608 not RepoGroupModel().check_exist_filesystem(
608 not RepoGroupModel().check_exist_filesystem(
609 group_name=group_name, exc_on_failure=False)):
609 group_name=group_name, exc_on_failure=False)):
610 continue
610 continue
611
611
612 log.info(
612 log.info(
613 'Removing non-existing repository group found in db `%s`',
613 'Removing non-existing repository group found in db `%s`',
614 group_name)
614 group_name)
615 try:
615 try:
616 RepoGroupModel(sa).delete(group_name, fs_remove=False)
616 RepoGroupModel(sa).delete(group_name, fs_remove=False)
617 sa.commit()
617 sa.commit()
618 removed.append(group_name)
618 removed.append(group_name)
619 except Exception:
619 except Exception:
620 # don't hold further removals on error
620 # don't hold further removals on error
621 log.exception(
621 log.exception(
622 'Unable to remove repository group `%s`',
622 'Unable to remove repository group `%s`',
623 group_name)
623 group_name)
624 sa.rollback()
624 sa.rollback()
625 raise
625 raise
626
626
627 return added, removed
627 return added, removed
628
628
629
629
630 def load_rcextensions(root_path):
630 def load_rcextensions(root_path):
631 import rhodecode
631 import rhodecode
632 from rhodecode.config import conf
632 from rhodecode.config import conf
633
633
634 path = os.path.join(root_path, 'rcextensions', '__init__.py')
634 path = os.path.join(root_path)
635 if os.path.isfile(path):
635 sys.path.append(path)
636 rcext = create_module('rc', path)
636 try:
637 EXT = rhodecode.EXTENSIONS = rcext
637 rcextensions = __import__('rcextensions')
638 log.debug('Found rcextensions now loading %s...', rcext)
638 except ImportError:
639 log.warn('Unable to load rcextensions from %s', path)
640 rcextensions = None
641
642 if rcextensions:
643 log.debug('Found rcextensions module loaded %s...', rcextensions)
644 rhodecode.EXTENSIONS = rcextensions
639
645
640 # Additional mappings that are not present in the pygments lexers
646 # Additional mappings that are not present in the pygments lexers
641 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
647 conf.LANGUAGES_EXTENSIONS_MAP.update(
642
648 getattr(rhodecode.EXTENSIONS, 'EXTRA_MAPPINGS', {}))
643 # auto check if the module is not missing any data, set to default if is
644 # this will help autoupdate new feature of rcext module
645 #from rhodecode.config import rcextensions
646 #for k in dir(rcextensions):
647 # if not k.startswith('_') and not hasattr(EXT, k):
648 # setattr(EXT, k, getattr(rcextensions, k))
649
649
650
650
651 def get_custom_lexer(extension):
651 def get_custom_lexer(extension):
652 """
652 """
653 returns a custom lexer if it is defined in rcextensions module, or None
653 returns a custom lexer if it is defined in rcextensions module, or None
654 if there's no custom lexer defined
654 if there's no custom lexer defined
655 """
655 """
656 import rhodecode
656 import rhodecode
657 from pygments import lexers
657 from pygments import lexers
658
658
659 # custom override made by RhodeCode
659 # custom override made by RhodeCode
660 if extension in ['mako']:
660 if extension in ['mako']:
661 return lexers.get_lexer_by_name('html+mako')
661 return lexers.get_lexer_by_name('html+mako')
662
662
663 # check if we didn't define this extension as other lexer
663 # check if we didn't define this extension as other lexer
664 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
664 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
665 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
665 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
666 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
666 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
667 return lexers.get_lexer_by_name(_lexer_name)
667 return lexers.get_lexer_by_name(_lexer_name)
668
668
669
669
670 #==============================================================================
670 #==============================================================================
671 # TEST FUNCTIONS AND CREATORS
671 # TEST FUNCTIONS AND CREATORS
672 #==============================================================================
672 #==============================================================================
673 def create_test_index(repo_location, config):
673 def create_test_index(repo_location, config):
674 """
674 """
675 Makes default test index.
675 Makes default test index.
676 """
676 """
677 import rc_testdata
677 import rc_testdata
678
678
679 rc_testdata.extract_search_index(
679 rc_testdata.extract_search_index(
680 'vcs_search_index', os.path.dirname(config['search.location']))
680 'vcs_search_index', os.path.dirname(config['search.location']))
681
681
682
682
683 def create_test_directory(test_path):
683 def create_test_directory(test_path):
684 """
684 """
685 Create test directory if it doesn't exist.
685 Create test directory if it doesn't exist.
686 """
686 """
687 if not os.path.isdir(test_path):
687 if not os.path.isdir(test_path):
688 log.debug('Creating testdir %s', test_path)
688 log.debug('Creating testdir %s', test_path)
689 os.makedirs(test_path)
689 os.makedirs(test_path)
690
690
691
691
692 def create_test_database(test_path, config):
692 def create_test_database(test_path, config):
693 """
693 """
694 Makes a fresh database.
694 Makes a fresh database.
695 """
695 """
696 from rhodecode.lib.db_manage import DbManage
696 from rhodecode.lib.db_manage import DbManage
697
697
698 # PART ONE create db
698 # PART ONE create db
699 dbconf = config['sqlalchemy.db1.url']
699 dbconf = config['sqlalchemy.db1.url']
700 log.debug('making test db %s', dbconf)
700 log.debug('making test db %s', dbconf)
701
701
702 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
702 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
703 tests=True, cli_args={'force_ask': True})
703 tests=True, cli_args={'force_ask': True})
704 dbmanage.create_tables(override=True)
704 dbmanage.create_tables(override=True)
705 dbmanage.set_db_version()
705 dbmanage.set_db_version()
706 # for tests dynamically set new root paths based on generated content
706 # for tests dynamically set new root paths based on generated content
707 dbmanage.create_settings(dbmanage.config_prompt(test_path))
707 dbmanage.create_settings(dbmanage.config_prompt(test_path))
708 dbmanage.create_default_user()
708 dbmanage.create_default_user()
709 dbmanage.create_test_admin_and_users()
709 dbmanage.create_test_admin_and_users()
710 dbmanage.create_permissions()
710 dbmanage.create_permissions()
711 dbmanage.populate_default_permissions()
711 dbmanage.populate_default_permissions()
712 Session().commit()
712 Session().commit()
713
713
714
714
715 def create_test_repositories(test_path, config):
715 def create_test_repositories(test_path, config):
716 """
716 """
717 Creates test repositories in the temporary directory. Repositories are
717 Creates test repositories in the temporary directory. Repositories are
718 extracted from archives within the rc_testdata package.
718 extracted from archives within the rc_testdata package.
719 """
719 """
720 import rc_testdata
720 import rc_testdata
721 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
721 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
722
722
723 log.debug('making test vcs repositories')
723 log.debug('making test vcs repositories')
724
724
725 idx_path = config['search.location']
725 idx_path = config['search.location']
726 data_path = config['cache_dir']
726 data_path = config['cache_dir']
727
727
728 # clean index and data
728 # clean index and data
729 if idx_path and os.path.exists(idx_path):
729 if idx_path and os.path.exists(idx_path):
730 log.debug('remove %s', idx_path)
730 log.debug('remove %s', idx_path)
731 shutil.rmtree(idx_path)
731 shutil.rmtree(idx_path)
732
732
733 if data_path and os.path.exists(data_path):
733 if data_path and os.path.exists(data_path):
734 log.debug('remove %s', data_path)
734 log.debug('remove %s', data_path)
735 shutil.rmtree(data_path)
735 shutil.rmtree(data_path)
736
736
737 rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO))
737 rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO))
738 rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO))
738 rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO))
739
739
740 # Note: Subversion is in the process of being integrated with the system,
740 # Note: Subversion is in the process of being integrated with the system,
741 # until we have a properly packed version of the test svn repository, this
741 # until we have a properly packed version of the test svn repository, this
742 # tries to copy over the repo from a package "rc_testdata"
742 # tries to copy over the repo from a package "rc_testdata"
743 svn_repo_path = rc_testdata.get_svn_repo_archive()
743 svn_repo_path = rc_testdata.get_svn_repo_archive()
744 with tarfile.open(svn_repo_path) as tar:
744 with tarfile.open(svn_repo_path) as tar:
745 tar.extractall(jn(test_path, SVN_REPO))
745 tar.extractall(jn(test_path, SVN_REPO))
746
746
747
747
748 def password_changed(auth_user, session):
748 def password_changed(auth_user, session):
749 # Never report password change in case of default user or anonymous user.
749 # Never report password change in case of default user or anonymous user.
750 if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None:
750 if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None:
751 return False
751 return False
752
752
753 password_hash = md5(auth_user.password) if auth_user.password else None
753 password_hash = md5(auth_user.password) if auth_user.password else None
754 rhodecode_user = session.get('rhodecode_user', {})
754 rhodecode_user = session.get('rhodecode_user', {})
755 session_password_hash = rhodecode_user.get('password', '')
755 session_password_hash = rhodecode_user.get('password', '')
756 return password_hash != session_password_hash
756 return password_hash != session_password_hash
757
757
758
758
759 def read_opensource_licenses():
759 def read_opensource_licenses():
760 global _license_cache
760 global _license_cache
761
761
762 if not _license_cache:
762 if not _license_cache:
763 licenses = pkg_resources.resource_string(
763 licenses = pkg_resources.resource_string(
764 'rhodecode', 'config/licenses.json')
764 'rhodecode', 'config/licenses.json')
765 _license_cache = json.loads(licenses)
765 _license_cache = json.loads(licenses)
766
766
767 return _license_cache
767 return _license_cache
768
768
769
769
770 def generate_platform_uuid():
770 def generate_platform_uuid():
771 """
771 """
772 Generates platform UUID based on it's name
772 Generates platform UUID based on it's name
773 """
773 """
774 import platform
774 import platform
775
775
776 try:
776 try:
777 uuid_list = [platform.platform()]
777 uuid_list = [platform.platform()]
778 return hashlib.sha256(':'.join(uuid_list)).hexdigest()
778 return hashlib.sha256(':'.join(uuid_list)).hexdigest()
779 except Exception as e:
779 except Exception as e:
780 log.error('Failed to generate host uuid: %s', e)
780 log.error('Failed to generate host uuid: %s', e)
781 return 'UNDEFINED'
781 return 'UNDEFINED'
@@ -1,1730 +1,1731 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2018 RhodeCode GmbH
3 # Copyright (C) 2012-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 pull request model for RhodeCode
23 pull request model for RhodeCode
24 """
24 """
25
25
26
26
27 import json
27 import json
28 import logging
28 import logging
29 import datetime
29 import datetime
30 import urllib
30 import urllib
31 import collections
31 import collections
32
32
33 from pyramid.threadlocal import get_current_request
33 from pyramid.threadlocal import get_current_request
34
34
35 from rhodecode import events
35 from rhodecode import events
36 from rhodecode.translation import lazy_ugettext#, _
36 from rhodecode.translation import lazy_ugettext#, _
37 from rhodecode.lib import helpers as h, hooks_utils, diffs
37 from rhodecode.lib import helpers as h, hooks_utils, diffs
38 from rhodecode.lib import audit_logger
38 from rhodecode.lib import audit_logger
39 from rhodecode.lib.compat import OrderedDict
39 from rhodecode.lib.compat import OrderedDict
40 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
40 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
41 from rhodecode.lib.markup_renderer import (
41 from rhodecode.lib.markup_renderer import (
42 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
42 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
43 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
43 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
44 from rhodecode.lib.vcs.backends.base import (
44 from rhodecode.lib.vcs.backends.base import (
45 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
45 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
46 from rhodecode.lib.vcs.conf import settings as vcs_settings
46 from rhodecode.lib.vcs.conf import settings as vcs_settings
47 from rhodecode.lib.vcs.exceptions import (
47 from rhodecode.lib.vcs.exceptions import (
48 CommitDoesNotExistError, EmptyRepositoryError)
48 CommitDoesNotExistError, EmptyRepositoryError)
49 from rhodecode.model import BaseModel
49 from rhodecode.model import BaseModel
50 from rhodecode.model.changeset_status import ChangesetStatusModel
50 from rhodecode.model.changeset_status import ChangesetStatusModel
51 from rhodecode.model.comment import CommentsModel
51 from rhodecode.model.comment import CommentsModel
52 from rhodecode.model.db import (
52 from rhodecode.model.db import (
53 or_, PullRequest, PullRequestReviewers, ChangesetStatus,
53 or_, PullRequest, PullRequestReviewers, ChangesetStatus,
54 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
54 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
55 from rhodecode.model.meta import Session
55 from rhodecode.model.meta import Session
56 from rhodecode.model.notification import NotificationModel, \
56 from rhodecode.model.notification import NotificationModel, \
57 EmailNotificationModel
57 EmailNotificationModel
58 from rhodecode.model.scm import ScmModel
58 from rhodecode.model.scm import ScmModel
59 from rhodecode.model.settings import VcsSettingsModel
59 from rhodecode.model.settings import VcsSettingsModel
60
60
61
61
62 log = logging.getLogger(__name__)
62 log = logging.getLogger(__name__)
63
63
64
64
65 # Data structure to hold the response data when updating commits during a pull
65 # Data structure to hold the response data when updating commits during a pull
66 # request update.
66 # request update.
67 UpdateResponse = collections.namedtuple('UpdateResponse', [
67 UpdateResponse = collections.namedtuple('UpdateResponse', [
68 'executed', 'reason', 'new', 'old', 'changes',
68 'executed', 'reason', 'new', 'old', 'changes',
69 'source_changed', 'target_changed'])
69 'source_changed', 'target_changed'])
70
70
71
71
72 class PullRequestModel(BaseModel):
72 class PullRequestModel(BaseModel):
73
73
74 cls = PullRequest
74 cls = PullRequest
75
75
76 DIFF_CONTEXT = 3
76 DIFF_CONTEXT = 3
77
77
78 MERGE_STATUS_MESSAGES = {
78 MERGE_STATUS_MESSAGES = {
79 MergeFailureReason.NONE: lazy_ugettext(
79 MergeFailureReason.NONE: lazy_ugettext(
80 'This pull request can be automatically merged.'),
80 'This pull request can be automatically merged.'),
81 MergeFailureReason.UNKNOWN: lazy_ugettext(
81 MergeFailureReason.UNKNOWN: lazy_ugettext(
82 'This pull request cannot be merged because of an unhandled'
82 'This pull request cannot be merged because of an unhandled'
83 ' exception.'),
83 ' exception.'),
84 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
84 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
85 'This pull request cannot be merged because of merge conflicts.'),
85 'This pull request cannot be merged because of merge conflicts.'),
86 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
86 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
87 'This pull request could not be merged because push to target'
87 'This pull request could not be merged because push to target'
88 ' failed.'),
88 ' failed.'),
89 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
89 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
90 'This pull request cannot be merged because the target is not a'
90 'This pull request cannot be merged because the target is not a'
91 ' head.'),
91 ' head.'),
92 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
92 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
93 'This pull request cannot be merged because the source contains'
93 'This pull request cannot be merged because the source contains'
94 ' more branches than the target.'),
94 ' more branches than the target.'),
95 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
95 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
96 'This pull request cannot be merged because the target has'
96 'This pull request cannot be merged because the target has'
97 ' multiple heads.'),
97 ' multiple heads.'),
98 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
98 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
99 'This pull request cannot be merged because the target repository'
99 'This pull request cannot be merged because the target repository'
100 ' is locked.'),
100 ' is locked.'),
101 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
101 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
102 'This pull request cannot be merged because the target or the '
102 'This pull request cannot be merged because the target or the '
103 'source reference is missing.'),
103 'source reference is missing.'),
104 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
104 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
105 'This pull request cannot be merged because the target '
105 'This pull request cannot be merged because the target '
106 'reference is missing.'),
106 'reference is missing.'),
107 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
107 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
108 'This pull request cannot be merged because the source '
108 'This pull request cannot be merged because the source '
109 'reference is missing.'),
109 'reference is missing.'),
110 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
110 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
111 'This pull request cannot be merged because of conflicts related '
111 'This pull request cannot be merged because of conflicts related '
112 'to sub repositories.'),
112 'to sub repositories.'),
113 }
113 }
114
114
115 UPDATE_STATUS_MESSAGES = {
115 UPDATE_STATUS_MESSAGES = {
116 UpdateFailureReason.NONE: lazy_ugettext(
116 UpdateFailureReason.NONE: lazy_ugettext(
117 'Pull request update successful.'),
117 'Pull request update successful.'),
118 UpdateFailureReason.UNKNOWN: lazy_ugettext(
118 UpdateFailureReason.UNKNOWN: lazy_ugettext(
119 'Pull request update failed because of an unknown error.'),
119 'Pull request update failed because of an unknown error.'),
120 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
120 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
121 'No update needed because the source and target have not changed.'),
121 'No update needed because the source and target have not changed.'),
122 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
122 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
123 'Pull request cannot be updated because the reference type is '
123 'Pull request cannot be updated because the reference type is '
124 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
124 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
125 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
125 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
126 'This pull request cannot be updated because the target '
126 'This pull request cannot be updated because the target '
127 'reference is missing.'),
127 'reference is missing.'),
128 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
128 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
129 'This pull request cannot be updated because the source '
129 'This pull request cannot be updated because the source '
130 'reference is missing.'),
130 'reference is missing.'),
131 }
131 }
132
132
133 def __get_pull_request(self, pull_request):
133 def __get_pull_request(self, pull_request):
134 return self._get_instance((
134 return self._get_instance((
135 PullRequest, PullRequestVersion), pull_request)
135 PullRequest, PullRequestVersion), pull_request)
136
136
137 def _check_perms(self, perms, pull_request, user, api=False):
137 def _check_perms(self, perms, pull_request, user, api=False):
138 if not api:
138 if not api:
139 return h.HasRepoPermissionAny(*perms)(
139 return h.HasRepoPermissionAny(*perms)(
140 user=user, repo_name=pull_request.target_repo.repo_name)
140 user=user, repo_name=pull_request.target_repo.repo_name)
141 else:
141 else:
142 return h.HasRepoPermissionAnyApi(*perms)(
142 return h.HasRepoPermissionAnyApi(*perms)(
143 user=user, repo_name=pull_request.target_repo.repo_name)
143 user=user, repo_name=pull_request.target_repo.repo_name)
144
144
145 def check_user_read(self, pull_request, user, api=False):
145 def check_user_read(self, pull_request, user, api=False):
146 _perms = ('repository.admin', 'repository.write', 'repository.read',)
146 _perms = ('repository.admin', 'repository.write', 'repository.read',)
147 return self._check_perms(_perms, pull_request, user, api)
147 return self._check_perms(_perms, pull_request, user, api)
148
148
149 def check_user_merge(self, pull_request, user, api=False):
149 def check_user_merge(self, pull_request, user, api=False):
150 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
150 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
151 return self._check_perms(_perms, pull_request, user, api)
151 return self._check_perms(_perms, pull_request, user, api)
152
152
153 def check_user_update(self, pull_request, user, api=False):
153 def check_user_update(self, pull_request, user, api=False):
154 owner = user.user_id == pull_request.user_id
154 owner = user.user_id == pull_request.user_id
155 return self.check_user_merge(pull_request, user, api) or owner
155 return self.check_user_merge(pull_request, user, api) or owner
156
156
157 def check_user_delete(self, pull_request, user):
157 def check_user_delete(self, pull_request, user):
158 owner = user.user_id == pull_request.user_id
158 owner = user.user_id == pull_request.user_id
159 _perms = ('repository.admin',)
159 _perms = ('repository.admin',)
160 return self._check_perms(_perms, pull_request, user) or owner
160 return self._check_perms(_perms, pull_request, user) or owner
161
161
162 def check_user_change_status(self, pull_request, user, api=False):
162 def check_user_change_status(self, pull_request, user, api=False):
163 reviewer = user.user_id in [x.user_id for x in
163 reviewer = user.user_id in [x.user_id for x in
164 pull_request.reviewers]
164 pull_request.reviewers]
165 return self.check_user_update(pull_request, user, api) or reviewer
165 return self.check_user_update(pull_request, user, api) or reviewer
166
166
167 def check_user_comment(self, pull_request, user):
167 def check_user_comment(self, pull_request, user):
168 owner = user.user_id == pull_request.user_id
168 owner = user.user_id == pull_request.user_id
169 return self.check_user_read(pull_request, user) or owner
169 return self.check_user_read(pull_request, user) or owner
170
170
171 def get(self, pull_request):
171 def get(self, pull_request):
172 return self.__get_pull_request(pull_request)
172 return self.__get_pull_request(pull_request)
173
173
174 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
174 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
175 opened_by=None, order_by=None,
175 opened_by=None, order_by=None,
176 order_dir='desc'):
176 order_dir='desc'):
177 repo = None
177 repo = None
178 if repo_name:
178 if repo_name:
179 repo = self._get_repo(repo_name)
179 repo = self._get_repo(repo_name)
180
180
181 q = PullRequest.query()
181 q = PullRequest.query()
182
182
183 # source or target
183 # source or target
184 if repo and source:
184 if repo and source:
185 q = q.filter(PullRequest.source_repo == repo)
185 q = q.filter(PullRequest.source_repo == repo)
186 elif repo:
186 elif repo:
187 q = q.filter(PullRequest.target_repo == repo)
187 q = q.filter(PullRequest.target_repo == repo)
188
188
189 # closed,opened
189 # closed,opened
190 if statuses:
190 if statuses:
191 q = q.filter(PullRequest.status.in_(statuses))
191 q = q.filter(PullRequest.status.in_(statuses))
192
192
193 # opened by filter
193 # opened by filter
194 if opened_by:
194 if opened_by:
195 q = q.filter(PullRequest.user_id.in_(opened_by))
195 q = q.filter(PullRequest.user_id.in_(opened_by))
196
196
197 if order_by:
197 if order_by:
198 order_map = {
198 order_map = {
199 'name_raw': PullRequest.pull_request_id,
199 'name_raw': PullRequest.pull_request_id,
200 'title': PullRequest.title,
200 'title': PullRequest.title,
201 'updated_on_raw': PullRequest.updated_on,
201 'updated_on_raw': PullRequest.updated_on,
202 'target_repo': PullRequest.target_repo_id
202 'target_repo': PullRequest.target_repo_id
203 }
203 }
204 if order_dir == 'asc':
204 if order_dir == 'asc':
205 q = q.order_by(order_map[order_by].asc())
205 q = q.order_by(order_map[order_by].asc())
206 else:
206 else:
207 q = q.order_by(order_map[order_by].desc())
207 q = q.order_by(order_map[order_by].desc())
208
208
209 return q
209 return q
210
210
211 def count_all(self, repo_name, source=False, statuses=None,
211 def count_all(self, repo_name, source=False, statuses=None,
212 opened_by=None):
212 opened_by=None):
213 """
213 """
214 Count the number of pull requests for a specific repository.
214 Count the number of pull requests for a specific repository.
215
215
216 :param repo_name: target or source repo
216 :param repo_name: target or source repo
217 :param source: boolean flag to specify if repo_name refers to source
217 :param source: boolean flag to specify if repo_name refers to source
218 :param statuses: list of pull request statuses
218 :param statuses: list of pull request statuses
219 :param opened_by: author user of the pull request
219 :param opened_by: author user of the pull request
220 :returns: int number of pull requests
220 :returns: int number of pull requests
221 """
221 """
222 q = self._prepare_get_all_query(
222 q = self._prepare_get_all_query(
223 repo_name, source=source, statuses=statuses, opened_by=opened_by)
223 repo_name, source=source, statuses=statuses, opened_by=opened_by)
224
224
225 return q.count()
225 return q.count()
226
226
227 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
227 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
228 offset=0, length=None, order_by=None, order_dir='desc'):
228 offset=0, length=None, order_by=None, order_dir='desc'):
229 """
229 """
230 Get all pull requests for a specific repository.
230 Get all pull requests for a specific repository.
231
231
232 :param repo_name: target or source repo
232 :param repo_name: target or source repo
233 :param source: boolean flag to specify if repo_name refers to source
233 :param source: boolean flag to specify if repo_name refers to source
234 :param statuses: list of pull request statuses
234 :param statuses: list of pull request statuses
235 :param opened_by: author user of the pull request
235 :param opened_by: author user of the pull request
236 :param offset: pagination offset
236 :param offset: pagination offset
237 :param length: length of returned list
237 :param length: length of returned list
238 :param order_by: order of the returned list
238 :param order_by: order of the returned list
239 :param order_dir: 'asc' or 'desc' ordering direction
239 :param order_dir: 'asc' or 'desc' ordering direction
240 :returns: list of pull requests
240 :returns: list of pull requests
241 """
241 """
242 q = self._prepare_get_all_query(
242 q = self._prepare_get_all_query(
243 repo_name, source=source, statuses=statuses, opened_by=opened_by,
243 repo_name, source=source, statuses=statuses, opened_by=opened_by,
244 order_by=order_by, order_dir=order_dir)
244 order_by=order_by, order_dir=order_dir)
245
245
246 if length:
246 if length:
247 pull_requests = q.limit(length).offset(offset).all()
247 pull_requests = q.limit(length).offset(offset).all()
248 else:
248 else:
249 pull_requests = q.all()
249 pull_requests = q.all()
250
250
251 return pull_requests
251 return pull_requests
252
252
253 def count_awaiting_review(self, repo_name, source=False, statuses=None,
253 def count_awaiting_review(self, repo_name, source=False, statuses=None,
254 opened_by=None):
254 opened_by=None):
255 """
255 """
256 Count the number of pull requests for a specific repository that are
256 Count the number of pull requests for a specific repository that are
257 awaiting review.
257 awaiting review.
258
258
259 :param repo_name: target or source repo
259 :param repo_name: target or source repo
260 :param source: boolean flag to specify if repo_name refers to source
260 :param source: boolean flag to specify if repo_name refers to source
261 :param statuses: list of pull request statuses
261 :param statuses: list of pull request statuses
262 :param opened_by: author user of the pull request
262 :param opened_by: author user of the pull request
263 :returns: int number of pull requests
263 :returns: int number of pull requests
264 """
264 """
265 pull_requests = self.get_awaiting_review(
265 pull_requests = self.get_awaiting_review(
266 repo_name, source=source, statuses=statuses, opened_by=opened_by)
266 repo_name, source=source, statuses=statuses, opened_by=opened_by)
267
267
268 return len(pull_requests)
268 return len(pull_requests)
269
269
270 def get_awaiting_review(self, repo_name, source=False, statuses=None,
270 def get_awaiting_review(self, repo_name, source=False, statuses=None,
271 opened_by=None, offset=0, length=None,
271 opened_by=None, offset=0, length=None,
272 order_by=None, order_dir='desc'):
272 order_by=None, order_dir='desc'):
273 """
273 """
274 Get all pull requests for a specific repository that are awaiting
274 Get all pull requests for a specific repository that are awaiting
275 review.
275 review.
276
276
277 :param repo_name: target or source repo
277 :param repo_name: target or source repo
278 :param source: boolean flag to specify if repo_name refers to source
278 :param source: boolean flag to specify if repo_name refers to source
279 :param statuses: list of pull request statuses
279 :param statuses: list of pull request statuses
280 :param opened_by: author user of the pull request
280 :param opened_by: author user of the pull request
281 :param offset: pagination offset
281 :param offset: pagination offset
282 :param length: length of returned list
282 :param length: length of returned list
283 :param order_by: order of the returned list
283 :param order_by: order of the returned list
284 :param order_dir: 'asc' or 'desc' ordering direction
284 :param order_dir: 'asc' or 'desc' ordering direction
285 :returns: list of pull requests
285 :returns: list of pull requests
286 """
286 """
287 pull_requests = self.get_all(
287 pull_requests = self.get_all(
288 repo_name, source=source, statuses=statuses, opened_by=opened_by,
288 repo_name, source=source, statuses=statuses, opened_by=opened_by,
289 order_by=order_by, order_dir=order_dir)
289 order_by=order_by, order_dir=order_dir)
290
290
291 _filtered_pull_requests = []
291 _filtered_pull_requests = []
292 for pr in pull_requests:
292 for pr in pull_requests:
293 status = pr.calculated_review_status()
293 status = pr.calculated_review_status()
294 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
294 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
295 ChangesetStatus.STATUS_UNDER_REVIEW]:
295 ChangesetStatus.STATUS_UNDER_REVIEW]:
296 _filtered_pull_requests.append(pr)
296 _filtered_pull_requests.append(pr)
297 if length:
297 if length:
298 return _filtered_pull_requests[offset:offset+length]
298 return _filtered_pull_requests[offset:offset+length]
299 else:
299 else:
300 return _filtered_pull_requests
300 return _filtered_pull_requests
301
301
302 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
302 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
303 opened_by=None, user_id=None):
303 opened_by=None, user_id=None):
304 """
304 """
305 Count the number of pull requests for a specific repository that are
305 Count the number of pull requests for a specific repository that are
306 awaiting review from a specific user.
306 awaiting review from a specific user.
307
307
308 :param repo_name: target or source repo
308 :param repo_name: target or source repo
309 :param source: boolean flag to specify if repo_name refers to source
309 :param source: boolean flag to specify if repo_name refers to source
310 :param statuses: list of pull request statuses
310 :param statuses: list of pull request statuses
311 :param opened_by: author user of the pull request
311 :param opened_by: author user of the pull request
312 :param user_id: reviewer user of the pull request
312 :param user_id: reviewer user of the pull request
313 :returns: int number of pull requests
313 :returns: int number of pull requests
314 """
314 """
315 pull_requests = self.get_awaiting_my_review(
315 pull_requests = self.get_awaiting_my_review(
316 repo_name, source=source, statuses=statuses, opened_by=opened_by,
316 repo_name, source=source, statuses=statuses, opened_by=opened_by,
317 user_id=user_id)
317 user_id=user_id)
318
318
319 return len(pull_requests)
319 return len(pull_requests)
320
320
321 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
321 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
322 opened_by=None, user_id=None, offset=0,
322 opened_by=None, user_id=None, offset=0,
323 length=None, order_by=None, order_dir='desc'):
323 length=None, order_by=None, order_dir='desc'):
324 """
324 """
325 Get all pull requests for a specific repository that are awaiting
325 Get all pull requests for a specific repository that are awaiting
326 review from a specific user.
326 review from a specific user.
327
327
328 :param repo_name: target or source repo
328 :param repo_name: target or source repo
329 :param source: boolean flag to specify if repo_name refers to source
329 :param source: boolean flag to specify if repo_name refers to source
330 :param statuses: list of pull request statuses
330 :param statuses: list of pull request statuses
331 :param opened_by: author user of the pull request
331 :param opened_by: author user of the pull request
332 :param user_id: reviewer user of the pull request
332 :param user_id: reviewer user of the pull request
333 :param offset: pagination offset
333 :param offset: pagination offset
334 :param length: length of returned list
334 :param length: length of returned list
335 :param order_by: order of the returned list
335 :param order_by: order of the returned list
336 :param order_dir: 'asc' or 'desc' ordering direction
336 :param order_dir: 'asc' or 'desc' ordering direction
337 :returns: list of pull requests
337 :returns: list of pull requests
338 """
338 """
339 pull_requests = self.get_all(
339 pull_requests = self.get_all(
340 repo_name, source=source, statuses=statuses, opened_by=opened_by,
340 repo_name, source=source, statuses=statuses, opened_by=opened_by,
341 order_by=order_by, order_dir=order_dir)
341 order_by=order_by, order_dir=order_dir)
342
342
343 _my = PullRequestModel().get_not_reviewed(user_id)
343 _my = PullRequestModel().get_not_reviewed(user_id)
344 my_participation = []
344 my_participation = []
345 for pr in pull_requests:
345 for pr in pull_requests:
346 if pr in _my:
346 if pr in _my:
347 my_participation.append(pr)
347 my_participation.append(pr)
348 _filtered_pull_requests = my_participation
348 _filtered_pull_requests = my_participation
349 if length:
349 if length:
350 return _filtered_pull_requests[offset:offset+length]
350 return _filtered_pull_requests[offset:offset+length]
351 else:
351 else:
352 return _filtered_pull_requests
352 return _filtered_pull_requests
353
353
354 def get_not_reviewed(self, user_id):
354 def get_not_reviewed(self, user_id):
355 return [
355 return [
356 x.pull_request for x in PullRequestReviewers.query().filter(
356 x.pull_request for x in PullRequestReviewers.query().filter(
357 PullRequestReviewers.user_id == user_id).all()
357 PullRequestReviewers.user_id == user_id).all()
358 ]
358 ]
359
359
360 def _prepare_participating_query(self, user_id=None, statuses=None,
360 def _prepare_participating_query(self, user_id=None, statuses=None,
361 order_by=None, order_dir='desc'):
361 order_by=None, order_dir='desc'):
362 q = PullRequest.query()
362 q = PullRequest.query()
363 if user_id:
363 if user_id:
364 reviewers_subquery = Session().query(
364 reviewers_subquery = Session().query(
365 PullRequestReviewers.pull_request_id).filter(
365 PullRequestReviewers.pull_request_id).filter(
366 PullRequestReviewers.user_id == user_id).subquery()
366 PullRequestReviewers.user_id == user_id).subquery()
367 user_filter = or_(
367 user_filter = or_(
368 PullRequest.user_id == user_id,
368 PullRequest.user_id == user_id,
369 PullRequest.pull_request_id.in_(reviewers_subquery)
369 PullRequest.pull_request_id.in_(reviewers_subquery)
370 )
370 )
371 q = PullRequest.query().filter(user_filter)
371 q = PullRequest.query().filter(user_filter)
372
372
373 # closed,opened
373 # closed,opened
374 if statuses:
374 if statuses:
375 q = q.filter(PullRequest.status.in_(statuses))
375 q = q.filter(PullRequest.status.in_(statuses))
376
376
377 if order_by:
377 if order_by:
378 order_map = {
378 order_map = {
379 'name_raw': PullRequest.pull_request_id,
379 'name_raw': PullRequest.pull_request_id,
380 'title': PullRequest.title,
380 'title': PullRequest.title,
381 'updated_on_raw': PullRequest.updated_on,
381 'updated_on_raw': PullRequest.updated_on,
382 'target_repo': PullRequest.target_repo_id
382 'target_repo': PullRequest.target_repo_id
383 }
383 }
384 if order_dir == 'asc':
384 if order_dir == 'asc':
385 q = q.order_by(order_map[order_by].asc())
385 q = q.order_by(order_map[order_by].asc())
386 else:
386 else:
387 q = q.order_by(order_map[order_by].desc())
387 q = q.order_by(order_map[order_by].desc())
388
388
389 return q
389 return q
390
390
391 def count_im_participating_in(self, user_id=None, statuses=None):
391 def count_im_participating_in(self, user_id=None, statuses=None):
392 q = self._prepare_participating_query(user_id, statuses=statuses)
392 q = self._prepare_participating_query(user_id, statuses=statuses)
393 return q.count()
393 return q.count()
394
394
395 def get_im_participating_in(
395 def get_im_participating_in(
396 self, user_id=None, statuses=None, offset=0,
396 self, user_id=None, statuses=None, offset=0,
397 length=None, order_by=None, order_dir='desc'):
397 length=None, order_by=None, order_dir='desc'):
398 """
398 """
399 Get all Pull requests that i'm participating in, or i have opened
399 Get all Pull requests that i'm participating in, or i have opened
400 """
400 """
401
401
402 q = self._prepare_participating_query(
402 q = self._prepare_participating_query(
403 user_id, statuses=statuses, order_by=order_by,
403 user_id, statuses=statuses, order_by=order_by,
404 order_dir=order_dir)
404 order_dir=order_dir)
405
405
406 if length:
406 if length:
407 pull_requests = q.limit(length).offset(offset).all()
407 pull_requests = q.limit(length).offset(offset).all()
408 else:
408 else:
409 pull_requests = q.all()
409 pull_requests = q.all()
410
410
411 return pull_requests
411 return pull_requests
412
412
413 def get_versions(self, pull_request):
413 def get_versions(self, pull_request):
414 """
414 """
415 returns version of pull request sorted by ID descending
415 returns version of pull request sorted by ID descending
416 """
416 """
417 return PullRequestVersion.query()\
417 return PullRequestVersion.query()\
418 .filter(PullRequestVersion.pull_request == pull_request)\
418 .filter(PullRequestVersion.pull_request == pull_request)\
419 .order_by(PullRequestVersion.pull_request_version_id.asc())\
419 .order_by(PullRequestVersion.pull_request_version_id.asc())\
420 .all()
420 .all()
421
421
422 def get_pr_version(self, pull_request_id, version=None):
422 def get_pr_version(self, pull_request_id, version=None):
423 at_version = None
423 at_version = None
424
424
425 if version and version == 'latest':
425 if version and version == 'latest':
426 pull_request_ver = PullRequest.get(pull_request_id)
426 pull_request_ver = PullRequest.get(pull_request_id)
427 pull_request_obj = pull_request_ver
427 pull_request_obj = pull_request_ver
428 _org_pull_request_obj = pull_request_obj
428 _org_pull_request_obj = pull_request_obj
429 at_version = 'latest'
429 at_version = 'latest'
430 elif version:
430 elif version:
431 pull_request_ver = PullRequestVersion.get_or_404(version)
431 pull_request_ver = PullRequestVersion.get_or_404(version)
432 pull_request_obj = pull_request_ver
432 pull_request_obj = pull_request_ver
433 _org_pull_request_obj = pull_request_ver.pull_request
433 _org_pull_request_obj = pull_request_ver.pull_request
434 at_version = pull_request_ver.pull_request_version_id
434 at_version = pull_request_ver.pull_request_version_id
435 else:
435 else:
436 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
436 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
437 pull_request_id)
437 pull_request_id)
438
438
439 pull_request_display_obj = PullRequest.get_pr_display_object(
439 pull_request_display_obj = PullRequest.get_pr_display_object(
440 pull_request_obj, _org_pull_request_obj)
440 pull_request_obj, _org_pull_request_obj)
441
441
442 return _org_pull_request_obj, pull_request_obj, \
442 return _org_pull_request_obj, pull_request_obj, \
443 pull_request_display_obj, at_version
443 pull_request_display_obj, at_version
444
444
445 def create(self, created_by, source_repo, source_ref, target_repo,
445 def create(self, created_by, source_repo, source_ref, target_repo,
446 target_ref, revisions, reviewers, title, description=None,
446 target_ref, revisions, reviewers, title, description=None,
447 description_renderer=None,
447 description_renderer=None,
448 reviewer_data=None, translator=None, auth_user=None):
448 reviewer_data=None, translator=None, auth_user=None):
449 translator = translator or get_current_request().translate
449 translator = translator or get_current_request().translate
450
450
451 created_by_user = self._get_user(created_by)
451 created_by_user = self._get_user(created_by)
452 auth_user = auth_user or created_by_user.AuthUser()
452 auth_user = auth_user or created_by_user.AuthUser()
453 source_repo = self._get_repo(source_repo)
453 source_repo = self._get_repo(source_repo)
454 target_repo = self._get_repo(target_repo)
454 target_repo = self._get_repo(target_repo)
455
455
456 pull_request = PullRequest()
456 pull_request = PullRequest()
457 pull_request.source_repo = source_repo
457 pull_request.source_repo = source_repo
458 pull_request.source_ref = source_ref
458 pull_request.source_ref = source_ref
459 pull_request.target_repo = target_repo
459 pull_request.target_repo = target_repo
460 pull_request.target_ref = target_ref
460 pull_request.target_ref = target_ref
461 pull_request.revisions = revisions
461 pull_request.revisions = revisions
462 pull_request.title = title
462 pull_request.title = title
463 pull_request.description = description
463 pull_request.description = description
464 pull_request.description_renderer = description_renderer
464 pull_request.description_renderer = description_renderer
465 pull_request.author = created_by_user
465 pull_request.author = created_by_user
466 pull_request.reviewer_data = reviewer_data
466 pull_request.reviewer_data = reviewer_data
467
467
468 Session().add(pull_request)
468 Session().add(pull_request)
469 Session().flush()
469 Session().flush()
470
470
471 reviewer_ids = set()
471 reviewer_ids = set()
472 # members / reviewers
472 # members / reviewers
473 for reviewer_object in reviewers:
473 for reviewer_object in reviewers:
474 user_id, reasons, mandatory, rules = reviewer_object
474 user_id, reasons, mandatory, rules = reviewer_object
475 user = self._get_user(user_id)
475 user = self._get_user(user_id)
476
476
477 # skip duplicates
477 # skip duplicates
478 if user.user_id in reviewer_ids:
478 if user.user_id in reviewer_ids:
479 continue
479 continue
480
480
481 reviewer_ids.add(user.user_id)
481 reviewer_ids.add(user.user_id)
482
482
483 reviewer = PullRequestReviewers()
483 reviewer = PullRequestReviewers()
484 reviewer.user = user
484 reviewer.user = user
485 reviewer.pull_request = pull_request
485 reviewer.pull_request = pull_request
486 reviewer.reasons = reasons
486 reviewer.reasons = reasons
487 reviewer.mandatory = mandatory
487 reviewer.mandatory = mandatory
488
488
489 # NOTE(marcink): pick only first rule for now
489 # NOTE(marcink): pick only first rule for now
490 rule_id = list(rules)[0] if rules else None
490 rule_id = list(rules)[0] if rules else None
491 rule = RepoReviewRule.get(rule_id) if rule_id else None
491 rule = RepoReviewRule.get(rule_id) if rule_id else None
492 if rule:
492 if rule:
493 review_group = rule.user_group_vote_rule(user_id)
493 review_group = rule.user_group_vote_rule(user_id)
494 # we check if this particular reviewer is member of a voting group
494 # we check if this particular reviewer is member of a voting group
495 if review_group:
495 if review_group:
496 # NOTE(marcink):
496 # NOTE(marcink):
497 # can be that user is member of more but we pick the first same,
497 # can be that user is member of more but we pick the first same,
498 # same as default reviewers algo
498 # same as default reviewers algo
499 review_group = review_group[0]
499 review_group = review_group[0]
500
500
501 rule_data = {
501 rule_data = {
502 'rule_name':
502 'rule_name':
503 rule.review_rule_name,
503 rule.review_rule_name,
504 'rule_user_group_entry_id':
504 'rule_user_group_entry_id':
505 review_group.repo_review_rule_users_group_id,
505 review_group.repo_review_rule_users_group_id,
506 'rule_user_group_name':
506 'rule_user_group_name':
507 review_group.users_group.users_group_name,
507 review_group.users_group.users_group_name,
508 'rule_user_group_members':
508 'rule_user_group_members':
509 [x.user.username for x in review_group.users_group.members],
509 [x.user.username for x in review_group.users_group.members],
510 'rule_user_group_members_id':
510 'rule_user_group_members_id':
511 [x.user.user_id for x in review_group.users_group.members],
511 [x.user.user_id for x in review_group.users_group.members],
512 }
512 }
513 # e.g {'vote_rule': -1, 'mandatory': True}
513 # e.g {'vote_rule': -1, 'mandatory': True}
514 rule_data.update(review_group.rule_data())
514 rule_data.update(review_group.rule_data())
515
515
516 reviewer.rule_data = rule_data
516 reviewer.rule_data = rule_data
517
517
518 Session().add(reviewer)
518 Session().add(reviewer)
519 Session().flush()
519 Session().flush()
520
520
521 # Set approval status to "Under Review" for all commits which are
521 # Set approval status to "Under Review" for all commits which are
522 # part of this pull request.
522 # part of this pull request.
523 ChangesetStatusModel().set_status(
523 ChangesetStatusModel().set_status(
524 repo=target_repo,
524 repo=target_repo,
525 status=ChangesetStatus.STATUS_UNDER_REVIEW,
525 status=ChangesetStatus.STATUS_UNDER_REVIEW,
526 user=created_by_user,
526 user=created_by_user,
527 pull_request=pull_request
527 pull_request=pull_request
528 )
528 )
529 # we commit early at this point. This has to do with a fact
529 # we commit early at this point. This has to do with a fact
530 # that before queries do some row-locking. And because of that
530 # that before queries do some row-locking. And because of that
531 # we need to commit and finish transation before below validate call
531 # we need to commit and finish transation before below validate call
532 # that for large repos could be long resulting in long row locks
532 # that for large repos could be long resulting in long row locks
533 Session().commit()
533 Session().commit()
534
534
535 # prepare workspace, and run initial merge simulation
535 # prepare workspace, and run initial merge simulation
536 MergeCheck.validate(
536 MergeCheck.validate(
537 pull_request, auth_user=auth_user, translator=translator)
537 pull_request, auth_user=auth_user, translator=translator)
538
538
539 self.notify_reviewers(pull_request, reviewer_ids)
539 self.notify_reviewers(pull_request, reviewer_ids)
540 self._trigger_pull_request_hook(
540 self._trigger_pull_request_hook(
541 pull_request, created_by_user, 'create')
541 pull_request, created_by_user, 'create')
542
542
543 creation_data = pull_request.get_api_data(with_merge_state=False)
543 creation_data = pull_request.get_api_data(with_merge_state=False)
544 self._log_audit_action(
544 self._log_audit_action(
545 'repo.pull_request.create', {'data': creation_data},
545 'repo.pull_request.create', {'data': creation_data},
546 auth_user, pull_request)
546 auth_user, pull_request)
547
547
548 return pull_request
548 return pull_request
549
549
550 def _trigger_pull_request_hook(self, pull_request, user, action):
550 def _trigger_pull_request_hook(self, pull_request, user, action):
551 pull_request = self.__get_pull_request(pull_request)
551 pull_request = self.__get_pull_request(pull_request)
552 target_scm = pull_request.target_repo.scm_instance()
552 target_scm = pull_request.target_repo.scm_instance()
553 if action == 'create':
553 if action == 'create':
554 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
554 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
555 elif action == 'merge':
555 elif action == 'merge':
556 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
556 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
557 elif action == 'close':
557 elif action == 'close':
558 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
558 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
559 elif action == 'review_status_change':
559 elif action == 'review_status_change':
560 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
560 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
561 elif action == 'update':
561 elif action == 'update':
562 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
562 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
563 else:
563 else:
564 return
564 return
565
565
566 trigger_hook(
566 trigger_hook(
567 username=user.username,
567 username=user.username,
568 repo_name=pull_request.target_repo.repo_name,
568 repo_name=pull_request.target_repo.repo_name,
569 repo_alias=target_scm.alias,
569 repo_alias=target_scm.alias,
570 pull_request=pull_request)
570 pull_request=pull_request)
571
571
572 def _get_commit_ids(self, pull_request):
572 def _get_commit_ids(self, pull_request):
573 """
573 """
574 Return the commit ids of the merged pull request.
574 Return the commit ids of the merged pull request.
575
575
576 This method is not dealing correctly yet with the lack of autoupdates
576 This method is not dealing correctly yet with the lack of autoupdates
577 nor with the implicit target updates.
577 nor with the implicit target updates.
578 For example: if a commit in the source repo is already in the target it
578 For example: if a commit in the source repo is already in the target it
579 will be reported anyways.
579 will be reported anyways.
580 """
580 """
581 merge_rev = pull_request.merge_rev
581 merge_rev = pull_request.merge_rev
582 if merge_rev is None:
582 if merge_rev is None:
583 raise ValueError('This pull request was not merged yet')
583 raise ValueError('This pull request was not merged yet')
584
584
585 commit_ids = list(pull_request.revisions)
585 commit_ids = list(pull_request.revisions)
586 if merge_rev not in commit_ids:
586 if merge_rev not in commit_ids:
587 commit_ids.append(merge_rev)
587 commit_ids.append(merge_rev)
588
588
589 return commit_ids
589 return commit_ids
590
590
591 def merge_repo(self, pull_request, user, extras):
591 def merge_repo(self, pull_request, user, extras):
592 log.debug("Merging pull request %s", pull_request.pull_request_id)
592 log.debug("Merging pull request %s", pull_request.pull_request_id)
593 extras['user_agent'] = 'internal-merge'
593 merge_state = self._merge_pull_request(pull_request, user, extras)
594 merge_state = self._merge_pull_request(pull_request, user, extras)
594 if merge_state.executed:
595 if merge_state.executed:
595 log.debug(
596 log.debug(
596 "Merge was successful, updating the pull request comments.")
597 "Merge was successful, updating the pull request comments.")
597 self._comment_and_close_pr(pull_request, user, merge_state)
598 self._comment_and_close_pr(pull_request, user, merge_state)
598
599
599 self._log_audit_action(
600 self._log_audit_action(
600 'repo.pull_request.merge',
601 'repo.pull_request.merge',
601 {'merge_state': merge_state.__dict__},
602 {'merge_state': merge_state.__dict__},
602 user, pull_request)
603 user, pull_request)
603
604
604 else:
605 else:
605 log.warn("Merge failed, not updating the pull request.")
606 log.warn("Merge failed, not updating the pull request.")
606 return merge_state
607 return merge_state
607
608
608 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
609 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
609 target_vcs = pull_request.target_repo.scm_instance()
610 target_vcs = pull_request.target_repo.scm_instance()
610 source_vcs = pull_request.source_repo.scm_instance()
611 source_vcs = pull_request.source_repo.scm_instance()
611
612
612 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
613 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
613 pr_id=pull_request.pull_request_id,
614 pr_id=pull_request.pull_request_id,
614 pr_title=pull_request.title,
615 pr_title=pull_request.title,
615 source_repo=source_vcs.name,
616 source_repo=source_vcs.name,
616 source_ref_name=pull_request.source_ref_parts.name,
617 source_ref_name=pull_request.source_ref_parts.name,
617 target_repo=target_vcs.name,
618 target_repo=target_vcs.name,
618 target_ref_name=pull_request.target_ref_parts.name,
619 target_ref_name=pull_request.target_ref_parts.name,
619 )
620 )
620
621
621 workspace_id = self._workspace_id(pull_request)
622 workspace_id = self._workspace_id(pull_request)
622 repo_id = pull_request.target_repo.repo_id
623 repo_id = pull_request.target_repo.repo_id
623 use_rebase = self._use_rebase_for_merging(pull_request)
624 use_rebase = self._use_rebase_for_merging(pull_request)
624 close_branch = self._close_branch_before_merging(pull_request)
625 close_branch = self._close_branch_before_merging(pull_request)
625
626
626 target_ref = self._refresh_reference(
627 target_ref = self._refresh_reference(
627 pull_request.target_ref_parts, target_vcs)
628 pull_request.target_ref_parts, target_vcs)
628
629
629 callback_daemon, extras = prepare_callback_daemon(
630 callback_daemon, extras = prepare_callback_daemon(
630 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
631 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
631 host=vcs_settings.HOOKS_HOST,
632 host=vcs_settings.HOOKS_HOST,
632 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
633 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
633
634
634 with callback_daemon:
635 with callback_daemon:
635 # TODO: johbo: Implement a clean way to run a config_override
636 # TODO: johbo: Implement a clean way to run a config_override
636 # for a single call.
637 # for a single call.
637 target_vcs.config.set(
638 target_vcs.config.set(
638 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
639 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
639
640
640 user_name = user.short_contact
641 user_name = user.short_contact
641 merge_state = target_vcs.merge(
642 merge_state = target_vcs.merge(
642 repo_id, workspace_id, target_ref, source_vcs,
643 repo_id, workspace_id, target_ref, source_vcs,
643 pull_request.source_ref_parts,
644 pull_request.source_ref_parts,
644 user_name=user_name, user_email=user.email,
645 user_name=user_name, user_email=user.email,
645 message=message, use_rebase=use_rebase,
646 message=message, use_rebase=use_rebase,
646 close_branch=close_branch)
647 close_branch=close_branch)
647 return merge_state
648 return merge_state
648
649
649 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
650 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
650 pull_request.merge_rev = merge_state.merge_ref.commit_id
651 pull_request.merge_rev = merge_state.merge_ref.commit_id
651 pull_request.updated_on = datetime.datetime.now()
652 pull_request.updated_on = datetime.datetime.now()
652 close_msg = close_msg or 'Pull request merged and closed'
653 close_msg = close_msg or 'Pull request merged and closed'
653
654
654 CommentsModel().create(
655 CommentsModel().create(
655 text=safe_unicode(close_msg),
656 text=safe_unicode(close_msg),
656 repo=pull_request.target_repo.repo_id,
657 repo=pull_request.target_repo.repo_id,
657 user=user.user_id,
658 user=user.user_id,
658 pull_request=pull_request.pull_request_id,
659 pull_request=pull_request.pull_request_id,
659 f_path=None,
660 f_path=None,
660 line_no=None,
661 line_no=None,
661 closing_pr=True
662 closing_pr=True
662 )
663 )
663
664
664 Session().add(pull_request)
665 Session().add(pull_request)
665 Session().flush()
666 Session().flush()
666 # TODO: paris: replace invalidation with less radical solution
667 # TODO: paris: replace invalidation with less radical solution
667 ScmModel().mark_for_invalidation(
668 ScmModel().mark_for_invalidation(
668 pull_request.target_repo.repo_name)
669 pull_request.target_repo.repo_name)
669 self._trigger_pull_request_hook(pull_request, user, 'merge')
670 self._trigger_pull_request_hook(pull_request, user, 'merge')
670
671
671 def has_valid_update_type(self, pull_request):
672 def has_valid_update_type(self, pull_request):
672 source_ref_type = pull_request.source_ref_parts.type
673 source_ref_type = pull_request.source_ref_parts.type
673 return source_ref_type in ['book', 'branch', 'tag']
674 return source_ref_type in ['book', 'branch', 'tag']
674
675
675 def update_commits(self, pull_request):
676 def update_commits(self, pull_request):
676 """
677 """
677 Get the updated list of commits for the pull request
678 Get the updated list of commits for the pull request
678 and return the new pull request version and the list
679 and return the new pull request version and the list
679 of commits processed by this update action
680 of commits processed by this update action
680 """
681 """
681 pull_request = self.__get_pull_request(pull_request)
682 pull_request = self.__get_pull_request(pull_request)
682 source_ref_type = pull_request.source_ref_parts.type
683 source_ref_type = pull_request.source_ref_parts.type
683 source_ref_name = pull_request.source_ref_parts.name
684 source_ref_name = pull_request.source_ref_parts.name
684 source_ref_id = pull_request.source_ref_parts.commit_id
685 source_ref_id = pull_request.source_ref_parts.commit_id
685
686
686 target_ref_type = pull_request.target_ref_parts.type
687 target_ref_type = pull_request.target_ref_parts.type
687 target_ref_name = pull_request.target_ref_parts.name
688 target_ref_name = pull_request.target_ref_parts.name
688 target_ref_id = pull_request.target_ref_parts.commit_id
689 target_ref_id = pull_request.target_ref_parts.commit_id
689
690
690 if not self.has_valid_update_type(pull_request):
691 if not self.has_valid_update_type(pull_request):
691 log.debug(
692 log.debug(
692 "Skipping update of pull request %s due to ref type: %s",
693 "Skipping update of pull request %s due to ref type: %s",
693 pull_request, source_ref_type)
694 pull_request, source_ref_type)
694 return UpdateResponse(
695 return UpdateResponse(
695 executed=False,
696 executed=False,
696 reason=UpdateFailureReason.WRONG_REF_TYPE,
697 reason=UpdateFailureReason.WRONG_REF_TYPE,
697 old=pull_request, new=None, changes=None,
698 old=pull_request, new=None, changes=None,
698 source_changed=False, target_changed=False)
699 source_changed=False, target_changed=False)
699
700
700 # source repo
701 # source repo
701 source_repo = pull_request.source_repo.scm_instance()
702 source_repo = pull_request.source_repo.scm_instance()
702 try:
703 try:
703 source_commit = source_repo.get_commit(commit_id=source_ref_name)
704 source_commit = source_repo.get_commit(commit_id=source_ref_name)
704 except CommitDoesNotExistError:
705 except CommitDoesNotExistError:
705 return UpdateResponse(
706 return UpdateResponse(
706 executed=False,
707 executed=False,
707 reason=UpdateFailureReason.MISSING_SOURCE_REF,
708 reason=UpdateFailureReason.MISSING_SOURCE_REF,
708 old=pull_request, new=None, changes=None,
709 old=pull_request, new=None, changes=None,
709 source_changed=False, target_changed=False)
710 source_changed=False, target_changed=False)
710
711
711 source_changed = source_ref_id != source_commit.raw_id
712 source_changed = source_ref_id != source_commit.raw_id
712
713
713 # target repo
714 # target repo
714 target_repo = pull_request.target_repo.scm_instance()
715 target_repo = pull_request.target_repo.scm_instance()
715 try:
716 try:
716 target_commit = target_repo.get_commit(commit_id=target_ref_name)
717 target_commit = target_repo.get_commit(commit_id=target_ref_name)
717 except CommitDoesNotExistError:
718 except CommitDoesNotExistError:
718 return UpdateResponse(
719 return UpdateResponse(
719 executed=False,
720 executed=False,
720 reason=UpdateFailureReason.MISSING_TARGET_REF,
721 reason=UpdateFailureReason.MISSING_TARGET_REF,
721 old=pull_request, new=None, changes=None,
722 old=pull_request, new=None, changes=None,
722 source_changed=False, target_changed=False)
723 source_changed=False, target_changed=False)
723 target_changed = target_ref_id != target_commit.raw_id
724 target_changed = target_ref_id != target_commit.raw_id
724
725
725 if not (source_changed or target_changed):
726 if not (source_changed or target_changed):
726 log.debug("Nothing changed in pull request %s", pull_request)
727 log.debug("Nothing changed in pull request %s", pull_request)
727 return UpdateResponse(
728 return UpdateResponse(
728 executed=False,
729 executed=False,
729 reason=UpdateFailureReason.NO_CHANGE,
730 reason=UpdateFailureReason.NO_CHANGE,
730 old=pull_request, new=None, changes=None,
731 old=pull_request, new=None, changes=None,
731 source_changed=target_changed, target_changed=source_changed)
732 source_changed=target_changed, target_changed=source_changed)
732
733
733 change_in_found = 'target repo' if target_changed else 'source repo'
734 change_in_found = 'target repo' if target_changed else 'source repo'
734 log.debug('Updating pull request because of change in %s detected',
735 log.debug('Updating pull request because of change in %s detected',
735 change_in_found)
736 change_in_found)
736
737
737 # Finally there is a need for an update, in case of source change
738 # Finally there is a need for an update, in case of source change
738 # we create a new version, else just an update
739 # we create a new version, else just an update
739 if source_changed:
740 if source_changed:
740 pull_request_version = self._create_version_from_snapshot(pull_request)
741 pull_request_version = self._create_version_from_snapshot(pull_request)
741 self._link_comments_to_version(pull_request_version)
742 self._link_comments_to_version(pull_request_version)
742 else:
743 else:
743 try:
744 try:
744 ver = pull_request.versions[-1]
745 ver = pull_request.versions[-1]
745 except IndexError:
746 except IndexError:
746 ver = None
747 ver = None
747
748
748 pull_request.pull_request_version_id = \
749 pull_request.pull_request_version_id = \
749 ver.pull_request_version_id if ver else None
750 ver.pull_request_version_id if ver else None
750 pull_request_version = pull_request
751 pull_request_version = pull_request
751
752
752 try:
753 try:
753 if target_ref_type in ('tag', 'branch', 'book'):
754 if target_ref_type in ('tag', 'branch', 'book'):
754 target_commit = target_repo.get_commit(target_ref_name)
755 target_commit = target_repo.get_commit(target_ref_name)
755 else:
756 else:
756 target_commit = target_repo.get_commit(target_ref_id)
757 target_commit = target_repo.get_commit(target_ref_id)
757 except CommitDoesNotExistError:
758 except CommitDoesNotExistError:
758 return UpdateResponse(
759 return UpdateResponse(
759 executed=False,
760 executed=False,
760 reason=UpdateFailureReason.MISSING_TARGET_REF,
761 reason=UpdateFailureReason.MISSING_TARGET_REF,
761 old=pull_request, new=None, changes=None,
762 old=pull_request, new=None, changes=None,
762 source_changed=source_changed, target_changed=target_changed)
763 source_changed=source_changed, target_changed=target_changed)
763
764
764 # re-compute commit ids
765 # re-compute commit ids
765 old_commit_ids = pull_request.revisions
766 old_commit_ids = pull_request.revisions
766 pre_load = ["author", "branch", "date", "message"]
767 pre_load = ["author", "branch", "date", "message"]
767 commit_ranges = target_repo.compare(
768 commit_ranges = target_repo.compare(
768 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
769 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
769 pre_load=pre_load)
770 pre_load=pre_load)
770
771
771 ancestor = target_repo.get_common_ancestor(
772 ancestor = target_repo.get_common_ancestor(
772 target_commit.raw_id, source_commit.raw_id, source_repo)
773 target_commit.raw_id, source_commit.raw_id, source_repo)
773
774
774 pull_request.source_ref = '%s:%s:%s' % (
775 pull_request.source_ref = '%s:%s:%s' % (
775 source_ref_type, source_ref_name, source_commit.raw_id)
776 source_ref_type, source_ref_name, source_commit.raw_id)
776 pull_request.target_ref = '%s:%s:%s' % (
777 pull_request.target_ref = '%s:%s:%s' % (
777 target_ref_type, target_ref_name, ancestor)
778 target_ref_type, target_ref_name, ancestor)
778
779
779 pull_request.revisions = [
780 pull_request.revisions = [
780 commit.raw_id for commit in reversed(commit_ranges)]
781 commit.raw_id for commit in reversed(commit_ranges)]
781 pull_request.updated_on = datetime.datetime.now()
782 pull_request.updated_on = datetime.datetime.now()
782 Session().add(pull_request)
783 Session().add(pull_request)
783 new_commit_ids = pull_request.revisions
784 new_commit_ids = pull_request.revisions
784
785
785 old_diff_data, new_diff_data = self._generate_update_diffs(
786 old_diff_data, new_diff_data = self._generate_update_diffs(
786 pull_request, pull_request_version)
787 pull_request, pull_request_version)
787
788
788 # calculate commit and file changes
789 # calculate commit and file changes
789 changes = self._calculate_commit_id_changes(
790 changes = self._calculate_commit_id_changes(
790 old_commit_ids, new_commit_ids)
791 old_commit_ids, new_commit_ids)
791 file_changes = self._calculate_file_changes(
792 file_changes = self._calculate_file_changes(
792 old_diff_data, new_diff_data)
793 old_diff_data, new_diff_data)
793
794
794 # set comments as outdated if DIFFS changed
795 # set comments as outdated if DIFFS changed
795 CommentsModel().outdate_comments(
796 CommentsModel().outdate_comments(
796 pull_request, old_diff_data=old_diff_data,
797 pull_request, old_diff_data=old_diff_data,
797 new_diff_data=new_diff_data)
798 new_diff_data=new_diff_data)
798
799
799 commit_changes = (changes.added or changes.removed)
800 commit_changes = (changes.added or changes.removed)
800 file_node_changes = (
801 file_node_changes = (
801 file_changes.added or file_changes.modified or file_changes.removed)
802 file_changes.added or file_changes.modified or file_changes.removed)
802 pr_has_changes = commit_changes or file_node_changes
803 pr_has_changes = commit_changes or file_node_changes
803
804
804 # Add an automatic comment to the pull request, in case
805 # Add an automatic comment to the pull request, in case
805 # anything has changed
806 # anything has changed
806 if pr_has_changes:
807 if pr_has_changes:
807 update_comment = CommentsModel().create(
808 update_comment = CommentsModel().create(
808 text=self._render_update_message(changes, file_changes),
809 text=self._render_update_message(changes, file_changes),
809 repo=pull_request.target_repo,
810 repo=pull_request.target_repo,
810 user=pull_request.author,
811 user=pull_request.author,
811 pull_request=pull_request,
812 pull_request=pull_request,
812 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
813 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
813
814
814 # Update status to "Under Review" for added commits
815 # Update status to "Under Review" for added commits
815 for commit_id in changes.added:
816 for commit_id in changes.added:
816 ChangesetStatusModel().set_status(
817 ChangesetStatusModel().set_status(
817 repo=pull_request.source_repo,
818 repo=pull_request.source_repo,
818 status=ChangesetStatus.STATUS_UNDER_REVIEW,
819 status=ChangesetStatus.STATUS_UNDER_REVIEW,
819 comment=update_comment,
820 comment=update_comment,
820 user=pull_request.author,
821 user=pull_request.author,
821 pull_request=pull_request,
822 pull_request=pull_request,
822 revision=commit_id)
823 revision=commit_id)
823
824
824 log.debug(
825 log.debug(
825 'Updated pull request %s, added_ids: %s, common_ids: %s, '
826 'Updated pull request %s, added_ids: %s, common_ids: %s, '
826 'removed_ids: %s', pull_request.pull_request_id,
827 'removed_ids: %s', pull_request.pull_request_id,
827 changes.added, changes.common, changes.removed)
828 changes.added, changes.common, changes.removed)
828 log.debug(
829 log.debug(
829 'Updated pull request with the following file changes: %s',
830 'Updated pull request with the following file changes: %s',
830 file_changes)
831 file_changes)
831
832
832 log.info(
833 log.info(
833 "Updated pull request %s from commit %s to commit %s, "
834 "Updated pull request %s from commit %s to commit %s, "
834 "stored new version %s of this pull request.",
835 "stored new version %s of this pull request.",
835 pull_request.pull_request_id, source_ref_id,
836 pull_request.pull_request_id, source_ref_id,
836 pull_request.source_ref_parts.commit_id,
837 pull_request.source_ref_parts.commit_id,
837 pull_request_version.pull_request_version_id)
838 pull_request_version.pull_request_version_id)
838 Session().commit()
839 Session().commit()
839 self._trigger_pull_request_hook(
840 self._trigger_pull_request_hook(
840 pull_request, pull_request.author, 'update')
841 pull_request, pull_request.author, 'update')
841
842
842 return UpdateResponse(
843 return UpdateResponse(
843 executed=True, reason=UpdateFailureReason.NONE,
844 executed=True, reason=UpdateFailureReason.NONE,
844 old=pull_request, new=pull_request_version, changes=changes,
845 old=pull_request, new=pull_request_version, changes=changes,
845 source_changed=source_changed, target_changed=target_changed)
846 source_changed=source_changed, target_changed=target_changed)
846
847
847 def _create_version_from_snapshot(self, pull_request):
848 def _create_version_from_snapshot(self, pull_request):
848 version = PullRequestVersion()
849 version = PullRequestVersion()
849 version.title = pull_request.title
850 version.title = pull_request.title
850 version.description = pull_request.description
851 version.description = pull_request.description
851 version.status = pull_request.status
852 version.status = pull_request.status
852 version.created_on = datetime.datetime.now()
853 version.created_on = datetime.datetime.now()
853 version.updated_on = pull_request.updated_on
854 version.updated_on = pull_request.updated_on
854 version.user_id = pull_request.user_id
855 version.user_id = pull_request.user_id
855 version.source_repo = pull_request.source_repo
856 version.source_repo = pull_request.source_repo
856 version.source_ref = pull_request.source_ref
857 version.source_ref = pull_request.source_ref
857 version.target_repo = pull_request.target_repo
858 version.target_repo = pull_request.target_repo
858 version.target_ref = pull_request.target_ref
859 version.target_ref = pull_request.target_ref
859
860
860 version._last_merge_source_rev = pull_request._last_merge_source_rev
861 version._last_merge_source_rev = pull_request._last_merge_source_rev
861 version._last_merge_target_rev = pull_request._last_merge_target_rev
862 version._last_merge_target_rev = pull_request._last_merge_target_rev
862 version.last_merge_status = pull_request.last_merge_status
863 version.last_merge_status = pull_request.last_merge_status
863 version.shadow_merge_ref = pull_request.shadow_merge_ref
864 version.shadow_merge_ref = pull_request.shadow_merge_ref
864 version.merge_rev = pull_request.merge_rev
865 version.merge_rev = pull_request.merge_rev
865 version.reviewer_data = pull_request.reviewer_data
866 version.reviewer_data = pull_request.reviewer_data
866
867
867 version.revisions = pull_request.revisions
868 version.revisions = pull_request.revisions
868 version.pull_request = pull_request
869 version.pull_request = pull_request
869 Session().add(version)
870 Session().add(version)
870 Session().flush()
871 Session().flush()
871
872
872 return version
873 return version
873
874
874 def _generate_update_diffs(self, pull_request, pull_request_version):
875 def _generate_update_diffs(self, pull_request, pull_request_version):
875
876
876 diff_context = (
877 diff_context = (
877 self.DIFF_CONTEXT +
878 self.DIFF_CONTEXT +
878 CommentsModel.needed_extra_diff_context())
879 CommentsModel.needed_extra_diff_context())
879
880
880 source_repo = pull_request_version.source_repo
881 source_repo = pull_request_version.source_repo
881 source_ref_id = pull_request_version.source_ref_parts.commit_id
882 source_ref_id = pull_request_version.source_ref_parts.commit_id
882 target_ref_id = pull_request_version.target_ref_parts.commit_id
883 target_ref_id = pull_request_version.target_ref_parts.commit_id
883 old_diff = self._get_diff_from_pr_or_version(
884 old_diff = self._get_diff_from_pr_or_version(
884 source_repo, source_ref_id, target_ref_id, context=diff_context)
885 source_repo, source_ref_id, target_ref_id, context=diff_context)
885
886
886 source_repo = pull_request.source_repo
887 source_repo = pull_request.source_repo
887 source_ref_id = pull_request.source_ref_parts.commit_id
888 source_ref_id = pull_request.source_ref_parts.commit_id
888 target_ref_id = pull_request.target_ref_parts.commit_id
889 target_ref_id = pull_request.target_ref_parts.commit_id
889
890
890 new_diff = self._get_diff_from_pr_or_version(
891 new_diff = self._get_diff_from_pr_or_version(
891 source_repo, source_ref_id, target_ref_id, context=diff_context)
892 source_repo, source_ref_id, target_ref_id, context=diff_context)
892
893
893 old_diff_data = diffs.DiffProcessor(old_diff)
894 old_diff_data = diffs.DiffProcessor(old_diff)
894 old_diff_data.prepare()
895 old_diff_data.prepare()
895 new_diff_data = diffs.DiffProcessor(new_diff)
896 new_diff_data = diffs.DiffProcessor(new_diff)
896 new_diff_data.prepare()
897 new_diff_data.prepare()
897
898
898 return old_diff_data, new_diff_data
899 return old_diff_data, new_diff_data
899
900
900 def _link_comments_to_version(self, pull_request_version):
901 def _link_comments_to_version(self, pull_request_version):
901 """
902 """
902 Link all unlinked comments of this pull request to the given version.
903 Link all unlinked comments of this pull request to the given version.
903
904
904 :param pull_request_version: The `PullRequestVersion` to which
905 :param pull_request_version: The `PullRequestVersion` to which
905 the comments shall be linked.
906 the comments shall be linked.
906
907
907 """
908 """
908 pull_request = pull_request_version.pull_request
909 pull_request = pull_request_version.pull_request
909 comments = ChangesetComment.query()\
910 comments = ChangesetComment.query()\
910 .filter(
911 .filter(
911 # TODO: johbo: Should we query for the repo at all here?
912 # TODO: johbo: Should we query for the repo at all here?
912 # Pending decision on how comments of PRs are to be related
913 # Pending decision on how comments of PRs are to be related
913 # to either the source repo, the target repo or no repo at all.
914 # to either the source repo, the target repo or no repo at all.
914 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
915 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
915 ChangesetComment.pull_request == pull_request,
916 ChangesetComment.pull_request == pull_request,
916 ChangesetComment.pull_request_version == None)\
917 ChangesetComment.pull_request_version == None)\
917 .order_by(ChangesetComment.comment_id.asc())
918 .order_by(ChangesetComment.comment_id.asc())
918
919
919 # TODO: johbo: Find out why this breaks if it is done in a bulk
920 # TODO: johbo: Find out why this breaks if it is done in a bulk
920 # operation.
921 # operation.
921 for comment in comments:
922 for comment in comments:
922 comment.pull_request_version_id = (
923 comment.pull_request_version_id = (
923 pull_request_version.pull_request_version_id)
924 pull_request_version.pull_request_version_id)
924 Session().add(comment)
925 Session().add(comment)
925
926
926 def _calculate_commit_id_changes(self, old_ids, new_ids):
927 def _calculate_commit_id_changes(self, old_ids, new_ids):
927 added = [x for x in new_ids if x not in old_ids]
928 added = [x for x in new_ids if x not in old_ids]
928 common = [x for x in new_ids if x in old_ids]
929 common = [x for x in new_ids if x in old_ids]
929 removed = [x for x in old_ids if x not in new_ids]
930 removed = [x for x in old_ids if x not in new_ids]
930 total = new_ids
931 total = new_ids
931 return ChangeTuple(added, common, removed, total)
932 return ChangeTuple(added, common, removed, total)
932
933
933 def _calculate_file_changes(self, old_diff_data, new_diff_data):
934 def _calculate_file_changes(self, old_diff_data, new_diff_data):
934
935
935 old_files = OrderedDict()
936 old_files = OrderedDict()
936 for diff_data in old_diff_data.parsed_diff:
937 for diff_data in old_diff_data.parsed_diff:
937 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
938 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
938
939
939 added_files = []
940 added_files = []
940 modified_files = []
941 modified_files = []
941 removed_files = []
942 removed_files = []
942 for diff_data in new_diff_data.parsed_diff:
943 for diff_data in new_diff_data.parsed_diff:
943 new_filename = diff_data['filename']
944 new_filename = diff_data['filename']
944 new_hash = md5_safe(diff_data['raw_diff'])
945 new_hash = md5_safe(diff_data['raw_diff'])
945
946
946 old_hash = old_files.get(new_filename)
947 old_hash = old_files.get(new_filename)
947 if not old_hash:
948 if not old_hash:
948 # file is not present in old diff, means it's added
949 # file is not present in old diff, means it's added
949 added_files.append(new_filename)
950 added_files.append(new_filename)
950 else:
951 else:
951 if new_hash != old_hash:
952 if new_hash != old_hash:
952 modified_files.append(new_filename)
953 modified_files.append(new_filename)
953 # now remove a file from old, since we have seen it already
954 # now remove a file from old, since we have seen it already
954 del old_files[new_filename]
955 del old_files[new_filename]
955
956
956 # removed files is when there are present in old, but not in NEW,
957 # removed files is when there are present in old, but not in NEW,
957 # since we remove old files that are present in new diff, left-overs
958 # since we remove old files that are present in new diff, left-overs
958 # if any should be the removed files
959 # if any should be the removed files
959 removed_files.extend(old_files.keys())
960 removed_files.extend(old_files.keys())
960
961
961 return FileChangeTuple(added_files, modified_files, removed_files)
962 return FileChangeTuple(added_files, modified_files, removed_files)
962
963
963 def _render_update_message(self, changes, file_changes):
964 def _render_update_message(self, changes, file_changes):
964 """
965 """
965 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
966 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
966 so it's always looking the same disregarding on which default
967 so it's always looking the same disregarding on which default
967 renderer system is using.
968 renderer system is using.
968
969
969 :param changes: changes named tuple
970 :param changes: changes named tuple
970 :param file_changes: file changes named tuple
971 :param file_changes: file changes named tuple
971
972
972 """
973 """
973 new_status = ChangesetStatus.get_status_lbl(
974 new_status = ChangesetStatus.get_status_lbl(
974 ChangesetStatus.STATUS_UNDER_REVIEW)
975 ChangesetStatus.STATUS_UNDER_REVIEW)
975
976
976 changed_files = (
977 changed_files = (
977 file_changes.added + file_changes.modified + file_changes.removed)
978 file_changes.added + file_changes.modified + file_changes.removed)
978
979
979 params = {
980 params = {
980 'under_review_label': new_status,
981 'under_review_label': new_status,
981 'added_commits': changes.added,
982 'added_commits': changes.added,
982 'removed_commits': changes.removed,
983 'removed_commits': changes.removed,
983 'changed_files': changed_files,
984 'changed_files': changed_files,
984 'added_files': file_changes.added,
985 'added_files': file_changes.added,
985 'modified_files': file_changes.modified,
986 'modified_files': file_changes.modified,
986 'removed_files': file_changes.removed,
987 'removed_files': file_changes.removed,
987 }
988 }
988 renderer = RstTemplateRenderer()
989 renderer = RstTemplateRenderer()
989 return renderer.render('pull_request_update.mako', **params)
990 return renderer.render('pull_request_update.mako', **params)
990
991
991 def edit(self, pull_request, title, description, description_renderer, user):
992 def edit(self, pull_request, title, description, description_renderer, user):
992 pull_request = self.__get_pull_request(pull_request)
993 pull_request = self.__get_pull_request(pull_request)
993 old_data = pull_request.get_api_data(with_merge_state=False)
994 old_data = pull_request.get_api_data(with_merge_state=False)
994 if pull_request.is_closed():
995 if pull_request.is_closed():
995 raise ValueError('This pull request is closed')
996 raise ValueError('This pull request is closed')
996 if title:
997 if title:
997 pull_request.title = title
998 pull_request.title = title
998 pull_request.description = description
999 pull_request.description = description
999 pull_request.updated_on = datetime.datetime.now()
1000 pull_request.updated_on = datetime.datetime.now()
1000 pull_request.description_renderer = description_renderer
1001 pull_request.description_renderer = description_renderer
1001 Session().add(pull_request)
1002 Session().add(pull_request)
1002 self._log_audit_action(
1003 self._log_audit_action(
1003 'repo.pull_request.edit', {'old_data': old_data},
1004 'repo.pull_request.edit', {'old_data': old_data},
1004 user, pull_request)
1005 user, pull_request)
1005
1006
1006 def update_reviewers(self, pull_request, reviewer_data, user):
1007 def update_reviewers(self, pull_request, reviewer_data, user):
1007 """
1008 """
1008 Update the reviewers in the pull request
1009 Update the reviewers in the pull request
1009
1010
1010 :param pull_request: the pr to update
1011 :param pull_request: the pr to update
1011 :param reviewer_data: list of tuples
1012 :param reviewer_data: list of tuples
1012 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
1013 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
1013 """
1014 """
1014 pull_request = self.__get_pull_request(pull_request)
1015 pull_request = self.__get_pull_request(pull_request)
1015 if pull_request.is_closed():
1016 if pull_request.is_closed():
1016 raise ValueError('This pull request is closed')
1017 raise ValueError('This pull request is closed')
1017
1018
1018 reviewers = {}
1019 reviewers = {}
1019 for user_id, reasons, mandatory, rules in reviewer_data:
1020 for user_id, reasons, mandatory, rules in reviewer_data:
1020 if isinstance(user_id, (int, basestring)):
1021 if isinstance(user_id, (int, basestring)):
1021 user_id = self._get_user(user_id).user_id
1022 user_id = self._get_user(user_id).user_id
1022 reviewers[user_id] = {
1023 reviewers[user_id] = {
1023 'reasons': reasons, 'mandatory': mandatory}
1024 'reasons': reasons, 'mandatory': mandatory}
1024
1025
1025 reviewers_ids = set(reviewers.keys())
1026 reviewers_ids = set(reviewers.keys())
1026 current_reviewers = PullRequestReviewers.query()\
1027 current_reviewers = PullRequestReviewers.query()\
1027 .filter(PullRequestReviewers.pull_request ==
1028 .filter(PullRequestReviewers.pull_request ==
1028 pull_request).all()
1029 pull_request).all()
1029 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1030 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1030
1031
1031 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1032 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1032 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1033 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1033
1034
1034 log.debug("Adding %s reviewers", ids_to_add)
1035 log.debug("Adding %s reviewers", ids_to_add)
1035 log.debug("Removing %s reviewers", ids_to_remove)
1036 log.debug("Removing %s reviewers", ids_to_remove)
1036 changed = False
1037 changed = False
1037 for uid in ids_to_add:
1038 for uid in ids_to_add:
1038 changed = True
1039 changed = True
1039 _usr = self._get_user(uid)
1040 _usr = self._get_user(uid)
1040 reviewer = PullRequestReviewers()
1041 reviewer = PullRequestReviewers()
1041 reviewer.user = _usr
1042 reviewer.user = _usr
1042 reviewer.pull_request = pull_request
1043 reviewer.pull_request = pull_request
1043 reviewer.reasons = reviewers[uid]['reasons']
1044 reviewer.reasons = reviewers[uid]['reasons']
1044 # NOTE(marcink): mandatory shouldn't be changed now
1045 # NOTE(marcink): mandatory shouldn't be changed now
1045 # reviewer.mandatory = reviewers[uid]['reasons']
1046 # reviewer.mandatory = reviewers[uid]['reasons']
1046 Session().add(reviewer)
1047 Session().add(reviewer)
1047 self._log_audit_action(
1048 self._log_audit_action(
1048 'repo.pull_request.reviewer.add', {'data': reviewer.get_dict()},
1049 'repo.pull_request.reviewer.add', {'data': reviewer.get_dict()},
1049 user, pull_request)
1050 user, pull_request)
1050
1051
1051 for uid in ids_to_remove:
1052 for uid in ids_to_remove:
1052 changed = True
1053 changed = True
1053 reviewers = PullRequestReviewers.query()\
1054 reviewers = PullRequestReviewers.query()\
1054 .filter(PullRequestReviewers.user_id == uid,
1055 .filter(PullRequestReviewers.user_id == uid,
1055 PullRequestReviewers.pull_request == pull_request)\
1056 PullRequestReviewers.pull_request == pull_request)\
1056 .all()
1057 .all()
1057 # use .all() in case we accidentally added the same person twice
1058 # use .all() in case we accidentally added the same person twice
1058 # this CAN happen due to the lack of DB checks
1059 # this CAN happen due to the lack of DB checks
1059 for obj in reviewers:
1060 for obj in reviewers:
1060 old_data = obj.get_dict()
1061 old_data = obj.get_dict()
1061 Session().delete(obj)
1062 Session().delete(obj)
1062 self._log_audit_action(
1063 self._log_audit_action(
1063 'repo.pull_request.reviewer.delete',
1064 'repo.pull_request.reviewer.delete',
1064 {'old_data': old_data}, user, pull_request)
1065 {'old_data': old_data}, user, pull_request)
1065
1066
1066 if changed:
1067 if changed:
1067 pull_request.updated_on = datetime.datetime.now()
1068 pull_request.updated_on = datetime.datetime.now()
1068 Session().add(pull_request)
1069 Session().add(pull_request)
1069
1070
1070 self.notify_reviewers(pull_request, ids_to_add)
1071 self.notify_reviewers(pull_request, ids_to_add)
1071 return ids_to_add, ids_to_remove
1072 return ids_to_add, ids_to_remove
1072
1073
1073 def get_url(self, pull_request, request=None, permalink=False):
1074 def get_url(self, pull_request, request=None, permalink=False):
1074 if not request:
1075 if not request:
1075 request = get_current_request()
1076 request = get_current_request()
1076
1077
1077 if permalink:
1078 if permalink:
1078 return request.route_url(
1079 return request.route_url(
1079 'pull_requests_global',
1080 'pull_requests_global',
1080 pull_request_id=pull_request.pull_request_id,)
1081 pull_request_id=pull_request.pull_request_id,)
1081 else:
1082 else:
1082 return request.route_url('pullrequest_show',
1083 return request.route_url('pullrequest_show',
1083 repo_name=safe_str(pull_request.target_repo.repo_name),
1084 repo_name=safe_str(pull_request.target_repo.repo_name),
1084 pull_request_id=pull_request.pull_request_id,)
1085 pull_request_id=pull_request.pull_request_id,)
1085
1086
1086 def get_shadow_clone_url(self, pull_request, request=None):
1087 def get_shadow_clone_url(self, pull_request, request=None):
1087 """
1088 """
1088 Returns qualified url pointing to the shadow repository. If this pull
1089 Returns qualified url pointing to the shadow repository. If this pull
1089 request is closed there is no shadow repository and ``None`` will be
1090 request is closed there is no shadow repository and ``None`` will be
1090 returned.
1091 returned.
1091 """
1092 """
1092 if pull_request.is_closed():
1093 if pull_request.is_closed():
1093 return None
1094 return None
1094 else:
1095 else:
1095 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1096 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1096 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1097 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1097
1098
1098 def notify_reviewers(self, pull_request, reviewers_ids):
1099 def notify_reviewers(self, pull_request, reviewers_ids):
1099 # notification to reviewers
1100 # notification to reviewers
1100 if not reviewers_ids:
1101 if not reviewers_ids:
1101 return
1102 return
1102
1103
1103 pull_request_obj = pull_request
1104 pull_request_obj = pull_request
1104 # get the current participants of this pull request
1105 # get the current participants of this pull request
1105 recipients = reviewers_ids
1106 recipients = reviewers_ids
1106 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1107 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1107
1108
1108 pr_source_repo = pull_request_obj.source_repo
1109 pr_source_repo = pull_request_obj.source_repo
1109 pr_target_repo = pull_request_obj.target_repo
1110 pr_target_repo = pull_request_obj.target_repo
1110
1111
1111 pr_url = h.route_url('pullrequest_show',
1112 pr_url = h.route_url('pullrequest_show',
1112 repo_name=pr_target_repo.repo_name,
1113 repo_name=pr_target_repo.repo_name,
1113 pull_request_id=pull_request_obj.pull_request_id,)
1114 pull_request_id=pull_request_obj.pull_request_id,)
1114
1115
1115 # set some variables for email notification
1116 # set some variables for email notification
1116 pr_target_repo_url = h.route_url(
1117 pr_target_repo_url = h.route_url(
1117 'repo_summary', repo_name=pr_target_repo.repo_name)
1118 'repo_summary', repo_name=pr_target_repo.repo_name)
1118
1119
1119 pr_source_repo_url = h.route_url(
1120 pr_source_repo_url = h.route_url(
1120 'repo_summary', repo_name=pr_source_repo.repo_name)
1121 'repo_summary', repo_name=pr_source_repo.repo_name)
1121
1122
1122 # pull request specifics
1123 # pull request specifics
1123 pull_request_commits = [
1124 pull_request_commits = [
1124 (x.raw_id, x.message)
1125 (x.raw_id, x.message)
1125 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1126 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1126
1127
1127 kwargs = {
1128 kwargs = {
1128 'user': pull_request.author,
1129 'user': pull_request.author,
1129 'pull_request': pull_request_obj,
1130 'pull_request': pull_request_obj,
1130 'pull_request_commits': pull_request_commits,
1131 'pull_request_commits': pull_request_commits,
1131
1132
1132 'pull_request_target_repo': pr_target_repo,
1133 'pull_request_target_repo': pr_target_repo,
1133 'pull_request_target_repo_url': pr_target_repo_url,
1134 'pull_request_target_repo_url': pr_target_repo_url,
1134
1135
1135 'pull_request_source_repo': pr_source_repo,
1136 'pull_request_source_repo': pr_source_repo,
1136 'pull_request_source_repo_url': pr_source_repo_url,
1137 'pull_request_source_repo_url': pr_source_repo_url,
1137
1138
1138 'pull_request_url': pr_url,
1139 'pull_request_url': pr_url,
1139 }
1140 }
1140
1141
1141 # pre-generate the subject for notification itself
1142 # pre-generate the subject for notification itself
1142 (subject,
1143 (subject,
1143 _h, _e, # we don't care about those
1144 _h, _e, # we don't care about those
1144 body_plaintext) = EmailNotificationModel().render_email(
1145 body_plaintext) = EmailNotificationModel().render_email(
1145 notification_type, **kwargs)
1146 notification_type, **kwargs)
1146
1147
1147 # create notification objects, and emails
1148 # create notification objects, and emails
1148 NotificationModel().create(
1149 NotificationModel().create(
1149 created_by=pull_request.author,
1150 created_by=pull_request.author,
1150 notification_subject=subject,
1151 notification_subject=subject,
1151 notification_body=body_plaintext,
1152 notification_body=body_plaintext,
1152 notification_type=notification_type,
1153 notification_type=notification_type,
1153 recipients=recipients,
1154 recipients=recipients,
1154 email_kwargs=kwargs,
1155 email_kwargs=kwargs,
1155 )
1156 )
1156
1157
1157 def delete(self, pull_request, user):
1158 def delete(self, pull_request, user):
1158 pull_request = self.__get_pull_request(pull_request)
1159 pull_request = self.__get_pull_request(pull_request)
1159 old_data = pull_request.get_api_data(with_merge_state=False)
1160 old_data = pull_request.get_api_data(with_merge_state=False)
1160 self._cleanup_merge_workspace(pull_request)
1161 self._cleanup_merge_workspace(pull_request)
1161 self._log_audit_action(
1162 self._log_audit_action(
1162 'repo.pull_request.delete', {'old_data': old_data},
1163 'repo.pull_request.delete', {'old_data': old_data},
1163 user, pull_request)
1164 user, pull_request)
1164 Session().delete(pull_request)
1165 Session().delete(pull_request)
1165
1166
1166 def close_pull_request(self, pull_request, user):
1167 def close_pull_request(self, pull_request, user):
1167 pull_request = self.__get_pull_request(pull_request)
1168 pull_request = self.__get_pull_request(pull_request)
1168 self._cleanup_merge_workspace(pull_request)
1169 self._cleanup_merge_workspace(pull_request)
1169 pull_request.status = PullRequest.STATUS_CLOSED
1170 pull_request.status = PullRequest.STATUS_CLOSED
1170 pull_request.updated_on = datetime.datetime.now()
1171 pull_request.updated_on = datetime.datetime.now()
1171 Session().add(pull_request)
1172 Session().add(pull_request)
1172 self._trigger_pull_request_hook(
1173 self._trigger_pull_request_hook(
1173 pull_request, pull_request.author, 'close')
1174 pull_request, pull_request.author, 'close')
1174
1175
1175 pr_data = pull_request.get_api_data(with_merge_state=False)
1176 pr_data = pull_request.get_api_data(with_merge_state=False)
1176 self._log_audit_action(
1177 self._log_audit_action(
1177 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1178 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1178
1179
1179 def close_pull_request_with_comment(
1180 def close_pull_request_with_comment(
1180 self, pull_request, user, repo, message=None, auth_user=None):
1181 self, pull_request, user, repo, message=None, auth_user=None):
1181
1182
1182 pull_request_review_status = pull_request.calculated_review_status()
1183 pull_request_review_status = pull_request.calculated_review_status()
1183
1184
1184 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1185 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1185 # approved only if we have voting consent
1186 # approved only if we have voting consent
1186 status = ChangesetStatus.STATUS_APPROVED
1187 status = ChangesetStatus.STATUS_APPROVED
1187 else:
1188 else:
1188 status = ChangesetStatus.STATUS_REJECTED
1189 status = ChangesetStatus.STATUS_REJECTED
1189 status_lbl = ChangesetStatus.get_status_lbl(status)
1190 status_lbl = ChangesetStatus.get_status_lbl(status)
1190
1191
1191 default_message = (
1192 default_message = (
1192 'Closing with status change {transition_icon} {status}.'
1193 'Closing with status change {transition_icon} {status}.'
1193 ).format(transition_icon='>', status=status_lbl)
1194 ).format(transition_icon='>', status=status_lbl)
1194 text = message or default_message
1195 text = message or default_message
1195
1196
1196 # create a comment, and link it to new status
1197 # create a comment, and link it to new status
1197 comment = CommentsModel().create(
1198 comment = CommentsModel().create(
1198 text=text,
1199 text=text,
1199 repo=repo.repo_id,
1200 repo=repo.repo_id,
1200 user=user.user_id,
1201 user=user.user_id,
1201 pull_request=pull_request.pull_request_id,
1202 pull_request=pull_request.pull_request_id,
1202 status_change=status_lbl,
1203 status_change=status_lbl,
1203 status_change_type=status,
1204 status_change_type=status,
1204 closing_pr=True,
1205 closing_pr=True,
1205 auth_user=auth_user,
1206 auth_user=auth_user,
1206 )
1207 )
1207
1208
1208 # calculate old status before we change it
1209 # calculate old status before we change it
1209 old_calculated_status = pull_request.calculated_review_status()
1210 old_calculated_status = pull_request.calculated_review_status()
1210 ChangesetStatusModel().set_status(
1211 ChangesetStatusModel().set_status(
1211 repo.repo_id,
1212 repo.repo_id,
1212 status,
1213 status,
1213 user.user_id,
1214 user.user_id,
1214 comment=comment,
1215 comment=comment,
1215 pull_request=pull_request.pull_request_id
1216 pull_request=pull_request.pull_request_id
1216 )
1217 )
1217
1218
1218 Session().flush()
1219 Session().flush()
1219 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1220 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1220 # we now calculate the status of pull request again, and based on that
1221 # we now calculate the status of pull request again, and based on that
1221 # calculation trigger status change. This might happen in cases
1222 # calculation trigger status change. This might happen in cases
1222 # that non-reviewer admin closes a pr, which means his vote doesn't
1223 # that non-reviewer admin closes a pr, which means his vote doesn't
1223 # change the status, while if he's a reviewer this might change it.
1224 # change the status, while if he's a reviewer this might change it.
1224 calculated_status = pull_request.calculated_review_status()
1225 calculated_status = pull_request.calculated_review_status()
1225 if old_calculated_status != calculated_status:
1226 if old_calculated_status != calculated_status:
1226 self._trigger_pull_request_hook(
1227 self._trigger_pull_request_hook(
1227 pull_request, user, 'review_status_change')
1228 pull_request, user, 'review_status_change')
1228
1229
1229 # finally close the PR
1230 # finally close the PR
1230 PullRequestModel().close_pull_request(
1231 PullRequestModel().close_pull_request(
1231 pull_request.pull_request_id, user)
1232 pull_request.pull_request_id, user)
1232
1233
1233 return comment, status
1234 return comment, status
1234
1235
1235 def merge_status(self, pull_request, translator=None,
1236 def merge_status(self, pull_request, translator=None,
1236 force_shadow_repo_refresh=False):
1237 force_shadow_repo_refresh=False):
1237 _ = translator or get_current_request().translate
1238 _ = translator or get_current_request().translate
1238
1239
1239 if not self._is_merge_enabled(pull_request):
1240 if not self._is_merge_enabled(pull_request):
1240 return False, _('Server-side pull request merging is disabled.')
1241 return False, _('Server-side pull request merging is disabled.')
1241 if pull_request.is_closed():
1242 if pull_request.is_closed():
1242 return False, _('This pull request is closed.')
1243 return False, _('This pull request is closed.')
1243 merge_possible, msg = self._check_repo_requirements(
1244 merge_possible, msg = self._check_repo_requirements(
1244 target=pull_request.target_repo, source=pull_request.source_repo,
1245 target=pull_request.target_repo, source=pull_request.source_repo,
1245 translator=_)
1246 translator=_)
1246 if not merge_possible:
1247 if not merge_possible:
1247 return merge_possible, msg
1248 return merge_possible, msg
1248
1249
1249 try:
1250 try:
1250 resp = self._try_merge(
1251 resp = self._try_merge(
1251 pull_request,
1252 pull_request,
1252 force_shadow_repo_refresh=force_shadow_repo_refresh)
1253 force_shadow_repo_refresh=force_shadow_repo_refresh)
1253 log.debug("Merge response: %s", resp)
1254 log.debug("Merge response: %s", resp)
1254 status = resp.possible, self.merge_status_message(
1255 status = resp.possible, self.merge_status_message(
1255 resp.failure_reason)
1256 resp.failure_reason)
1256 except NotImplementedError:
1257 except NotImplementedError:
1257 status = False, _('Pull request merging is not supported.')
1258 status = False, _('Pull request merging is not supported.')
1258
1259
1259 return status
1260 return status
1260
1261
1261 def _check_repo_requirements(self, target, source, translator):
1262 def _check_repo_requirements(self, target, source, translator):
1262 """
1263 """
1263 Check if `target` and `source` have compatible requirements.
1264 Check if `target` and `source` have compatible requirements.
1264
1265
1265 Currently this is just checking for largefiles.
1266 Currently this is just checking for largefiles.
1266 """
1267 """
1267 _ = translator
1268 _ = translator
1268 target_has_largefiles = self._has_largefiles(target)
1269 target_has_largefiles = self._has_largefiles(target)
1269 source_has_largefiles = self._has_largefiles(source)
1270 source_has_largefiles = self._has_largefiles(source)
1270 merge_possible = True
1271 merge_possible = True
1271 message = u''
1272 message = u''
1272
1273
1273 if target_has_largefiles != source_has_largefiles:
1274 if target_has_largefiles != source_has_largefiles:
1274 merge_possible = False
1275 merge_possible = False
1275 if source_has_largefiles:
1276 if source_has_largefiles:
1276 message = _(
1277 message = _(
1277 'Target repository large files support is disabled.')
1278 'Target repository large files support is disabled.')
1278 else:
1279 else:
1279 message = _(
1280 message = _(
1280 'Source repository large files support is disabled.')
1281 'Source repository large files support is disabled.')
1281
1282
1282 return merge_possible, message
1283 return merge_possible, message
1283
1284
1284 def _has_largefiles(self, repo):
1285 def _has_largefiles(self, repo):
1285 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1286 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1286 'extensions', 'largefiles')
1287 'extensions', 'largefiles')
1287 return largefiles_ui and largefiles_ui[0].active
1288 return largefiles_ui and largefiles_ui[0].active
1288
1289
1289 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1290 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1290 """
1291 """
1291 Try to merge the pull request and return the merge status.
1292 Try to merge the pull request and return the merge status.
1292 """
1293 """
1293 log.debug(
1294 log.debug(
1294 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1295 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1295 pull_request.pull_request_id, force_shadow_repo_refresh)
1296 pull_request.pull_request_id, force_shadow_repo_refresh)
1296 target_vcs = pull_request.target_repo.scm_instance()
1297 target_vcs = pull_request.target_repo.scm_instance()
1297
1298
1298 # Refresh the target reference.
1299 # Refresh the target reference.
1299 try:
1300 try:
1300 target_ref = self._refresh_reference(
1301 target_ref = self._refresh_reference(
1301 pull_request.target_ref_parts, target_vcs)
1302 pull_request.target_ref_parts, target_vcs)
1302 except CommitDoesNotExistError:
1303 except CommitDoesNotExistError:
1303 merge_state = MergeResponse(
1304 merge_state = MergeResponse(
1304 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
1305 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
1305 return merge_state
1306 return merge_state
1306
1307
1307 target_locked = pull_request.target_repo.locked
1308 target_locked = pull_request.target_repo.locked
1308 if target_locked and target_locked[0]:
1309 if target_locked and target_locked[0]:
1309 log.debug("The target repository is locked.")
1310 log.debug("The target repository is locked.")
1310 merge_state = MergeResponse(
1311 merge_state = MergeResponse(
1311 False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
1312 False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
1312 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1313 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1313 pull_request, target_ref):
1314 pull_request, target_ref):
1314 log.debug("Refreshing the merge status of the repository.")
1315 log.debug("Refreshing the merge status of the repository.")
1315 merge_state = self._refresh_merge_state(
1316 merge_state = self._refresh_merge_state(
1316 pull_request, target_vcs, target_ref)
1317 pull_request, target_vcs, target_ref)
1317 else:
1318 else:
1318 possible = pull_request.\
1319 possible = pull_request.\
1319 last_merge_status == MergeFailureReason.NONE
1320 last_merge_status == MergeFailureReason.NONE
1320 merge_state = MergeResponse(
1321 merge_state = MergeResponse(
1321 possible, False, None, pull_request.last_merge_status)
1322 possible, False, None, pull_request.last_merge_status)
1322
1323
1323 return merge_state
1324 return merge_state
1324
1325
1325 def _refresh_reference(self, reference, vcs_repository):
1326 def _refresh_reference(self, reference, vcs_repository):
1326 if reference.type in ('branch', 'book'):
1327 if reference.type in ('branch', 'book'):
1327 name_or_id = reference.name
1328 name_or_id = reference.name
1328 else:
1329 else:
1329 name_or_id = reference.commit_id
1330 name_or_id = reference.commit_id
1330 refreshed_commit = vcs_repository.get_commit(name_or_id)
1331 refreshed_commit = vcs_repository.get_commit(name_or_id)
1331 refreshed_reference = Reference(
1332 refreshed_reference = Reference(
1332 reference.type, reference.name, refreshed_commit.raw_id)
1333 reference.type, reference.name, refreshed_commit.raw_id)
1333 return refreshed_reference
1334 return refreshed_reference
1334
1335
1335 def _needs_merge_state_refresh(self, pull_request, target_reference):
1336 def _needs_merge_state_refresh(self, pull_request, target_reference):
1336 return not(
1337 return not(
1337 pull_request.revisions and
1338 pull_request.revisions and
1338 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1339 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1339 target_reference.commit_id == pull_request._last_merge_target_rev)
1340 target_reference.commit_id == pull_request._last_merge_target_rev)
1340
1341
1341 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1342 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1342 workspace_id = self._workspace_id(pull_request)
1343 workspace_id = self._workspace_id(pull_request)
1343 source_vcs = pull_request.source_repo.scm_instance()
1344 source_vcs = pull_request.source_repo.scm_instance()
1344 repo_id = pull_request.target_repo.repo_id
1345 repo_id = pull_request.target_repo.repo_id
1345 use_rebase = self._use_rebase_for_merging(pull_request)
1346 use_rebase = self._use_rebase_for_merging(pull_request)
1346 close_branch = self._close_branch_before_merging(pull_request)
1347 close_branch = self._close_branch_before_merging(pull_request)
1347 merge_state = target_vcs.merge(
1348 merge_state = target_vcs.merge(
1348 repo_id, workspace_id,
1349 repo_id, workspace_id,
1349 target_reference, source_vcs, pull_request.source_ref_parts,
1350 target_reference, source_vcs, pull_request.source_ref_parts,
1350 dry_run=True, use_rebase=use_rebase,
1351 dry_run=True, use_rebase=use_rebase,
1351 close_branch=close_branch)
1352 close_branch=close_branch)
1352
1353
1353 # Do not store the response if there was an unknown error.
1354 # Do not store the response if there was an unknown error.
1354 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1355 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1355 pull_request._last_merge_source_rev = \
1356 pull_request._last_merge_source_rev = \
1356 pull_request.source_ref_parts.commit_id
1357 pull_request.source_ref_parts.commit_id
1357 pull_request._last_merge_target_rev = target_reference.commit_id
1358 pull_request._last_merge_target_rev = target_reference.commit_id
1358 pull_request.last_merge_status = merge_state.failure_reason
1359 pull_request.last_merge_status = merge_state.failure_reason
1359 pull_request.shadow_merge_ref = merge_state.merge_ref
1360 pull_request.shadow_merge_ref = merge_state.merge_ref
1360 Session().add(pull_request)
1361 Session().add(pull_request)
1361 Session().commit()
1362 Session().commit()
1362
1363
1363 return merge_state
1364 return merge_state
1364
1365
1365 def _workspace_id(self, pull_request):
1366 def _workspace_id(self, pull_request):
1366 workspace_id = 'pr-%s' % pull_request.pull_request_id
1367 workspace_id = 'pr-%s' % pull_request.pull_request_id
1367 return workspace_id
1368 return workspace_id
1368
1369
1369 def merge_status_message(self, status_code):
1370 def merge_status_message(self, status_code):
1370 """
1371 """
1371 Return a human friendly error message for the given merge status code.
1372 Return a human friendly error message for the given merge status code.
1372 """
1373 """
1373 return self.MERGE_STATUS_MESSAGES[status_code]
1374 return self.MERGE_STATUS_MESSAGES[status_code]
1374
1375
1375 def generate_repo_data(self, repo, commit_id=None, branch=None,
1376 def generate_repo_data(self, repo, commit_id=None, branch=None,
1376 bookmark=None, translator=None):
1377 bookmark=None, translator=None):
1377 from rhodecode.model.repo import RepoModel
1378 from rhodecode.model.repo import RepoModel
1378
1379
1379 all_refs, selected_ref = \
1380 all_refs, selected_ref = \
1380 self._get_repo_pullrequest_sources(
1381 self._get_repo_pullrequest_sources(
1381 repo.scm_instance(), commit_id=commit_id,
1382 repo.scm_instance(), commit_id=commit_id,
1382 branch=branch, bookmark=bookmark, translator=translator)
1383 branch=branch, bookmark=bookmark, translator=translator)
1383
1384
1384 refs_select2 = []
1385 refs_select2 = []
1385 for element in all_refs:
1386 for element in all_refs:
1386 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1387 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1387 refs_select2.append({'text': element[1], 'children': children})
1388 refs_select2.append({'text': element[1], 'children': children})
1388
1389
1389 return {
1390 return {
1390 'user': {
1391 'user': {
1391 'user_id': repo.user.user_id,
1392 'user_id': repo.user.user_id,
1392 'username': repo.user.username,
1393 'username': repo.user.username,
1393 'firstname': repo.user.first_name,
1394 'firstname': repo.user.first_name,
1394 'lastname': repo.user.last_name,
1395 'lastname': repo.user.last_name,
1395 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1396 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1396 },
1397 },
1397 'name': repo.repo_name,
1398 'name': repo.repo_name,
1398 'link': RepoModel().get_url(repo),
1399 'link': RepoModel().get_url(repo),
1399 'description': h.chop_at_smart(repo.description_safe, '\n'),
1400 'description': h.chop_at_smart(repo.description_safe, '\n'),
1400 'refs': {
1401 'refs': {
1401 'all_refs': all_refs,
1402 'all_refs': all_refs,
1402 'selected_ref': selected_ref,
1403 'selected_ref': selected_ref,
1403 'select2_refs': refs_select2
1404 'select2_refs': refs_select2
1404 }
1405 }
1405 }
1406 }
1406
1407
1407 def generate_pullrequest_title(self, source, source_ref, target):
1408 def generate_pullrequest_title(self, source, source_ref, target):
1408 return u'{source}#{at_ref} to {target}'.format(
1409 return u'{source}#{at_ref} to {target}'.format(
1409 source=source,
1410 source=source,
1410 at_ref=source_ref,
1411 at_ref=source_ref,
1411 target=target,
1412 target=target,
1412 )
1413 )
1413
1414
1414 def _cleanup_merge_workspace(self, pull_request):
1415 def _cleanup_merge_workspace(self, pull_request):
1415 # Merging related cleanup
1416 # Merging related cleanup
1416 repo_id = pull_request.target_repo.repo_id
1417 repo_id = pull_request.target_repo.repo_id
1417 target_scm = pull_request.target_repo.scm_instance()
1418 target_scm = pull_request.target_repo.scm_instance()
1418 workspace_id = self._workspace_id(pull_request)
1419 workspace_id = self._workspace_id(pull_request)
1419
1420
1420 try:
1421 try:
1421 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1422 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1422 except NotImplementedError:
1423 except NotImplementedError:
1423 pass
1424 pass
1424
1425
1425 def _get_repo_pullrequest_sources(
1426 def _get_repo_pullrequest_sources(
1426 self, repo, commit_id=None, branch=None, bookmark=None,
1427 self, repo, commit_id=None, branch=None, bookmark=None,
1427 translator=None):
1428 translator=None):
1428 """
1429 """
1429 Return a structure with repo's interesting commits, suitable for
1430 Return a structure with repo's interesting commits, suitable for
1430 the selectors in pullrequest controller
1431 the selectors in pullrequest controller
1431
1432
1432 :param commit_id: a commit that must be in the list somehow
1433 :param commit_id: a commit that must be in the list somehow
1433 and selected by default
1434 and selected by default
1434 :param branch: a branch that must be in the list and selected
1435 :param branch: a branch that must be in the list and selected
1435 by default - even if closed
1436 by default - even if closed
1436 :param bookmark: a bookmark that must be in the list and selected
1437 :param bookmark: a bookmark that must be in the list and selected
1437 """
1438 """
1438 _ = translator or get_current_request().translate
1439 _ = translator or get_current_request().translate
1439
1440
1440 commit_id = safe_str(commit_id) if commit_id else None
1441 commit_id = safe_str(commit_id) if commit_id else None
1441 branch = safe_str(branch) if branch else None
1442 branch = safe_str(branch) if branch else None
1442 bookmark = safe_str(bookmark) if bookmark else None
1443 bookmark = safe_str(bookmark) if bookmark else None
1443
1444
1444 selected = None
1445 selected = None
1445
1446
1446 # order matters: first source that has commit_id in it will be selected
1447 # order matters: first source that has commit_id in it will be selected
1447 sources = []
1448 sources = []
1448 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1449 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1449 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1450 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1450
1451
1451 if commit_id:
1452 if commit_id:
1452 ref_commit = (h.short_id(commit_id), commit_id)
1453 ref_commit = (h.short_id(commit_id), commit_id)
1453 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1454 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1454
1455
1455 sources.append(
1456 sources.append(
1456 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1457 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1457 )
1458 )
1458
1459
1459 groups = []
1460 groups = []
1460 for group_key, ref_list, group_name, match in sources:
1461 for group_key, ref_list, group_name, match in sources:
1461 group_refs = []
1462 group_refs = []
1462 for ref_name, ref_id in ref_list:
1463 for ref_name, ref_id in ref_list:
1463 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1464 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1464 group_refs.append((ref_key, ref_name))
1465 group_refs.append((ref_key, ref_name))
1465
1466
1466 if not selected:
1467 if not selected:
1467 if set([commit_id, match]) & set([ref_id, ref_name]):
1468 if set([commit_id, match]) & set([ref_id, ref_name]):
1468 selected = ref_key
1469 selected = ref_key
1469
1470
1470 if group_refs:
1471 if group_refs:
1471 groups.append((group_refs, group_name))
1472 groups.append((group_refs, group_name))
1472
1473
1473 if not selected:
1474 if not selected:
1474 ref = commit_id or branch or bookmark
1475 ref = commit_id or branch or bookmark
1475 if ref:
1476 if ref:
1476 raise CommitDoesNotExistError(
1477 raise CommitDoesNotExistError(
1477 'No commit refs could be found matching: %s' % ref)
1478 'No commit refs could be found matching: %s' % ref)
1478 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1479 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1479 selected = 'branch:%s:%s' % (
1480 selected = 'branch:%s:%s' % (
1480 repo.DEFAULT_BRANCH_NAME,
1481 repo.DEFAULT_BRANCH_NAME,
1481 repo.branches[repo.DEFAULT_BRANCH_NAME]
1482 repo.branches[repo.DEFAULT_BRANCH_NAME]
1482 )
1483 )
1483 elif repo.commit_ids:
1484 elif repo.commit_ids:
1484 # make the user select in this case
1485 # make the user select in this case
1485 selected = None
1486 selected = None
1486 else:
1487 else:
1487 raise EmptyRepositoryError()
1488 raise EmptyRepositoryError()
1488 return groups, selected
1489 return groups, selected
1489
1490
1490 def get_diff(self, source_repo, source_ref_id, target_ref_id, context=DIFF_CONTEXT):
1491 def get_diff(self, source_repo, source_ref_id, target_ref_id, context=DIFF_CONTEXT):
1491 return self._get_diff_from_pr_or_version(
1492 return self._get_diff_from_pr_or_version(
1492 source_repo, source_ref_id, target_ref_id, context=context)
1493 source_repo, source_ref_id, target_ref_id, context=context)
1493
1494
1494 def _get_diff_from_pr_or_version(
1495 def _get_diff_from_pr_or_version(
1495 self, source_repo, source_ref_id, target_ref_id, context):
1496 self, source_repo, source_ref_id, target_ref_id, context):
1496 target_commit = source_repo.get_commit(
1497 target_commit = source_repo.get_commit(
1497 commit_id=safe_str(target_ref_id))
1498 commit_id=safe_str(target_ref_id))
1498 source_commit = source_repo.get_commit(
1499 source_commit = source_repo.get_commit(
1499 commit_id=safe_str(source_ref_id))
1500 commit_id=safe_str(source_ref_id))
1500 if isinstance(source_repo, Repository):
1501 if isinstance(source_repo, Repository):
1501 vcs_repo = source_repo.scm_instance()
1502 vcs_repo = source_repo.scm_instance()
1502 else:
1503 else:
1503 vcs_repo = source_repo
1504 vcs_repo = source_repo
1504
1505
1505 # TODO: johbo: In the context of an update, we cannot reach
1506 # TODO: johbo: In the context of an update, we cannot reach
1506 # the old commit anymore with our normal mechanisms. It needs
1507 # the old commit anymore with our normal mechanisms. It needs
1507 # some sort of special support in the vcs layer to avoid this
1508 # some sort of special support in the vcs layer to avoid this
1508 # workaround.
1509 # workaround.
1509 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1510 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1510 vcs_repo.alias == 'git'):
1511 vcs_repo.alias == 'git'):
1511 source_commit.raw_id = safe_str(source_ref_id)
1512 source_commit.raw_id = safe_str(source_ref_id)
1512
1513
1513 log.debug('calculating diff between '
1514 log.debug('calculating diff between '
1514 'source_ref:%s and target_ref:%s for repo `%s`',
1515 'source_ref:%s and target_ref:%s for repo `%s`',
1515 target_ref_id, source_ref_id,
1516 target_ref_id, source_ref_id,
1516 safe_unicode(vcs_repo.path))
1517 safe_unicode(vcs_repo.path))
1517
1518
1518 vcs_diff = vcs_repo.get_diff(
1519 vcs_diff = vcs_repo.get_diff(
1519 commit1=target_commit, commit2=source_commit, context=context)
1520 commit1=target_commit, commit2=source_commit, context=context)
1520 return vcs_diff
1521 return vcs_diff
1521
1522
1522 def _is_merge_enabled(self, pull_request):
1523 def _is_merge_enabled(self, pull_request):
1523 return self._get_general_setting(
1524 return self._get_general_setting(
1524 pull_request, 'rhodecode_pr_merge_enabled')
1525 pull_request, 'rhodecode_pr_merge_enabled')
1525
1526
1526 def _use_rebase_for_merging(self, pull_request):
1527 def _use_rebase_for_merging(self, pull_request):
1527 repo_type = pull_request.target_repo.repo_type
1528 repo_type = pull_request.target_repo.repo_type
1528 if repo_type == 'hg':
1529 if repo_type == 'hg':
1529 return self._get_general_setting(
1530 return self._get_general_setting(
1530 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1531 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1531 elif repo_type == 'git':
1532 elif repo_type == 'git':
1532 return self._get_general_setting(
1533 return self._get_general_setting(
1533 pull_request, 'rhodecode_git_use_rebase_for_merging')
1534 pull_request, 'rhodecode_git_use_rebase_for_merging')
1534
1535
1535 return False
1536 return False
1536
1537
1537 def _close_branch_before_merging(self, pull_request):
1538 def _close_branch_before_merging(self, pull_request):
1538 repo_type = pull_request.target_repo.repo_type
1539 repo_type = pull_request.target_repo.repo_type
1539 if repo_type == 'hg':
1540 if repo_type == 'hg':
1540 return self._get_general_setting(
1541 return self._get_general_setting(
1541 pull_request, 'rhodecode_hg_close_branch_before_merging')
1542 pull_request, 'rhodecode_hg_close_branch_before_merging')
1542 elif repo_type == 'git':
1543 elif repo_type == 'git':
1543 return self._get_general_setting(
1544 return self._get_general_setting(
1544 pull_request, 'rhodecode_git_close_branch_before_merging')
1545 pull_request, 'rhodecode_git_close_branch_before_merging')
1545
1546
1546 return False
1547 return False
1547
1548
1548 def _get_general_setting(self, pull_request, settings_key, default=False):
1549 def _get_general_setting(self, pull_request, settings_key, default=False):
1549 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1550 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1550 settings = settings_model.get_general_settings()
1551 settings = settings_model.get_general_settings()
1551 return settings.get(settings_key, default)
1552 return settings.get(settings_key, default)
1552
1553
1553 def _log_audit_action(self, action, action_data, user, pull_request):
1554 def _log_audit_action(self, action, action_data, user, pull_request):
1554 audit_logger.store(
1555 audit_logger.store(
1555 action=action,
1556 action=action,
1556 action_data=action_data,
1557 action_data=action_data,
1557 user=user,
1558 user=user,
1558 repo=pull_request.target_repo)
1559 repo=pull_request.target_repo)
1559
1560
1560 def get_reviewer_functions(self):
1561 def get_reviewer_functions(self):
1561 """
1562 """
1562 Fetches functions for validation and fetching default reviewers.
1563 Fetches functions for validation and fetching default reviewers.
1563 If available we use the EE package, else we fallback to CE
1564 If available we use the EE package, else we fallback to CE
1564 package functions
1565 package functions
1565 """
1566 """
1566 try:
1567 try:
1567 from rc_reviewers.utils import get_default_reviewers_data
1568 from rc_reviewers.utils import get_default_reviewers_data
1568 from rc_reviewers.utils import validate_default_reviewers
1569 from rc_reviewers.utils import validate_default_reviewers
1569 except ImportError:
1570 except ImportError:
1570 from rhodecode.apps.repository.utils import \
1571 from rhodecode.apps.repository.utils import \
1571 get_default_reviewers_data
1572 get_default_reviewers_data
1572 from rhodecode.apps.repository.utils import \
1573 from rhodecode.apps.repository.utils import \
1573 validate_default_reviewers
1574 validate_default_reviewers
1574
1575
1575 return get_default_reviewers_data, validate_default_reviewers
1576 return get_default_reviewers_data, validate_default_reviewers
1576
1577
1577
1578
1578 class MergeCheck(object):
1579 class MergeCheck(object):
1579 """
1580 """
1580 Perform Merge Checks and returns a check object which stores information
1581 Perform Merge Checks and returns a check object which stores information
1581 about merge errors, and merge conditions
1582 about merge errors, and merge conditions
1582 """
1583 """
1583 TODO_CHECK = 'todo'
1584 TODO_CHECK = 'todo'
1584 PERM_CHECK = 'perm'
1585 PERM_CHECK = 'perm'
1585 REVIEW_CHECK = 'review'
1586 REVIEW_CHECK = 'review'
1586 MERGE_CHECK = 'merge'
1587 MERGE_CHECK = 'merge'
1587
1588
1588 def __init__(self):
1589 def __init__(self):
1589 self.review_status = None
1590 self.review_status = None
1590 self.merge_possible = None
1591 self.merge_possible = None
1591 self.merge_msg = ''
1592 self.merge_msg = ''
1592 self.failed = None
1593 self.failed = None
1593 self.errors = []
1594 self.errors = []
1594 self.error_details = OrderedDict()
1595 self.error_details = OrderedDict()
1595
1596
1596 def push_error(self, error_type, message, error_key, details):
1597 def push_error(self, error_type, message, error_key, details):
1597 self.failed = True
1598 self.failed = True
1598 self.errors.append([error_type, message])
1599 self.errors.append([error_type, message])
1599 self.error_details[error_key] = dict(
1600 self.error_details[error_key] = dict(
1600 details=details,
1601 details=details,
1601 error_type=error_type,
1602 error_type=error_type,
1602 message=message
1603 message=message
1603 )
1604 )
1604
1605
1605 @classmethod
1606 @classmethod
1606 def validate(cls, pull_request, auth_user, translator, fail_early=False,
1607 def validate(cls, pull_request, auth_user, translator, fail_early=False,
1607 force_shadow_repo_refresh=False):
1608 force_shadow_repo_refresh=False):
1608 _ = translator
1609 _ = translator
1609 merge_check = cls()
1610 merge_check = cls()
1610
1611
1611 # permissions to merge
1612 # permissions to merge
1612 user_allowed_to_merge = PullRequestModel().check_user_merge(
1613 user_allowed_to_merge = PullRequestModel().check_user_merge(
1613 pull_request, auth_user)
1614 pull_request, auth_user)
1614 if not user_allowed_to_merge:
1615 if not user_allowed_to_merge:
1615 log.debug("MergeCheck: cannot merge, approval is pending.")
1616 log.debug("MergeCheck: cannot merge, approval is pending.")
1616
1617
1617 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
1618 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
1618 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1619 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1619 if fail_early:
1620 if fail_early:
1620 return merge_check
1621 return merge_check
1621
1622
1622 # permission to merge into the target branch
1623 # permission to merge into the target branch
1623 target_commit_id = pull_request.target_ref_parts.commit_id
1624 target_commit_id = pull_request.target_ref_parts.commit_id
1624 if pull_request.target_ref_parts.type == 'branch':
1625 if pull_request.target_ref_parts.type == 'branch':
1625 branch_name = pull_request.target_ref_parts.name
1626 branch_name = pull_request.target_ref_parts.name
1626 else:
1627 else:
1627 # for mercurial we can always figure out the branch from the commit
1628 # for mercurial we can always figure out the branch from the commit
1628 # in case of bookmark
1629 # in case of bookmark
1629 target_commit = pull_request.target_repo.get_commit(target_commit_id)
1630 target_commit = pull_request.target_repo.get_commit(target_commit_id)
1630 branch_name = target_commit.branch
1631 branch_name = target_commit.branch
1631
1632
1632 rule, branch_perm = auth_user.get_rule_and_branch_permission(
1633 rule, branch_perm = auth_user.get_rule_and_branch_permission(
1633 pull_request.target_repo.repo_name, branch_name)
1634 pull_request.target_repo.repo_name, branch_name)
1634 if branch_perm and branch_perm == 'branch.none':
1635 if branch_perm and branch_perm == 'branch.none':
1635 msg = _('Target branch `{}` changes rejected by rule {}.').format(
1636 msg = _('Target branch `{}` changes rejected by rule {}.').format(
1636 branch_name, rule)
1637 branch_name, rule)
1637 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1638 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1638 if fail_early:
1639 if fail_early:
1639 return merge_check
1640 return merge_check
1640
1641
1641 # review status, must be always present
1642 # review status, must be always present
1642 review_status = pull_request.calculated_review_status()
1643 review_status = pull_request.calculated_review_status()
1643 merge_check.review_status = review_status
1644 merge_check.review_status = review_status
1644
1645
1645 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1646 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1646 if not status_approved:
1647 if not status_approved:
1647 log.debug("MergeCheck: cannot merge, approval is pending.")
1648 log.debug("MergeCheck: cannot merge, approval is pending.")
1648
1649
1649 msg = _('Pull request reviewer approval is pending.')
1650 msg = _('Pull request reviewer approval is pending.')
1650
1651
1651 merge_check.push_error(
1652 merge_check.push_error(
1652 'warning', msg, cls.REVIEW_CHECK, review_status)
1653 'warning', msg, cls.REVIEW_CHECK, review_status)
1653
1654
1654 if fail_early:
1655 if fail_early:
1655 return merge_check
1656 return merge_check
1656
1657
1657 # left over TODOs
1658 # left over TODOs
1658 todos = CommentsModel().get_unresolved_todos(pull_request)
1659 todos = CommentsModel().get_unresolved_todos(pull_request)
1659 if todos:
1660 if todos:
1660 log.debug("MergeCheck: cannot merge, {} "
1661 log.debug("MergeCheck: cannot merge, {} "
1661 "unresolved todos left.".format(len(todos)))
1662 "unresolved todos left.".format(len(todos)))
1662
1663
1663 if len(todos) == 1:
1664 if len(todos) == 1:
1664 msg = _('Cannot merge, {} TODO still not resolved.').format(
1665 msg = _('Cannot merge, {} TODO still not resolved.').format(
1665 len(todos))
1666 len(todos))
1666 else:
1667 else:
1667 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1668 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1668 len(todos))
1669 len(todos))
1669
1670
1670 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1671 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1671
1672
1672 if fail_early:
1673 if fail_early:
1673 return merge_check
1674 return merge_check
1674
1675
1675 # merge possible, here is the filesystem simulation + shadow repo
1676 # merge possible, here is the filesystem simulation + shadow repo
1676 merge_status, msg = PullRequestModel().merge_status(
1677 merge_status, msg = PullRequestModel().merge_status(
1677 pull_request, translator=translator,
1678 pull_request, translator=translator,
1678 force_shadow_repo_refresh=force_shadow_repo_refresh)
1679 force_shadow_repo_refresh=force_shadow_repo_refresh)
1679 merge_check.merge_possible = merge_status
1680 merge_check.merge_possible = merge_status
1680 merge_check.merge_msg = msg
1681 merge_check.merge_msg = msg
1681 if not merge_status:
1682 if not merge_status:
1682 log.debug(
1683 log.debug(
1683 "MergeCheck: cannot merge, pull request merge not possible.")
1684 "MergeCheck: cannot merge, pull request merge not possible.")
1684 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1685 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1685
1686
1686 if fail_early:
1687 if fail_early:
1687 return merge_check
1688 return merge_check
1688
1689
1689 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1690 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1690 return merge_check
1691 return merge_check
1691
1692
1692 @classmethod
1693 @classmethod
1693 def get_merge_conditions(cls, pull_request, translator):
1694 def get_merge_conditions(cls, pull_request, translator):
1694 _ = translator
1695 _ = translator
1695 merge_details = {}
1696 merge_details = {}
1696
1697
1697 model = PullRequestModel()
1698 model = PullRequestModel()
1698 use_rebase = model._use_rebase_for_merging(pull_request)
1699 use_rebase = model._use_rebase_for_merging(pull_request)
1699
1700
1700 if use_rebase:
1701 if use_rebase:
1701 merge_details['merge_strategy'] = dict(
1702 merge_details['merge_strategy'] = dict(
1702 details={},
1703 details={},
1703 message=_('Merge strategy: rebase')
1704 message=_('Merge strategy: rebase')
1704 )
1705 )
1705 else:
1706 else:
1706 merge_details['merge_strategy'] = dict(
1707 merge_details['merge_strategy'] = dict(
1707 details={},
1708 details={},
1708 message=_('Merge strategy: explicit merge commit')
1709 message=_('Merge strategy: explicit merge commit')
1709 )
1710 )
1710
1711
1711 close_branch = model._close_branch_before_merging(pull_request)
1712 close_branch = model._close_branch_before_merging(pull_request)
1712 if close_branch:
1713 if close_branch:
1713 repo_type = pull_request.target_repo.repo_type
1714 repo_type = pull_request.target_repo.repo_type
1714 if repo_type == 'hg':
1715 if repo_type == 'hg':
1715 close_msg = _('Source branch will be closed after merge.')
1716 close_msg = _('Source branch will be closed after merge.')
1716 elif repo_type == 'git':
1717 elif repo_type == 'git':
1717 close_msg = _('Source branch will be deleted after merge.')
1718 close_msg = _('Source branch will be deleted after merge.')
1718
1719
1719 merge_details['close_branch'] = dict(
1720 merge_details['close_branch'] = dict(
1720 details={},
1721 details={},
1721 message=close_msg
1722 message=close_msg
1722 )
1723 )
1723
1724
1724 return merge_details
1725 return merge_details
1725
1726
1726 ChangeTuple = collections.namedtuple(
1727 ChangeTuple = collections.namedtuple(
1727 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1728 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1728
1729
1729 FileChangeTuple = collections.namedtuple(
1730 FileChangeTuple = collections.namedtuple(
1730 'FileChangeTuple', ['added', 'modified', 'removed'])
1731 'FileChangeTuple', ['added', 'modified', 'removed'])
@@ -1,833 +1,834 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Scm model for RhodeCode
22 Scm model for RhodeCode
23 """
23 """
24
24
25 import os.path
25 import os.path
26 import traceback
26 import traceback
27 import logging
27 import logging
28 import cStringIO
28 import cStringIO
29
29
30 from sqlalchemy import func
30 from sqlalchemy import func
31 from zope.cachedescriptors.property import Lazy as LazyProperty
31 from zope.cachedescriptors.property import Lazy as LazyProperty
32
32
33 import rhodecode
33 import rhodecode
34 from rhodecode.lib.vcs import get_backend
34 from rhodecode.lib.vcs import get_backend
35 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
35 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
36 from rhodecode.lib.vcs.nodes import FileNode
36 from rhodecode.lib.vcs.nodes import FileNode
37 from rhodecode.lib.vcs.backends.base import EmptyCommit
37 from rhodecode.lib.vcs.backends.base import EmptyCommit
38 from rhodecode.lib import helpers as h, rc_cache
38 from rhodecode.lib import helpers as h, rc_cache
39 from rhodecode.lib.auth import (
39 from rhodecode.lib.auth import (
40 HasRepoPermissionAny, HasRepoGroupPermissionAny,
40 HasRepoPermissionAny, HasRepoGroupPermissionAny,
41 HasUserGroupPermissionAny)
41 HasUserGroupPermissionAny)
42 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
42 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
43 from rhodecode.lib import hooks_utils
43 from rhodecode.lib import hooks_utils
44 from rhodecode.lib.utils import (
44 from rhodecode.lib.utils import (
45 get_filesystem_repos, make_db_config)
45 get_filesystem_repos, make_db_config)
46 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
46 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
47 from rhodecode.lib.system_info import get_system_info
47 from rhodecode.lib.system_info import get_system_info
48 from rhodecode.model import BaseModel
48 from rhodecode.model import BaseModel
49 from rhodecode.model.db import (
49 from rhodecode.model.db import (
50 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
50 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
51 PullRequest)
51 PullRequest)
52 from rhodecode.model.settings import VcsSettingsModel
52 from rhodecode.model.settings import VcsSettingsModel
53 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
53 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
54
54
55 log = logging.getLogger(__name__)
55 log = logging.getLogger(__name__)
56
56
57
57
58 class UserTemp(object):
58 class UserTemp(object):
59 def __init__(self, user_id):
59 def __init__(self, user_id):
60 self.user_id = user_id
60 self.user_id = user_id
61
61
62 def __repr__(self):
62 def __repr__(self):
63 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
63 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
64
64
65
65
66 class RepoTemp(object):
66 class RepoTemp(object):
67 def __init__(self, repo_id):
67 def __init__(self, repo_id):
68 self.repo_id = repo_id
68 self.repo_id = repo_id
69
69
70 def __repr__(self):
70 def __repr__(self):
71 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
71 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
72
72
73
73
74 class SimpleCachedRepoList(object):
74 class SimpleCachedRepoList(object):
75 """
75 """
76 Lighter version of of iteration of repos without the scm initialisation,
76 Lighter version of of iteration of repos without the scm initialisation,
77 and with cache usage
77 and with cache usage
78 """
78 """
79 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
79 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
80 self.db_repo_list = db_repo_list
80 self.db_repo_list = db_repo_list
81 self.repos_path = repos_path
81 self.repos_path = repos_path
82 self.order_by = order_by
82 self.order_by = order_by
83 self.reversed = (order_by or '').startswith('-')
83 self.reversed = (order_by or '').startswith('-')
84 if not perm_set:
84 if not perm_set:
85 perm_set = ['repository.read', 'repository.write',
85 perm_set = ['repository.read', 'repository.write',
86 'repository.admin']
86 'repository.admin']
87 self.perm_set = perm_set
87 self.perm_set = perm_set
88
88
89 def __len__(self):
89 def __len__(self):
90 return len(self.db_repo_list)
90 return len(self.db_repo_list)
91
91
92 def __repr__(self):
92 def __repr__(self):
93 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
93 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
94
94
95 def __iter__(self):
95 def __iter__(self):
96 for dbr in self.db_repo_list:
96 for dbr in self.db_repo_list:
97 # check permission at this level
97 # check permission at this level
98 has_perm = HasRepoPermissionAny(*self.perm_set)(
98 has_perm = HasRepoPermissionAny(*self.perm_set)(
99 dbr.repo_name, 'SimpleCachedRepoList check')
99 dbr.repo_name, 'SimpleCachedRepoList check')
100 if not has_perm:
100 if not has_perm:
101 continue
101 continue
102
102
103 tmp_d = {
103 tmp_d = {
104 'name': dbr.repo_name,
104 'name': dbr.repo_name,
105 'dbrepo': dbr.get_dict(),
105 'dbrepo': dbr.get_dict(),
106 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
106 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
107 }
107 }
108 yield tmp_d
108 yield tmp_d
109
109
110
110
111 class _PermCheckIterator(object):
111 class _PermCheckIterator(object):
112
112
113 def __init__(
113 def __init__(
114 self, obj_list, obj_attr, perm_set, perm_checker,
114 self, obj_list, obj_attr, perm_set, perm_checker,
115 extra_kwargs=None):
115 extra_kwargs=None):
116 """
116 """
117 Creates iterator from given list of objects, additionally
117 Creates iterator from given list of objects, additionally
118 checking permission for them from perm_set var
118 checking permission for them from perm_set var
119
119
120 :param obj_list: list of db objects
120 :param obj_list: list of db objects
121 :param obj_attr: attribute of object to pass into perm_checker
121 :param obj_attr: attribute of object to pass into perm_checker
122 :param perm_set: list of permissions to check
122 :param perm_set: list of permissions to check
123 :param perm_checker: callable to check permissions against
123 :param perm_checker: callable to check permissions against
124 """
124 """
125 self.obj_list = obj_list
125 self.obj_list = obj_list
126 self.obj_attr = obj_attr
126 self.obj_attr = obj_attr
127 self.perm_set = perm_set
127 self.perm_set = perm_set
128 self.perm_checker = perm_checker
128 self.perm_checker = perm_checker
129 self.extra_kwargs = extra_kwargs or {}
129 self.extra_kwargs = extra_kwargs or {}
130
130
131 def __len__(self):
131 def __len__(self):
132 return len(self.obj_list)
132 return len(self.obj_list)
133
133
134 def __repr__(self):
134 def __repr__(self):
135 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
135 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
136
136
137 def __iter__(self):
137 def __iter__(self):
138 checker = self.perm_checker(*self.perm_set)
138 checker = self.perm_checker(*self.perm_set)
139 for db_obj in self.obj_list:
139 for db_obj in self.obj_list:
140 # check permission at this level
140 # check permission at this level
141 name = getattr(db_obj, self.obj_attr, None)
141 name = getattr(db_obj, self.obj_attr, None)
142 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
142 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
143 continue
143 continue
144
144
145 yield db_obj
145 yield db_obj
146
146
147
147
148 class RepoList(_PermCheckIterator):
148 class RepoList(_PermCheckIterator):
149
149
150 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
150 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
151 if not perm_set:
151 if not perm_set:
152 perm_set = [
152 perm_set = [
153 'repository.read', 'repository.write', 'repository.admin']
153 'repository.read', 'repository.write', 'repository.admin']
154
154
155 super(RepoList, self).__init__(
155 super(RepoList, self).__init__(
156 obj_list=db_repo_list,
156 obj_list=db_repo_list,
157 obj_attr='repo_name', perm_set=perm_set,
157 obj_attr='repo_name', perm_set=perm_set,
158 perm_checker=HasRepoPermissionAny,
158 perm_checker=HasRepoPermissionAny,
159 extra_kwargs=extra_kwargs)
159 extra_kwargs=extra_kwargs)
160
160
161
161
162 class RepoGroupList(_PermCheckIterator):
162 class RepoGroupList(_PermCheckIterator):
163
163
164 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
164 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
165 if not perm_set:
165 if not perm_set:
166 perm_set = ['group.read', 'group.write', 'group.admin']
166 perm_set = ['group.read', 'group.write', 'group.admin']
167
167
168 super(RepoGroupList, self).__init__(
168 super(RepoGroupList, self).__init__(
169 obj_list=db_repo_group_list,
169 obj_list=db_repo_group_list,
170 obj_attr='group_name', perm_set=perm_set,
170 obj_attr='group_name', perm_set=perm_set,
171 perm_checker=HasRepoGroupPermissionAny,
171 perm_checker=HasRepoGroupPermissionAny,
172 extra_kwargs=extra_kwargs)
172 extra_kwargs=extra_kwargs)
173
173
174
174
175 class UserGroupList(_PermCheckIterator):
175 class UserGroupList(_PermCheckIterator):
176
176
177 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
177 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
178 if not perm_set:
178 if not perm_set:
179 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
179 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
180
180
181 super(UserGroupList, self).__init__(
181 super(UserGroupList, self).__init__(
182 obj_list=db_user_group_list,
182 obj_list=db_user_group_list,
183 obj_attr='users_group_name', perm_set=perm_set,
183 obj_attr='users_group_name', perm_set=perm_set,
184 perm_checker=HasUserGroupPermissionAny,
184 perm_checker=HasUserGroupPermissionAny,
185 extra_kwargs=extra_kwargs)
185 extra_kwargs=extra_kwargs)
186
186
187
187
188 class ScmModel(BaseModel):
188 class ScmModel(BaseModel):
189 """
189 """
190 Generic Scm Model
190 Generic Scm Model
191 """
191 """
192
192
193 @LazyProperty
193 @LazyProperty
194 def repos_path(self):
194 def repos_path(self):
195 """
195 """
196 Gets the repositories root path from database
196 Gets the repositories root path from database
197 """
197 """
198
198
199 settings_model = VcsSettingsModel(sa=self.sa)
199 settings_model = VcsSettingsModel(sa=self.sa)
200 return settings_model.get_repos_location()
200 return settings_model.get_repos_location()
201
201
202 def repo_scan(self, repos_path=None):
202 def repo_scan(self, repos_path=None):
203 """
203 """
204 Listing of repositories in given path. This path should not be a
204 Listing of repositories in given path. This path should not be a
205 repository itself. Return a dictionary of repository objects
205 repository itself. Return a dictionary of repository objects
206
206
207 :param repos_path: path to directory containing repositories
207 :param repos_path: path to directory containing repositories
208 """
208 """
209
209
210 if repos_path is None:
210 if repos_path is None:
211 repos_path = self.repos_path
211 repos_path = self.repos_path
212
212
213 log.info('scanning for repositories in %s', repos_path)
213 log.info('scanning for repositories in %s', repos_path)
214
214
215 config = make_db_config()
215 config = make_db_config()
216 config.set('extensions', 'largefiles', '')
216 config.set('extensions', 'largefiles', '')
217 repos = {}
217 repos = {}
218
218
219 for name, path in get_filesystem_repos(repos_path, recursive=True):
219 for name, path in get_filesystem_repos(repos_path, recursive=True):
220 # name need to be decomposed and put back together using the /
220 # name need to be decomposed and put back together using the /
221 # since this is internal storage separator for rhodecode
221 # since this is internal storage separator for rhodecode
222 name = Repository.normalize_repo_name(name)
222 name = Repository.normalize_repo_name(name)
223
223
224 try:
224 try:
225 if name in repos:
225 if name in repos:
226 raise RepositoryError('Duplicate repository name %s '
226 raise RepositoryError('Duplicate repository name %s '
227 'found in %s' % (name, path))
227 'found in %s' % (name, path))
228 elif path[0] in rhodecode.BACKENDS:
228 elif path[0] in rhodecode.BACKENDS:
229 klass = get_backend(path[0])
229 klass = get_backend(path[0])
230 repos[name] = klass(path[1], config=config)
230 repos[name] = klass(path[1], config=config)
231 except OSError:
231 except OSError:
232 continue
232 continue
233 log.debug('found %s paths with repositories', len(repos))
233 log.debug('found %s paths with repositories', len(repos))
234 return repos
234 return repos
235
235
236 def get_repos(self, all_repos=None, sort_key=None):
236 def get_repos(self, all_repos=None, sort_key=None):
237 """
237 """
238 Get all repositories from db and for each repo create it's
238 Get all repositories from db and for each repo create it's
239 backend instance and fill that backed with information from database
239 backend instance and fill that backed with information from database
240
240
241 :param all_repos: list of repository names as strings
241 :param all_repos: list of repository names as strings
242 give specific repositories list, good for filtering
242 give specific repositories list, good for filtering
243
243
244 :param sort_key: initial sorting of repositories
244 :param sort_key: initial sorting of repositories
245 """
245 """
246 if all_repos is None:
246 if all_repos is None:
247 all_repos = self.sa.query(Repository)\
247 all_repos = self.sa.query(Repository)\
248 .filter(Repository.group_id == None)\
248 .filter(Repository.group_id == None)\
249 .order_by(func.lower(Repository.repo_name)).all()
249 .order_by(func.lower(Repository.repo_name)).all()
250 repo_iter = SimpleCachedRepoList(
250 repo_iter = SimpleCachedRepoList(
251 all_repos, repos_path=self.repos_path, order_by=sort_key)
251 all_repos, repos_path=self.repos_path, order_by=sort_key)
252 return repo_iter
252 return repo_iter
253
253
254 def get_repo_groups(self, all_groups=None):
254 def get_repo_groups(self, all_groups=None):
255 if all_groups is None:
255 if all_groups is None:
256 all_groups = RepoGroup.query()\
256 all_groups = RepoGroup.query()\
257 .filter(RepoGroup.group_parent_id == None).all()
257 .filter(RepoGroup.group_parent_id == None).all()
258 return [x for x in RepoGroupList(all_groups)]
258 return [x for x in RepoGroupList(all_groups)]
259
259
260 def mark_for_invalidation(self, repo_name, delete=False):
260 def mark_for_invalidation(self, repo_name, delete=False):
261 """
261 """
262 Mark caches of this repo invalid in the database. `delete` flag
262 Mark caches of this repo invalid in the database. `delete` flag
263 removes the cache entries
263 removes the cache entries
264
264
265 :param repo_name: the repo_name for which caches should be marked
265 :param repo_name: the repo_name for which caches should be marked
266 invalid, or deleted
266 invalid, or deleted
267 :param delete: delete the entry keys instead of setting bool
267 :param delete: delete the entry keys instead of setting bool
268 flag on them, and also purge caches used by the dogpile
268 flag on them, and also purge caches used by the dogpile
269 """
269 """
270 repo = Repository.get_by_repo_name(repo_name)
270 repo = Repository.get_by_repo_name(repo_name)
271
271
272 if repo:
272 if repo:
273 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
273 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
274 repo_id=repo.repo_id)
274 repo_id=repo.repo_id)
275 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
275 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
276
276
277 repo_id = repo.repo_id
277 repo_id = repo.repo_id
278 config = repo._config
278 config = repo._config
279 config.set('extensions', 'largefiles', '')
279 config.set('extensions', 'largefiles', '')
280 repo.update_commit_cache(config=config, cs_cache=None)
280 repo.update_commit_cache(config=config, cs_cache=None)
281 if delete:
281 if delete:
282 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
282 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
283 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid)
283 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid)
284
284
285 def toggle_following_repo(self, follow_repo_id, user_id):
285 def toggle_following_repo(self, follow_repo_id, user_id):
286
286
287 f = self.sa.query(UserFollowing)\
287 f = self.sa.query(UserFollowing)\
288 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
288 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
289 .filter(UserFollowing.user_id == user_id).scalar()
289 .filter(UserFollowing.user_id == user_id).scalar()
290
290
291 if f is not None:
291 if f is not None:
292 try:
292 try:
293 self.sa.delete(f)
293 self.sa.delete(f)
294 return
294 return
295 except Exception:
295 except Exception:
296 log.error(traceback.format_exc())
296 log.error(traceback.format_exc())
297 raise
297 raise
298
298
299 try:
299 try:
300 f = UserFollowing()
300 f = UserFollowing()
301 f.user_id = user_id
301 f.user_id = user_id
302 f.follows_repo_id = follow_repo_id
302 f.follows_repo_id = follow_repo_id
303 self.sa.add(f)
303 self.sa.add(f)
304 except Exception:
304 except Exception:
305 log.error(traceback.format_exc())
305 log.error(traceback.format_exc())
306 raise
306 raise
307
307
308 def toggle_following_user(self, follow_user_id, user_id):
308 def toggle_following_user(self, follow_user_id, user_id):
309 f = self.sa.query(UserFollowing)\
309 f = self.sa.query(UserFollowing)\
310 .filter(UserFollowing.follows_user_id == follow_user_id)\
310 .filter(UserFollowing.follows_user_id == follow_user_id)\
311 .filter(UserFollowing.user_id == user_id).scalar()
311 .filter(UserFollowing.user_id == user_id).scalar()
312
312
313 if f is not None:
313 if f is not None:
314 try:
314 try:
315 self.sa.delete(f)
315 self.sa.delete(f)
316 return
316 return
317 except Exception:
317 except Exception:
318 log.error(traceback.format_exc())
318 log.error(traceback.format_exc())
319 raise
319 raise
320
320
321 try:
321 try:
322 f = UserFollowing()
322 f = UserFollowing()
323 f.user_id = user_id
323 f.user_id = user_id
324 f.follows_user_id = follow_user_id
324 f.follows_user_id = follow_user_id
325 self.sa.add(f)
325 self.sa.add(f)
326 except Exception:
326 except Exception:
327 log.error(traceback.format_exc())
327 log.error(traceback.format_exc())
328 raise
328 raise
329
329
330 def is_following_repo(self, repo_name, user_id, cache=False):
330 def is_following_repo(self, repo_name, user_id, cache=False):
331 r = self.sa.query(Repository)\
331 r = self.sa.query(Repository)\
332 .filter(Repository.repo_name == repo_name).scalar()
332 .filter(Repository.repo_name == repo_name).scalar()
333
333
334 f = self.sa.query(UserFollowing)\
334 f = self.sa.query(UserFollowing)\
335 .filter(UserFollowing.follows_repository == r)\
335 .filter(UserFollowing.follows_repository == r)\
336 .filter(UserFollowing.user_id == user_id).scalar()
336 .filter(UserFollowing.user_id == user_id).scalar()
337
337
338 return f is not None
338 return f is not None
339
339
340 def is_following_user(self, username, user_id, cache=False):
340 def is_following_user(self, username, user_id, cache=False):
341 u = User.get_by_username(username)
341 u = User.get_by_username(username)
342
342
343 f = self.sa.query(UserFollowing)\
343 f = self.sa.query(UserFollowing)\
344 .filter(UserFollowing.follows_user == u)\
344 .filter(UserFollowing.follows_user == u)\
345 .filter(UserFollowing.user_id == user_id).scalar()
345 .filter(UserFollowing.user_id == user_id).scalar()
346
346
347 return f is not None
347 return f is not None
348
348
349 def get_followers(self, repo):
349 def get_followers(self, repo):
350 repo = self._get_repo(repo)
350 repo = self._get_repo(repo)
351
351
352 return self.sa.query(UserFollowing)\
352 return self.sa.query(UserFollowing)\
353 .filter(UserFollowing.follows_repository == repo).count()
353 .filter(UserFollowing.follows_repository == repo).count()
354
354
355 def get_forks(self, repo):
355 def get_forks(self, repo):
356 repo = self._get_repo(repo)
356 repo = self._get_repo(repo)
357 return self.sa.query(Repository)\
357 return self.sa.query(Repository)\
358 .filter(Repository.fork == repo).count()
358 .filter(Repository.fork == repo).count()
359
359
360 def get_pull_requests(self, repo):
360 def get_pull_requests(self, repo):
361 repo = self._get_repo(repo)
361 repo = self._get_repo(repo)
362 return self.sa.query(PullRequest)\
362 return self.sa.query(PullRequest)\
363 .filter(PullRequest.target_repo == repo)\
363 .filter(PullRequest.target_repo == repo)\
364 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
364 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
365
365
366 def mark_as_fork(self, repo, fork, user):
366 def mark_as_fork(self, repo, fork, user):
367 repo = self._get_repo(repo)
367 repo = self._get_repo(repo)
368 fork = self._get_repo(fork)
368 fork = self._get_repo(fork)
369 if fork and repo.repo_id == fork.repo_id:
369 if fork and repo.repo_id == fork.repo_id:
370 raise Exception("Cannot set repository as fork of itself")
370 raise Exception("Cannot set repository as fork of itself")
371
371
372 if fork and repo.repo_type != fork.repo_type:
372 if fork and repo.repo_type != fork.repo_type:
373 raise RepositoryError(
373 raise RepositoryError(
374 "Cannot set repository as fork of repository with other type")
374 "Cannot set repository as fork of repository with other type")
375
375
376 repo.fork = fork
376 repo.fork = fork
377 self.sa.add(repo)
377 self.sa.add(repo)
378 return repo
378 return repo
379
379
380 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True):
380 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True):
381 dbrepo = self._get_repo(repo)
381 dbrepo = self._get_repo(repo)
382 remote_uri = remote_uri or dbrepo.clone_uri
382 remote_uri = remote_uri or dbrepo.clone_uri
383 if not remote_uri:
383 if not remote_uri:
384 raise Exception("This repository doesn't have a clone uri")
384 raise Exception("This repository doesn't have a clone uri")
385
385
386 repo = dbrepo.scm_instance(cache=False)
386 repo = dbrepo.scm_instance(cache=False)
387 repo.config.clear_section('hooks')
387 repo.config.clear_section('hooks')
388
388
389 try:
389 try:
390 # NOTE(marcink): add extra validation so we skip invalid urls
390 # NOTE(marcink): add extra validation so we skip invalid urls
391 # this is due this tasks can be executed via scheduler without
391 # this is due this tasks can be executed via scheduler without
392 # proper validation of remote_uri
392 # proper validation of remote_uri
393 if validate_uri:
393 if validate_uri:
394 config = make_db_config(clear_session=False)
394 config = make_db_config(clear_session=False)
395 url_validator(remote_uri, dbrepo.repo_type, config)
395 url_validator(remote_uri, dbrepo.repo_type, config)
396 except InvalidCloneUrl:
396 except InvalidCloneUrl:
397 raise
397 raise
398
398
399 repo_name = dbrepo.repo_name
399 repo_name = dbrepo.repo_name
400 try:
400 try:
401 # TODO: we need to make sure those operations call proper hooks !
401 # TODO: we need to make sure those operations call proper hooks !
402 repo.fetch(remote_uri)
402 repo.fetch(remote_uri)
403
403
404 self.mark_for_invalidation(repo_name)
404 self.mark_for_invalidation(repo_name)
405 except Exception:
405 except Exception:
406 log.error(traceback.format_exc())
406 log.error(traceback.format_exc())
407 raise
407 raise
408
408
409 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
409 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
410 dbrepo = self._get_repo(repo)
410 dbrepo = self._get_repo(repo)
411 remote_uri = remote_uri or dbrepo.push_uri
411 remote_uri = remote_uri or dbrepo.push_uri
412 if not remote_uri:
412 if not remote_uri:
413 raise Exception("This repository doesn't have a clone uri")
413 raise Exception("This repository doesn't have a clone uri")
414
414
415 repo = dbrepo.scm_instance(cache=False)
415 repo = dbrepo.scm_instance(cache=False)
416 repo.config.clear_section('hooks')
416 repo.config.clear_section('hooks')
417
417
418 try:
418 try:
419 # NOTE(marcink): add extra validation so we skip invalid urls
419 # NOTE(marcink): add extra validation so we skip invalid urls
420 # this is due this tasks can be executed via scheduler without
420 # this is due this tasks can be executed via scheduler without
421 # proper validation of remote_uri
421 # proper validation of remote_uri
422 if validate_uri:
422 if validate_uri:
423 config = make_db_config(clear_session=False)
423 config = make_db_config(clear_session=False)
424 url_validator(remote_uri, dbrepo.repo_type, config)
424 url_validator(remote_uri, dbrepo.repo_type, config)
425 except InvalidCloneUrl:
425 except InvalidCloneUrl:
426 raise
426 raise
427
427
428 try:
428 try:
429 repo.push(remote_uri)
429 repo.push(remote_uri)
430 except Exception:
430 except Exception:
431 log.error(traceback.format_exc())
431 log.error(traceback.format_exc())
432 raise
432 raise
433
433
434 def commit_change(self, repo, repo_name, commit, user, author, message,
434 def commit_change(self, repo, repo_name, commit, user, author, message,
435 content, f_path):
435 content, f_path):
436 """
436 """
437 Commits changes
437 Commits changes
438
438
439 :param repo: SCM instance
439 :param repo: SCM instance
440
440
441 """
441 """
442 user = self._get_user(user)
442 user = self._get_user(user)
443
443
444 # decoding here will force that we have proper encoded values
444 # decoding here will force that we have proper encoded values
445 # in any other case this will throw exceptions and deny commit
445 # in any other case this will throw exceptions and deny commit
446 content = safe_str(content)
446 content = safe_str(content)
447 path = safe_str(f_path)
447 path = safe_str(f_path)
448 # message and author needs to be unicode
448 # message and author needs to be unicode
449 # proper backend should then translate that into required type
449 # proper backend should then translate that into required type
450 message = safe_unicode(message)
450 message = safe_unicode(message)
451 author = safe_unicode(author)
451 author = safe_unicode(author)
452 imc = repo.in_memory_commit
452 imc = repo.in_memory_commit
453 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
453 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
454 try:
454 try:
455 # TODO: handle pre-push action !
455 # TODO: handle pre-push action !
456 tip = imc.commit(
456 tip = imc.commit(
457 message=message, author=author, parents=[commit],
457 message=message, author=author, parents=[commit],
458 branch=commit.branch)
458 branch=commit.branch)
459 except Exception as e:
459 except Exception as e:
460 log.error(traceback.format_exc())
460 log.error(traceback.format_exc())
461 raise IMCCommitError(str(e))
461 raise IMCCommitError(str(e))
462 finally:
462 finally:
463 # always clear caches, if commit fails we want fresh object also
463 # always clear caches, if commit fails we want fresh object also
464 self.mark_for_invalidation(repo_name)
464 self.mark_for_invalidation(repo_name)
465
465
466 # We trigger the post-push action
466 # We trigger the post-push action
467 hooks_utils.trigger_post_push_hook(
467 hooks_utils.trigger_post_push_hook(
468 username=user.username, action='push_local', repo_name=repo_name,
468 username=user.username, action='push_local', hook_type='post_push',
469 repo_alias=repo.alias, commit_ids=[tip.raw_id])
469 repo_name=repo_name, repo_alias=repo.alias, commit_ids=[tip.raw_id])
470 return tip
470 return tip
471
471
472 def _sanitize_path(self, f_path):
472 def _sanitize_path(self, f_path):
473 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
473 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
474 raise NonRelativePathError('%s is not an relative path' % f_path)
474 raise NonRelativePathError('%s is not an relative path' % f_path)
475 if f_path:
475 if f_path:
476 f_path = os.path.normpath(f_path)
476 f_path = os.path.normpath(f_path)
477 return f_path
477 return f_path
478
478
479 def get_dirnode_metadata(self, request, commit, dir_node):
479 def get_dirnode_metadata(self, request, commit, dir_node):
480 if not dir_node.is_dir():
480 if not dir_node.is_dir():
481 return []
481 return []
482
482
483 data = []
483 data = []
484 for node in dir_node:
484 for node in dir_node:
485 if not node.is_file():
485 if not node.is_file():
486 # we skip file-nodes
486 # we skip file-nodes
487 continue
487 continue
488
488
489 last_commit = node.last_commit
489 last_commit = node.last_commit
490 last_commit_date = last_commit.date
490 last_commit_date = last_commit.date
491 data.append({
491 data.append({
492 'name': node.name,
492 'name': node.name,
493 'size': h.format_byte_size_binary(node.size),
493 'size': h.format_byte_size_binary(node.size),
494 'modified_at': h.format_date(last_commit_date),
494 'modified_at': h.format_date(last_commit_date),
495 'modified_ts': last_commit_date.isoformat(),
495 'modified_ts': last_commit_date.isoformat(),
496 'revision': last_commit.revision,
496 'revision': last_commit.revision,
497 'short_id': last_commit.short_id,
497 'short_id': last_commit.short_id,
498 'message': h.escape(last_commit.message),
498 'message': h.escape(last_commit.message),
499 'author': h.escape(last_commit.author),
499 'author': h.escape(last_commit.author),
500 'user_profile': h.gravatar_with_user(
500 'user_profile': h.gravatar_with_user(
501 request, last_commit.author),
501 request, last_commit.author),
502 })
502 })
503
503
504 return data
504 return data
505
505
506 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
506 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
507 extended_info=False, content=False, max_file_bytes=None):
507 extended_info=False, content=False, max_file_bytes=None):
508 """
508 """
509 recursive walk in root dir and return a set of all path in that dir
509 recursive walk in root dir and return a set of all path in that dir
510 based on repository walk function
510 based on repository walk function
511
511
512 :param repo_name: name of repository
512 :param repo_name: name of repository
513 :param commit_id: commit id for which to list nodes
513 :param commit_id: commit id for which to list nodes
514 :param root_path: root path to list
514 :param root_path: root path to list
515 :param flat: return as a list, if False returns a dict with description
515 :param flat: return as a list, if False returns a dict with description
516 :param max_file_bytes: will not return file contents over this limit
516 :param max_file_bytes: will not return file contents over this limit
517
517
518 """
518 """
519 _files = list()
519 _files = list()
520 _dirs = list()
520 _dirs = list()
521 try:
521 try:
522 _repo = self._get_repo(repo_name)
522 _repo = self._get_repo(repo_name)
523 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
523 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
524 root_path = root_path.lstrip('/')
524 root_path = root_path.lstrip('/')
525 for __, dirs, files in commit.walk(root_path):
525 for __, dirs, files in commit.walk(root_path):
526 for f in files:
526 for f in files:
527 _content = None
527 _content = None
528 _data = f.unicode_path
528 _data = f.unicode_path
529 over_size_limit = (max_file_bytes is not None
529 over_size_limit = (max_file_bytes is not None
530 and f.size > max_file_bytes)
530 and f.size > max_file_bytes)
531
531
532 if not flat:
532 if not flat:
533 _data = {
533 _data = {
534 "name": h.escape(f.unicode_path),
534 "name": h.escape(f.unicode_path),
535 "type": "file",
535 "type": "file",
536 }
536 }
537 if extended_info:
537 if extended_info:
538 _data.update({
538 _data.update({
539 "md5": f.md5,
539 "md5": f.md5,
540 "binary": f.is_binary,
540 "binary": f.is_binary,
541 "size": f.size,
541 "size": f.size,
542 "extension": f.extension,
542 "extension": f.extension,
543 "mimetype": f.mimetype,
543 "mimetype": f.mimetype,
544 "lines": f.lines()[0]
544 "lines": f.lines()[0]
545 })
545 })
546
546
547 if content:
547 if content:
548 full_content = None
548 full_content = None
549 if not f.is_binary and not over_size_limit:
549 if not f.is_binary and not over_size_limit:
550 full_content = safe_str(f.content)
550 full_content = safe_str(f.content)
551
551
552 _data.update({
552 _data.update({
553 "content": full_content,
553 "content": full_content,
554 })
554 })
555 _files.append(_data)
555 _files.append(_data)
556 for d in dirs:
556 for d in dirs:
557 _data = d.unicode_path
557 _data = d.unicode_path
558 if not flat:
558 if not flat:
559 _data = {
559 _data = {
560 "name": h.escape(d.unicode_path),
560 "name": h.escape(d.unicode_path),
561 "type": "dir",
561 "type": "dir",
562 }
562 }
563 if extended_info:
563 if extended_info:
564 _data.update({
564 _data.update({
565 "md5": None,
565 "md5": None,
566 "binary": None,
566 "binary": None,
567 "size": None,
567 "size": None,
568 "extension": None,
568 "extension": None,
569 })
569 })
570 if content:
570 if content:
571 _data.update({
571 _data.update({
572 "content": None
572 "content": None
573 })
573 })
574 _dirs.append(_data)
574 _dirs.append(_data)
575 except RepositoryError:
575 except RepositoryError:
576 log.debug("Exception in get_nodes", exc_info=True)
576 log.debug("Exception in get_nodes", exc_info=True)
577 raise
577 raise
578
578
579 return _dirs, _files
579 return _dirs, _files
580
580
581 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
581 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
582 author=None, trigger_push_hook=True):
582 author=None, trigger_push_hook=True):
583 """
583 """
584 Commits given multiple nodes into repo
584 Commits given multiple nodes into repo
585
585
586 :param user: RhodeCode User object or user_id, the commiter
586 :param user: RhodeCode User object or user_id, the commiter
587 :param repo: RhodeCode Repository object
587 :param repo: RhodeCode Repository object
588 :param message: commit message
588 :param message: commit message
589 :param nodes: mapping {filename:{'content':content},...}
589 :param nodes: mapping {filename:{'content':content},...}
590 :param parent_commit: parent commit, can be empty than it's
590 :param parent_commit: parent commit, can be empty than it's
591 initial commit
591 initial commit
592 :param author: author of commit, cna be different that commiter
592 :param author: author of commit, cna be different that commiter
593 only for git
593 only for git
594 :param trigger_push_hook: trigger push hooks
594 :param trigger_push_hook: trigger push hooks
595
595
596 :returns: new commited commit
596 :returns: new commited commit
597 """
597 """
598
598
599 user = self._get_user(user)
599 user = self._get_user(user)
600 scm_instance = repo.scm_instance(cache=False)
600 scm_instance = repo.scm_instance(cache=False)
601
601
602 processed_nodes = []
602 processed_nodes = []
603 for f_path in nodes:
603 for f_path in nodes:
604 f_path = self._sanitize_path(f_path)
604 f_path = self._sanitize_path(f_path)
605 content = nodes[f_path]['content']
605 content = nodes[f_path]['content']
606 f_path = safe_str(f_path)
606 f_path = safe_str(f_path)
607 # decoding here will force that we have proper encoded values
607 # decoding here will force that we have proper encoded values
608 # in any other case this will throw exceptions and deny commit
608 # in any other case this will throw exceptions and deny commit
609 if isinstance(content, (basestring,)):
609 if isinstance(content, (basestring,)):
610 content = safe_str(content)
610 content = safe_str(content)
611 elif isinstance(content, (file, cStringIO.OutputType,)):
611 elif isinstance(content, (file, cStringIO.OutputType,)):
612 content = content.read()
612 content = content.read()
613 else:
613 else:
614 raise Exception('Content is of unrecognized type %s' % (
614 raise Exception('Content is of unrecognized type %s' % (
615 type(content)
615 type(content)
616 ))
616 ))
617 processed_nodes.append((f_path, content))
617 processed_nodes.append((f_path, content))
618
618
619 message = safe_unicode(message)
619 message = safe_unicode(message)
620 commiter = user.full_contact
620 commiter = user.full_contact
621 author = safe_unicode(author) if author else commiter
621 author = safe_unicode(author) if author else commiter
622
622
623 imc = scm_instance.in_memory_commit
623 imc = scm_instance.in_memory_commit
624
624
625 if not parent_commit:
625 if not parent_commit:
626 parent_commit = EmptyCommit(alias=scm_instance.alias)
626 parent_commit = EmptyCommit(alias=scm_instance.alias)
627
627
628 if isinstance(parent_commit, EmptyCommit):
628 if isinstance(parent_commit, EmptyCommit):
629 # EmptyCommit means we we're editing empty repository
629 # EmptyCommit means we we're editing empty repository
630 parents = None
630 parents = None
631 else:
631 else:
632 parents = [parent_commit]
632 parents = [parent_commit]
633 # add multiple nodes
633 # add multiple nodes
634 for path, content in processed_nodes:
634 for path, content in processed_nodes:
635 imc.add(FileNode(path, content=content))
635 imc.add(FileNode(path, content=content))
636 # TODO: handle pre push scenario
636 # TODO: handle pre push scenario
637 tip = imc.commit(message=message,
637 tip = imc.commit(message=message,
638 author=author,
638 author=author,
639 parents=parents,
639 parents=parents,
640 branch=parent_commit.branch)
640 branch=parent_commit.branch)
641
641
642 self.mark_for_invalidation(repo.repo_name)
642 self.mark_for_invalidation(repo.repo_name)
643 if trigger_push_hook:
643 if trigger_push_hook:
644 hooks_utils.trigger_post_push_hook(
644 hooks_utils.trigger_post_push_hook(
645 username=user.username, action='push_local',
645 username=user.username, action='push_local',
646 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
646 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
647 hook_type='post_push',
647 commit_ids=[tip.raw_id])
648 commit_ids=[tip.raw_id])
648 return tip
649 return tip
649
650
650 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
651 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
651 author=None, trigger_push_hook=True):
652 author=None, trigger_push_hook=True):
652 user = self._get_user(user)
653 user = self._get_user(user)
653 scm_instance = repo.scm_instance(cache=False)
654 scm_instance = repo.scm_instance(cache=False)
654
655
655 message = safe_unicode(message)
656 message = safe_unicode(message)
656 commiter = user.full_contact
657 commiter = user.full_contact
657 author = safe_unicode(author) if author else commiter
658 author = safe_unicode(author) if author else commiter
658
659
659 imc = scm_instance.in_memory_commit
660 imc = scm_instance.in_memory_commit
660
661
661 if not parent_commit:
662 if not parent_commit:
662 parent_commit = EmptyCommit(alias=scm_instance.alias)
663 parent_commit = EmptyCommit(alias=scm_instance.alias)
663
664
664 if isinstance(parent_commit, EmptyCommit):
665 if isinstance(parent_commit, EmptyCommit):
665 # EmptyCommit means we we're editing empty repository
666 # EmptyCommit means we we're editing empty repository
666 parents = None
667 parents = None
667 else:
668 else:
668 parents = [parent_commit]
669 parents = [parent_commit]
669
670
670 # add multiple nodes
671 # add multiple nodes
671 for _filename, data in nodes.items():
672 for _filename, data in nodes.items():
672 # new filename, can be renamed from the old one, also sanitaze
673 # new filename, can be renamed from the old one, also sanitaze
673 # the path for any hack around relative paths like ../../ etc.
674 # the path for any hack around relative paths like ../../ etc.
674 filename = self._sanitize_path(data['filename'])
675 filename = self._sanitize_path(data['filename'])
675 old_filename = self._sanitize_path(_filename)
676 old_filename = self._sanitize_path(_filename)
676 content = data['content']
677 content = data['content']
677
678
678 filenode = FileNode(old_filename, content=content)
679 filenode = FileNode(old_filename, content=content)
679 op = data['op']
680 op = data['op']
680 if op == 'add':
681 if op == 'add':
681 imc.add(filenode)
682 imc.add(filenode)
682 elif op == 'del':
683 elif op == 'del':
683 imc.remove(filenode)
684 imc.remove(filenode)
684 elif op == 'mod':
685 elif op == 'mod':
685 if filename != old_filename:
686 if filename != old_filename:
686 # TODO: handle renames more efficient, needs vcs lib
687 # TODO: handle renames more efficient, needs vcs lib
687 # changes
688 # changes
688 imc.remove(filenode)
689 imc.remove(filenode)
689 imc.add(FileNode(filename, content=content))
690 imc.add(FileNode(filename, content=content))
690 else:
691 else:
691 imc.change(filenode)
692 imc.change(filenode)
692
693
693 try:
694 try:
694 # TODO: handle pre push scenario
695 # TODO: handle pre push scenario
695 # commit changes
696 # commit changes
696 tip = imc.commit(message=message,
697 tip = imc.commit(message=message,
697 author=author,
698 author=author,
698 parents=parents,
699 parents=parents,
699 branch=parent_commit.branch)
700 branch=parent_commit.branch)
700 except NodeNotChangedError:
701 except NodeNotChangedError:
701 raise
702 raise
702 except Exception as e:
703 except Exception as e:
703 log.exception("Unexpected exception during call to imc.commit")
704 log.exception("Unexpected exception during call to imc.commit")
704 raise IMCCommitError(str(e))
705 raise IMCCommitError(str(e))
705 finally:
706 finally:
706 # always clear caches, if commit fails we want fresh object also
707 # always clear caches, if commit fails we want fresh object also
707 self.mark_for_invalidation(repo.repo_name)
708 self.mark_for_invalidation(repo.repo_name)
708
709
709 if trigger_push_hook:
710 if trigger_push_hook:
710 hooks_utils.trigger_post_push_hook(
711 hooks_utils.trigger_post_push_hook(
711 username=user.username, action='push_local',
712 username=user.username, action='push_local', hook_type='post_push',
712 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
713 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
713 commit_ids=[tip.raw_id])
714 commit_ids=[tip.raw_id])
714
715
715 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
716 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
716 author=None, trigger_push_hook=True):
717 author=None, trigger_push_hook=True):
717 """
718 """
718 Deletes given multiple nodes into `repo`
719 Deletes given multiple nodes into `repo`
719
720
720 :param user: RhodeCode User object or user_id, the committer
721 :param user: RhodeCode User object or user_id, the committer
721 :param repo: RhodeCode Repository object
722 :param repo: RhodeCode Repository object
722 :param message: commit message
723 :param message: commit message
723 :param nodes: mapping {filename:{'content':content},...}
724 :param nodes: mapping {filename:{'content':content},...}
724 :param parent_commit: parent commit, can be empty than it's initial
725 :param parent_commit: parent commit, can be empty than it's initial
725 commit
726 commit
726 :param author: author of commit, cna be different that commiter only
727 :param author: author of commit, cna be different that commiter only
727 for git
728 for git
728 :param trigger_push_hook: trigger push hooks
729 :param trigger_push_hook: trigger push hooks
729
730
730 :returns: new commit after deletion
731 :returns: new commit after deletion
731 """
732 """
732
733
733 user = self._get_user(user)
734 user = self._get_user(user)
734 scm_instance = repo.scm_instance(cache=False)
735 scm_instance = repo.scm_instance(cache=False)
735
736
736 processed_nodes = []
737 processed_nodes = []
737 for f_path in nodes:
738 for f_path in nodes:
738 f_path = self._sanitize_path(f_path)
739 f_path = self._sanitize_path(f_path)
739 # content can be empty but for compatabilty it allows same dicts
740 # content can be empty but for compatabilty it allows same dicts
740 # structure as add_nodes
741 # structure as add_nodes
741 content = nodes[f_path].get('content')
742 content = nodes[f_path].get('content')
742 processed_nodes.append((f_path, content))
743 processed_nodes.append((f_path, content))
743
744
744 message = safe_unicode(message)
745 message = safe_unicode(message)
745 commiter = user.full_contact
746 commiter = user.full_contact
746 author = safe_unicode(author) if author else commiter
747 author = safe_unicode(author) if author else commiter
747
748
748 imc = scm_instance.in_memory_commit
749 imc = scm_instance.in_memory_commit
749
750
750 if not parent_commit:
751 if not parent_commit:
751 parent_commit = EmptyCommit(alias=scm_instance.alias)
752 parent_commit = EmptyCommit(alias=scm_instance.alias)
752
753
753 if isinstance(parent_commit, EmptyCommit):
754 if isinstance(parent_commit, EmptyCommit):
754 # EmptyCommit means we we're editing empty repository
755 # EmptyCommit means we we're editing empty repository
755 parents = None
756 parents = None
756 else:
757 else:
757 parents = [parent_commit]
758 parents = [parent_commit]
758 # add multiple nodes
759 # add multiple nodes
759 for path, content in processed_nodes:
760 for path, content in processed_nodes:
760 imc.remove(FileNode(path, content=content))
761 imc.remove(FileNode(path, content=content))
761
762
762 # TODO: handle pre push scenario
763 # TODO: handle pre push scenario
763 tip = imc.commit(message=message,
764 tip = imc.commit(message=message,
764 author=author,
765 author=author,
765 parents=parents,
766 parents=parents,
766 branch=parent_commit.branch)
767 branch=parent_commit.branch)
767
768
768 self.mark_for_invalidation(repo.repo_name)
769 self.mark_for_invalidation(repo.repo_name)
769 if trigger_push_hook:
770 if trigger_push_hook:
770 hooks_utils.trigger_post_push_hook(
771 hooks_utils.trigger_post_push_hook(
771 username=user.username, action='push_local',
772 username=user.username, action='push_local', hook_type='post_push',
772 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
773 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
773 commit_ids=[tip.raw_id])
774 commit_ids=[tip.raw_id])
774 return tip
775 return tip
775
776
776 def strip(self, repo, commit_id, branch):
777 def strip(self, repo, commit_id, branch):
777 scm_instance = repo.scm_instance(cache=False)
778 scm_instance = repo.scm_instance(cache=False)
778 scm_instance.config.clear_section('hooks')
779 scm_instance.config.clear_section('hooks')
779 scm_instance.strip(commit_id, branch)
780 scm_instance.strip(commit_id, branch)
780 self.mark_for_invalidation(repo.repo_name)
781 self.mark_for_invalidation(repo.repo_name)
781
782
782 def get_unread_journal(self):
783 def get_unread_journal(self):
783 return self.sa.query(UserLog).count()
784 return self.sa.query(UserLog).count()
784
785
785 def get_repo_landing_revs(self, translator, repo=None):
786 def get_repo_landing_revs(self, translator, repo=None):
786 """
787 """
787 Generates select option with tags branches and bookmarks (for hg only)
788 Generates select option with tags branches and bookmarks (for hg only)
788 grouped by type
789 grouped by type
789
790
790 :param repo:
791 :param repo:
791 """
792 """
792 _ = translator
793 _ = translator
793 repo = self._get_repo(repo)
794 repo = self._get_repo(repo)
794
795
795 hist_l = [
796 hist_l = [
796 ['rev:tip', _('latest tip')]
797 ['rev:tip', _('latest tip')]
797 ]
798 ]
798 choices = [
799 choices = [
799 'rev:tip'
800 'rev:tip'
800 ]
801 ]
801
802
802 if not repo:
803 if not repo:
803 return choices, hist_l
804 return choices, hist_l
804
805
805 repo = repo.scm_instance()
806 repo = repo.scm_instance()
806
807
807 branches_group = (
808 branches_group = (
808 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
809 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
809 for b in repo.branches],
810 for b in repo.branches],
810 _("Branches"))
811 _("Branches"))
811 hist_l.append(branches_group)
812 hist_l.append(branches_group)
812 choices.extend([x[0] for x in branches_group[0]])
813 choices.extend([x[0] for x in branches_group[0]])
813
814
814 if repo.alias == 'hg':
815 if repo.alias == 'hg':
815 bookmarks_group = (
816 bookmarks_group = (
816 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
817 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
817 for b in repo.bookmarks],
818 for b in repo.bookmarks],
818 _("Bookmarks"))
819 _("Bookmarks"))
819 hist_l.append(bookmarks_group)
820 hist_l.append(bookmarks_group)
820 choices.extend([x[0] for x in bookmarks_group[0]])
821 choices.extend([x[0] for x in bookmarks_group[0]])
821
822
822 tags_group = (
823 tags_group = (
823 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
824 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
824 for t in repo.tags],
825 for t in repo.tags],
825 _("Tags"))
826 _("Tags"))
826 hist_l.append(tags_group)
827 hist_l.append(tags_group)
827 choices.extend([x[0] for x in tags_group[0]])
828 choices.extend([x[0] for x in tags_group[0]])
828
829
829 return choices, hist_l
830 return choices, hist_l
830
831
831 def get_server_info(self, environ=None):
832 def get_server_info(self, environ=None):
832 server_info = get_system_info(environ)
833 server_info = get_system_info(environ)
833 return server_info
834 return server_info
@@ -1,122 +1,123 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import pytest
21 import pytest
22
22
23 from rhodecode.tests.events.conftest import EventCatcher
23 from rhodecode.tests.events.conftest import EventCatcher
24
24
25 from rhodecode.lib import hooks_base, utils2
25 from rhodecode.lib import hooks_base, utils2
26 from rhodecode.model.repo import RepoModel
26 from rhodecode.model.repo import RepoModel
27 from rhodecode.events.repo import (
27 from rhodecode.events.repo import (
28 RepoPrePullEvent, RepoPullEvent,
28 RepoPrePullEvent, RepoPullEvent,
29 RepoPrePushEvent, RepoPushEvent,
29 RepoPrePushEvent, RepoPushEvent,
30 RepoPreCreateEvent, RepoCreateEvent,
30 RepoPreCreateEvent, RepoCreateEvent,
31 RepoPreDeleteEvent, RepoDeleteEvent,
31 RepoPreDeleteEvent, RepoDeleteEvent,
32 )
32 )
33
33
34
34
35 @pytest.fixture
35 @pytest.fixture
36 def scm_extras(user_regular, repo_stub):
36 def scm_extras(user_regular, repo_stub):
37 extras = utils2.AttributeDict({
37 extras = utils2.AttributeDict({
38 'ip': '127.0.0.1',
38 'ip': '127.0.0.1',
39 'username': user_regular.username,
39 'username': user_regular.username,
40 'user_id': user_regular.user_id,
40 'user_id': user_regular.user_id,
41 'action': '',
41 'action': '',
42 'repository': repo_stub.repo_name,
42 'repository': repo_stub.repo_name,
43 'scm': repo_stub.scm_instance().alias,
43 'scm': repo_stub.scm_instance().alias,
44 'config': '',
44 'config': '',
45 'repo_store': '',
45 'repo_store': '',
46 'server_url': 'http://example.com',
46 'server_url': 'http://example.com',
47 'make_lock': None,
47 'make_lock': None,
48 'user-agent': 'some-client',
48 'user_agent': 'some-client',
49 'locked_by': [None],
49 'locked_by': [None],
50 'commit_ids': ['a' * 40] * 3,
50 'commit_ids': ['a' * 40] * 3,
51 'hook_type': 'scm_extras_test',
51 'is_shadow_repo': False,
52 'is_shadow_repo': False,
52 })
53 })
53 return extras
54 return extras
54
55
55
56
56 # TODO: dan: make the serialization tests complete json comparisons
57 # TODO: dan: make the serialization tests complete json comparisons
57 @pytest.mark.parametrize('EventClass', [
58 @pytest.mark.parametrize('EventClass', [
58 RepoPreCreateEvent, RepoCreateEvent,
59 RepoPreCreateEvent, RepoCreateEvent,
59 RepoPreDeleteEvent, RepoDeleteEvent,
60 RepoPreDeleteEvent, RepoDeleteEvent,
60 ])
61 ])
61 def test_repo_events_serialized(config_stub, repo_stub, EventClass):
62 def test_repo_events_serialized(config_stub, repo_stub, EventClass):
62 event = EventClass(repo_stub)
63 event = EventClass(repo_stub)
63 data = event.as_dict()
64 data = event.as_dict()
64 assert data['name'] == EventClass.name
65 assert data['name'] == EventClass.name
65 assert data['repo']['repo_name'] == repo_stub.repo_name
66 assert data['repo']['repo_name'] == repo_stub.repo_name
66 assert data['repo']['url']
67 assert data['repo']['url']
67 assert data['repo']['permalink_url']
68 assert data['repo']['permalink_url']
68
69
69
70
70 @pytest.mark.parametrize('EventClass', [
71 @pytest.mark.parametrize('EventClass', [
71 RepoPrePullEvent, RepoPullEvent, RepoPrePushEvent
72 RepoPrePullEvent, RepoPullEvent, RepoPrePushEvent
72 ])
73 ])
73 def test_vcs_repo_events_serialize(config_stub, repo_stub, scm_extras, EventClass):
74 def test_vcs_repo_events_serialize(config_stub, repo_stub, scm_extras, EventClass):
74 event = EventClass(repo_name=repo_stub.repo_name, extras=scm_extras)
75 event = EventClass(repo_name=repo_stub.repo_name, extras=scm_extras)
75 data = event.as_dict()
76 data = event.as_dict()
76 assert data['name'] == EventClass.name
77 assert data['name'] == EventClass.name
77 assert data['repo']['repo_name'] == repo_stub.repo_name
78 assert data['repo']['repo_name'] == repo_stub.repo_name
78 assert data['repo']['url']
79 assert data['repo']['url']
79 assert data['repo']['permalink_url']
80 assert data['repo']['permalink_url']
80
81
81
82
82 @pytest.mark.parametrize('EventClass', [RepoPushEvent])
83 @pytest.mark.parametrize('EventClass', [RepoPushEvent])
83 def test_vcs_repo_push_event_serialize(config_stub, repo_stub, scm_extras, EventClass):
84 def test_vcs_repo_push_event_serialize(config_stub, repo_stub, scm_extras, EventClass):
84 event = EventClass(repo_name=repo_stub.repo_name,
85 event = EventClass(repo_name=repo_stub.repo_name,
85 pushed_commit_ids=scm_extras['commit_ids'],
86 pushed_commit_ids=scm_extras['commit_ids'],
86 extras=scm_extras)
87 extras=scm_extras)
87 data = event.as_dict()
88 data = event.as_dict()
88 assert data['name'] == EventClass.name
89 assert data['name'] == EventClass.name
89 assert data['repo']['repo_name'] == repo_stub.repo_name
90 assert data['repo']['repo_name'] == repo_stub.repo_name
90 assert data['repo']['url']
91 assert data['repo']['url']
91 assert data['repo']['permalink_url']
92 assert data['repo']['permalink_url']
92
93
93
94
94 def test_create_delete_repo_fires_events(backend):
95 def test_create_delete_repo_fires_events(backend):
95 with EventCatcher() as event_catcher:
96 with EventCatcher() as event_catcher:
96 repo = backend.create_repo()
97 repo = backend.create_repo()
97 assert event_catcher.events_types == [RepoPreCreateEvent, RepoCreateEvent]
98 assert event_catcher.events_types == [RepoPreCreateEvent, RepoCreateEvent]
98
99
99 with EventCatcher() as event_catcher:
100 with EventCatcher() as event_catcher:
100 RepoModel().delete(repo)
101 RepoModel().delete(repo)
101 assert event_catcher.events_types == [RepoPreDeleteEvent, RepoDeleteEvent]
102 assert event_catcher.events_types == [RepoPreDeleteEvent, RepoDeleteEvent]
102
103
103
104
104 def test_pull_fires_events(scm_extras):
105 def test_pull_fires_events(scm_extras):
105 with EventCatcher() as event_catcher:
106 with EventCatcher() as event_catcher:
106 hooks_base.pre_push(scm_extras)
107 hooks_base.pre_push(scm_extras)
107 assert event_catcher.events_types == [RepoPrePushEvent]
108 assert event_catcher.events_types == [RepoPrePushEvent]
108
109
109 with EventCatcher() as event_catcher:
110 with EventCatcher() as event_catcher:
110 hooks_base.post_push(scm_extras)
111 hooks_base.post_push(scm_extras)
111 assert event_catcher.events_types == [RepoPushEvent]
112 assert event_catcher.events_types == [RepoPushEvent]
112
113
113
114
114 def test_push_fires_events(scm_extras):
115 def test_push_fires_events(scm_extras):
115 with EventCatcher() as event_catcher:
116 with EventCatcher() as event_catcher:
116 hooks_base.pre_pull(scm_extras)
117 hooks_base.pre_pull(scm_extras)
117 assert event_catcher.events_types == [RepoPrePullEvent]
118 assert event_catcher.events_types == [RepoPrePullEvent]
118
119
119 with EventCatcher() as event_catcher:
120 with EventCatcher() as event_catcher:
120 hooks_base.post_pull(scm_extras)
121 hooks_base.post_pull(scm_extras)
121 assert event_catcher.events_types == [RepoPullEvent]
122 assert event_catcher.events_types == [RepoPullEvent]
122
123
@@ -1,144 +1,151 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import mock
21 import mock
22 import pytest
22 import pytest
23 from rhodecode.model.db import Session, UserLog
23 from rhodecode.model.db import Session, UserLog
24 from rhodecode.lib import hooks_base, utils2
24 from rhodecode.lib import hooks_base, utils2
25
25
26
26
27 def test_post_push_truncates_commits(user_regular, repo_stub):
27 def test_post_push_truncates_commits(user_regular, repo_stub):
28 extras = {
28 extras = {
29 'ip': '127.0.0.1',
29 'ip': '127.0.0.1',
30 'username': user_regular.username,
30 'username': user_regular.username,
31 'user_id': user_regular.user_id,
31 'user_id': user_regular.user_id,
32 'action': 'push_local',
32 'action': 'push_local',
33 'repository': repo_stub.repo_name,
33 'repository': repo_stub.repo_name,
34 'scm': 'git',
34 'scm': 'git',
35 'config': '',
35 'config': '',
36 'server_url': 'http://example.com',
36 'server_url': 'http://example.com',
37 'make_lock': None,
37 'make_lock': None,
38 'user_agent': 'some-client',
38 'user_agent': 'some-client',
39 'locked_by': [None],
39 'locked_by': [None],
40 'commit_ids': ['abcde12345' * 4] * 30000,
40 'commit_ids': ['abcde12345' * 4] * 30000,
41 'hook_type': 'large_push_test_type',
41 'is_shadow_repo': False,
42 'is_shadow_repo': False,
42 }
43 }
43 extras = utils2.AttributeDict(extras)
44 extras = utils2.AttributeDict(extras)
44
45
45 hooks_base.post_push(extras)
46 hooks_base.post_push(extras)
46
47
47 # Calculate appropriate action string here
48 # Calculate appropriate action string here
48 commit_ids = extras.commit_ids[:400]
49 commit_ids = extras.commit_ids[:400]
49
50
50 entry = UserLog.query().order_by('-user_log_id').first()
51 entry = UserLog.query().order_by('-user_log_id').first()
51 assert entry.action == 'user.push'
52 assert entry.action == 'user.push'
52 assert entry.action_data['commit_ids'] == commit_ids
53 assert entry.action_data['commit_ids'] == commit_ids
53 Session().delete(entry)
54 Session().delete(entry)
54 Session().commit()
55 Session().commit()
55
56
56
57
57 def assert_called_with_mock(callable_, expected_mock_name):
58 def assert_called_with_mock(callable_, expected_mock_name):
58 mock_obj = callable_.call_args[0][0]
59 mock_obj = callable_.call_args[0][0]
59 mock_name = mock_obj._mock_new_parent._mock_new_name
60 mock_name = mock_obj._mock_new_parent._mock_new_name
60 assert mock_name == expected_mock_name
61 assert mock_name == expected_mock_name
61
62
62
63
63 @pytest.fixture
64 @pytest.fixture
64 def hook_extras(user_regular, repo_stub):
65 def hook_extras(user_regular, repo_stub):
65 extras = utils2.AttributeDict({
66 extras = utils2.AttributeDict({
66 'ip': '127.0.0.1',
67 'ip': '127.0.0.1',
67 'username': user_regular.username,
68 'username': user_regular.username,
68 'user_id': user_regular.user_id,
69 'user_id': user_regular.user_id,
69 'action': 'push',
70 'action': 'push',
70 'repository': repo_stub.repo_name,
71 'repository': repo_stub.repo_name,
71 'scm': '',
72 'scm': '',
72 'config': '',
73 'config': '',
73 'repo_store': '',
74 'repo_store': '',
74 'server_url': 'http://example.com',
75 'server_url': 'http://example.com',
75 'make_lock': None,
76 'make_lock': None,
76 'user_agent': 'some-client',
77 'user_agent': 'some-client',
77 'locked_by': [None],
78 'locked_by': [None],
78 'commit_ids': [],
79 'commit_ids': [],
80 'hook_type': 'test_type',
79 'is_shadow_repo': False,
81 'is_shadow_repo': False,
80 })
82 })
81 return extras
83 return extras
82
84
83
85
84 @pytest.mark.parametrize('func, extension, event', [
86 @pytest.mark.parametrize('func, extension, event', [
85 (hooks_base.pre_push, 'pre_push_extension', 'RepoPrePushEvent'),
87 (hooks_base.pre_push, 'pre_push_extension', 'RepoPrePushEvent'),
86 (hooks_base.post_push, 'post_pull_extension', 'RepoPushEvent'),
88 (hooks_base.post_push, 'post_pull_extension', 'RepoPushEvent'),
87 (hooks_base.pre_pull, 'pre_pull_extension', 'RepoPrePullEvent'),
89 (hooks_base.pre_pull, 'pre_pull_extension', 'RepoPrePullEvent'),
88 (hooks_base.post_pull, 'post_push_extension', 'RepoPullEvent'),
90 (hooks_base.post_pull, 'post_push_extension', 'RepoPullEvent'),
89 ])
91 ])
90 def test_hooks_propagate(func, extension, event, hook_extras):
92 def test_hooks_propagate(func, extension, event, hook_extras):
91 """
93 """
92 Tests that our hook code propagates to rhodecode extensions and triggers
94 Tests that our hook code propagates to rhodecode extensions and triggers
93 the appropriate event.
95 the appropriate event.
94 """
96 """
95 extension_mock = mock.Mock()
97 class ExtensionMock(mock.Mock):
98 @property
99 def output(self):
100 return 'MOCK'
101
102 extension_mock = ExtensionMock()
96 events_mock = mock.Mock()
103 events_mock = mock.Mock()
97 patches = {
104 patches = {
98 'Repository': mock.Mock(),
105 'Repository': mock.Mock(),
99 'events': events_mock,
106 'events': events_mock,
100 extension: extension_mock,
107 extension: extension_mock,
101 }
108 }
102
109
103 # Clear shadow repo flag.
110 # Clear shadow repo flag.
104 hook_extras.is_shadow_repo = False
111 hook_extras.is_shadow_repo = False
105
112
106 # Execute hook function.
113 # Execute hook function.
107 with mock.patch.multiple(hooks_base, **patches):
114 with mock.patch.multiple(hooks_base, **patches):
108 func(hook_extras)
115 func(hook_extras)
109
116
110 # Assert that extensions are called and event was fired.
117 # Assert that extensions are called and event was fired.
111 extension_mock.called_once()
118 extension_mock.called_once()
112 assert_called_with_mock(events_mock.trigger, event)
119 assert_called_with_mock(events_mock.trigger, event)
113
120
114
121
115 @pytest.mark.parametrize('func, extension, event', [
122 @pytest.mark.parametrize('func, extension, event', [
116 (hooks_base.pre_push, 'pre_push_extension', 'RepoPrePushEvent'),
123 (hooks_base.pre_push, 'pre_push_extension', 'RepoPrePushEvent'),
117 (hooks_base.post_push, 'post_pull_extension', 'RepoPushEvent'),
124 (hooks_base.post_push, 'post_pull_extension', 'RepoPushEvent'),
118 (hooks_base.pre_pull, 'pre_pull_extension', 'RepoPrePullEvent'),
125 (hooks_base.pre_pull, 'pre_pull_extension', 'RepoPrePullEvent'),
119 (hooks_base.post_pull, 'post_push_extension', 'RepoPullEvent'),
126 (hooks_base.post_pull, 'post_push_extension', 'RepoPullEvent'),
120 ])
127 ])
121 def test_hooks_propagates_not_on_shadow(func, extension, event, hook_extras):
128 def test_hooks_propagates_not_on_shadow(func, extension, event, hook_extras):
122 """
129 """
123 If hooks are called by a request to a shadow repo we only want to run our
130 If hooks are called by a request to a shadow repo we only want to run our
124 internal hooks code but not external ones like rhodecode extensions or
131 internal hooks code but not external ones like rhodecode extensions or
125 trigger an event.
132 trigger an event.
126 """
133 """
127 extension_mock = mock.Mock()
134 extension_mock = mock.Mock()
128 events_mock = mock.Mock()
135 events_mock = mock.Mock()
129 patches = {
136 patches = {
130 'Repository': mock.Mock(),
137 'Repository': mock.Mock(),
131 'events': events_mock,
138 'events': events_mock,
132 extension: extension_mock,
139 extension: extension_mock,
133 }
140 }
134
141
135 # Set shadow repo flag.
142 # Set shadow repo flag.
136 hook_extras.is_shadow_repo = True
143 hook_extras.is_shadow_repo = True
137
144
138 # Execute hook function.
145 # Execute hook function.
139 with mock.patch.multiple(hooks_base, **patches):
146 with mock.patch.multiple(hooks_base, **patches):
140 func(hook_extras)
147 func(hook_extras)
141
148
142 # Assert that extensions are *not* called and event was *not* fired.
149 # Assert that extensions are *not* called and event was *not* fired.
143 assert not extension_mock.called
150 assert not extension_mock.called
144 assert not events_mock.trigger.called
151 assert not events_mock.trigger.called
@@ -1,869 +1,870 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import mock
21 import mock
22 import pytest
22 import pytest
23 import textwrap
23 import textwrap
24
24
25 import rhodecode
25 import rhodecode
26 from rhodecode.lib.utils2 import safe_unicode
26 from rhodecode.lib.utils2 import safe_unicode
27 from rhodecode.lib.vcs.backends import get_backend
27 from rhodecode.lib.vcs.backends import get_backend
28 from rhodecode.lib.vcs.backends.base import (
28 from rhodecode.lib.vcs.backends.base import (
29 MergeResponse, MergeFailureReason, Reference)
29 MergeResponse, MergeFailureReason, Reference)
30 from rhodecode.lib.vcs.exceptions import RepositoryError
30 from rhodecode.lib.vcs.exceptions import RepositoryError
31 from rhodecode.lib.vcs.nodes import FileNode
31 from rhodecode.lib.vcs.nodes import FileNode
32 from rhodecode.model.comment import CommentsModel
32 from rhodecode.model.comment import CommentsModel
33 from rhodecode.model.db import PullRequest, Session
33 from rhodecode.model.db import PullRequest, Session
34 from rhodecode.model.pull_request import PullRequestModel
34 from rhodecode.model.pull_request import PullRequestModel
35 from rhodecode.model.user import UserModel
35 from rhodecode.model.user import UserModel
36 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
36 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
37
37
38
38
39 pytestmark = [
39 pytestmark = [
40 pytest.mark.backends("git", "hg"),
40 pytest.mark.backends("git", "hg"),
41 ]
41 ]
42
42
43
43
44 @pytest.mark.usefixtures('config_stub')
44 @pytest.mark.usefixtures('config_stub')
45 class TestPullRequestModel(object):
45 class TestPullRequestModel(object):
46
46
47 @pytest.fixture
47 @pytest.fixture
48 def pull_request(self, request, backend, pr_util):
48 def pull_request(self, request, backend, pr_util):
49 """
49 """
50 A pull request combined with multiples patches.
50 A pull request combined with multiples patches.
51 """
51 """
52 BackendClass = get_backend(backend.alias)
52 BackendClass = get_backend(backend.alias)
53 self.merge_patcher = mock.patch.object(
53 self.merge_patcher = mock.patch.object(
54 BackendClass, 'merge', return_value=MergeResponse(
54 BackendClass, 'merge', return_value=MergeResponse(
55 False, False, None, MergeFailureReason.UNKNOWN))
55 False, False, None, MergeFailureReason.UNKNOWN))
56 self.workspace_remove_patcher = mock.patch.object(
56 self.workspace_remove_patcher = mock.patch.object(
57 BackendClass, 'cleanup_merge_workspace')
57 BackendClass, 'cleanup_merge_workspace')
58
58
59 self.workspace_remove_mock = self.workspace_remove_patcher.start()
59 self.workspace_remove_mock = self.workspace_remove_patcher.start()
60 self.merge_mock = self.merge_patcher.start()
60 self.merge_mock = self.merge_patcher.start()
61 self.comment_patcher = mock.patch(
61 self.comment_patcher = mock.patch(
62 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status')
62 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status')
63 self.comment_patcher.start()
63 self.comment_patcher.start()
64 self.notification_patcher = mock.patch(
64 self.notification_patcher = mock.patch(
65 'rhodecode.model.notification.NotificationModel.create')
65 'rhodecode.model.notification.NotificationModel.create')
66 self.notification_patcher.start()
66 self.notification_patcher.start()
67 self.helper_patcher = mock.patch(
67 self.helper_patcher = mock.patch(
68 'rhodecode.lib.helpers.route_path')
68 'rhodecode.lib.helpers.route_path')
69 self.helper_patcher.start()
69 self.helper_patcher.start()
70
70
71 self.hook_patcher = mock.patch.object(PullRequestModel,
71 self.hook_patcher = mock.patch.object(PullRequestModel,
72 '_trigger_pull_request_hook')
72 '_trigger_pull_request_hook')
73 self.hook_mock = self.hook_patcher.start()
73 self.hook_mock = self.hook_patcher.start()
74
74
75 self.invalidation_patcher = mock.patch(
75 self.invalidation_patcher = mock.patch(
76 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation')
76 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation')
77 self.invalidation_mock = self.invalidation_patcher.start()
77 self.invalidation_mock = self.invalidation_patcher.start()
78
78
79 self.pull_request = pr_util.create_pull_request(
79 self.pull_request = pr_util.create_pull_request(
80 mergeable=True, name_suffix=u'ąć')
80 mergeable=True, name_suffix=u'ąć')
81 self.source_commit = self.pull_request.source_ref_parts.commit_id
81 self.source_commit = self.pull_request.source_ref_parts.commit_id
82 self.target_commit = self.pull_request.target_ref_parts.commit_id
82 self.target_commit = self.pull_request.target_ref_parts.commit_id
83 self.workspace_id = 'pr-%s' % self.pull_request.pull_request_id
83 self.workspace_id = 'pr-%s' % self.pull_request.pull_request_id
84 self.repo_id = self.pull_request.target_repo.repo_id
84 self.repo_id = self.pull_request.target_repo.repo_id
85
85
86 @request.addfinalizer
86 @request.addfinalizer
87 def cleanup_pull_request():
87 def cleanup_pull_request():
88 calls = [mock.call(
88 calls = [mock.call(
89 self.pull_request, self.pull_request.author, 'create')]
89 self.pull_request, self.pull_request.author, 'create')]
90 self.hook_mock.assert_has_calls(calls)
90 self.hook_mock.assert_has_calls(calls)
91
91
92 self.workspace_remove_patcher.stop()
92 self.workspace_remove_patcher.stop()
93 self.merge_patcher.stop()
93 self.merge_patcher.stop()
94 self.comment_patcher.stop()
94 self.comment_patcher.stop()
95 self.notification_patcher.stop()
95 self.notification_patcher.stop()
96 self.helper_patcher.stop()
96 self.helper_patcher.stop()
97 self.hook_patcher.stop()
97 self.hook_patcher.stop()
98 self.invalidation_patcher.stop()
98 self.invalidation_patcher.stop()
99
99
100 return self.pull_request
100 return self.pull_request
101
101
102 def test_get_all(self, pull_request):
102 def test_get_all(self, pull_request):
103 prs = PullRequestModel().get_all(pull_request.target_repo)
103 prs = PullRequestModel().get_all(pull_request.target_repo)
104 assert isinstance(prs, list)
104 assert isinstance(prs, list)
105 assert len(prs) == 1
105 assert len(prs) == 1
106
106
107 def test_count_all(self, pull_request):
107 def test_count_all(self, pull_request):
108 pr_count = PullRequestModel().count_all(pull_request.target_repo)
108 pr_count = PullRequestModel().count_all(pull_request.target_repo)
109 assert pr_count == 1
109 assert pr_count == 1
110
110
111 def test_get_awaiting_review(self, pull_request):
111 def test_get_awaiting_review(self, pull_request):
112 prs = PullRequestModel().get_awaiting_review(pull_request.target_repo)
112 prs = PullRequestModel().get_awaiting_review(pull_request.target_repo)
113 assert isinstance(prs, list)
113 assert isinstance(prs, list)
114 assert len(prs) == 1
114 assert len(prs) == 1
115
115
116 def test_count_awaiting_review(self, pull_request):
116 def test_count_awaiting_review(self, pull_request):
117 pr_count = PullRequestModel().count_awaiting_review(
117 pr_count = PullRequestModel().count_awaiting_review(
118 pull_request.target_repo)
118 pull_request.target_repo)
119 assert pr_count == 1
119 assert pr_count == 1
120
120
121 def test_get_awaiting_my_review(self, pull_request):
121 def test_get_awaiting_my_review(self, pull_request):
122 PullRequestModel().update_reviewers(
122 PullRequestModel().update_reviewers(
123 pull_request, [(pull_request.author, ['author'], False, [])],
123 pull_request, [(pull_request.author, ['author'], False, [])],
124 pull_request.author)
124 pull_request.author)
125 prs = PullRequestModel().get_awaiting_my_review(
125 prs = PullRequestModel().get_awaiting_my_review(
126 pull_request.target_repo, user_id=pull_request.author.user_id)
126 pull_request.target_repo, user_id=pull_request.author.user_id)
127 assert isinstance(prs, list)
127 assert isinstance(prs, list)
128 assert len(prs) == 1
128 assert len(prs) == 1
129
129
130 def test_count_awaiting_my_review(self, pull_request):
130 def test_count_awaiting_my_review(self, pull_request):
131 PullRequestModel().update_reviewers(
131 PullRequestModel().update_reviewers(
132 pull_request, [(pull_request.author, ['author'], False, [])],
132 pull_request, [(pull_request.author, ['author'], False, [])],
133 pull_request.author)
133 pull_request.author)
134 pr_count = PullRequestModel().count_awaiting_my_review(
134 pr_count = PullRequestModel().count_awaiting_my_review(
135 pull_request.target_repo, user_id=pull_request.author.user_id)
135 pull_request.target_repo, user_id=pull_request.author.user_id)
136 assert pr_count == 1
136 assert pr_count == 1
137
137
138 def test_delete_calls_cleanup_merge(self, pull_request):
138 def test_delete_calls_cleanup_merge(self, pull_request):
139 repo_id = pull_request.target_repo.repo_id
139 repo_id = pull_request.target_repo.repo_id
140 PullRequestModel().delete(pull_request, pull_request.author)
140 PullRequestModel().delete(pull_request, pull_request.author)
141
141
142 self.workspace_remove_mock.assert_called_once_with(
142 self.workspace_remove_mock.assert_called_once_with(
143 repo_id, self.workspace_id)
143 repo_id, self.workspace_id)
144
144
145 def test_close_calls_cleanup_and_hook(self, pull_request):
145 def test_close_calls_cleanup_and_hook(self, pull_request):
146 PullRequestModel().close_pull_request(
146 PullRequestModel().close_pull_request(
147 pull_request, pull_request.author)
147 pull_request, pull_request.author)
148 repo_id = pull_request.target_repo.repo_id
148 repo_id = pull_request.target_repo.repo_id
149
149
150 self.workspace_remove_mock.assert_called_once_with(
150 self.workspace_remove_mock.assert_called_once_with(
151 repo_id, self.workspace_id)
151 repo_id, self.workspace_id)
152 self.hook_mock.assert_called_with(
152 self.hook_mock.assert_called_with(
153 self.pull_request, self.pull_request.author, 'close')
153 self.pull_request, self.pull_request.author, 'close')
154
154
155 def test_merge_status(self, pull_request):
155 def test_merge_status(self, pull_request):
156 self.merge_mock.return_value = MergeResponse(
156 self.merge_mock.return_value = MergeResponse(
157 True, False, None, MergeFailureReason.NONE)
157 True, False, None, MergeFailureReason.NONE)
158
158
159 assert pull_request._last_merge_source_rev is None
159 assert pull_request._last_merge_source_rev is None
160 assert pull_request._last_merge_target_rev is None
160 assert pull_request._last_merge_target_rev is None
161 assert pull_request.last_merge_status is None
161 assert pull_request.last_merge_status is None
162
162
163 status, msg = PullRequestModel().merge_status(pull_request)
163 status, msg = PullRequestModel().merge_status(pull_request)
164 assert status is True
164 assert status is True
165 assert msg.eval() == 'This pull request can be automatically merged.'
165 assert msg.eval() == 'This pull request can be automatically merged.'
166 self.merge_mock.assert_called_with(
166 self.merge_mock.assert_called_with(
167 self.repo_id, self.workspace_id,
167 self.repo_id, self.workspace_id,
168 pull_request.target_ref_parts,
168 pull_request.target_ref_parts,
169 pull_request.source_repo.scm_instance(),
169 pull_request.source_repo.scm_instance(),
170 pull_request.source_ref_parts, dry_run=True,
170 pull_request.source_ref_parts, dry_run=True,
171 use_rebase=False, close_branch=False)
171 use_rebase=False, close_branch=False)
172
172
173 assert pull_request._last_merge_source_rev == self.source_commit
173 assert pull_request._last_merge_source_rev == self.source_commit
174 assert pull_request._last_merge_target_rev == self.target_commit
174 assert pull_request._last_merge_target_rev == self.target_commit
175 assert pull_request.last_merge_status is MergeFailureReason.NONE
175 assert pull_request.last_merge_status is MergeFailureReason.NONE
176
176
177 self.merge_mock.reset_mock()
177 self.merge_mock.reset_mock()
178 status, msg = PullRequestModel().merge_status(pull_request)
178 status, msg = PullRequestModel().merge_status(pull_request)
179 assert status is True
179 assert status is True
180 assert msg.eval() == 'This pull request can be automatically merged.'
180 assert msg.eval() == 'This pull request can be automatically merged.'
181 assert self.merge_mock.called is False
181 assert self.merge_mock.called is False
182
182
183 def test_merge_status_known_failure(self, pull_request):
183 def test_merge_status_known_failure(self, pull_request):
184 self.merge_mock.return_value = MergeResponse(
184 self.merge_mock.return_value = MergeResponse(
185 False, False, None, MergeFailureReason.MERGE_FAILED)
185 False, False, None, MergeFailureReason.MERGE_FAILED)
186
186
187 assert pull_request._last_merge_source_rev is None
187 assert pull_request._last_merge_source_rev is None
188 assert pull_request._last_merge_target_rev is None
188 assert pull_request._last_merge_target_rev is None
189 assert pull_request.last_merge_status is None
189 assert pull_request.last_merge_status is None
190
190
191 status, msg = PullRequestModel().merge_status(pull_request)
191 status, msg = PullRequestModel().merge_status(pull_request)
192 assert status is False
192 assert status is False
193 assert (
193 assert (
194 msg.eval() ==
194 msg.eval() ==
195 'This pull request cannot be merged because of merge conflicts.')
195 'This pull request cannot be merged because of merge conflicts.')
196 self.merge_mock.assert_called_with(
196 self.merge_mock.assert_called_with(
197 self.repo_id, self.workspace_id,
197 self.repo_id, self.workspace_id,
198 pull_request.target_ref_parts,
198 pull_request.target_ref_parts,
199 pull_request.source_repo.scm_instance(),
199 pull_request.source_repo.scm_instance(),
200 pull_request.source_ref_parts, dry_run=True,
200 pull_request.source_ref_parts, dry_run=True,
201 use_rebase=False, close_branch=False)
201 use_rebase=False, close_branch=False)
202
202
203 assert pull_request._last_merge_source_rev == self.source_commit
203 assert pull_request._last_merge_source_rev == self.source_commit
204 assert pull_request._last_merge_target_rev == self.target_commit
204 assert pull_request._last_merge_target_rev == self.target_commit
205 assert (
205 assert (
206 pull_request.last_merge_status is MergeFailureReason.MERGE_FAILED)
206 pull_request.last_merge_status is MergeFailureReason.MERGE_FAILED)
207
207
208 self.merge_mock.reset_mock()
208 self.merge_mock.reset_mock()
209 status, msg = PullRequestModel().merge_status(pull_request)
209 status, msg = PullRequestModel().merge_status(pull_request)
210 assert status is False
210 assert status is False
211 assert (
211 assert (
212 msg.eval() ==
212 msg.eval() ==
213 'This pull request cannot be merged because of merge conflicts.')
213 'This pull request cannot be merged because of merge conflicts.')
214 assert self.merge_mock.called is False
214 assert self.merge_mock.called is False
215
215
216 def test_merge_status_unknown_failure(self, pull_request):
216 def test_merge_status_unknown_failure(self, pull_request):
217 self.merge_mock.return_value = MergeResponse(
217 self.merge_mock.return_value = MergeResponse(
218 False, False, None, MergeFailureReason.UNKNOWN)
218 False, False, None, MergeFailureReason.UNKNOWN)
219
219
220 assert pull_request._last_merge_source_rev is None
220 assert pull_request._last_merge_source_rev is None
221 assert pull_request._last_merge_target_rev is None
221 assert pull_request._last_merge_target_rev is None
222 assert pull_request.last_merge_status is None
222 assert pull_request.last_merge_status is None
223
223
224 status, msg = PullRequestModel().merge_status(pull_request)
224 status, msg = PullRequestModel().merge_status(pull_request)
225 assert status is False
225 assert status is False
226 assert msg.eval() == (
226 assert msg.eval() == (
227 'This pull request cannot be merged because of an unhandled'
227 'This pull request cannot be merged because of an unhandled'
228 ' exception.')
228 ' exception.')
229 self.merge_mock.assert_called_with(
229 self.merge_mock.assert_called_with(
230 self.repo_id, self.workspace_id,
230 self.repo_id, self.workspace_id,
231 pull_request.target_ref_parts,
231 pull_request.target_ref_parts,
232 pull_request.source_repo.scm_instance(),
232 pull_request.source_repo.scm_instance(),
233 pull_request.source_ref_parts, dry_run=True,
233 pull_request.source_ref_parts, dry_run=True,
234 use_rebase=False, close_branch=False)
234 use_rebase=False, close_branch=False)
235
235
236 assert pull_request._last_merge_source_rev is None
236 assert pull_request._last_merge_source_rev is None
237 assert pull_request._last_merge_target_rev is None
237 assert pull_request._last_merge_target_rev is None
238 assert pull_request.last_merge_status is None
238 assert pull_request.last_merge_status is None
239
239
240 self.merge_mock.reset_mock()
240 self.merge_mock.reset_mock()
241 status, msg = PullRequestModel().merge_status(pull_request)
241 status, msg = PullRequestModel().merge_status(pull_request)
242 assert status is False
242 assert status is False
243 assert msg.eval() == (
243 assert msg.eval() == (
244 'This pull request cannot be merged because of an unhandled'
244 'This pull request cannot be merged because of an unhandled'
245 ' exception.')
245 ' exception.')
246 assert self.merge_mock.called is True
246 assert self.merge_mock.called is True
247
247
248 def test_merge_status_when_target_is_locked(self, pull_request):
248 def test_merge_status_when_target_is_locked(self, pull_request):
249 pull_request.target_repo.locked = [1, u'12345.50', 'lock_web']
249 pull_request.target_repo.locked = [1, u'12345.50', 'lock_web']
250 status, msg = PullRequestModel().merge_status(pull_request)
250 status, msg = PullRequestModel().merge_status(pull_request)
251 assert status is False
251 assert status is False
252 assert msg.eval() == (
252 assert msg.eval() == (
253 'This pull request cannot be merged because the target repository'
253 'This pull request cannot be merged because the target repository'
254 ' is locked.')
254 ' is locked.')
255
255
256 def test_merge_status_requirements_check_target(self, pull_request):
256 def test_merge_status_requirements_check_target(self, pull_request):
257
257
258 def has_largefiles(self, repo):
258 def has_largefiles(self, repo):
259 return repo == pull_request.source_repo
259 return repo == pull_request.source_repo
260
260
261 patcher = mock.patch.object(
261 patcher = mock.patch.object(
262 PullRequestModel, '_has_largefiles', has_largefiles)
262 PullRequestModel, '_has_largefiles', has_largefiles)
263 with patcher:
263 with patcher:
264 status, msg = PullRequestModel().merge_status(pull_request)
264 status, msg = PullRequestModel().merge_status(pull_request)
265
265
266 assert status is False
266 assert status is False
267 assert msg == 'Target repository large files support is disabled.'
267 assert msg == 'Target repository large files support is disabled.'
268
268
269 def test_merge_status_requirements_check_source(self, pull_request):
269 def test_merge_status_requirements_check_source(self, pull_request):
270
270
271 def has_largefiles(self, repo):
271 def has_largefiles(self, repo):
272 return repo == pull_request.target_repo
272 return repo == pull_request.target_repo
273
273
274 patcher = mock.patch.object(
274 patcher = mock.patch.object(
275 PullRequestModel, '_has_largefiles', has_largefiles)
275 PullRequestModel, '_has_largefiles', has_largefiles)
276 with patcher:
276 with patcher:
277 status, msg = PullRequestModel().merge_status(pull_request)
277 status, msg = PullRequestModel().merge_status(pull_request)
278
278
279 assert status is False
279 assert status is False
280 assert msg == 'Source repository large files support is disabled.'
280 assert msg == 'Source repository large files support is disabled.'
281
281
282 def test_merge(self, pull_request, merge_extras):
282 def test_merge(self, pull_request, merge_extras):
283 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
283 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
284 merge_ref = Reference(
284 merge_ref = Reference(
285 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
285 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
286 self.merge_mock.return_value = MergeResponse(
286 self.merge_mock.return_value = MergeResponse(
287 True, True, merge_ref, MergeFailureReason.NONE)
287 True, True, merge_ref, MergeFailureReason.NONE)
288
288
289 merge_extras['repository'] = pull_request.target_repo.repo_name
289 merge_extras['repository'] = pull_request.target_repo.repo_name
290 PullRequestModel().merge_repo(
290 PullRequestModel().merge_repo(
291 pull_request, pull_request.author, extras=merge_extras)
291 pull_request, pull_request.author, extras=merge_extras)
292
292
293 message = (
293 message = (
294 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
294 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
295 u'\n\n {pr_title}'.format(
295 u'\n\n {pr_title}'.format(
296 pr_id=pull_request.pull_request_id,
296 pr_id=pull_request.pull_request_id,
297 source_repo=safe_unicode(
297 source_repo=safe_unicode(
298 pull_request.source_repo.scm_instance().name),
298 pull_request.source_repo.scm_instance().name),
299 source_ref_name=pull_request.source_ref_parts.name,
299 source_ref_name=pull_request.source_ref_parts.name,
300 pr_title=safe_unicode(pull_request.title)
300 pr_title=safe_unicode(pull_request.title)
301 )
301 )
302 )
302 )
303 self.merge_mock.assert_called_with(
303 self.merge_mock.assert_called_with(
304 self.repo_id, self.workspace_id,
304 self.repo_id, self.workspace_id,
305 pull_request.target_ref_parts,
305 pull_request.target_ref_parts,
306 pull_request.source_repo.scm_instance(),
306 pull_request.source_repo.scm_instance(),
307 pull_request.source_ref_parts,
307 pull_request.source_ref_parts,
308 user_name=user.short_contact, user_email=user.email, message=message,
308 user_name=user.short_contact, user_email=user.email, message=message,
309 use_rebase=False, close_branch=False
309 use_rebase=False, close_branch=False
310 )
310 )
311 self.invalidation_mock.assert_called_once_with(
311 self.invalidation_mock.assert_called_once_with(
312 pull_request.target_repo.repo_name)
312 pull_request.target_repo.repo_name)
313
313
314 self.hook_mock.assert_called_with(
314 self.hook_mock.assert_called_with(
315 self.pull_request, self.pull_request.author, 'merge')
315 self.pull_request, self.pull_request.author, 'merge')
316
316
317 pull_request = PullRequest.get(pull_request.pull_request_id)
317 pull_request = PullRequest.get(pull_request.pull_request_id)
318 assert (
318 assert (
319 pull_request.merge_rev ==
319 pull_request.merge_rev ==
320 '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
320 '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
321
321
322 def test_merge_failed(self, pull_request, merge_extras):
322 def test_merge_failed(self, pull_request, merge_extras):
323 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
323 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
324 merge_ref = Reference(
324 merge_ref = Reference(
325 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
325 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
326 self.merge_mock.return_value = MergeResponse(
326 self.merge_mock.return_value = MergeResponse(
327 False, False, merge_ref, MergeFailureReason.MERGE_FAILED)
327 False, False, merge_ref, MergeFailureReason.MERGE_FAILED)
328
328
329 merge_extras['repository'] = pull_request.target_repo.repo_name
329 merge_extras['repository'] = pull_request.target_repo.repo_name
330 PullRequestModel().merge_repo(
330 PullRequestModel().merge_repo(
331 pull_request, pull_request.author, extras=merge_extras)
331 pull_request, pull_request.author, extras=merge_extras)
332
332
333 message = (
333 message = (
334 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
334 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
335 u'\n\n {pr_title}'.format(
335 u'\n\n {pr_title}'.format(
336 pr_id=pull_request.pull_request_id,
336 pr_id=pull_request.pull_request_id,
337 source_repo=safe_unicode(
337 source_repo=safe_unicode(
338 pull_request.source_repo.scm_instance().name),
338 pull_request.source_repo.scm_instance().name),
339 source_ref_name=pull_request.source_ref_parts.name,
339 source_ref_name=pull_request.source_ref_parts.name,
340 pr_title=safe_unicode(pull_request.title)
340 pr_title=safe_unicode(pull_request.title)
341 )
341 )
342 )
342 )
343 self.merge_mock.assert_called_with(
343 self.merge_mock.assert_called_with(
344 self.repo_id, self.workspace_id,
344 self.repo_id, self.workspace_id,
345 pull_request.target_ref_parts,
345 pull_request.target_ref_parts,
346 pull_request.source_repo.scm_instance(),
346 pull_request.source_repo.scm_instance(),
347 pull_request.source_ref_parts,
347 pull_request.source_ref_parts,
348 user_name=user.short_contact, user_email=user.email, message=message,
348 user_name=user.short_contact, user_email=user.email, message=message,
349 use_rebase=False, close_branch=False
349 use_rebase=False, close_branch=False
350 )
350 )
351
351
352 pull_request = PullRequest.get(pull_request.pull_request_id)
352 pull_request = PullRequest.get(pull_request.pull_request_id)
353 assert self.invalidation_mock.called is False
353 assert self.invalidation_mock.called is False
354 assert pull_request.merge_rev is None
354 assert pull_request.merge_rev is None
355
355
356 def test_get_commit_ids(self, pull_request):
356 def test_get_commit_ids(self, pull_request):
357 # The PR has been not merget yet, so expect an exception
357 # The PR has been not merget yet, so expect an exception
358 with pytest.raises(ValueError):
358 with pytest.raises(ValueError):
359 PullRequestModel()._get_commit_ids(pull_request)
359 PullRequestModel()._get_commit_ids(pull_request)
360
360
361 # Merge revision is in the revisions list
361 # Merge revision is in the revisions list
362 pull_request.merge_rev = pull_request.revisions[0]
362 pull_request.merge_rev = pull_request.revisions[0]
363 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
363 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
364 assert commit_ids == pull_request.revisions
364 assert commit_ids == pull_request.revisions
365
365
366 # Merge revision is not in the revisions list
366 # Merge revision is not in the revisions list
367 pull_request.merge_rev = 'f000' * 10
367 pull_request.merge_rev = 'f000' * 10
368 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
368 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
369 assert commit_ids == pull_request.revisions + [pull_request.merge_rev]
369 assert commit_ids == pull_request.revisions + [pull_request.merge_rev]
370
370
371 def test_get_diff_from_pr_version(self, pull_request):
371 def test_get_diff_from_pr_version(self, pull_request):
372 source_repo = pull_request.source_repo
372 source_repo = pull_request.source_repo
373 source_ref_id = pull_request.source_ref_parts.commit_id
373 source_ref_id = pull_request.source_ref_parts.commit_id
374 target_ref_id = pull_request.target_ref_parts.commit_id
374 target_ref_id = pull_request.target_ref_parts.commit_id
375 diff = PullRequestModel()._get_diff_from_pr_or_version(
375 diff = PullRequestModel()._get_diff_from_pr_or_version(
376 source_repo, source_ref_id, target_ref_id, context=6)
376 source_repo, source_ref_id, target_ref_id, context=6)
377 assert 'file_1' in diff.raw
377 assert 'file_1' in diff.raw
378
378
379 def test_generate_title_returns_unicode(self):
379 def test_generate_title_returns_unicode(self):
380 title = PullRequestModel().generate_pullrequest_title(
380 title = PullRequestModel().generate_pullrequest_title(
381 source='source-dummy',
381 source='source-dummy',
382 source_ref='source-ref-dummy',
382 source_ref='source-ref-dummy',
383 target='target-dummy',
383 target='target-dummy',
384 )
384 )
385 assert type(title) == unicode
385 assert type(title) == unicode
386
386
387
387
388 @pytest.mark.usefixtures('config_stub')
388 @pytest.mark.usefixtures('config_stub')
389 class TestIntegrationMerge(object):
389 class TestIntegrationMerge(object):
390 @pytest.mark.parametrize('extra_config', (
390 @pytest.mark.parametrize('extra_config', (
391 {'vcs.hooks.protocol': 'http', 'vcs.hooks.direct_calls': False},
391 {'vcs.hooks.protocol': 'http', 'vcs.hooks.direct_calls': False},
392 ))
392 ))
393 def test_merge_triggers_push_hooks(
393 def test_merge_triggers_push_hooks(
394 self, pr_util, user_admin, capture_rcextensions, merge_extras,
394 self, pr_util, user_admin, capture_rcextensions, merge_extras,
395 extra_config):
395 extra_config):
396
396 pull_request = pr_util.create_pull_request(
397 pull_request = pr_util.create_pull_request(
397 approved=True, mergeable=True)
398 approved=True, mergeable=True)
398 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
399 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
399 merge_extras['repository'] = pull_request.target_repo.repo_name
400 merge_extras['repository'] = pull_request.target_repo.repo_name
400 Session().commit()
401 Session().commit()
401
402
402 with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False):
403 with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False):
403 merge_state = PullRequestModel().merge_repo(
404 merge_state = PullRequestModel().merge_repo(
404 pull_request, user_admin, extras=merge_extras)
405 pull_request, user_admin, extras=merge_extras)
405
406
406 assert merge_state.executed
407 assert merge_state.executed
407 assert 'pre_push' in capture_rcextensions
408 assert '_pre_push_hook' in capture_rcextensions
408 assert 'post_push' in capture_rcextensions
409 assert '_push_hook' in capture_rcextensions
409
410
410 def test_merge_can_be_rejected_by_pre_push_hook(
411 def test_merge_can_be_rejected_by_pre_push_hook(
411 self, pr_util, user_admin, capture_rcextensions, merge_extras):
412 self, pr_util, user_admin, capture_rcextensions, merge_extras):
412 pull_request = pr_util.create_pull_request(
413 pull_request = pr_util.create_pull_request(
413 approved=True, mergeable=True)
414 approved=True, mergeable=True)
414 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
415 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
415 merge_extras['repository'] = pull_request.target_repo.repo_name
416 merge_extras['repository'] = pull_request.target_repo.repo_name
416 Session().commit()
417 Session().commit()
417
418
418 with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull:
419 with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull:
419 pre_pull.side_effect = RepositoryError("Disallow push!")
420 pre_pull.side_effect = RepositoryError("Disallow push!")
420 merge_status = PullRequestModel().merge_repo(
421 merge_status = PullRequestModel().merge_repo(
421 pull_request, user_admin, extras=merge_extras)
422 pull_request, user_admin, extras=merge_extras)
422
423
423 assert not merge_status.executed
424 assert not merge_status.executed
424 assert 'pre_push' not in capture_rcextensions
425 assert 'pre_push' not in capture_rcextensions
425 assert 'post_push' not in capture_rcextensions
426 assert 'post_push' not in capture_rcextensions
426
427
427 def test_merge_fails_if_target_is_locked(
428 def test_merge_fails_if_target_is_locked(
428 self, pr_util, user_regular, merge_extras):
429 self, pr_util, user_regular, merge_extras):
429 pull_request = pr_util.create_pull_request(
430 pull_request = pr_util.create_pull_request(
430 approved=True, mergeable=True)
431 approved=True, mergeable=True)
431 locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web']
432 locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web']
432 pull_request.target_repo.locked = locked_by
433 pull_request.target_repo.locked = locked_by
433 # TODO: johbo: Check if this can work based on the database, currently
434 # TODO: johbo: Check if this can work based on the database, currently
434 # all data is pre-computed, that's why just updating the DB is not
435 # all data is pre-computed, that's why just updating the DB is not
435 # enough.
436 # enough.
436 merge_extras['locked_by'] = locked_by
437 merge_extras['locked_by'] = locked_by
437 merge_extras['repository'] = pull_request.target_repo.repo_name
438 merge_extras['repository'] = pull_request.target_repo.repo_name
438 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
439 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
439 Session().commit()
440 Session().commit()
440 merge_status = PullRequestModel().merge_repo(
441 merge_status = PullRequestModel().merge_repo(
441 pull_request, user_regular, extras=merge_extras)
442 pull_request, user_regular, extras=merge_extras)
442 assert not merge_status.executed
443 assert not merge_status.executed
443
444
444
445
445 @pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [
446 @pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [
446 (False, 1, 0),
447 (False, 1, 0),
447 (True, 0, 1),
448 (True, 0, 1),
448 ])
449 ])
449 def test_outdated_comments(
450 def test_outdated_comments(
450 pr_util, use_outdated, inlines_count, outdated_count, config_stub):
451 pr_util, use_outdated, inlines_count, outdated_count, config_stub):
451 pull_request = pr_util.create_pull_request()
452 pull_request = pr_util.create_pull_request()
452 pr_util.create_inline_comment(file_path='not_in_updated_diff')
453 pr_util.create_inline_comment(file_path='not_in_updated_diff')
453
454
454 with outdated_comments_patcher(use_outdated) as outdated_comment_mock:
455 with outdated_comments_patcher(use_outdated) as outdated_comment_mock:
455 pr_util.add_one_commit()
456 pr_util.add_one_commit()
456 assert_inline_comments(
457 assert_inline_comments(
457 pull_request, visible=inlines_count, outdated=outdated_count)
458 pull_request, visible=inlines_count, outdated=outdated_count)
458 outdated_comment_mock.assert_called_with(pull_request)
459 outdated_comment_mock.assert_called_with(pull_request)
459
460
460
461
461 @pytest.fixture
462 @pytest.fixture
462 def merge_extras(user_regular):
463 def merge_extras(user_regular):
463 """
464 """
464 Context for the vcs operation when running a merge.
465 Context for the vcs operation when running a merge.
465 """
466 """
466 extras = {
467 extras = {
467 'ip': '127.0.0.1',
468 'ip': '127.0.0.1',
468 'username': user_regular.username,
469 'username': user_regular.username,
469 'user_id': user_regular.user_id,
470 'user_id': user_regular.user_id,
470 'action': 'push',
471 'action': 'push',
471 'repository': 'fake_target_repo_name',
472 'repository': 'fake_target_repo_name',
472 'scm': 'git',
473 'scm': 'git',
473 'config': 'fake_config_ini_path',
474 'config': 'fake_config_ini_path',
474 'repo_store': '',
475 'repo_store': '',
475 'make_lock': None,
476 'make_lock': None,
476 'locked_by': [None, None, None],
477 'locked_by': [None, None, None],
477 'server_url': 'http://test.example.com:5000',
478 'server_url': 'http://test.example.com:5000',
478 'hooks': ['push', 'pull'],
479 'hooks': ['push', 'pull'],
479 'is_shadow_repo': False,
480 'is_shadow_repo': False,
480 }
481 }
481 return extras
482 return extras
482
483
483
484
484 @pytest.mark.usefixtures('config_stub')
485 @pytest.mark.usefixtures('config_stub')
485 class TestUpdateCommentHandling(object):
486 class TestUpdateCommentHandling(object):
486
487
487 @pytest.fixture(autouse=True, scope='class')
488 @pytest.fixture(autouse=True, scope='class')
488 def enable_outdated_comments(self, request, baseapp):
489 def enable_outdated_comments(self, request, baseapp):
489 config_patch = mock.patch.dict(
490 config_patch = mock.patch.dict(
490 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True})
491 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True})
491 config_patch.start()
492 config_patch.start()
492
493
493 @request.addfinalizer
494 @request.addfinalizer
494 def cleanup():
495 def cleanup():
495 config_patch.stop()
496 config_patch.stop()
496
497
497 def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util):
498 def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util):
498 commits = [
499 commits = [
499 {'message': 'a'},
500 {'message': 'a'},
500 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
501 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
501 {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]},
502 {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]},
502 ]
503 ]
503 pull_request = pr_util.create_pull_request(
504 pull_request = pr_util.create_pull_request(
504 commits=commits, target_head='a', source_head='b', revisions=['b'])
505 commits=commits, target_head='a', source_head='b', revisions=['b'])
505 pr_util.create_inline_comment(file_path='file_b')
506 pr_util.create_inline_comment(file_path='file_b')
506 pr_util.add_one_commit(head='c')
507 pr_util.add_one_commit(head='c')
507
508
508 assert_inline_comments(pull_request, visible=1, outdated=0)
509 assert_inline_comments(pull_request, visible=1, outdated=0)
509
510
510 def test_comment_stays_unflagged_on_change_above(self, pr_util):
511 def test_comment_stays_unflagged_on_change_above(self, pr_util):
511 original_content = ''.join(
512 original_content = ''.join(
512 ['line {}\n'.format(x) for x in range(1, 11)])
513 ['line {}\n'.format(x) for x in range(1, 11)])
513 updated_content = 'new_line_at_top\n' + original_content
514 updated_content = 'new_line_at_top\n' + original_content
514 commits = [
515 commits = [
515 {'message': 'a'},
516 {'message': 'a'},
516 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
517 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
517 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
518 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
518 ]
519 ]
519 pull_request = pr_util.create_pull_request(
520 pull_request = pr_util.create_pull_request(
520 commits=commits, target_head='a', source_head='b', revisions=['b'])
521 commits=commits, target_head='a', source_head='b', revisions=['b'])
521
522
522 with outdated_comments_patcher():
523 with outdated_comments_patcher():
523 comment = pr_util.create_inline_comment(
524 comment = pr_util.create_inline_comment(
524 line_no=u'n8', file_path='file_b')
525 line_no=u'n8', file_path='file_b')
525 pr_util.add_one_commit(head='c')
526 pr_util.add_one_commit(head='c')
526
527
527 assert_inline_comments(pull_request, visible=1, outdated=0)
528 assert_inline_comments(pull_request, visible=1, outdated=0)
528 assert comment.line_no == u'n9'
529 assert comment.line_no == u'n9'
529
530
530 def test_comment_stays_unflagged_on_change_below(self, pr_util):
531 def test_comment_stays_unflagged_on_change_below(self, pr_util):
531 original_content = ''.join(['line {}\n'.format(x) for x in range(10)])
532 original_content = ''.join(['line {}\n'.format(x) for x in range(10)])
532 updated_content = original_content + 'new_line_at_end\n'
533 updated_content = original_content + 'new_line_at_end\n'
533 commits = [
534 commits = [
534 {'message': 'a'},
535 {'message': 'a'},
535 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
536 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
536 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
537 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
537 ]
538 ]
538 pull_request = pr_util.create_pull_request(
539 pull_request = pr_util.create_pull_request(
539 commits=commits, target_head='a', source_head='b', revisions=['b'])
540 commits=commits, target_head='a', source_head='b', revisions=['b'])
540 pr_util.create_inline_comment(file_path='file_b')
541 pr_util.create_inline_comment(file_path='file_b')
541 pr_util.add_one_commit(head='c')
542 pr_util.add_one_commit(head='c')
542
543
543 assert_inline_comments(pull_request, visible=1, outdated=0)
544 assert_inline_comments(pull_request, visible=1, outdated=0)
544
545
545 @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9'])
546 @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9'])
546 def test_comment_flagged_on_change_around_context(self, pr_util, line_no):
547 def test_comment_flagged_on_change_around_context(self, pr_util, line_no):
547 base_lines = ['line {}\n'.format(x) for x in range(1, 13)]
548 base_lines = ['line {}\n'.format(x) for x in range(1, 13)]
548 change_lines = list(base_lines)
549 change_lines = list(base_lines)
549 change_lines.insert(6, 'line 6a added\n')
550 change_lines.insert(6, 'line 6a added\n')
550
551
551 # Changes on the last line of sight
552 # Changes on the last line of sight
552 update_lines = list(change_lines)
553 update_lines = list(change_lines)
553 update_lines[0] = 'line 1 changed\n'
554 update_lines[0] = 'line 1 changed\n'
554 update_lines[-1] = 'line 12 changed\n'
555 update_lines[-1] = 'line 12 changed\n'
555
556
556 def file_b(lines):
557 def file_b(lines):
557 return FileNode('file_b', ''.join(lines))
558 return FileNode('file_b', ''.join(lines))
558
559
559 commits = [
560 commits = [
560 {'message': 'a', 'added': [file_b(base_lines)]},
561 {'message': 'a', 'added': [file_b(base_lines)]},
561 {'message': 'b', 'changed': [file_b(change_lines)]},
562 {'message': 'b', 'changed': [file_b(change_lines)]},
562 {'message': 'c', 'changed': [file_b(update_lines)]},
563 {'message': 'c', 'changed': [file_b(update_lines)]},
563 ]
564 ]
564
565
565 pull_request = pr_util.create_pull_request(
566 pull_request = pr_util.create_pull_request(
566 commits=commits, target_head='a', source_head='b', revisions=['b'])
567 commits=commits, target_head='a', source_head='b', revisions=['b'])
567 pr_util.create_inline_comment(line_no=line_no, file_path='file_b')
568 pr_util.create_inline_comment(line_no=line_no, file_path='file_b')
568
569
569 with outdated_comments_patcher():
570 with outdated_comments_patcher():
570 pr_util.add_one_commit(head='c')
571 pr_util.add_one_commit(head='c')
571 assert_inline_comments(pull_request, visible=0, outdated=1)
572 assert_inline_comments(pull_request, visible=0, outdated=1)
572
573
573 @pytest.mark.parametrize("change, content", [
574 @pytest.mark.parametrize("change, content", [
574 ('changed', 'changed\n'),
575 ('changed', 'changed\n'),
575 ('removed', ''),
576 ('removed', ''),
576 ], ids=['changed', 'removed'])
577 ], ids=['changed', 'removed'])
577 def test_comment_flagged_on_change(self, pr_util, change, content):
578 def test_comment_flagged_on_change(self, pr_util, change, content):
578 commits = [
579 commits = [
579 {'message': 'a'},
580 {'message': 'a'},
580 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
581 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
581 {'message': 'c', change: [FileNode('file_b', content)]},
582 {'message': 'c', change: [FileNode('file_b', content)]},
582 ]
583 ]
583 pull_request = pr_util.create_pull_request(
584 pull_request = pr_util.create_pull_request(
584 commits=commits, target_head='a', source_head='b', revisions=['b'])
585 commits=commits, target_head='a', source_head='b', revisions=['b'])
585 pr_util.create_inline_comment(file_path='file_b')
586 pr_util.create_inline_comment(file_path='file_b')
586
587
587 with outdated_comments_patcher():
588 with outdated_comments_patcher():
588 pr_util.add_one_commit(head='c')
589 pr_util.add_one_commit(head='c')
589 assert_inline_comments(pull_request, visible=0, outdated=1)
590 assert_inline_comments(pull_request, visible=0, outdated=1)
590
591
591
592
592 @pytest.mark.usefixtures('config_stub')
593 @pytest.mark.usefixtures('config_stub')
593 class TestUpdateChangedFiles(object):
594 class TestUpdateChangedFiles(object):
594
595
595 def test_no_changes_on_unchanged_diff(self, pr_util):
596 def test_no_changes_on_unchanged_diff(self, pr_util):
596 commits = [
597 commits = [
597 {'message': 'a'},
598 {'message': 'a'},
598 {'message': 'b',
599 {'message': 'b',
599 'added': [FileNode('file_b', 'test_content b\n')]},
600 'added': [FileNode('file_b', 'test_content b\n')]},
600 {'message': 'c',
601 {'message': 'c',
601 'added': [FileNode('file_c', 'test_content c\n')]},
602 'added': [FileNode('file_c', 'test_content c\n')]},
602 ]
603 ]
603 # open a PR from a to b, adding file_b
604 # open a PR from a to b, adding file_b
604 pull_request = pr_util.create_pull_request(
605 pull_request = pr_util.create_pull_request(
605 commits=commits, target_head='a', source_head='b', revisions=['b'],
606 commits=commits, target_head='a', source_head='b', revisions=['b'],
606 name_suffix='per-file-review')
607 name_suffix='per-file-review')
607
608
608 # modify PR adding new file file_c
609 # modify PR adding new file file_c
609 pr_util.add_one_commit(head='c')
610 pr_util.add_one_commit(head='c')
610
611
611 assert_pr_file_changes(
612 assert_pr_file_changes(
612 pull_request,
613 pull_request,
613 added=['file_c'],
614 added=['file_c'],
614 modified=[],
615 modified=[],
615 removed=[])
616 removed=[])
616
617
617 def test_modify_and_undo_modification_diff(self, pr_util):
618 def test_modify_and_undo_modification_diff(self, pr_util):
618 commits = [
619 commits = [
619 {'message': 'a'},
620 {'message': 'a'},
620 {'message': 'b',
621 {'message': 'b',
621 'added': [FileNode('file_b', 'test_content b\n')]},
622 'added': [FileNode('file_b', 'test_content b\n')]},
622 {'message': 'c',
623 {'message': 'c',
623 'changed': [FileNode('file_b', 'test_content b modified\n')]},
624 'changed': [FileNode('file_b', 'test_content b modified\n')]},
624 {'message': 'd',
625 {'message': 'd',
625 'changed': [FileNode('file_b', 'test_content b\n')]},
626 'changed': [FileNode('file_b', 'test_content b\n')]},
626 ]
627 ]
627 # open a PR from a to b, adding file_b
628 # open a PR from a to b, adding file_b
628 pull_request = pr_util.create_pull_request(
629 pull_request = pr_util.create_pull_request(
629 commits=commits, target_head='a', source_head='b', revisions=['b'],
630 commits=commits, target_head='a', source_head='b', revisions=['b'],
630 name_suffix='per-file-review')
631 name_suffix='per-file-review')
631
632
632 # modify PR modifying file file_b
633 # modify PR modifying file file_b
633 pr_util.add_one_commit(head='c')
634 pr_util.add_one_commit(head='c')
634
635
635 assert_pr_file_changes(
636 assert_pr_file_changes(
636 pull_request,
637 pull_request,
637 added=[],
638 added=[],
638 modified=['file_b'],
639 modified=['file_b'],
639 removed=[])
640 removed=[])
640
641
641 # move the head again to d, which rollbacks change,
642 # move the head again to d, which rollbacks change,
642 # meaning we should indicate no changes
643 # meaning we should indicate no changes
643 pr_util.add_one_commit(head='d')
644 pr_util.add_one_commit(head='d')
644
645
645 assert_pr_file_changes(
646 assert_pr_file_changes(
646 pull_request,
647 pull_request,
647 added=[],
648 added=[],
648 modified=[],
649 modified=[],
649 removed=[])
650 removed=[])
650
651
651 def test_updated_all_files_in_pr(self, pr_util):
652 def test_updated_all_files_in_pr(self, pr_util):
652 commits = [
653 commits = [
653 {'message': 'a'},
654 {'message': 'a'},
654 {'message': 'b', 'added': [
655 {'message': 'b', 'added': [
655 FileNode('file_a', 'test_content a\n'),
656 FileNode('file_a', 'test_content a\n'),
656 FileNode('file_b', 'test_content b\n'),
657 FileNode('file_b', 'test_content b\n'),
657 FileNode('file_c', 'test_content c\n')]},
658 FileNode('file_c', 'test_content c\n')]},
658 {'message': 'c', 'changed': [
659 {'message': 'c', 'changed': [
659 FileNode('file_a', 'test_content a changed\n'),
660 FileNode('file_a', 'test_content a changed\n'),
660 FileNode('file_b', 'test_content b changed\n'),
661 FileNode('file_b', 'test_content b changed\n'),
661 FileNode('file_c', 'test_content c changed\n')]},
662 FileNode('file_c', 'test_content c changed\n')]},
662 ]
663 ]
663 # open a PR from a to b, changing 3 files
664 # open a PR from a to b, changing 3 files
664 pull_request = pr_util.create_pull_request(
665 pull_request = pr_util.create_pull_request(
665 commits=commits, target_head='a', source_head='b', revisions=['b'],
666 commits=commits, target_head='a', source_head='b', revisions=['b'],
666 name_suffix='per-file-review')
667 name_suffix='per-file-review')
667
668
668 pr_util.add_one_commit(head='c')
669 pr_util.add_one_commit(head='c')
669
670
670 assert_pr_file_changes(
671 assert_pr_file_changes(
671 pull_request,
672 pull_request,
672 added=[],
673 added=[],
673 modified=['file_a', 'file_b', 'file_c'],
674 modified=['file_a', 'file_b', 'file_c'],
674 removed=[])
675 removed=[])
675
676
676 def test_updated_and_removed_all_files_in_pr(self, pr_util):
677 def test_updated_and_removed_all_files_in_pr(self, pr_util):
677 commits = [
678 commits = [
678 {'message': 'a'},
679 {'message': 'a'},
679 {'message': 'b', 'added': [
680 {'message': 'b', 'added': [
680 FileNode('file_a', 'test_content a\n'),
681 FileNode('file_a', 'test_content a\n'),
681 FileNode('file_b', 'test_content b\n'),
682 FileNode('file_b', 'test_content b\n'),
682 FileNode('file_c', 'test_content c\n')]},
683 FileNode('file_c', 'test_content c\n')]},
683 {'message': 'c', 'removed': [
684 {'message': 'c', 'removed': [
684 FileNode('file_a', 'test_content a changed\n'),
685 FileNode('file_a', 'test_content a changed\n'),
685 FileNode('file_b', 'test_content b changed\n'),
686 FileNode('file_b', 'test_content b changed\n'),
686 FileNode('file_c', 'test_content c changed\n')]},
687 FileNode('file_c', 'test_content c changed\n')]},
687 ]
688 ]
688 # open a PR from a to b, removing 3 files
689 # open a PR from a to b, removing 3 files
689 pull_request = pr_util.create_pull_request(
690 pull_request = pr_util.create_pull_request(
690 commits=commits, target_head='a', source_head='b', revisions=['b'],
691 commits=commits, target_head='a', source_head='b', revisions=['b'],
691 name_suffix='per-file-review')
692 name_suffix='per-file-review')
692
693
693 pr_util.add_one_commit(head='c')
694 pr_util.add_one_commit(head='c')
694
695
695 assert_pr_file_changes(
696 assert_pr_file_changes(
696 pull_request,
697 pull_request,
697 added=[],
698 added=[],
698 modified=[],
699 modified=[],
699 removed=['file_a', 'file_b', 'file_c'])
700 removed=['file_a', 'file_b', 'file_c'])
700
701
701
702
702 def test_update_writes_snapshot_into_pull_request_version(pr_util, config_stub):
703 def test_update_writes_snapshot_into_pull_request_version(pr_util, config_stub):
703 model = PullRequestModel()
704 model = PullRequestModel()
704 pull_request = pr_util.create_pull_request()
705 pull_request = pr_util.create_pull_request()
705 pr_util.update_source_repository()
706 pr_util.update_source_repository()
706
707
707 model.update_commits(pull_request)
708 model.update_commits(pull_request)
708
709
709 # Expect that it has a version entry now
710 # Expect that it has a version entry now
710 assert len(model.get_versions(pull_request)) == 1
711 assert len(model.get_versions(pull_request)) == 1
711
712
712
713
713 def test_update_skips_new_version_if_unchanged(pr_util, config_stub):
714 def test_update_skips_new_version_if_unchanged(pr_util, config_stub):
714 pull_request = pr_util.create_pull_request()
715 pull_request = pr_util.create_pull_request()
715 model = PullRequestModel()
716 model = PullRequestModel()
716 model.update_commits(pull_request)
717 model.update_commits(pull_request)
717
718
718 # Expect that it still has no versions
719 # Expect that it still has no versions
719 assert len(model.get_versions(pull_request)) == 0
720 assert len(model.get_versions(pull_request)) == 0
720
721
721
722
722 def test_update_assigns_comments_to_the_new_version(pr_util, config_stub):
723 def test_update_assigns_comments_to_the_new_version(pr_util, config_stub):
723 model = PullRequestModel()
724 model = PullRequestModel()
724 pull_request = pr_util.create_pull_request()
725 pull_request = pr_util.create_pull_request()
725 comment = pr_util.create_comment()
726 comment = pr_util.create_comment()
726 pr_util.update_source_repository()
727 pr_util.update_source_repository()
727
728
728 model.update_commits(pull_request)
729 model.update_commits(pull_request)
729
730
730 # Expect that the comment is linked to the pr version now
731 # Expect that the comment is linked to the pr version now
731 assert comment.pull_request_version == model.get_versions(pull_request)[0]
732 assert comment.pull_request_version == model.get_versions(pull_request)[0]
732
733
733
734
734 def test_update_adds_a_comment_to_the_pull_request_about_the_change(pr_util, config_stub):
735 def test_update_adds_a_comment_to_the_pull_request_about_the_change(pr_util, config_stub):
735 model = PullRequestModel()
736 model = PullRequestModel()
736 pull_request = pr_util.create_pull_request()
737 pull_request = pr_util.create_pull_request()
737 pr_util.update_source_repository()
738 pr_util.update_source_repository()
738 pr_util.update_source_repository()
739 pr_util.update_source_repository()
739
740
740 model.update_commits(pull_request)
741 model.update_commits(pull_request)
741
742
742 # Expect to find a new comment about the change
743 # Expect to find a new comment about the change
743 expected_message = textwrap.dedent(
744 expected_message = textwrap.dedent(
744 """\
745 """\
745 Pull request updated. Auto status change to |under_review|
746 Pull request updated. Auto status change to |under_review|
746
747
747 .. role:: added
748 .. role:: added
748 .. role:: removed
749 .. role:: removed
749 .. parsed-literal::
750 .. parsed-literal::
750
751
751 Changed commits:
752 Changed commits:
752 * :added:`1 added`
753 * :added:`1 added`
753 * :removed:`0 removed`
754 * :removed:`0 removed`
754
755
755 Changed files:
756 Changed files:
756 * `A file_2 <#a_c--92ed3b5f07b4>`_
757 * `A file_2 <#a_c--92ed3b5f07b4>`_
757
758
758 .. |under_review| replace:: *"Under Review"*"""
759 .. |under_review| replace:: *"Under Review"*"""
759 )
760 )
760 pull_request_comments = sorted(
761 pull_request_comments = sorted(
761 pull_request.comments, key=lambda c: c.modified_at)
762 pull_request.comments, key=lambda c: c.modified_at)
762 update_comment = pull_request_comments[-1]
763 update_comment = pull_request_comments[-1]
763 assert update_comment.text == expected_message
764 assert update_comment.text == expected_message
764
765
765
766
766 def test_create_version_from_snapshot_updates_attributes(pr_util, config_stub):
767 def test_create_version_from_snapshot_updates_attributes(pr_util, config_stub):
767 pull_request = pr_util.create_pull_request()
768 pull_request = pr_util.create_pull_request()
768
769
769 # Avoiding default values
770 # Avoiding default values
770 pull_request.status = PullRequest.STATUS_CLOSED
771 pull_request.status = PullRequest.STATUS_CLOSED
771 pull_request._last_merge_source_rev = "0" * 40
772 pull_request._last_merge_source_rev = "0" * 40
772 pull_request._last_merge_target_rev = "1" * 40
773 pull_request._last_merge_target_rev = "1" * 40
773 pull_request.last_merge_status = 1
774 pull_request.last_merge_status = 1
774 pull_request.merge_rev = "2" * 40
775 pull_request.merge_rev = "2" * 40
775
776
776 # Remember automatic values
777 # Remember automatic values
777 created_on = pull_request.created_on
778 created_on = pull_request.created_on
778 updated_on = pull_request.updated_on
779 updated_on = pull_request.updated_on
779
780
780 # Create a new version of the pull request
781 # Create a new version of the pull request
781 version = PullRequestModel()._create_version_from_snapshot(pull_request)
782 version = PullRequestModel()._create_version_from_snapshot(pull_request)
782
783
783 # Check attributes
784 # Check attributes
784 assert version.title == pr_util.create_parameters['title']
785 assert version.title == pr_util.create_parameters['title']
785 assert version.description == pr_util.create_parameters['description']
786 assert version.description == pr_util.create_parameters['description']
786 assert version.status == PullRequest.STATUS_CLOSED
787 assert version.status == PullRequest.STATUS_CLOSED
787
788
788 # versions get updated created_on
789 # versions get updated created_on
789 assert version.created_on != created_on
790 assert version.created_on != created_on
790
791
791 assert version.updated_on == updated_on
792 assert version.updated_on == updated_on
792 assert version.user_id == pull_request.user_id
793 assert version.user_id == pull_request.user_id
793 assert version.revisions == pr_util.create_parameters['revisions']
794 assert version.revisions == pr_util.create_parameters['revisions']
794 assert version.source_repo == pr_util.source_repository
795 assert version.source_repo == pr_util.source_repository
795 assert version.source_ref == pr_util.create_parameters['source_ref']
796 assert version.source_ref == pr_util.create_parameters['source_ref']
796 assert version.target_repo == pr_util.target_repository
797 assert version.target_repo == pr_util.target_repository
797 assert version.target_ref == pr_util.create_parameters['target_ref']
798 assert version.target_ref == pr_util.create_parameters['target_ref']
798 assert version._last_merge_source_rev == pull_request._last_merge_source_rev
799 assert version._last_merge_source_rev == pull_request._last_merge_source_rev
799 assert version._last_merge_target_rev == pull_request._last_merge_target_rev
800 assert version._last_merge_target_rev == pull_request._last_merge_target_rev
800 assert version.last_merge_status == pull_request.last_merge_status
801 assert version.last_merge_status == pull_request.last_merge_status
801 assert version.merge_rev == pull_request.merge_rev
802 assert version.merge_rev == pull_request.merge_rev
802 assert version.pull_request == pull_request
803 assert version.pull_request == pull_request
803
804
804
805
805 def test_link_comments_to_version_only_updates_unlinked_comments(pr_util, config_stub):
806 def test_link_comments_to_version_only_updates_unlinked_comments(pr_util, config_stub):
806 version1 = pr_util.create_version_of_pull_request()
807 version1 = pr_util.create_version_of_pull_request()
807 comment_linked = pr_util.create_comment(linked_to=version1)
808 comment_linked = pr_util.create_comment(linked_to=version1)
808 comment_unlinked = pr_util.create_comment()
809 comment_unlinked = pr_util.create_comment()
809 version2 = pr_util.create_version_of_pull_request()
810 version2 = pr_util.create_version_of_pull_request()
810
811
811 PullRequestModel()._link_comments_to_version(version2)
812 PullRequestModel()._link_comments_to_version(version2)
812
813
813 # Expect that only the new comment is linked to version2
814 # Expect that only the new comment is linked to version2
814 assert (
815 assert (
815 comment_unlinked.pull_request_version_id ==
816 comment_unlinked.pull_request_version_id ==
816 version2.pull_request_version_id)
817 version2.pull_request_version_id)
817 assert (
818 assert (
818 comment_linked.pull_request_version_id ==
819 comment_linked.pull_request_version_id ==
819 version1.pull_request_version_id)
820 version1.pull_request_version_id)
820 assert (
821 assert (
821 comment_unlinked.pull_request_version_id !=
822 comment_unlinked.pull_request_version_id !=
822 comment_linked.pull_request_version_id)
823 comment_linked.pull_request_version_id)
823
824
824
825
825 def test_calculate_commits():
826 def test_calculate_commits():
826 old_ids = [1, 2, 3]
827 old_ids = [1, 2, 3]
827 new_ids = [1, 3, 4, 5]
828 new_ids = [1, 3, 4, 5]
828 change = PullRequestModel()._calculate_commit_id_changes(old_ids, new_ids)
829 change = PullRequestModel()._calculate_commit_id_changes(old_ids, new_ids)
829 assert change.added == [4, 5]
830 assert change.added == [4, 5]
830 assert change.common == [1, 3]
831 assert change.common == [1, 3]
831 assert change.removed == [2]
832 assert change.removed == [2]
832 assert change.total == [1, 3, 4, 5]
833 assert change.total == [1, 3, 4, 5]
833
834
834
835
835 def assert_inline_comments(pull_request, visible=None, outdated=None):
836 def assert_inline_comments(pull_request, visible=None, outdated=None):
836 if visible is not None:
837 if visible is not None:
837 inline_comments = CommentsModel().get_inline_comments(
838 inline_comments = CommentsModel().get_inline_comments(
838 pull_request.target_repo.repo_id, pull_request=pull_request)
839 pull_request.target_repo.repo_id, pull_request=pull_request)
839 inline_cnt = CommentsModel().get_inline_comments_count(
840 inline_cnt = CommentsModel().get_inline_comments_count(
840 inline_comments)
841 inline_comments)
841 assert inline_cnt == visible
842 assert inline_cnt == visible
842 if outdated is not None:
843 if outdated is not None:
843 outdated_comments = CommentsModel().get_outdated_comments(
844 outdated_comments = CommentsModel().get_outdated_comments(
844 pull_request.target_repo.repo_id, pull_request)
845 pull_request.target_repo.repo_id, pull_request)
845 assert len(outdated_comments) == outdated
846 assert len(outdated_comments) == outdated
846
847
847
848
848 def assert_pr_file_changes(
849 def assert_pr_file_changes(
849 pull_request, added=None, modified=None, removed=None):
850 pull_request, added=None, modified=None, removed=None):
850 pr_versions = PullRequestModel().get_versions(pull_request)
851 pr_versions = PullRequestModel().get_versions(pull_request)
851 # always use first version, ie original PR to calculate changes
852 # always use first version, ie original PR to calculate changes
852 pull_request_version = pr_versions[0]
853 pull_request_version = pr_versions[0]
853 old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs(
854 old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs(
854 pull_request, pull_request_version)
855 pull_request, pull_request_version)
855 file_changes = PullRequestModel()._calculate_file_changes(
856 file_changes = PullRequestModel()._calculate_file_changes(
856 old_diff_data, new_diff_data)
857 old_diff_data, new_diff_data)
857
858
858 assert added == file_changes.added, \
859 assert added == file_changes.added, \
859 'expected added:%s vs value:%s' % (added, file_changes.added)
860 'expected added:%s vs value:%s' % (added, file_changes.added)
860 assert modified == file_changes.modified, \
861 assert modified == file_changes.modified, \
861 'expected modified:%s vs value:%s' % (modified, file_changes.modified)
862 'expected modified:%s vs value:%s' % (modified, file_changes.modified)
862 assert removed == file_changes.removed, \
863 assert removed == file_changes.removed, \
863 'expected removed:%s vs value:%s' % (removed, file_changes.removed)
864 'expected removed:%s vs value:%s' % (removed, file_changes.removed)
864
865
865
866
866 def outdated_comments_patcher(use_outdated=True):
867 def outdated_comments_patcher(use_outdated=True):
867 return mock.patch.object(
868 return mock.patch.object(
868 CommentsModel, 'use_outdated_comments',
869 CommentsModel, 'use_outdated_comments',
869 return_value=use_outdated)
870 return_value=use_outdated)
@@ -1,1886 +1,1887 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import collections
21 import collections
22 import datetime
22 import datetime
23 import hashlib
23 import hashlib
24 import os
24 import os
25 import re
25 import re
26 import pprint
26 import pprint
27 import shutil
27 import shutil
28 import socket
28 import socket
29 import subprocess32
29 import subprocess32
30 import time
30 import time
31 import uuid
31 import uuid
32 import dateutil.tz
32 import dateutil.tz
33 import functools
33 import functools
34
34
35 import mock
35 import mock
36 import pyramid.testing
36 import pyramid.testing
37 import pytest
37 import pytest
38 import colander
38 import colander
39 import requests
39 import requests
40 import pyramid.paster
40 import pyramid.paster
41
41
42 import rhodecode
42 import rhodecode
43 from rhodecode.lib.utils2 import AttributeDict
43 from rhodecode.lib.utils2 import AttributeDict
44 from rhodecode.model.changeset_status import ChangesetStatusModel
44 from rhodecode.model.changeset_status import ChangesetStatusModel
45 from rhodecode.model.comment import CommentsModel
45 from rhodecode.model.comment import CommentsModel
46 from rhodecode.model.db import (
46 from rhodecode.model.db import (
47 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
47 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
48 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
48 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
49 from rhodecode.model.meta import Session
49 from rhodecode.model.meta import Session
50 from rhodecode.model.pull_request import PullRequestModel
50 from rhodecode.model.pull_request import PullRequestModel
51 from rhodecode.model.repo import RepoModel
51 from rhodecode.model.repo import RepoModel
52 from rhodecode.model.repo_group import RepoGroupModel
52 from rhodecode.model.repo_group import RepoGroupModel
53 from rhodecode.model.user import UserModel
53 from rhodecode.model.user import UserModel
54 from rhodecode.model.settings import VcsSettingsModel
54 from rhodecode.model.settings import VcsSettingsModel
55 from rhodecode.model.user_group import UserGroupModel
55 from rhodecode.model.user_group import UserGroupModel
56 from rhodecode.model.integration import IntegrationModel
56 from rhodecode.model.integration import IntegrationModel
57 from rhodecode.integrations import integration_type_registry
57 from rhodecode.integrations import integration_type_registry
58 from rhodecode.integrations.types.base import IntegrationTypeBase
58 from rhodecode.integrations.types.base import IntegrationTypeBase
59 from rhodecode.lib.utils import repo2db_mapper
59 from rhodecode.lib.utils import repo2db_mapper
60 from rhodecode.lib.vcs import create_vcsserver_proxy
60 from rhodecode.lib.vcs import create_vcsserver_proxy
61 from rhodecode.lib.vcs.backends import get_backend
61 from rhodecode.lib.vcs.backends import get_backend
62 from rhodecode.lib.vcs.nodes import FileNode
62 from rhodecode.lib.vcs.nodes import FileNode
63 from rhodecode.tests import (
63 from rhodecode.tests import (
64 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
64 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
65 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
65 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
66 TEST_USER_REGULAR_PASS)
66 TEST_USER_REGULAR_PASS)
67 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
67 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
68 from rhodecode.tests.fixture import Fixture
68 from rhodecode.tests.fixture import Fixture
69 from rhodecode.config import utils as config_utils
69 from rhodecode.config import utils as config_utils
70
70
71 def _split_comma(value):
71 def _split_comma(value):
72 return value.split(',')
72 return value.split(',')
73
73
74
74
75 def pytest_addoption(parser):
75 def pytest_addoption(parser):
76 parser.addoption(
76 parser.addoption(
77 '--keep-tmp-path', action='store_true',
77 '--keep-tmp-path', action='store_true',
78 help="Keep the test temporary directories")
78 help="Keep the test temporary directories")
79 parser.addoption(
79 parser.addoption(
80 '--backends', action='store', type=_split_comma,
80 '--backends', action='store', type=_split_comma,
81 default=['git', 'hg', 'svn'],
81 default=['git', 'hg', 'svn'],
82 help="Select which backends to test for backend specific tests.")
82 help="Select which backends to test for backend specific tests.")
83 parser.addoption(
83 parser.addoption(
84 '--dbs', action='store', type=_split_comma,
84 '--dbs', action='store', type=_split_comma,
85 default=['sqlite'],
85 default=['sqlite'],
86 help="Select which database to test for database specific tests. "
86 help="Select which database to test for database specific tests. "
87 "Possible options are sqlite,postgres,mysql")
87 "Possible options are sqlite,postgres,mysql")
88 parser.addoption(
88 parser.addoption(
89 '--appenlight', '--ae', action='store_true',
89 '--appenlight', '--ae', action='store_true',
90 help="Track statistics in appenlight.")
90 help="Track statistics in appenlight.")
91 parser.addoption(
91 parser.addoption(
92 '--appenlight-api-key', '--ae-key',
92 '--appenlight-api-key', '--ae-key',
93 help="API key for Appenlight.")
93 help="API key for Appenlight.")
94 parser.addoption(
94 parser.addoption(
95 '--appenlight-url', '--ae-url',
95 '--appenlight-url', '--ae-url',
96 default="https://ae.rhodecode.com",
96 default="https://ae.rhodecode.com",
97 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
97 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
98 parser.addoption(
98 parser.addoption(
99 '--sqlite-connection-string', action='store',
99 '--sqlite-connection-string', action='store',
100 default='', help="Connection string for the dbs tests with SQLite")
100 default='', help="Connection string for the dbs tests with SQLite")
101 parser.addoption(
101 parser.addoption(
102 '--postgres-connection-string', action='store',
102 '--postgres-connection-string', action='store',
103 default='', help="Connection string for the dbs tests with Postgres")
103 default='', help="Connection string for the dbs tests with Postgres")
104 parser.addoption(
104 parser.addoption(
105 '--mysql-connection-string', action='store',
105 '--mysql-connection-string', action='store',
106 default='', help="Connection string for the dbs tests with MySQL")
106 default='', help="Connection string for the dbs tests with MySQL")
107 parser.addoption(
107 parser.addoption(
108 '--repeat', type=int, default=100,
108 '--repeat', type=int, default=100,
109 help="Number of repetitions in performance tests.")
109 help="Number of repetitions in performance tests.")
110
110
111
111
112 def pytest_configure(config):
112 def pytest_configure(config):
113 from rhodecode.config import patches
113 from rhodecode.config import patches
114
114
115
115
116 def pytest_collection_modifyitems(session, config, items):
116 def pytest_collection_modifyitems(session, config, items):
117 # nottest marked, compare nose, used for transition from nose to pytest
117 # nottest marked, compare nose, used for transition from nose to pytest
118 remaining = [
118 remaining = [
119 i for i in items if getattr(i.obj, '__test__', True)]
119 i for i in items if getattr(i.obj, '__test__', True)]
120 items[:] = remaining
120 items[:] = remaining
121
121
122
122
123 def pytest_generate_tests(metafunc):
123 def pytest_generate_tests(metafunc):
124 # Support test generation based on --backend parameter
124 # Support test generation based on --backend parameter
125 if 'backend_alias' in metafunc.fixturenames:
125 if 'backend_alias' in metafunc.fixturenames:
126 backends = get_backends_from_metafunc(metafunc)
126 backends = get_backends_from_metafunc(metafunc)
127 scope = None
127 scope = None
128 if not backends:
128 if not backends:
129 pytest.skip("Not enabled for any of selected backends")
129 pytest.skip("Not enabled for any of selected backends")
130 metafunc.parametrize('backend_alias', backends, scope=scope)
130 metafunc.parametrize('backend_alias', backends, scope=scope)
131 elif hasattr(metafunc.function, 'backends'):
131 elif hasattr(metafunc.function, 'backends'):
132 backends = get_backends_from_metafunc(metafunc)
132 backends = get_backends_from_metafunc(metafunc)
133 if not backends:
133 if not backends:
134 pytest.skip("Not enabled for any of selected backends")
134 pytest.skip("Not enabled for any of selected backends")
135
135
136
136
137 def get_backends_from_metafunc(metafunc):
137 def get_backends_from_metafunc(metafunc):
138 requested_backends = set(metafunc.config.getoption('--backends'))
138 requested_backends = set(metafunc.config.getoption('--backends'))
139 if hasattr(metafunc.function, 'backends'):
139 if hasattr(metafunc.function, 'backends'):
140 # Supported backends by this test function, created from
140 # Supported backends by this test function, created from
141 # pytest.mark.backends
141 # pytest.mark.backends
142 backends = metafunc.definition.get_closest_marker('backends').args
142 backends = metafunc.definition.get_closest_marker('backends').args
143 elif hasattr(metafunc.cls, 'backend_alias'):
143 elif hasattr(metafunc.cls, 'backend_alias'):
144 # Support class attribute "backend_alias", this is mainly
144 # Support class attribute "backend_alias", this is mainly
145 # for legacy reasons for tests not yet using pytest.mark.backends
145 # for legacy reasons for tests not yet using pytest.mark.backends
146 backends = [metafunc.cls.backend_alias]
146 backends = [metafunc.cls.backend_alias]
147 else:
147 else:
148 backends = metafunc.config.getoption('--backends')
148 backends = metafunc.config.getoption('--backends')
149 return requested_backends.intersection(backends)
149 return requested_backends.intersection(backends)
150
150
151
151
152 @pytest.fixture(scope='session', autouse=True)
152 @pytest.fixture(scope='session', autouse=True)
153 def activate_example_rcextensions(request):
153 def activate_example_rcextensions(request):
154 """
154 """
155 Patch in an example rcextensions module which verifies passed in kwargs.
155 Patch in an example rcextensions module which verifies passed in kwargs.
156 """
156 """
157 from rhodecode.tests.other import example_rcextensions
157 from rhodecode.config import rcextensions
158
158
159 old_extensions = rhodecode.EXTENSIONS
159 old_extensions = rhodecode.EXTENSIONS
160 rhodecode.EXTENSIONS = example_rcextensions
160 rhodecode.EXTENSIONS = rcextensions
161 rhodecode.EXTENSIONS.calls = collections.defaultdict(list)
161
162
162 @request.addfinalizer
163 @request.addfinalizer
163 def cleanup():
164 def cleanup():
164 rhodecode.EXTENSIONS = old_extensions
165 rhodecode.EXTENSIONS = old_extensions
165
166
166
167
167 @pytest.fixture
168 @pytest.fixture
168 def capture_rcextensions():
169 def capture_rcextensions():
169 """
170 """
170 Returns the recorded calls to entry points in rcextensions.
171 Returns the recorded calls to entry points in rcextensions.
171 """
172 """
172 calls = rhodecode.EXTENSIONS.calls
173 calls = rhodecode.EXTENSIONS.calls
173 calls.clear()
174 calls.clear()
174 # Note: At this moment, it is still the empty dict, but that will
175 # Note: At this moment, it is still the empty dict, but that will
175 # be filled during the test run and since it is a reference this
176 # be filled during the test run and since it is a reference this
176 # is enough to make it work.
177 # is enough to make it work.
177 return calls
178 return calls
178
179
179
180
180 @pytest.fixture(scope='session')
181 @pytest.fixture(scope='session')
181 def http_environ_session():
182 def http_environ_session():
182 """
183 """
183 Allow to use "http_environ" in session scope.
184 Allow to use "http_environ" in session scope.
184 """
185 """
185 return plain_http_environ()
186 return plain_http_environ()
186
187
187
188
188 def plain_http_host_stub():
189 def plain_http_host_stub():
189 """
190 """
190 Value of HTTP_HOST in the test run.
191 Value of HTTP_HOST in the test run.
191 """
192 """
192 return 'example.com:80'
193 return 'example.com:80'
193
194
194
195
195 @pytest.fixture
196 @pytest.fixture
196 def http_host_stub():
197 def http_host_stub():
197 """
198 """
198 Value of HTTP_HOST in the test run.
199 Value of HTTP_HOST in the test run.
199 """
200 """
200 return plain_http_host_stub()
201 return plain_http_host_stub()
201
202
202
203
203 def plain_http_host_only_stub():
204 def plain_http_host_only_stub():
204 """
205 """
205 Value of HTTP_HOST in the test run.
206 Value of HTTP_HOST in the test run.
206 """
207 """
207 return plain_http_host_stub().split(':')[0]
208 return plain_http_host_stub().split(':')[0]
208
209
209
210
210 @pytest.fixture
211 @pytest.fixture
211 def http_host_only_stub():
212 def http_host_only_stub():
212 """
213 """
213 Value of HTTP_HOST in the test run.
214 Value of HTTP_HOST in the test run.
214 """
215 """
215 return plain_http_host_only_stub()
216 return plain_http_host_only_stub()
216
217
217
218
218 def plain_http_environ():
219 def plain_http_environ():
219 """
220 """
220 HTTP extra environ keys.
221 HTTP extra environ keys.
221
222
222 User by the test application and as well for setting up the pylons
223 User by the test application and as well for setting up the pylons
223 environment. In the case of the fixture "app" it should be possible
224 environment. In the case of the fixture "app" it should be possible
224 to override this for a specific test case.
225 to override this for a specific test case.
225 """
226 """
226 return {
227 return {
227 'SERVER_NAME': plain_http_host_only_stub(),
228 'SERVER_NAME': plain_http_host_only_stub(),
228 'SERVER_PORT': plain_http_host_stub().split(':')[1],
229 'SERVER_PORT': plain_http_host_stub().split(':')[1],
229 'HTTP_HOST': plain_http_host_stub(),
230 'HTTP_HOST': plain_http_host_stub(),
230 'HTTP_USER_AGENT': 'rc-test-agent',
231 'HTTP_USER_AGENT': 'rc-test-agent',
231 'REQUEST_METHOD': 'GET'
232 'REQUEST_METHOD': 'GET'
232 }
233 }
233
234
234
235
235 @pytest.fixture
236 @pytest.fixture
236 def http_environ():
237 def http_environ():
237 """
238 """
238 HTTP extra environ keys.
239 HTTP extra environ keys.
239
240
240 User by the test application and as well for setting up the pylons
241 User by the test application and as well for setting up the pylons
241 environment. In the case of the fixture "app" it should be possible
242 environment. In the case of the fixture "app" it should be possible
242 to override this for a specific test case.
243 to override this for a specific test case.
243 """
244 """
244 return plain_http_environ()
245 return plain_http_environ()
245
246
246
247
247 @pytest.fixture(scope='session')
248 @pytest.fixture(scope='session')
248 def baseapp(ini_config, vcsserver, http_environ_session):
249 def baseapp(ini_config, vcsserver, http_environ_session):
249 from rhodecode.lib.pyramid_utils import get_app_config
250 from rhodecode.lib.pyramid_utils import get_app_config
250 from rhodecode.config.middleware import make_pyramid_app
251 from rhodecode.config.middleware import make_pyramid_app
251
252
252 print("Using the RhodeCode configuration:{}".format(ini_config))
253 print("Using the RhodeCode configuration:{}".format(ini_config))
253 pyramid.paster.setup_logging(ini_config)
254 pyramid.paster.setup_logging(ini_config)
254
255
255 settings = get_app_config(ini_config)
256 settings = get_app_config(ini_config)
256 app = make_pyramid_app({'__file__': ini_config}, **settings)
257 app = make_pyramid_app({'__file__': ini_config}, **settings)
257
258
258 return app
259 return app
259
260
260
261
261 @pytest.fixture(scope='function')
262 @pytest.fixture(scope='function')
262 def app(request, config_stub, baseapp, http_environ):
263 def app(request, config_stub, baseapp, http_environ):
263 app = CustomTestApp(
264 app = CustomTestApp(
264 baseapp,
265 baseapp,
265 extra_environ=http_environ)
266 extra_environ=http_environ)
266 if request.cls:
267 if request.cls:
267 request.cls.app = app
268 request.cls.app = app
268 return app
269 return app
269
270
270
271
271 @pytest.fixture(scope='session')
272 @pytest.fixture(scope='session')
272 def app_settings(baseapp, ini_config):
273 def app_settings(baseapp, ini_config):
273 """
274 """
274 Settings dictionary used to create the app.
275 Settings dictionary used to create the app.
275
276
276 Parses the ini file and passes the result through the sanitize and apply
277 Parses the ini file and passes the result through the sanitize and apply
277 defaults mechanism in `rhodecode.config.middleware`.
278 defaults mechanism in `rhodecode.config.middleware`.
278 """
279 """
279 return baseapp.config.get_settings()
280 return baseapp.config.get_settings()
280
281
281
282
282 @pytest.fixture(scope='session')
283 @pytest.fixture(scope='session')
283 def db_connection(ini_settings):
284 def db_connection(ini_settings):
284 # Initialize the database connection.
285 # Initialize the database connection.
285 config_utils.initialize_database(ini_settings)
286 config_utils.initialize_database(ini_settings)
286
287
287
288
288 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
289 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
289
290
290
291
291 def _autologin_user(app, *args):
292 def _autologin_user(app, *args):
292 session = login_user_session(app, *args)
293 session = login_user_session(app, *args)
293 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
294 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
294 return LoginData(csrf_token, session['rhodecode_user'])
295 return LoginData(csrf_token, session['rhodecode_user'])
295
296
296
297
297 @pytest.fixture
298 @pytest.fixture
298 def autologin_user(app):
299 def autologin_user(app):
299 """
300 """
300 Utility fixture which makes sure that the admin user is logged in
301 Utility fixture which makes sure that the admin user is logged in
301 """
302 """
302 return _autologin_user(app)
303 return _autologin_user(app)
303
304
304
305
305 @pytest.fixture
306 @pytest.fixture
306 def autologin_regular_user(app):
307 def autologin_regular_user(app):
307 """
308 """
308 Utility fixture which makes sure that the regular user is logged in
309 Utility fixture which makes sure that the regular user is logged in
309 """
310 """
310 return _autologin_user(
311 return _autologin_user(
311 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
312 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
312
313
313
314
314 @pytest.fixture(scope='function')
315 @pytest.fixture(scope='function')
315 def csrf_token(request, autologin_user):
316 def csrf_token(request, autologin_user):
316 return autologin_user.csrf_token
317 return autologin_user.csrf_token
317
318
318
319
319 @pytest.fixture(scope='function')
320 @pytest.fixture(scope='function')
320 def xhr_header(request):
321 def xhr_header(request):
321 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
322 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
322
323
323
324
324 @pytest.fixture
325 @pytest.fixture
325 def real_crypto_backend(monkeypatch):
326 def real_crypto_backend(monkeypatch):
326 """
327 """
327 Switch the production crypto backend on for this test.
328 Switch the production crypto backend on for this test.
328
329
329 During the test run the crypto backend is replaced with a faster
330 During the test run the crypto backend is replaced with a faster
330 implementation based on the MD5 algorithm.
331 implementation based on the MD5 algorithm.
331 """
332 """
332 monkeypatch.setattr(rhodecode, 'is_test', False)
333 monkeypatch.setattr(rhodecode, 'is_test', False)
333
334
334
335
335 @pytest.fixture(scope='class')
336 @pytest.fixture(scope='class')
336 def index_location(request, baseapp):
337 def index_location(request, baseapp):
337 index_location = baseapp.config.get_settings()['search.location']
338 index_location = baseapp.config.get_settings()['search.location']
338 if request.cls:
339 if request.cls:
339 request.cls.index_location = index_location
340 request.cls.index_location = index_location
340 return index_location
341 return index_location
341
342
342
343
343 @pytest.fixture(scope='session', autouse=True)
344 @pytest.fixture(scope='session', autouse=True)
344 def tests_tmp_path(request):
345 def tests_tmp_path(request):
345 """
346 """
346 Create temporary directory to be used during the test session.
347 Create temporary directory to be used during the test session.
347 """
348 """
348 if not os.path.exists(TESTS_TMP_PATH):
349 if not os.path.exists(TESTS_TMP_PATH):
349 os.makedirs(TESTS_TMP_PATH)
350 os.makedirs(TESTS_TMP_PATH)
350
351
351 if not request.config.getoption('--keep-tmp-path'):
352 if not request.config.getoption('--keep-tmp-path'):
352 @request.addfinalizer
353 @request.addfinalizer
353 def remove_tmp_path():
354 def remove_tmp_path():
354 shutil.rmtree(TESTS_TMP_PATH)
355 shutil.rmtree(TESTS_TMP_PATH)
355
356
356 return TESTS_TMP_PATH
357 return TESTS_TMP_PATH
357
358
358
359
359 @pytest.fixture
360 @pytest.fixture
360 def test_repo_group(request):
361 def test_repo_group(request):
361 """
362 """
362 Create a temporary repository group, and destroy it after
363 Create a temporary repository group, and destroy it after
363 usage automatically
364 usage automatically
364 """
365 """
365 fixture = Fixture()
366 fixture = Fixture()
366 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
367 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
367 repo_group = fixture.create_repo_group(repogroupid)
368 repo_group = fixture.create_repo_group(repogroupid)
368
369
369 def _cleanup():
370 def _cleanup():
370 fixture.destroy_repo_group(repogroupid)
371 fixture.destroy_repo_group(repogroupid)
371
372
372 request.addfinalizer(_cleanup)
373 request.addfinalizer(_cleanup)
373 return repo_group
374 return repo_group
374
375
375
376
376 @pytest.fixture
377 @pytest.fixture
377 def test_user_group(request):
378 def test_user_group(request):
378 """
379 """
379 Create a temporary user group, and destroy it after
380 Create a temporary user group, and destroy it after
380 usage automatically
381 usage automatically
381 """
382 """
382 fixture = Fixture()
383 fixture = Fixture()
383 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
384 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
384 user_group = fixture.create_user_group(usergroupid)
385 user_group = fixture.create_user_group(usergroupid)
385
386
386 def _cleanup():
387 def _cleanup():
387 fixture.destroy_user_group(user_group)
388 fixture.destroy_user_group(user_group)
388
389
389 request.addfinalizer(_cleanup)
390 request.addfinalizer(_cleanup)
390 return user_group
391 return user_group
391
392
392
393
393 @pytest.fixture(scope='session')
394 @pytest.fixture(scope='session')
394 def test_repo(request):
395 def test_repo(request):
395 container = TestRepoContainer()
396 container = TestRepoContainer()
396 request.addfinalizer(container._cleanup)
397 request.addfinalizer(container._cleanup)
397 return container
398 return container
398
399
399
400
400 class TestRepoContainer(object):
401 class TestRepoContainer(object):
401 """
402 """
402 Container for test repositories which are used read only.
403 Container for test repositories which are used read only.
403
404
404 Repositories will be created on demand and re-used during the lifetime
405 Repositories will be created on demand and re-used during the lifetime
405 of this object.
406 of this object.
406
407
407 Usage to get the svn test repository "minimal"::
408 Usage to get the svn test repository "minimal"::
408
409
409 test_repo = TestContainer()
410 test_repo = TestContainer()
410 repo = test_repo('minimal', 'svn')
411 repo = test_repo('minimal', 'svn')
411
412
412 """
413 """
413
414
414 dump_extractors = {
415 dump_extractors = {
415 'git': utils.extract_git_repo_from_dump,
416 'git': utils.extract_git_repo_from_dump,
416 'hg': utils.extract_hg_repo_from_dump,
417 'hg': utils.extract_hg_repo_from_dump,
417 'svn': utils.extract_svn_repo_from_dump,
418 'svn': utils.extract_svn_repo_from_dump,
418 }
419 }
419
420
420 def __init__(self):
421 def __init__(self):
421 self._cleanup_repos = []
422 self._cleanup_repos = []
422 self._fixture = Fixture()
423 self._fixture = Fixture()
423 self._repos = {}
424 self._repos = {}
424
425
425 def __call__(self, dump_name, backend_alias, config=None):
426 def __call__(self, dump_name, backend_alias, config=None):
426 key = (dump_name, backend_alias)
427 key = (dump_name, backend_alias)
427 if key not in self._repos:
428 if key not in self._repos:
428 repo = self._create_repo(dump_name, backend_alias, config)
429 repo = self._create_repo(dump_name, backend_alias, config)
429 self._repos[key] = repo.repo_id
430 self._repos[key] = repo.repo_id
430 return Repository.get(self._repos[key])
431 return Repository.get(self._repos[key])
431
432
432 def _create_repo(self, dump_name, backend_alias, config):
433 def _create_repo(self, dump_name, backend_alias, config):
433 repo_name = '%s-%s' % (backend_alias, dump_name)
434 repo_name = '%s-%s' % (backend_alias, dump_name)
434 backend_class = get_backend(backend_alias)
435 backend_class = get_backend(backend_alias)
435 dump_extractor = self.dump_extractors[backend_alias]
436 dump_extractor = self.dump_extractors[backend_alias]
436 repo_path = dump_extractor(dump_name, repo_name)
437 repo_path = dump_extractor(dump_name, repo_name)
437
438
438 vcs_repo = backend_class(repo_path, config=config)
439 vcs_repo = backend_class(repo_path, config=config)
439 repo2db_mapper({repo_name: vcs_repo})
440 repo2db_mapper({repo_name: vcs_repo})
440
441
441 repo = RepoModel().get_by_repo_name(repo_name)
442 repo = RepoModel().get_by_repo_name(repo_name)
442 self._cleanup_repos.append(repo_name)
443 self._cleanup_repos.append(repo_name)
443 return repo
444 return repo
444
445
445 def _cleanup(self):
446 def _cleanup(self):
446 for repo_name in reversed(self._cleanup_repos):
447 for repo_name in reversed(self._cleanup_repos):
447 self._fixture.destroy_repo(repo_name)
448 self._fixture.destroy_repo(repo_name)
448
449
449
450
450 def backend_base(request, backend_alias, baseapp, test_repo):
451 def backend_base(request, backend_alias, baseapp, test_repo):
451 if backend_alias not in request.config.getoption('--backends'):
452 if backend_alias not in request.config.getoption('--backends'):
452 pytest.skip("Backend %s not selected." % (backend_alias, ))
453 pytest.skip("Backend %s not selected." % (backend_alias, ))
453
454
454 utils.check_xfail_backends(request.node, backend_alias)
455 utils.check_xfail_backends(request.node, backend_alias)
455 utils.check_skip_backends(request.node, backend_alias)
456 utils.check_skip_backends(request.node, backend_alias)
456
457
457 repo_name = 'vcs_test_%s' % (backend_alias, )
458 repo_name = 'vcs_test_%s' % (backend_alias, )
458 backend = Backend(
459 backend = Backend(
459 alias=backend_alias,
460 alias=backend_alias,
460 repo_name=repo_name,
461 repo_name=repo_name,
461 test_name=request.node.name,
462 test_name=request.node.name,
462 test_repo_container=test_repo)
463 test_repo_container=test_repo)
463 request.addfinalizer(backend.cleanup)
464 request.addfinalizer(backend.cleanup)
464 return backend
465 return backend
465
466
466
467
467 @pytest.fixture
468 @pytest.fixture
468 def backend(request, backend_alias, baseapp, test_repo):
469 def backend(request, backend_alias, baseapp, test_repo):
469 """
470 """
470 Parametrized fixture which represents a single backend implementation.
471 Parametrized fixture which represents a single backend implementation.
471
472
472 It respects the option `--backends` to focus the test run on specific
473 It respects the option `--backends` to focus the test run on specific
473 backend implementations.
474 backend implementations.
474
475
475 It also supports `pytest.mark.xfail_backends` to mark tests as failing
476 It also supports `pytest.mark.xfail_backends` to mark tests as failing
476 for specific backends. This is intended as a utility for incremental
477 for specific backends. This is intended as a utility for incremental
477 development of a new backend implementation.
478 development of a new backend implementation.
478 """
479 """
479 return backend_base(request, backend_alias, baseapp, test_repo)
480 return backend_base(request, backend_alias, baseapp, test_repo)
480
481
481
482
482 @pytest.fixture
483 @pytest.fixture
483 def backend_git(request, baseapp, test_repo):
484 def backend_git(request, baseapp, test_repo):
484 return backend_base(request, 'git', baseapp, test_repo)
485 return backend_base(request, 'git', baseapp, test_repo)
485
486
486
487
487 @pytest.fixture
488 @pytest.fixture
488 def backend_hg(request, baseapp, test_repo):
489 def backend_hg(request, baseapp, test_repo):
489 return backend_base(request, 'hg', baseapp, test_repo)
490 return backend_base(request, 'hg', baseapp, test_repo)
490
491
491
492
492 @pytest.fixture
493 @pytest.fixture
493 def backend_svn(request, baseapp, test_repo):
494 def backend_svn(request, baseapp, test_repo):
494 return backend_base(request, 'svn', baseapp, test_repo)
495 return backend_base(request, 'svn', baseapp, test_repo)
495
496
496
497
497 @pytest.fixture
498 @pytest.fixture
498 def backend_random(backend_git):
499 def backend_random(backend_git):
499 """
500 """
500 Use this to express that your tests need "a backend.
501 Use this to express that your tests need "a backend.
501
502
502 A few of our tests need a backend, so that we can run the code. This
503 A few of our tests need a backend, so that we can run the code. This
503 fixture is intended to be used for such cases. It will pick one of the
504 fixture is intended to be used for such cases. It will pick one of the
504 backends and run the tests.
505 backends and run the tests.
505
506
506 The fixture `backend` would run the test multiple times for each
507 The fixture `backend` would run the test multiple times for each
507 available backend which is a pure waste of time if the test is
508 available backend which is a pure waste of time if the test is
508 independent of the backend type.
509 independent of the backend type.
509 """
510 """
510 # TODO: johbo: Change this to pick a random backend
511 # TODO: johbo: Change this to pick a random backend
511 return backend_git
512 return backend_git
512
513
513
514
514 @pytest.fixture
515 @pytest.fixture
515 def backend_stub(backend_git):
516 def backend_stub(backend_git):
516 """
517 """
517 Use this to express that your tests need a backend stub
518 Use this to express that your tests need a backend stub
518
519
519 TODO: mikhail: Implement a real stub logic instead of returning
520 TODO: mikhail: Implement a real stub logic instead of returning
520 a git backend
521 a git backend
521 """
522 """
522 return backend_git
523 return backend_git
523
524
524
525
525 @pytest.fixture
526 @pytest.fixture
526 def repo_stub(backend_stub):
527 def repo_stub(backend_stub):
527 """
528 """
528 Use this to express that your tests need a repository stub
529 Use this to express that your tests need a repository stub
529 """
530 """
530 return backend_stub.create_repo()
531 return backend_stub.create_repo()
531
532
532
533
533 class Backend(object):
534 class Backend(object):
534 """
535 """
535 Represents the test configuration for one supported backend
536 Represents the test configuration for one supported backend
536
537
537 Provides easy access to different test repositories based on
538 Provides easy access to different test repositories based on
538 `__getitem__`. Such repositories will only be created once per test
539 `__getitem__`. Such repositories will only be created once per test
539 session.
540 session.
540 """
541 """
541
542
542 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
543 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
543 _master_repo = None
544 _master_repo = None
544 _commit_ids = {}
545 _commit_ids = {}
545
546
546 def __init__(self, alias, repo_name, test_name, test_repo_container):
547 def __init__(self, alias, repo_name, test_name, test_repo_container):
547 self.alias = alias
548 self.alias = alias
548 self.repo_name = repo_name
549 self.repo_name = repo_name
549 self._cleanup_repos = []
550 self._cleanup_repos = []
550 self._test_name = test_name
551 self._test_name = test_name
551 self._test_repo_container = test_repo_container
552 self._test_repo_container = test_repo_container
552 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
553 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
553 # Fixture will survive in the end.
554 # Fixture will survive in the end.
554 self._fixture = Fixture()
555 self._fixture = Fixture()
555
556
556 def __getitem__(self, key):
557 def __getitem__(self, key):
557 return self._test_repo_container(key, self.alias)
558 return self._test_repo_container(key, self.alias)
558
559
559 def create_test_repo(self, key, config=None):
560 def create_test_repo(self, key, config=None):
560 return self._test_repo_container(key, self.alias, config)
561 return self._test_repo_container(key, self.alias, config)
561
562
562 @property
563 @property
563 def repo(self):
564 def repo(self):
564 """
565 """
565 Returns the "current" repository. This is the vcs_test repo or the
566 Returns the "current" repository. This is the vcs_test repo or the
566 last repo which has been created with `create_repo`.
567 last repo which has been created with `create_repo`.
567 """
568 """
568 from rhodecode.model.db import Repository
569 from rhodecode.model.db import Repository
569 return Repository.get_by_repo_name(self.repo_name)
570 return Repository.get_by_repo_name(self.repo_name)
570
571
571 @property
572 @property
572 def default_branch_name(self):
573 def default_branch_name(self):
573 VcsRepository = get_backend(self.alias)
574 VcsRepository = get_backend(self.alias)
574 return VcsRepository.DEFAULT_BRANCH_NAME
575 return VcsRepository.DEFAULT_BRANCH_NAME
575
576
576 @property
577 @property
577 def default_head_id(self):
578 def default_head_id(self):
578 """
579 """
579 Returns the default head id of the underlying backend.
580 Returns the default head id of the underlying backend.
580
581
581 This will be the default branch name in case the backend does have a
582 This will be the default branch name in case the backend does have a
582 default branch. In the other cases it will point to a valid head
583 default branch. In the other cases it will point to a valid head
583 which can serve as the base to create a new commit on top of it.
584 which can serve as the base to create a new commit on top of it.
584 """
585 """
585 vcsrepo = self.repo.scm_instance()
586 vcsrepo = self.repo.scm_instance()
586 head_id = (
587 head_id = (
587 vcsrepo.DEFAULT_BRANCH_NAME or
588 vcsrepo.DEFAULT_BRANCH_NAME or
588 vcsrepo.commit_ids[-1])
589 vcsrepo.commit_ids[-1])
589 return head_id
590 return head_id
590
591
591 @property
592 @property
592 def commit_ids(self):
593 def commit_ids(self):
593 """
594 """
594 Returns the list of commits for the last created repository
595 Returns the list of commits for the last created repository
595 """
596 """
596 return self._commit_ids
597 return self._commit_ids
597
598
598 def create_master_repo(self, commits):
599 def create_master_repo(self, commits):
599 """
600 """
600 Create a repository and remember it as a template.
601 Create a repository and remember it as a template.
601
602
602 This allows to easily create derived repositories to construct
603 This allows to easily create derived repositories to construct
603 more complex scenarios for diff, compare and pull requests.
604 more complex scenarios for diff, compare and pull requests.
604
605
605 Returns a commit map which maps from commit message to raw_id.
606 Returns a commit map which maps from commit message to raw_id.
606 """
607 """
607 self._master_repo = self.create_repo(commits=commits)
608 self._master_repo = self.create_repo(commits=commits)
608 return self._commit_ids
609 return self._commit_ids
609
610
610 def create_repo(
611 def create_repo(
611 self, commits=None, number_of_commits=0, heads=None,
612 self, commits=None, number_of_commits=0, heads=None,
612 name_suffix=u'', bare=False, **kwargs):
613 name_suffix=u'', bare=False, **kwargs):
613 """
614 """
614 Create a repository and record it for later cleanup.
615 Create a repository and record it for later cleanup.
615
616
616 :param commits: Optional. A sequence of dict instances.
617 :param commits: Optional. A sequence of dict instances.
617 Will add a commit per entry to the new repository.
618 Will add a commit per entry to the new repository.
618 :param number_of_commits: Optional. If set to a number, this number of
619 :param number_of_commits: Optional. If set to a number, this number of
619 commits will be added to the new repository.
620 commits will be added to the new repository.
620 :param heads: Optional. Can be set to a sequence of of commit
621 :param heads: Optional. Can be set to a sequence of of commit
621 names which shall be pulled in from the master repository.
622 names which shall be pulled in from the master repository.
622 :param name_suffix: adds special suffix to generated repo name
623 :param name_suffix: adds special suffix to generated repo name
623 :param bare: set a repo as bare (no checkout)
624 :param bare: set a repo as bare (no checkout)
624 """
625 """
625 self.repo_name = self._next_repo_name() + name_suffix
626 self.repo_name = self._next_repo_name() + name_suffix
626 repo = self._fixture.create_repo(
627 repo = self._fixture.create_repo(
627 self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
628 self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
628 self._cleanup_repos.append(repo.repo_name)
629 self._cleanup_repos.append(repo.repo_name)
629
630
630 commits = commits or [
631 commits = commits or [
631 {'message': 'Commit %s of %s' % (x, self.repo_name)}
632 {'message': 'Commit %s of %s' % (x, self.repo_name)}
632 for x in range(number_of_commits)]
633 for x in range(number_of_commits)]
633 self._add_commits_to_repo(repo.scm_instance(), commits)
634 self._add_commits_to_repo(repo.scm_instance(), commits)
634 if heads:
635 if heads:
635 self.pull_heads(repo, heads)
636 self.pull_heads(repo, heads)
636
637
637 return repo
638 return repo
638
639
639 def pull_heads(self, repo, heads):
640 def pull_heads(self, repo, heads):
640 """
641 """
641 Make sure that repo contains all commits mentioned in `heads`
642 Make sure that repo contains all commits mentioned in `heads`
642 """
643 """
643 vcsmaster = self._master_repo.scm_instance()
644 vcsmaster = self._master_repo.scm_instance()
644 vcsrepo = repo.scm_instance()
645 vcsrepo = repo.scm_instance()
645 vcsrepo.config.clear_section('hooks')
646 vcsrepo.config.clear_section('hooks')
646 commit_ids = [self._commit_ids[h] for h in heads]
647 commit_ids = [self._commit_ids[h] for h in heads]
647 vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids)
648 vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids)
648
649
649 def create_fork(self):
650 def create_fork(self):
650 repo_to_fork = self.repo_name
651 repo_to_fork = self.repo_name
651 self.repo_name = self._next_repo_name()
652 self.repo_name = self._next_repo_name()
652 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
653 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
653 self._cleanup_repos.append(self.repo_name)
654 self._cleanup_repos.append(self.repo_name)
654 return repo
655 return repo
655
656
656 def new_repo_name(self, suffix=u''):
657 def new_repo_name(self, suffix=u''):
657 self.repo_name = self._next_repo_name() + suffix
658 self.repo_name = self._next_repo_name() + suffix
658 self._cleanup_repos.append(self.repo_name)
659 self._cleanup_repos.append(self.repo_name)
659 return self.repo_name
660 return self.repo_name
660
661
661 def _next_repo_name(self):
662 def _next_repo_name(self):
662 return u"%s_%s" % (
663 return u"%s_%s" % (
663 self.invalid_repo_name.sub(u'_', self._test_name),
664 self.invalid_repo_name.sub(u'_', self._test_name),
664 len(self._cleanup_repos))
665 len(self._cleanup_repos))
665
666
666 def ensure_file(self, filename, content='Test content\n'):
667 def ensure_file(self, filename, content='Test content\n'):
667 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
668 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
668 commits = [
669 commits = [
669 {'added': [
670 {'added': [
670 FileNode(filename, content=content),
671 FileNode(filename, content=content),
671 ]},
672 ]},
672 ]
673 ]
673 self._add_commits_to_repo(self.repo.scm_instance(), commits)
674 self._add_commits_to_repo(self.repo.scm_instance(), commits)
674
675
675 def enable_downloads(self):
676 def enable_downloads(self):
676 repo = self.repo
677 repo = self.repo
677 repo.enable_downloads = True
678 repo.enable_downloads = True
678 Session().add(repo)
679 Session().add(repo)
679 Session().commit()
680 Session().commit()
680
681
681 def cleanup(self):
682 def cleanup(self):
682 for repo_name in reversed(self._cleanup_repos):
683 for repo_name in reversed(self._cleanup_repos):
683 self._fixture.destroy_repo(repo_name)
684 self._fixture.destroy_repo(repo_name)
684
685
685 def _add_commits_to_repo(self, repo, commits):
686 def _add_commits_to_repo(self, repo, commits):
686 commit_ids = _add_commits_to_repo(repo, commits)
687 commit_ids = _add_commits_to_repo(repo, commits)
687 if not commit_ids:
688 if not commit_ids:
688 return
689 return
689 self._commit_ids = commit_ids
690 self._commit_ids = commit_ids
690
691
691 # Creating refs for Git to allow fetching them from remote repository
692 # Creating refs for Git to allow fetching them from remote repository
692 if self.alias == 'git':
693 if self.alias == 'git':
693 refs = {}
694 refs = {}
694 for message in self._commit_ids:
695 for message in self._commit_ids:
695 # TODO: mikhail: do more special chars replacements
696 # TODO: mikhail: do more special chars replacements
696 ref_name = 'refs/test-refs/{}'.format(
697 ref_name = 'refs/test-refs/{}'.format(
697 message.replace(' ', ''))
698 message.replace(' ', ''))
698 refs[ref_name] = self._commit_ids[message]
699 refs[ref_name] = self._commit_ids[message]
699 self._create_refs(repo, refs)
700 self._create_refs(repo, refs)
700
701
701 def _create_refs(self, repo, refs):
702 def _create_refs(self, repo, refs):
702 for ref_name in refs:
703 for ref_name in refs:
703 repo.set_refs(ref_name, refs[ref_name])
704 repo.set_refs(ref_name, refs[ref_name])
704
705
705
706
706 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo):
707 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo):
707 if backend_alias not in request.config.getoption('--backends'):
708 if backend_alias not in request.config.getoption('--backends'):
708 pytest.skip("Backend %s not selected." % (backend_alias, ))
709 pytest.skip("Backend %s not selected." % (backend_alias, ))
709
710
710 utils.check_xfail_backends(request.node, backend_alias)
711 utils.check_xfail_backends(request.node, backend_alias)
711 utils.check_skip_backends(request.node, backend_alias)
712 utils.check_skip_backends(request.node, backend_alias)
712
713
713 repo_name = 'vcs_test_%s' % (backend_alias, )
714 repo_name = 'vcs_test_%s' % (backend_alias, )
714 repo_path = os.path.join(tests_tmp_path, repo_name)
715 repo_path = os.path.join(tests_tmp_path, repo_name)
715 backend = VcsBackend(
716 backend = VcsBackend(
716 alias=backend_alias,
717 alias=backend_alias,
717 repo_path=repo_path,
718 repo_path=repo_path,
718 test_name=request.node.name,
719 test_name=request.node.name,
719 test_repo_container=test_repo)
720 test_repo_container=test_repo)
720 request.addfinalizer(backend.cleanup)
721 request.addfinalizer(backend.cleanup)
721 return backend
722 return backend
722
723
723
724
724 @pytest.fixture
725 @pytest.fixture
725 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
726 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
726 """
727 """
727 Parametrized fixture which represents a single vcs backend implementation.
728 Parametrized fixture which represents a single vcs backend implementation.
728
729
729 See the fixture `backend` for more details. This one implements the same
730 See the fixture `backend` for more details. This one implements the same
730 concept, but on vcs level. So it does not provide model instances etc.
731 concept, but on vcs level. So it does not provide model instances etc.
731
732
732 Parameters are generated dynamically, see :func:`pytest_generate_tests`
733 Parameters are generated dynamically, see :func:`pytest_generate_tests`
733 for how this works.
734 for how this works.
734 """
735 """
735 return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo)
736 return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo)
736
737
737
738
738 @pytest.fixture
739 @pytest.fixture
739 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
740 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
740 return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo)
741 return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo)
741
742
742
743
743 @pytest.fixture
744 @pytest.fixture
744 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
745 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
745 return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo)
746 return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo)
746
747
747
748
748 @pytest.fixture
749 @pytest.fixture
749 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
750 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
750 return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo)
751 return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo)
751
752
752
753
753 @pytest.fixture
754 @pytest.fixture
754 def vcsbackend_stub(vcsbackend_git):
755 def vcsbackend_stub(vcsbackend_git):
755 """
756 """
756 Use this to express that your test just needs a stub of a vcsbackend.
757 Use this to express that your test just needs a stub of a vcsbackend.
757
758
758 Plan is to eventually implement an in-memory stub to speed tests up.
759 Plan is to eventually implement an in-memory stub to speed tests up.
759 """
760 """
760 return vcsbackend_git
761 return vcsbackend_git
761
762
762
763
763 class VcsBackend(object):
764 class VcsBackend(object):
764 """
765 """
765 Represents the test configuration for one supported vcs backend.
766 Represents the test configuration for one supported vcs backend.
766 """
767 """
767
768
768 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
769 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
769
770
770 def __init__(self, alias, repo_path, test_name, test_repo_container):
771 def __init__(self, alias, repo_path, test_name, test_repo_container):
771 self.alias = alias
772 self.alias = alias
772 self._repo_path = repo_path
773 self._repo_path = repo_path
773 self._cleanup_repos = []
774 self._cleanup_repos = []
774 self._test_name = test_name
775 self._test_name = test_name
775 self._test_repo_container = test_repo_container
776 self._test_repo_container = test_repo_container
776
777
777 def __getitem__(self, key):
778 def __getitem__(self, key):
778 return self._test_repo_container(key, self.alias).scm_instance()
779 return self._test_repo_container(key, self.alias).scm_instance()
779
780
780 @property
781 @property
781 def repo(self):
782 def repo(self):
782 """
783 """
783 Returns the "current" repository. This is the vcs_test repo of the last
784 Returns the "current" repository. This is the vcs_test repo of the last
784 repo which has been created.
785 repo which has been created.
785 """
786 """
786 Repository = get_backend(self.alias)
787 Repository = get_backend(self.alias)
787 return Repository(self._repo_path)
788 return Repository(self._repo_path)
788
789
789 @property
790 @property
790 def backend(self):
791 def backend(self):
791 """
792 """
792 Returns the backend implementation class.
793 Returns the backend implementation class.
793 """
794 """
794 return get_backend(self.alias)
795 return get_backend(self.alias)
795
796
796 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None,
797 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None,
797 bare=False):
798 bare=False):
798 repo_name = self._next_repo_name()
799 repo_name = self._next_repo_name()
799 self._repo_path = get_new_dir(repo_name)
800 self._repo_path = get_new_dir(repo_name)
800 repo_class = get_backend(self.alias)
801 repo_class = get_backend(self.alias)
801 src_url = None
802 src_url = None
802 if _clone_repo:
803 if _clone_repo:
803 src_url = _clone_repo.path
804 src_url = _clone_repo.path
804 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
805 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
805 self._cleanup_repos.append(repo)
806 self._cleanup_repos.append(repo)
806
807
807 commits = commits or [
808 commits = commits or [
808 {'message': 'Commit %s of %s' % (x, repo_name)}
809 {'message': 'Commit %s of %s' % (x, repo_name)}
809 for x in xrange(number_of_commits)]
810 for x in xrange(number_of_commits)]
810 _add_commits_to_repo(repo, commits)
811 _add_commits_to_repo(repo, commits)
811 return repo
812 return repo
812
813
813 def clone_repo(self, repo):
814 def clone_repo(self, repo):
814 return self.create_repo(_clone_repo=repo)
815 return self.create_repo(_clone_repo=repo)
815
816
816 def cleanup(self):
817 def cleanup(self):
817 for repo in self._cleanup_repos:
818 for repo in self._cleanup_repos:
818 shutil.rmtree(repo.path)
819 shutil.rmtree(repo.path)
819
820
820 def new_repo_path(self):
821 def new_repo_path(self):
821 repo_name = self._next_repo_name()
822 repo_name = self._next_repo_name()
822 self._repo_path = get_new_dir(repo_name)
823 self._repo_path = get_new_dir(repo_name)
823 return self._repo_path
824 return self._repo_path
824
825
825 def _next_repo_name(self):
826 def _next_repo_name(self):
826 return "%s_%s" % (
827 return "%s_%s" % (
827 self.invalid_repo_name.sub('_', self._test_name),
828 self.invalid_repo_name.sub('_', self._test_name),
828 len(self._cleanup_repos))
829 len(self._cleanup_repos))
829
830
830 def add_file(self, repo, filename, content='Test content\n'):
831 def add_file(self, repo, filename, content='Test content\n'):
831 imc = repo.in_memory_commit
832 imc = repo.in_memory_commit
832 imc.add(FileNode(filename, content=content))
833 imc.add(FileNode(filename, content=content))
833 imc.commit(
834 imc.commit(
834 message=u'Automatic commit from vcsbackend fixture',
835 message=u'Automatic commit from vcsbackend fixture',
835 author=u'Automatic')
836 author=u'Automatic')
836
837
837 def ensure_file(self, filename, content='Test content\n'):
838 def ensure_file(self, filename, content='Test content\n'):
838 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
839 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
839 self.add_file(self.repo, filename, content)
840 self.add_file(self.repo, filename, content)
840
841
841
842
842 def _add_commits_to_repo(vcs_repo, commits):
843 def _add_commits_to_repo(vcs_repo, commits):
843 commit_ids = {}
844 commit_ids = {}
844 if not commits:
845 if not commits:
845 return commit_ids
846 return commit_ids
846
847
847 imc = vcs_repo.in_memory_commit
848 imc = vcs_repo.in_memory_commit
848 commit = None
849 commit = None
849
850
850 for idx, commit in enumerate(commits):
851 for idx, commit in enumerate(commits):
851 message = unicode(commit.get('message', 'Commit %s' % idx))
852 message = unicode(commit.get('message', 'Commit %s' % idx))
852
853
853 for node in commit.get('added', []):
854 for node in commit.get('added', []):
854 imc.add(FileNode(node.path, content=node.content))
855 imc.add(FileNode(node.path, content=node.content))
855 for node in commit.get('changed', []):
856 for node in commit.get('changed', []):
856 imc.change(FileNode(node.path, content=node.content))
857 imc.change(FileNode(node.path, content=node.content))
857 for node in commit.get('removed', []):
858 for node in commit.get('removed', []):
858 imc.remove(FileNode(node.path))
859 imc.remove(FileNode(node.path))
859
860
860 parents = [
861 parents = [
861 vcs_repo.get_commit(commit_id=commit_ids[p])
862 vcs_repo.get_commit(commit_id=commit_ids[p])
862 for p in commit.get('parents', [])]
863 for p in commit.get('parents', [])]
863
864
864 operations = ('added', 'changed', 'removed')
865 operations = ('added', 'changed', 'removed')
865 if not any((commit.get(o) for o in operations)):
866 if not any((commit.get(o) for o in operations)):
866 imc.add(FileNode('file_%s' % idx, content=message))
867 imc.add(FileNode('file_%s' % idx, content=message))
867
868
868 commit = imc.commit(
869 commit = imc.commit(
869 message=message,
870 message=message,
870 author=unicode(commit.get('author', 'Automatic')),
871 author=unicode(commit.get('author', 'Automatic')),
871 date=commit.get('date'),
872 date=commit.get('date'),
872 branch=commit.get('branch'),
873 branch=commit.get('branch'),
873 parents=parents)
874 parents=parents)
874
875
875 commit_ids[commit.message] = commit.raw_id
876 commit_ids[commit.message] = commit.raw_id
876
877
877 return commit_ids
878 return commit_ids
878
879
879
880
880 @pytest.fixture
881 @pytest.fixture
881 def reposerver(request):
882 def reposerver(request):
882 """
883 """
883 Allows to serve a backend repository
884 Allows to serve a backend repository
884 """
885 """
885
886
886 repo_server = RepoServer()
887 repo_server = RepoServer()
887 request.addfinalizer(repo_server.cleanup)
888 request.addfinalizer(repo_server.cleanup)
888 return repo_server
889 return repo_server
889
890
890
891
891 class RepoServer(object):
892 class RepoServer(object):
892 """
893 """
893 Utility to serve a local repository for the duration of a test case.
894 Utility to serve a local repository for the duration of a test case.
894
895
895 Supports only Subversion so far.
896 Supports only Subversion so far.
896 """
897 """
897
898
898 url = None
899 url = None
899
900
900 def __init__(self):
901 def __init__(self):
901 self._cleanup_servers = []
902 self._cleanup_servers = []
902
903
903 def serve(self, vcsrepo):
904 def serve(self, vcsrepo):
904 if vcsrepo.alias != 'svn':
905 if vcsrepo.alias != 'svn':
905 raise TypeError("Backend %s not supported" % vcsrepo.alias)
906 raise TypeError("Backend %s not supported" % vcsrepo.alias)
906
907
907 proc = subprocess32.Popen(
908 proc = subprocess32.Popen(
908 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
909 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
909 '--root', vcsrepo.path])
910 '--root', vcsrepo.path])
910 self._cleanup_servers.append(proc)
911 self._cleanup_servers.append(proc)
911 self.url = 'svn://localhost'
912 self.url = 'svn://localhost'
912
913
913 def cleanup(self):
914 def cleanup(self):
914 for proc in self._cleanup_servers:
915 for proc in self._cleanup_servers:
915 proc.terminate()
916 proc.terminate()
916
917
917
918
918 @pytest.fixture
919 @pytest.fixture
919 def pr_util(backend, request, config_stub):
920 def pr_util(backend, request, config_stub):
920 """
921 """
921 Utility for tests of models and for functional tests around pull requests.
922 Utility for tests of models and for functional tests around pull requests.
922
923
923 It gives an instance of :class:`PRTestUtility` which provides various
924 It gives an instance of :class:`PRTestUtility` which provides various
924 utility methods around one pull request.
925 utility methods around one pull request.
925
926
926 This fixture uses `backend` and inherits its parameterization.
927 This fixture uses `backend` and inherits its parameterization.
927 """
928 """
928
929
929 util = PRTestUtility(backend)
930 util = PRTestUtility(backend)
930 request.addfinalizer(util.cleanup)
931 request.addfinalizer(util.cleanup)
931
932
932 return util
933 return util
933
934
934
935
935 class PRTestUtility(object):
936 class PRTestUtility(object):
936
937
937 pull_request = None
938 pull_request = None
938 pull_request_id = None
939 pull_request_id = None
939 mergeable_patcher = None
940 mergeable_patcher = None
940 mergeable_mock = None
941 mergeable_mock = None
941 notification_patcher = None
942 notification_patcher = None
942
943
943 def __init__(self, backend):
944 def __init__(self, backend):
944 self.backend = backend
945 self.backend = backend
945
946
946 def create_pull_request(
947 def create_pull_request(
947 self, commits=None, target_head=None, source_head=None,
948 self, commits=None, target_head=None, source_head=None,
948 revisions=None, approved=False, author=None, mergeable=False,
949 revisions=None, approved=False, author=None, mergeable=False,
949 enable_notifications=True, name_suffix=u'', reviewers=None,
950 enable_notifications=True, name_suffix=u'', reviewers=None,
950 title=u"Test", description=u"Description"):
951 title=u"Test", description=u"Description"):
951 self.set_mergeable(mergeable)
952 self.set_mergeable(mergeable)
952 if not enable_notifications:
953 if not enable_notifications:
953 # mock notification side effect
954 # mock notification side effect
954 self.notification_patcher = mock.patch(
955 self.notification_patcher = mock.patch(
955 'rhodecode.model.notification.NotificationModel.create')
956 'rhodecode.model.notification.NotificationModel.create')
956 self.notification_patcher.start()
957 self.notification_patcher.start()
957
958
958 if not self.pull_request:
959 if not self.pull_request:
959 if not commits:
960 if not commits:
960 commits = [
961 commits = [
961 {'message': 'c1'},
962 {'message': 'c1'},
962 {'message': 'c2'},
963 {'message': 'c2'},
963 {'message': 'c3'},
964 {'message': 'c3'},
964 ]
965 ]
965 target_head = 'c1'
966 target_head = 'c1'
966 source_head = 'c2'
967 source_head = 'c2'
967 revisions = ['c2']
968 revisions = ['c2']
968
969
969 self.commit_ids = self.backend.create_master_repo(commits)
970 self.commit_ids = self.backend.create_master_repo(commits)
970 self.target_repository = self.backend.create_repo(
971 self.target_repository = self.backend.create_repo(
971 heads=[target_head], name_suffix=name_suffix)
972 heads=[target_head], name_suffix=name_suffix)
972 self.source_repository = self.backend.create_repo(
973 self.source_repository = self.backend.create_repo(
973 heads=[source_head], name_suffix=name_suffix)
974 heads=[source_head], name_suffix=name_suffix)
974 self.author = author or UserModel().get_by_username(
975 self.author = author or UserModel().get_by_username(
975 TEST_USER_ADMIN_LOGIN)
976 TEST_USER_ADMIN_LOGIN)
976
977
977 model = PullRequestModel()
978 model = PullRequestModel()
978 self.create_parameters = {
979 self.create_parameters = {
979 'created_by': self.author,
980 'created_by': self.author,
980 'source_repo': self.source_repository.repo_name,
981 'source_repo': self.source_repository.repo_name,
981 'source_ref': self._default_branch_reference(source_head),
982 'source_ref': self._default_branch_reference(source_head),
982 'target_repo': self.target_repository.repo_name,
983 'target_repo': self.target_repository.repo_name,
983 'target_ref': self._default_branch_reference(target_head),
984 'target_ref': self._default_branch_reference(target_head),
984 'revisions': [self.commit_ids[r] for r in revisions],
985 'revisions': [self.commit_ids[r] for r in revisions],
985 'reviewers': reviewers or self._get_reviewers(),
986 'reviewers': reviewers or self._get_reviewers(),
986 'title': title,
987 'title': title,
987 'description': description,
988 'description': description,
988 }
989 }
989 self.pull_request = model.create(**self.create_parameters)
990 self.pull_request = model.create(**self.create_parameters)
990 assert model.get_versions(self.pull_request) == []
991 assert model.get_versions(self.pull_request) == []
991
992
992 self.pull_request_id = self.pull_request.pull_request_id
993 self.pull_request_id = self.pull_request.pull_request_id
993
994
994 if approved:
995 if approved:
995 self.approve()
996 self.approve()
996
997
997 Session().add(self.pull_request)
998 Session().add(self.pull_request)
998 Session().commit()
999 Session().commit()
999
1000
1000 return self.pull_request
1001 return self.pull_request
1001
1002
1002 def approve(self):
1003 def approve(self):
1003 self.create_status_votes(
1004 self.create_status_votes(
1004 ChangesetStatus.STATUS_APPROVED,
1005 ChangesetStatus.STATUS_APPROVED,
1005 *self.pull_request.reviewers)
1006 *self.pull_request.reviewers)
1006
1007
1007 def close(self):
1008 def close(self):
1008 PullRequestModel().close_pull_request(self.pull_request, self.author)
1009 PullRequestModel().close_pull_request(self.pull_request, self.author)
1009
1010
1010 def _default_branch_reference(self, commit_message):
1011 def _default_branch_reference(self, commit_message):
1011 reference = '%s:%s:%s' % (
1012 reference = '%s:%s:%s' % (
1012 'branch',
1013 'branch',
1013 self.backend.default_branch_name,
1014 self.backend.default_branch_name,
1014 self.commit_ids[commit_message])
1015 self.commit_ids[commit_message])
1015 return reference
1016 return reference
1016
1017
1017 def _get_reviewers(self):
1018 def _get_reviewers(self):
1018 return [
1019 return [
1019 (TEST_USER_REGULAR_LOGIN, ['default1'], False, []),
1020 (TEST_USER_REGULAR_LOGIN, ['default1'], False, []),
1020 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, []),
1021 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, []),
1021 ]
1022 ]
1022
1023
1023 def update_source_repository(self, head=None):
1024 def update_source_repository(self, head=None):
1024 heads = [head or 'c3']
1025 heads = [head or 'c3']
1025 self.backend.pull_heads(self.source_repository, heads=heads)
1026 self.backend.pull_heads(self.source_repository, heads=heads)
1026
1027
1027 def add_one_commit(self, head=None):
1028 def add_one_commit(self, head=None):
1028 self.update_source_repository(head=head)
1029 self.update_source_repository(head=head)
1029 old_commit_ids = set(self.pull_request.revisions)
1030 old_commit_ids = set(self.pull_request.revisions)
1030 PullRequestModel().update_commits(self.pull_request)
1031 PullRequestModel().update_commits(self.pull_request)
1031 commit_ids = set(self.pull_request.revisions)
1032 commit_ids = set(self.pull_request.revisions)
1032 new_commit_ids = commit_ids - old_commit_ids
1033 new_commit_ids = commit_ids - old_commit_ids
1033 assert len(new_commit_ids) == 1
1034 assert len(new_commit_ids) == 1
1034 return new_commit_ids.pop()
1035 return new_commit_ids.pop()
1035
1036
1036 def remove_one_commit(self):
1037 def remove_one_commit(self):
1037 assert len(self.pull_request.revisions) == 2
1038 assert len(self.pull_request.revisions) == 2
1038 source_vcs = self.source_repository.scm_instance()
1039 source_vcs = self.source_repository.scm_instance()
1039 removed_commit_id = source_vcs.commit_ids[-1]
1040 removed_commit_id = source_vcs.commit_ids[-1]
1040
1041
1041 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1042 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1042 # remove the if once that's sorted out.
1043 # remove the if once that's sorted out.
1043 if self.backend.alias == "git":
1044 if self.backend.alias == "git":
1044 kwargs = {'branch_name': self.backend.default_branch_name}
1045 kwargs = {'branch_name': self.backend.default_branch_name}
1045 else:
1046 else:
1046 kwargs = {}
1047 kwargs = {}
1047 source_vcs.strip(removed_commit_id, **kwargs)
1048 source_vcs.strip(removed_commit_id, **kwargs)
1048
1049
1049 PullRequestModel().update_commits(self.pull_request)
1050 PullRequestModel().update_commits(self.pull_request)
1050 assert len(self.pull_request.revisions) == 1
1051 assert len(self.pull_request.revisions) == 1
1051 return removed_commit_id
1052 return removed_commit_id
1052
1053
1053 def create_comment(self, linked_to=None):
1054 def create_comment(self, linked_to=None):
1054 comment = CommentsModel().create(
1055 comment = CommentsModel().create(
1055 text=u"Test comment",
1056 text=u"Test comment",
1056 repo=self.target_repository.repo_name,
1057 repo=self.target_repository.repo_name,
1057 user=self.author,
1058 user=self.author,
1058 pull_request=self.pull_request)
1059 pull_request=self.pull_request)
1059 assert comment.pull_request_version_id is None
1060 assert comment.pull_request_version_id is None
1060
1061
1061 if linked_to:
1062 if linked_to:
1062 PullRequestModel()._link_comments_to_version(linked_to)
1063 PullRequestModel()._link_comments_to_version(linked_to)
1063
1064
1064 return comment
1065 return comment
1065
1066
1066 def create_inline_comment(
1067 def create_inline_comment(
1067 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1068 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1068 comment = CommentsModel().create(
1069 comment = CommentsModel().create(
1069 text=u"Test comment",
1070 text=u"Test comment",
1070 repo=self.target_repository.repo_name,
1071 repo=self.target_repository.repo_name,
1071 user=self.author,
1072 user=self.author,
1072 line_no=line_no,
1073 line_no=line_no,
1073 f_path=file_path,
1074 f_path=file_path,
1074 pull_request=self.pull_request)
1075 pull_request=self.pull_request)
1075 assert comment.pull_request_version_id is None
1076 assert comment.pull_request_version_id is None
1076
1077
1077 if linked_to:
1078 if linked_to:
1078 PullRequestModel()._link_comments_to_version(linked_to)
1079 PullRequestModel()._link_comments_to_version(linked_to)
1079
1080
1080 return comment
1081 return comment
1081
1082
1082 def create_version_of_pull_request(self):
1083 def create_version_of_pull_request(self):
1083 pull_request = self.create_pull_request()
1084 pull_request = self.create_pull_request()
1084 version = PullRequestModel()._create_version_from_snapshot(
1085 version = PullRequestModel()._create_version_from_snapshot(
1085 pull_request)
1086 pull_request)
1086 return version
1087 return version
1087
1088
1088 def create_status_votes(self, status, *reviewers):
1089 def create_status_votes(self, status, *reviewers):
1089 for reviewer in reviewers:
1090 for reviewer in reviewers:
1090 ChangesetStatusModel().set_status(
1091 ChangesetStatusModel().set_status(
1091 repo=self.pull_request.target_repo,
1092 repo=self.pull_request.target_repo,
1092 status=status,
1093 status=status,
1093 user=reviewer.user_id,
1094 user=reviewer.user_id,
1094 pull_request=self.pull_request)
1095 pull_request=self.pull_request)
1095
1096
1096 def set_mergeable(self, value):
1097 def set_mergeable(self, value):
1097 if not self.mergeable_patcher:
1098 if not self.mergeable_patcher:
1098 self.mergeable_patcher = mock.patch.object(
1099 self.mergeable_patcher = mock.patch.object(
1099 VcsSettingsModel, 'get_general_settings')
1100 VcsSettingsModel, 'get_general_settings')
1100 self.mergeable_mock = self.mergeable_patcher.start()
1101 self.mergeable_mock = self.mergeable_patcher.start()
1101 self.mergeable_mock.return_value = {
1102 self.mergeable_mock.return_value = {
1102 'rhodecode_pr_merge_enabled': value}
1103 'rhodecode_pr_merge_enabled': value}
1103
1104
1104 def cleanup(self):
1105 def cleanup(self):
1105 # In case the source repository is already cleaned up, the pull
1106 # In case the source repository is already cleaned up, the pull
1106 # request will already be deleted.
1107 # request will already be deleted.
1107 pull_request = PullRequest().get(self.pull_request_id)
1108 pull_request = PullRequest().get(self.pull_request_id)
1108 if pull_request:
1109 if pull_request:
1109 PullRequestModel().delete(pull_request, pull_request.author)
1110 PullRequestModel().delete(pull_request, pull_request.author)
1110 Session().commit()
1111 Session().commit()
1111
1112
1112 if self.notification_patcher:
1113 if self.notification_patcher:
1113 self.notification_patcher.stop()
1114 self.notification_patcher.stop()
1114
1115
1115 if self.mergeable_patcher:
1116 if self.mergeable_patcher:
1116 self.mergeable_patcher.stop()
1117 self.mergeable_patcher.stop()
1117
1118
1118
1119
1119 @pytest.fixture
1120 @pytest.fixture
1120 def user_admin(baseapp):
1121 def user_admin(baseapp):
1121 """
1122 """
1122 Provides the default admin test user as an instance of `db.User`.
1123 Provides the default admin test user as an instance of `db.User`.
1123 """
1124 """
1124 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1125 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1125 return user
1126 return user
1126
1127
1127
1128
1128 @pytest.fixture
1129 @pytest.fixture
1129 def user_regular(baseapp):
1130 def user_regular(baseapp):
1130 """
1131 """
1131 Provides the default regular test user as an instance of `db.User`.
1132 Provides the default regular test user as an instance of `db.User`.
1132 """
1133 """
1133 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1134 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1134 return user
1135 return user
1135
1136
1136
1137
1137 @pytest.fixture
1138 @pytest.fixture
1138 def user_util(request, db_connection):
1139 def user_util(request, db_connection):
1139 """
1140 """
1140 Provides a wired instance of `UserUtility` with integrated cleanup.
1141 Provides a wired instance of `UserUtility` with integrated cleanup.
1141 """
1142 """
1142 utility = UserUtility(test_name=request.node.name)
1143 utility = UserUtility(test_name=request.node.name)
1143 request.addfinalizer(utility.cleanup)
1144 request.addfinalizer(utility.cleanup)
1144 return utility
1145 return utility
1145
1146
1146
1147
1147 # TODO: johbo: Split this up into utilities per domain or something similar
1148 # TODO: johbo: Split this up into utilities per domain or something similar
1148 class UserUtility(object):
1149 class UserUtility(object):
1149
1150
1150 def __init__(self, test_name="test"):
1151 def __init__(self, test_name="test"):
1151 self._test_name = self._sanitize_name(test_name)
1152 self._test_name = self._sanitize_name(test_name)
1152 self.fixture = Fixture()
1153 self.fixture = Fixture()
1153 self.repo_group_ids = []
1154 self.repo_group_ids = []
1154 self.repos_ids = []
1155 self.repos_ids = []
1155 self.user_ids = []
1156 self.user_ids = []
1156 self.user_group_ids = []
1157 self.user_group_ids = []
1157 self.user_repo_permission_ids = []
1158 self.user_repo_permission_ids = []
1158 self.user_group_repo_permission_ids = []
1159 self.user_group_repo_permission_ids = []
1159 self.user_repo_group_permission_ids = []
1160 self.user_repo_group_permission_ids = []
1160 self.user_group_repo_group_permission_ids = []
1161 self.user_group_repo_group_permission_ids = []
1161 self.user_user_group_permission_ids = []
1162 self.user_user_group_permission_ids = []
1162 self.user_group_user_group_permission_ids = []
1163 self.user_group_user_group_permission_ids = []
1163 self.user_permissions = []
1164 self.user_permissions = []
1164
1165
1165 def _sanitize_name(self, name):
1166 def _sanitize_name(self, name):
1166 for char in ['[', ']']:
1167 for char in ['[', ']']:
1167 name = name.replace(char, '_')
1168 name = name.replace(char, '_')
1168 return name
1169 return name
1169
1170
1170 def create_repo_group(
1171 def create_repo_group(
1171 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1172 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1172 group_name = "{prefix}_repogroup_{count}".format(
1173 group_name = "{prefix}_repogroup_{count}".format(
1173 prefix=self._test_name,
1174 prefix=self._test_name,
1174 count=len(self.repo_group_ids))
1175 count=len(self.repo_group_ids))
1175 repo_group = self.fixture.create_repo_group(
1176 repo_group = self.fixture.create_repo_group(
1176 group_name, cur_user=owner)
1177 group_name, cur_user=owner)
1177 if auto_cleanup:
1178 if auto_cleanup:
1178 self.repo_group_ids.append(repo_group.group_id)
1179 self.repo_group_ids.append(repo_group.group_id)
1179 return repo_group
1180 return repo_group
1180
1181
1181 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1182 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1182 auto_cleanup=True, repo_type='hg', bare=False):
1183 auto_cleanup=True, repo_type='hg', bare=False):
1183 repo_name = "{prefix}_repository_{count}".format(
1184 repo_name = "{prefix}_repository_{count}".format(
1184 prefix=self._test_name,
1185 prefix=self._test_name,
1185 count=len(self.repos_ids))
1186 count=len(self.repos_ids))
1186
1187
1187 repository = self.fixture.create_repo(
1188 repository = self.fixture.create_repo(
1188 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare)
1189 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare)
1189 if auto_cleanup:
1190 if auto_cleanup:
1190 self.repos_ids.append(repository.repo_id)
1191 self.repos_ids.append(repository.repo_id)
1191 return repository
1192 return repository
1192
1193
1193 def create_user(self, auto_cleanup=True, **kwargs):
1194 def create_user(self, auto_cleanup=True, **kwargs):
1194 user_name = "{prefix}_user_{count}".format(
1195 user_name = "{prefix}_user_{count}".format(
1195 prefix=self._test_name,
1196 prefix=self._test_name,
1196 count=len(self.user_ids))
1197 count=len(self.user_ids))
1197 user = self.fixture.create_user(user_name, **kwargs)
1198 user = self.fixture.create_user(user_name, **kwargs)
1198 if auto_cleanup:
1199 if auto_cleanup:
1199 self.user_ids.append(user.user_id)
1200 self.user_ids.append(user.user_id)
1200 return user
1201 return user
1201
1202
1202 def create_additional_user_email(self, user, email):
1203 def create_additional_user_email(self, user, email):
1203 uem = self.fixture.create_additional_user_email(user=user, email=email)
1204 uem = self.fixture.create_additional_user_email(user=user, email=email)
1204 return uem
1205 return uem
1205
1206
1206 def create_user_with_group(self):
1207 def create_user_with_group(self):
1207 user = self.create_user()
1208 user = self.create_user()
1208 user_group = self.create_user_group(members=[user])
1209 user_group = self.create_user_group(members=[user])
1209 return user, user_group
1210 return user, user_group
1210
1211
1211 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1212 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1212 auto_cleanup=True, **kwargs):
1213 auto_cleanup=True, **kwargs):
1213 group_name = "{prefix}_usergroup_{count}".format(
1214 group_name = "{prefix}_usergroup_{count}".format(
1214 prefix=self._test_name,
1215 prefix=self._test_name,
1215 count=len(self.user_group_ids))
1216 count=len(self.user_group_ids))
1216 user_group = self.fixture.create_user_group(
1217 user_group = self.fixture.create_user_group(
1217 group_name, cur_user=owner, **kwargs)
1218 group_name, cur_user=owner, **kwargs)
1218
1219
1219 if auto_cleanup:
1220 if auto_cleanup:
1220 self.user_group_ids.append(user_group.users_group_id)
1221 self.user_group_ids.append(user_group.users_group_id)
1221 if members:
1222 if members:
1222 for user in members:
1223 for user in members:
1223 UserGroupModel().add_user_to_group(user_group, user)
1224 UserGroupModel().add_user_to_group(user_group, user)
1224 return user_group
1225 return user_group
1225
1226
1226 def grant_user_permission(self, user_name, permission_name):
1227 def grant_user_permission(self, user_name, permission_name):
1227 self._inherit_default_user_permissions(user_name, False)
1228 self._inherit_default_user_permissions(user_name, False)
1228 self.user_permissions.append((user_name, permission_name))
1229 self.user_permissions.append((user_name, permission_name))
1229
1230
1230 def grant_user_permission_to_repo_group(
1231 def grant_user_permission_to_repo_group(
1231 self, repo_group, user, permission_name):
1232 self, repo_group, user, permission_name):
1232 permission = RepoGroupModel().grant_user_permission(
1233 permission = RepoGroupModel().grant_user_permission(
1233 repo_group, user, permission_name)
1234 repo_group, user, permission_name)
1234 self.user_repo_group_permission_ids.append(
1235 self.user_repo_group_permission_ids.append(
1235 (repo_group.group_id, user.user_id))
1236 (repo_group.group_id, user.user_id))
1236 return permission
1237 return permission
1237
1238
1238 def grant_user_group_permission_to_repo_group(
1239 def grant_user_group_permission_to_repo_group(
1239 self, repo_group, user_group, permission_name):
1240 self, repo_group, user_group, permission_name):
1240 permission = RepoGroupModel().grant_user_group_permission(
1241 permission = RepoGroupModel().grant_user_group_permission(
1241 repo_group, user_group, permission_name)
1242 repo_group, user_group, permission_name)
1242 self.user_group_repo_group_permission_ids.append(
1243 self.user_group_repo_group_permission_ids.append(
1243 (repo_group.group_id, user_group.users_group_id))
1244 (repo_group.group_id, user_group.users_group_id))
1244 return permission
1245 return permission
1245
1246
1246 def grant_user_permission_to_repo(
1247 def grant_user_permission_to_repo(
1247 self, repo, user, permission_name):
1248 self, repo, user, permission_name):
1248 permission = RepoModel().grant_user_permission(
1249 permission = RepoModel().grant_user_permission(
1249 repo, user, permission_name)
1250 repo, user, permission_name)
1250 self.user_repo_permission_ids.append(
1251 self.user_repo_permission_ids.append(
1251 (repo.repo_id, user.user_id))
1252 (repo.repo_id, user.user_id))
1252 return permission
1253 return permission
1253
1254
1254 def grant_user_group_permission_to_repo(
1255 def grant_user_group_permission_to_repo(
1255 self, repo, user_group, permission_name):
1256 self, repo, user_group, permission_name):
1256 permission = RepoModel().grant_user_group_permission(
1257 permission = RepoModel().grant_user_group_permission(
1257 repo, user_group, permission_name)
1258 repo, user_group, permission_name)
1258 self.user_group_repo_permission_ids.append(
1259 self.user_group_repo_permission_ids.append(
1259 (repo.repo_id, user_group.users_group_id))
1260 (repo.repo_id, user_group.users_group_id))
1260 return permission
1261 return permission
1261
1262
1262 def grant_user_permission_to_user_group(
1263 def grant_user_permission_to_user_group(
1263 self, target_user_group, user, permission_name):
1264 self, target_user_group, user, permission_name):
1264 permission = UserGroupModel().grant_user_permission(
1265 permission = UserGroupModel().grant_user_permission(
1265 target_user_group, user, permission_name)
1266 target_user_group, user, permission_name)
1266 self.user_user_group_permission_ids.append(
1267 self.user_user_group_permission_ids.append(
1267 (target_user_group.users_group_id, user.user_id))
1268 (target_user_group.users_group_id, user.user_id))
1268 return permission
1269 return permission
1269
1270
1270 def grant_user_group_permission_to_user_group(
1271 def grant_user_group_permission_to_user_group(
1271 self, target_user_group, user_group, permission_name):
1272 self, target_user_group, user_group, permission_name):
1272 permission = UserGroupModel().grant_user_group_permission(
1273 permission = UserGroupModel().grant_user_group_permission(
1273 target_user_group, user_group, permission_name)
1274 target_user_group, user_group, permission_name)
1274 self.user_group_user_group_permission_ids.append(
1275 self.user_group_user_group_permission_ids.append(
1275 (target_user_group.users_group_id, user_group.users_group_id))
1276 (target_user_group.users_group_id, user_group.users_group_id))
1276 return permission
1277 return permission
1277
1278
1278 def revoke_user_permission(self, user_name, permission_name):
1279 def revoke_user_permission(self, user_name, permission_name):
1279 self._inherit_default_user_permissions(user_name, True)
1280 self._inherit_default_user_permissions(user_name, True)
1280 UserModel().revoke_perm(user_name, permission_name)
1281 UserModel().revoke_perm(user_name, permission_name)
1281
1282
1282 def _inherit_default_user_permissions(self, user_name, value):
1283 def _inherit_default_user_permissions(self, user_name, value):
1283 user = UserModel().get_by_username(user_name)
1284 user = UserModel().get_by_username(user_name)
1284 user.inherit_default_permissions = value
1285 user.inherit_default_permissions = value
1285 Session().add(user)
1286 Session().add(user)
1286 Session().commit()
1287 Session().commit()
1287
1288
1288 def cleanup(self):
1289 def cleanup(self):
1289 self._cleanup_permissions()
1290 self._cleanup_permissions()
1290 self._cleanup_repos()
1291 self._cleanup_repos()
1291 self._cleanup_repo_groups()
1292 self._cleanup_repo_groups()
1292 self._cleanup_user_groups()
1293 self._cleanup_user_groups()
1293 self._cleanup_users()
1294 self._cleanup_users()
1294
1295
1295 def _cleanup_permissions(self):
1296 def _cleanup_permissions(self):
1296 if self.user_permissions:
1297 if self.user_permissions:
1297 for user_name, permission_name in self.user_permissions:
1298 for user_name, permission_name in self.user_permissions:
1298 self.revoke_user_permission(user_name, permission_name)
1299 self.revoke_user_permission(user_name, permission_name)
1299
1300
1300 for permission in self.user_repo_permission_ids:
1301 for permission in self.user_repo_permission_ids:
1301 RepoModel().revoke_user_permission(*permission)
1302 RepoModel().revoke_user_permission(*permission)
1302
1303
1303 for permission in self.user_group_repo_permission_ids:
1304 for permission in self.user_group_repo_permission_ids:
1304 RepoModel().revoke_user_group_permission(*permission)
1305 RepoModel().revoke_user_group_permission(*permission)
1305
1306
1306 for permission in self.user_repo_group_permission_ids:
1307 for permission in self.user_repo_group_permission_ids:
1307 RepoGroupModel().revoke_user_permission(*permission)
1308 RepoGroupModel().revoke_user_permission(*permission)
1308
1309
1309 for permission in self.user_group_repo_group_permission_ids:
1310 for permission in self.user_group_repo_group_permission_ids:
1310 RepoGroupModel().revoke_user_group_permission(*permission)
1311 RepoGroupModel().revoke_user_group_permission(*permission)
1311
1312
1312 for permission in self.user_user_group_permission_ids:
1313 for permission in self.user_user_group_permission_ids:
1313 UserGroupModel().revoke_user_permission(*permission)
1314 UserGroupModel().revoke_user_permission(*permission)
1314
1315
1315 for permission in self.user_group_user_group_permission_ids:
1316 for permission in self.user_group_user_group_permission_ids:
1316 UserGroupModel().revoke_user_group_permission(*permission)
1317 UserGroupModel().revoke_user_group_permission(*permission)
1317
1318
1318 def _cleanup_repo_groups(self):
1319 def _cleanup_repo_groups(self):
1319 def _repo_group_compare(first_group_id, second_group_id):
1320 def _repo_group_compare(first_group_id, second_group_id):
1320 """
1321 """
1321 Gives higher priority to the groups with the most complex paths
1322 Gives higher priority to the groups with the most complex paths
1322 """
1323 """
1323 first_group = RepoGroup.get(first_group_id)
1324 first_group = RepoGroup.get(first_group_id)
1324 second_group = RepoGroup.get(second_group_id)
1325 second_group = RepoGroup.get(second_group_id)
1325 first_group_parts = (
1326 first_group_parts = (
1326 len(first_group.group_name.split('/')) if first_group else 0)
1327 len(first_group.group_name.split('/')) if first_group else 0)
1327 second_group_parts = (
1328 second_group_parts = (
1328 len(second_group.group_name.split('/')) if second_group else 0)
1329 len(second_group.group_name.split('/')) if second_group else 0)
1329 return cmp(second_group_parts, first_group_parts)
1330 return cmp(second_group_parts, first_group_parts)
1330
1331
1331 sorted_repo_group_ids = sorted(
1332 sorted_repo_group_ids = sorted(
1332 self.repo_group_ids, cmp=_repo_group_compare)
1333 self.repo_group_ids, cmp=_repo_group_compare)
1333 for repo_group_id in sorted_repo_group_ids:
1334 for repo_group_id in sorted_repo_group_ids:
1334 self.fixture.destroy_repo_group(repo_group_id)
1335 self.fixture.destroy_repo_group(repo_group_id)
1335
1336
1336 def _cleanup_repos(self):
1337 def _cleanup_repos(self):
1337 sorted_repos_ids = sorted(self.repos_ids)
1338 sorted_repos_ids = sorted(self.repos_ids)
1338 for repo_id in sorted_repos_ids:
1339 for repo_id in sorted_repos_ids:
1339 self.fixture.destroy_repo(repo_id)
1340 self.fixture.destroy_repo(repo_id)
1340
1341
1341 def _cleanup_user_groups(self):
1342 def _cleanup_user_groups(self):
1342 def _user_group_compare(first_group_id, second_group_id):
1343 def _user_group_compare(first_group_id, second_group_id):
1343 """
1344 """
1344 Gives higher priority to the groups with the most complex paths
1345 Gives higher priority to the groups with the most complex paths
1345 """
1346 """
1346 first_group = UserGroup.get(first_group_id)
1347 first_group = UserGroup.get(first_group_id)
1347 second_group = UserGroup.get(second_group_id)
1348 second_group = UserGroup.get(second_group_id)
1348 first_group_parts = (
1349 first_group_parts = (
1349 len(first_group.users_group_name.split('/'))
1350 len(first_group.users_group_name.split('/'))
1350 if first_group else 0)
1351 if first_group else 0)
1351 second_group_parts = (
1352 second_group_parts = (
1352 len(second_group.users_group_name.split('/'))
1353 len(second_group.users_group_name.split('/'))
1353 if second_group else 0)
1354 if second_group else 0)
1354 return cmp(second_group_parts, first_group_parts)
1355 return cmp(second_group_parts, first_group_parts)
1355
1356
1356 sorted_user_group_ids = sorted(
1357 sorted_user_group_ids = sorted(
1357 self.user_group_ids, cmp=_user_group_compare)
1358 self.user_group_ids, cmp=_user_group_compare)
1358 for user_group_id in sorted_user_group_ids:
1359 for user_group_id in sorted_user_group_ids:
1359 self.fixture.destroy_user_group(user_group_id)
1360 self.fixture.destroy_user_group(user_group_id)
1360
1361
1361 def _cleanup_users(self):
1362 def _cleanup_users(self):
1362 for user_id in self.user_ids:
1363 for user_id in self.user_ids:
1363 self.fixture.destroy_user(user_id)
1364 self.fixture.destroy_user(user_id)
1364
1365
1365
1366
1366 # TODO: Think about moving this into a pytest-pyro package and make it a
1367 # TODO: Think about moving this into a pytest-pyro package and make it a
1367 # pytest plugin
1368 # pytest plugin
1368 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1369 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1369 def pytest_runtest_makereport(item, call):
1370 def pytest_runtest_makereport(item, call):
1370 """
1371 """
1371 Adding the remote traceback if the exception has this information.
1372 Adding the remote traceback if the exception has this information.
1372
1373
1373 VCSServer attaches this information as the attribute `_vcs_server_traceback`
1374 VCSServer attaches this information as the attribute `_vcs_server_traceback`
1374 to the exception instance.
1375 to the exception instance.
1375 """
1376 """
1376 outcome = yield
1377 outcome = yield
1377 report = outcome.get_result()
1378 report = outcome.get_result()
1378 if call.excinfo:
1379 if call.excinfo:
1379 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1380 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1380
1381
1381
1382
1382 def _add_vcsserver_remote_traceback(report, exc):
1383 def _add_vcsserver_remote_traceback(report, exc):
1383 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1384 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1384
1385
1385 if vcsserver_traceback:
1386 if vcsserver_traceback:
1386 section = 'VCSServer remote traceback ' + report.when
1387 section = 'VCSServer remote traceback ' + report.when
1387 report.sections.append((section, vcsserver_traceback))
1388 report.sections.append((section, vcsserver_traceback))
1388
1389
1389
1390
1390 @pytest.fixture(scope='session')
1391 @pytest.fixture(scope='session')
1391 def testrun():
1392 def testrun():
1392 return {
1393 return {
1393 'uuid': uuid.uuid4(),
1394 'uuid': uuid.uuid4(),
1394 'start': datetime.datetime.utcnow().isoformat(),
1395 'start': datetime.datetime.utcnow().isoformat(),
1395 'timestamp': int(time.time()),
1396 'timestamp': int(time.time()),
1396 }
1397 }
1397
1398
1398
1399
1399 @pytest.fixture(autouse=True)
1400 @pytest.fixture(autouse=True)
1400 def collect_appenlight_stats(request, testrun):
1401 def collect_appenlight_stats(request, testrun):
1401 """
1402 """
1402 This fixture reports memory consumtion of single tests.
1403 This fixture reports memory consumtion of single tests.
1403
1404
1404 It gathers data based on `psutil` and sends them to Appenlight. The option
1405 It gathers data based on `psutil` and sends them to Appenlight. The option
1405 ``--ae`` has te be used to enable this fixture and the API key for your
1406 ``--ae`` has te be used to enable this fixture and the API key for your
1406 application has to be provided in ``--ae-key``.
1407 application has to be provided in ``--ae-key``.
1407 """
1408 """
1408 try:
1409 try:
1409 # cygwin cannot have yet psutil support.
1410 # cygwin cannot have yet psutil support.
1410 import psutil
1411 import psutil
1411 except ImportError:
1412 except ImportError:
1412 return
1413 return
1413
1414
1414 if not request.config.getoption('--appenlight'):
1415 if not request.config.getoption('--appenlight'):
1415 return
1416 return
1416 else:
1417 else:
1417 # Only request the baseapp fixture if appenlight tracking is
1418 # Only request the baseapp fixture if appenlight tracking is
1418 # enabled. This will speed up a test run of unit tests by 2 to 3
1419 # enabled. This will speed up a test run of unit tests by 2 to 3
1419 # seconds if appenlight is not enabled.
1420 # seconds if appenlight is not enabled.
1420 baseapp = request.getfuncargvalue("baseapp")
1421 baseapp = request.getfuncargvalue("baseapp")
1421 url = '{}/api/logs'.format(request.config.getoption('--appenlight-url'))
1422 url = '{}/api/logs'.format(request.config.getoption('--appenlight-url'))
1422 client = AppenlightClient(
1423 client = AppenlightClient(
1423 url=url,
1424 url=url,
1424 api_key=request.config.getoption('--appenlight-api-key'),
1425 api_key=request.config.getoption('--appenlight-api-key'),
1425 namespace=request.node.nodeid,
1426 namespace=request.node.nodeid,
1426 request=str(testrun['uuid']),
1427 request=str(testrun['uuid']),
1427 testrun=testrun)
1428 testrun=testrun)
1428
1429
1429 client.collect({
1430 client.collect({
1430 'message': "Starting",
1431 'message': "Starting",
1431 })
1432 })
1432
1433
1433 server_and_port = baseapp.config.get_settings()['vcs.server']
1434 server_and_port = baseapp.config.get_settings()['vcs.server']
1434 protocol = baseapp.config.get_settings()['vcs.server.protocol']
1435 protocol = baseapp.config.get_settings()['vcs.server.protocol']
1435 server = create_vcsserver_proxy(server_and_port, protocol)
1436 server = create_vcsserver_proxy(server_and_port, protocol)
1436 with server:
1437 with server:
1437 vcs_pid = server.get_pid()
1438 vcs_pid = server.get_pid()
1438 server.run_gc()
1439 server.run_gc()
1439 vcs_process = psutil.Process(vcs_pid)
1440 vcs_process = psutil.Process(vcs_pid)
1440 mem = vcs_process.memory_info()
1441 mem = vcs_process.memory_info()
1441 client.tag_before('vcsserver.rss', mem.rss)
1442 client.tag_before('vcsserver.rss', mem.rss)
1442 client.tag_before('vcsserver.vms', mem.vms)
1443 client.tag_before('vcsserver.vms', mem.vms)
1443
1444
1444 test_process = psutil.Process()
1445 test_process = psutil.Process()
1445 mem = test_process.memory_info()
1446 mem = test_process.memory_info()
1446 client.tag_before('test.rss', mem.rss)
1447 client.tag_before('test.rss', mem.rss)
1447 client.tag_before('test.vms', mem.vms)
1448 client.tag_before('test.vms', mem.vms)
1448
1449
1449 client.tag_before('time', time.time())
1450 client.tag_before('time', time.time())
1450
1451
1451 @request.addfinalizer
1452 @request.addfinalizer
1452 def send_stats():
1453 def send_stats():
1453 client.tag_after('time', time.time())
1454 client.tag_after('time', time.time())
1454 with server:
1455 with server:
1455 gc_stats = server.run_gc()
1456 gc_stats = server.run_gc()
1456 for tag, value in gc_stats.items():
1457 for tag, value in gc_stats.items():
1457 client.tag_after(tag, value)
1458 client.tag_after(tag, value)
1458 mem = vcs_process.memory_info()
1459 mem = vcs_process.memory_info()
1459 client.tag_after('vcsserver.rss', mem.rss)
1460 client.tag_after('vcsserver.rss', mem.rss)
1460 client.tag_after('vcsserver.vms', mem.vms)
1461 client.tag_after('vcsserver.vms', mem.vms)
1461
1462
1462 mem = test_process.memory_info()
1463 mem = test_process.memory_info()
1463 client.tag_after('test.rss', mem.rss)
1464 client.tag_after('test.rss', mem.rss)
1464 client.tag_after('test.vms', mem.vms)
1465 client.tag_after('test.vms', mem.vms)
1465
1466
1466 client.collect({
1467 client.collect({
1467 'message': "Finished",
1468 'message': "Finished",
1468 })
1469 })
1469 client.send_stats()
1470 client.send_stats()
1470
1471
1471 return client
1472 return client
1472
1473
1473
1474
1474 class AppenlightClient():
1475 class AppenlightClient():
1475
1476
1476 url_template = '{url}?protocol_version=0.5'
1477 url_template = '{url}?protocol_version=0.5'
1477
1478
1478 def __init__(
1479 def __init__(
1479 self, url, api_key, add_server=True, add_timestamp=True,
1480 self, url, api_key, add_server=True, add_timestamp=True,
1480 namespace=None, request=None, testrun=None):
1481 namespace=None, request=None, testrun=None):
1481 self.url = self.url_template.format(url=url)
1482 self.url = self.url_template.format(url=url)
1482 self.api_key = api_key
1483 self.api_key = api_key
1483 self.add_server = add_server
1484 self.add_server = add_server
1484 self.add_timestamp = add_timestamp
1485 self.add_timestamp = add_timestamp
1485 self.namespace = namespace
1486 self.namespace = namespace
1486 self.request = request
1487 self.request = request
1487 self.server = socket.getfqdn(socket.gethostname())
1488 self.server = socket.getfqdn(socket.gethostname())
1488 self.tags_before = {}
1489 self.tags_before = {}
1489 self.tags_after = {}
1490 self.tags_after = {}
1490 self.stats = []
1491 self.stats = []
1491 self.testrun = testrun or {}
1492 self.testrun = testrun or {}
1492
1493
1493 def tag_before(self, tag, value):
1494 def tag_before(self, tag, value):
1494 self.tags_before[tag] = value
1495 self.tags_before[tag] = value
1495
1496
1496 def tag_after(self, tag, value):
1497 def tag_after(self, tag, value):
1497 self.tags_after[tag] = value
1498 self.tags_after[tag] = value
1498
1499
1499 def collect(self, data):
1500 def collect(self, data):
1500 if self.add_server:
1501 if self.add_server:
1501 data.setdefault('server', self.server)
1502 data.setdefault('server', self.server)
1502 if self.add_timestamp:
1503 if self.add_timestamp:
1503 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1504 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1504 if self.namespace:
1505 if self.namespace:
1505 data.setdefault('namespace', self.namespace)
1506 data.setdefault('namespace', self.namespace)
1506 if self.request:
1507 if self.request:
1507 data.setdefault('request', self.request)
1508 data.setdefault('request', self.request)
1508 self.stats.append(data)
1509 self.stats.append(data)
1509
1510
1510 def send_stats(self):
1511 def send_stats(self):
1511 tags = [
1512 tags = [
1512 ('testrun', self.request),
1513 ('testrun', self.request),
1513 ('testrun.start', self.testrun['start']),
1514 ('testrun.start', self.testrun['start']),
1514 ('testrun.timestamp', self.testrun['timestamp']),
1515 ('testrun.timestamp', self.testrun['timestamp']),
1515 ('test', self.namespace),
1516 ('test', self.namespace),
1516 ]
1517 ]
1517 for key, value in self.tags_before.items():
1518 for key, value in self.tags_before.items():
1518 tags.append((key + '.before', value))
1519 tags.append((key + '.before', value))
1519 try:
1520 try:
1520 delta = self.tags_after[key] - value
1521 delta = self.tags_after[key] - value
1521 tags.append((key + '.delta', delta))
1522 tags.append((key + '.delta', delta))
1522 except Exception:
1523 except Exception:
1523 pass
1524 pass
1524 for key, value in self.tags_after.items():
1525 for key, value in self.tags_after.items():
1525 tags.append((key + '.after', value))
1526 tags.append((key + '.after', value))
1526 self.collect({
1527 self.collect({
1527 'message': "Collected tags",
1528 'message': "Collected tags",
1528 'tags': tags,
1529 'tags': tags,
1529 })
1530 })
1530
1531
1531 response = requests.post(
1532 response = requests.post(
1532 self.url,
1533 self.url,
1533 headers={
1534 headers={
1534 'X-appenlight-api-key': self.api_key},
1535 'X-appenlight-api-key': self.api_key},
1535 json=self.stats,
1536 json=self.stats,
1536 )
1537 )
1537
1538
1538 if not response.status_code == 200:
1539 if not response.status_code == 200:
1539 pprint.pprint(self.stats)
1540 pprint.pprint(self.stats)
1540 print(response.headers)
1541 print(response.headers)
1541 print(response.text)
1542 print(response.text)
1542 raise Exception('Sending to appenlight failed')
1543 raise Exception('Sending to appenlight failed')
1543
1544
1544
1545
1545 @pytest.fixture
1546 @pytest.fixture
1546 def gist_util(request, db_connection):
1547 def gist_util(request, db_connection):
1547 """
1548 """
1548 Provides a wired instance of `GistUtility` with integrated cleanup.
1549 Provides a wired instance of `GistUtility` with integrated cleanup.
1549 """
1550 """
1550 utility = GistUtility()
1551 utility = GistUtility()
1551 request.addfinalizer(utility.cleanup)
1552 request.addfinalizer(utility.cleanup)
1552 return utility
1553 return utility
1553
1554
1554
1555
1555 class GistUtility(object):
1556 class GistUtility(object):
1556 def __init__(self):
1557 def __init__(self):
1557 self.fixture = Fixture()
1558 self.fixture = Fixture()
1558 self.gist_ids = []
1559 self.gist_ids = []
1559
1560
1560 def create_gist(self, **kwargs):
1561 def create_gist(self, **kwargs):
1561 gist = self.fixture.create_gist(**kwargs)
1562 gist = self.fixture.create_gist(**kwargs)
1562 self.gist_ids.append(gist.gist_id)
1563 self.gist_ids.append(gist.gist_id)
1563 return gist
1564 return gist
1564
1565
1565 def cleanup(self):
1566 def cleanup(self):
1566 for id_ in self.gist_ids:
1567 for id_ in self.gist_ids:
1567 self.fixture.destroy_gists(str(id_))
1568 self.fixture.destroy_gists(str(id_))
1568
1569
1569
1570
1570 @pytest.fixture
1571 @pytest.fixture
1571 def enabled_backends(request):
1572 def enabled_backends(request):
1572 backends = request.config.option.backends
1573 backends = request.config.option.backends
1573 return backends[:]
1574 return backends[:]
1574
1575
1575
1576
1576 @pytest.fixture
1577 @pytest.fixture
1577 def settings_util(request, db_connection):
1578 def settings_util(request, db_connection):
1578 """
1579 """
1579 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1580 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1580 """
1581 """
1581 utility = SettingsUtility()
1582 utility = SettingsUtility()
1582 request.addfinalizer(utility.cleanup)
1583 request.addfinalizer(utility.cleanup)
1583 return utility
1584 return utility
1584
1585
1585
1586
1586 class SettingsUtility(object):
1587 class SettingsUtility(object):
1587 def __init__(self):
1588 def __init__(self):
1588 self.rhodecode_ui_ids = []
1589 self.rhodecode_ui_ids = []
1589 self.rhodecode_setting_ids = []
1590 self.rhodecode_setting_ids = []
1590 self.repo_rhodecode_ui_ids = []
1591 self.repo_rhodecode_ui_ids = []
1591 self.repo_rhodecode_setting_ids = []
1592 self.repo_rhodecode_setting_ids = []
1592
1593
1593 def create_repo_rhodecode_ui(
1594 def create_repo_rhodecode_ui(
1594 self, repo, section, value, key=None, active=True, cleanup=True):
1595 self, repo, section, value, key=None, active=True, cleanup=True):
1595 key = key or hashlib.sha1(
1596 key = key or hashlib.sha1(
1596 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1597 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1597
1598
1598 setting = RepoRhodeCodeUi()
1599 setting = RepoRhodeCodeUi()
1599 setting.repository_id = repo.repo_id
1600 setting.repository_id = repo.repo_id
1600 setting.ui_section = section
1601 setting.ui_section = section
1601 setting.ui_value = value
1602 setting.ui_value = value
1602 setting.ui_key = key
1603 setting.ui_key = key
1603 setting.ui_active = active
1604 setting.ui_active = active
1604 Session().add(setting)
1605 Session().add(setting)
1605 Session().commit()
1606 Session().commit()
1606
1607
1607 if cleanup:
1608 if cleanup:
1608 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1609 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1609 return setting
1610 return setting
1610
1611
1611 def create_rhodecode_ui(
1612 def create_rhodecode_ui(
1612 self, section, value, key=None, active=True, cleanup=True):
1613 self, section, value, key=None, active=True, cleanup=True):
1613 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1614 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1614
1615
1615 setting = RhodeCodeUi()
1616 setting = RhodeCodeUi()
1616 setting.ui_section = section
1617 setting.ui_section = section
1617 setting.ui_value = value
1618 setting.ui_value = value
1618 setting.ui_key = key
1619 setting.ui_key = key
1619 setting.ui_active = active
1620 setting.ui_active = active
1620 Session().add(setting)
1621 Session().add(setting)
1621 Session().commit()
1622 Session().commit()
1622
1623
1623 if cleanup:
1624 if cleanup:
1624 self.rhodecode_ui_ids.append(setting.ui_id)
1625 self.rhodecode_ui_ids.append(setting.ui_id)
1625 return setting
1626 return setting
1626
1627
1627 def create_repo_rhodecode_setting(
1628 def create_repo_rhodecode_setting(
1628 self, repo, name, value, type_, cleanup=True):
1629 self, repo, name, value, type_, cleanup=True):
1629 setting = RepoRhodeCodeSetting(
1630 setting = RepoRhodeCodeSetting(
1630 repo.repo_id, key=name, val=value, type=type_)
1631 repo.repo_id, key=name, val=value, type=type_)
1631 Session().add(setting)
1632 Session().add(setting)
1632 Session().commit()
1633 Session().commit()
1633
1634
1634 if cleanup:
1635 if cleanup:
1635 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1636 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1636 return setting
1637 return setting
1637
1638
1638 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1639 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1639 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1640 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1640 Session().add(setting)
1641 Session().add(setting)
1641 Session().commit()
1642 Session().commit()
1642
1643
1643 if cleanup:
1644 if cleanup:
1644 self.rhodecode_setting_ids.append(setting.app_settings_id)
1645 self.rhodecode_setting_ids.append(setting.app_settings_id)
1645
1646
1646 return setting
1647 return setting
1647
1648
1648 def cleanup(self):
1649 def cleanup(self):
1649 for id_ in self.rhodecode_ui_ids:
1650 for id_ in self.rhodecode_ui_ids:
1650 setting = RhodeCodeUi.get(id_)
1651 setting = RhodeCodeUi.get(id_)
1651 Session().delete(setting)
1652 Session().delete(setting)
1652
1653
1653 for id_ in self.rhodecode_setting_ids:
1654 for id_ in self.rhodecode_setting_ids:
1654 setting = RhodeCodeSetting.get(id_)
1655 setting = RhodeCodeSetting.get(id_)
1655 Session().delete(setting)
1656 Session().delete(setting)
1656
1657
1657 for id_ in self.repo_rhodecode_ui_ids:
1658 for id_ in self.repo_rhodecode_ui_ids:
1658 setting = RepoRhodeCodeUi.get(id_)
1659 setting = RepoRhodeCodeUi.get(id_)
1659 Session().delete(setting)
1660 Session().delete(setting)
1660
1661
1661 for id_ in self.repo_rhodecode_setting_ids:
1662 for id_ in self.repo_rhodecode_setting_ids:
1662 setting = RepoRhodeCodeSetting.get(id_)
1663 setting = RepoRhodeCodeSetting.get(id_)
1663 Session().delete(setting)
1664 Session().delete(setting)
1664
1665
1665 Session().commit()
1666 Session().commit()
1666
1667
1667
1668
1668 @pytest.fixture
1669 @pytest.fixture
1669 def no_notifications(request):
1670 def no_notifications(request):
1670 notification_patcher = mock.patch(
1671 notification_patcher = mock.patch(
1671 'rhodecode.model.notification.NotificationModel.create')
1672 'rhodecode.model.notification.NotificationModel.create')
1672 notification_patcher.start()
1673 notification_patcher.start()
1673 request.addfinalizer(notification_patcher.stop)
1674 request.addfinalizer(notification_patcher.stop)
1674
1675
1675
1676
1676 @pytest.fixture(scope='session')
1677 @pytest.fixture(scope='session')
1677 def repeat(request):
1678 def repeat(request):
1678 """
1679 """
1679 The number of repetitions is based on this fixture.
1680 The number of repetitions is based on this fixture.
1680
1681
1681 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1682 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1682 tests are not too slow in our default test suite.
1683 tests are not too slow in our default test suite.
1683 """
1684 """
1684 return request.config.getoption('--repeat')
1685 return request.config.getoption('--repeat')
1685
1686
1686
1687
1687 @pytest.fixture
1688 @pytest.fixture
1688 def rhodecode_fixtures():
1689 def rhodecode_fixtures():
1689 return Fixture()
1690 return Fixture()
1690
1691
1691
1692
1692 @pytest.fixture
1693 @pytest.fixture
1693 def context_stub():
1694 def context_stub():
1694 """
1695 """
1695 Stub context object.
1696 Stub context object.
1696 """
1697 """
1697 context = pyramid.testing.DummyResource()
1698 context = pyramid.testing.DummyResource()
1698 return context
1699 return context
1699
1700
1700
1701
1701 @pytest.fixture
1702 @pytest.fixture
1702 def request_stub():
1703 def request_stub():
1703 """
1704 """
1704 Stub request object.
1705 Stub request object.
1705 """
1706 """
1706 from rhodecode.lib.base import bootstrap_request
1707 from rhodecode.lib.base import bootstrap_request
1707 request = bootstrap_request(scheme='https')
1708 request = bootstrap_request(scheme='https')
1708 return request
1709 return request
1709
1710
1710
1711
1711 @pytest.fixture
1712 @pytest.fixture
1712 def config_stub(request, request_stub):
1713 def config_stub(request, request_stub):
1713 """
1714 """
1714 Set up pyramid.testing and return the Configurator.
1715 Set up pyramid.testing and return the Configurator.
1715 """
1716 """
1716 from rhodecode.lib.base import bootstrap_config
1717 from rhodecode.lib.base import bootstrap_config
1717 config = bootstrap_config(request=request_stub)
1718 config = bootstrap_config(request=request_stub)
1718
1719
1719 @request.addfinalizer
1720 @request.addfinalizer
1720 def cleanup():
1721 def cleanup():
1721 pyramid.testing.tearDown()
1722 pyramid.testing.tearDown()
1722
1723
1723 return config
1724 return config
1724
1725
1725
1726
1726 @pytest.fixture
1727 @pytest.fixture
1727 def StubIntegrationType():
1728 def StubIntegrationType():
1728 class _StubIntegrationType(IntegrationTypeBase):
1729 class _StubIntegrationType(IntegrationTypeBase):
1729 """ Test integration type class """
1730 """ Test integration type class """
1730
1731
1731 key = 'test'
1732 key = 'test'
1732 display_name = 'Test integration type'
1733 display_name = 'Test integration type'
1733 description = 'A test integration type for testing'
1734 description = 'A test integration type for testing'
1734
1735
1735 @classmethod
1736 @classmethod
1736 def icon(cls):
1737 def icon(cls):
1737 return 'test_icon_html_image'
1738 return 'test_icon_html_image'
1738
1739
1739 def __init__(self, settings):
1740 def __init__(self, settings):
1740 super(_StubIntegrationType, self).__init__(settings)
1741 super(_StubIntegrationType, self).__init__(settings)
1741 self.sent_events = [] # for testing
1742 self.sent_events = [] # for testing
1742
1743
1743 def send_event(self, event):
1744 def send_event(self, event):
1744 self.sent_events.append(event)
1745 self.sent_events.append(event)
1745
1746
1746 def settings_schema(self):
1747 def settings_schema(self):
1747 class SettingsSchema(colander.Schema):
1748 class SettingsSchema(colander.Schema):
1748 test_string_field = colander.SchemaNode(
1749 test_string_field = colander.SchemaNode(
1749 colander.String(),
1750 colander.String(),
1750 missing=colander.required,
1751 missing=colander.required,
1751 title='test string field',
1752 title='test string field',
1752 )
1753 )
1753 test_int_field = colander.SchemaNode(
1754 test_int_field = colander.SchemaNode(
1754 colander.Int(),
1755 colander.Int(),
1755 title='some integer setting',
1756 title='some integer setting',
1756 )
1757 )
1757 return SettingsSchema()
1758 return SettingsSchema()
1758
1759
1759
1760
1760 integration_type_registry.register_integration_type(_StubIntegrationType)
1761 integration_type_registry.register_integration_type(_StubIntegrationType)
1761 return _StubIntegrationType
1762 return _StubIntegrationType
1762
1763
1763 @pytest.fixture
1764 @pytest.fixture
1764 def stub_integration_settings():
1765 def stub_integration_settings():
1765 return {
1766 return {
1766 'test_string_field': 'some data',
1767 'test_string_field': 'some data',
1767 'test_int_field': 100,
1768 'test_int_field': 100,
1768 }
1769 }
1769
1770
1770
1771
1771 @pytest.fixture
1772 @pytest.fixture
1772 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1773 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1773 stub_integration_settings):
1774 stub_integration_settings):
1774 integration = IntegrationModel().create(
1775 integration = IntegrationModel().create(
1775 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1776 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1776 name='test repo integration',
1777 name='test repo integration',
1777 repo=repo_stub, repo_group=None, child_repos_only=None)
1778 repo=repo_stub, repo_group=None, child_repos_only=None)
1778
1779
1779 @request.addfinalizer
1780 @request.addfinalizer
1780 def cleanup():
1781 def cleanup():
1781 IntegrationModel().delete(integration)
1782 IntegrationModel().delete(integration)
1782
1783
1783 return integration
1784 return integration
1784
1785
1785
1786
1786 @pytest.fixture
1787 @pytest.fixture
1787 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1788 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1788 stub_integration_settings):
1789 stub_integration_settings):
1789 integration = IntegrationModel().create(
1790 integration = IntegrationModel().create(
1790 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1791 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1791 name='test repogroup integration',
1792 name='test repogroup integration',
1792 repo=None, repo_group=test_repo_group, child_repos_only=True)
1793 repo=None, repo_group=test_repo_group, child_repos_only=True)
1793
1794
1794 @request.addfinalizer
1795 @request.addfinalizer
1795 def cleanup():
1796 def cleanup():
1796 IntegrationModel().delete(integration)
1797 IntegrationModel().delete(integration)
1797
1798
1798 return integration
1799 return integration
1799
1800
1800
1801
1801 @pytest.fixture
1802 @pytest.fixture
1802 def repogroup_recursive_integration_stub(request, test_repo_group,
1803 def repogroup_recursive_integration_stub(request, test_repo_group,
1803 StubIntegrationType, stub_integration_settings):
1804 StubIntegrationType, stub_integration_settings):
1804 integration = IntegrationModel().create(
1805 integration = IntegrationModel().create(
1805 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1806 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1806 name='test recursive repogroup integration',
1807 name='test recursive repogroup integration',
1807 repo=None, repo_group=test_repo_group, child_repos_only=False)
1808 repo=None, repo_group=test_repo_group, child_repos_only=False)
1808
1809
1809 @request.addfinalizer
1810 @request.addfinalizer
1810 def cleanup():
1811 def cleanup():
1811 IntegrationModel().delete(integration)
1812 IntegrationModel().delete(integration)
1812
1813
1813 return integration
1814 return integration
1814
1815
1815
1816
1816 @pytest.fixture
1817 @pytest.fixture
1817 def global_integration_stub(request, StubIntegrationType,
1818 def global_integration_stub(request, StubIntegrationType,
1818 stub_integration_settings):
1819 stub_integration_settings):
1819 integration = IntegrationModel().create(
1820 integration = IntegrationModel().create(
1820 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1821 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1821 name='test global integration',
1822 name='test global integration',
1822 repo=None, repo_group=None, child_repos_only=None)
1823 repo=None, repo_group=None, child_repos_only=None)
1823
1824
1824 @request.addfinalizer
1825 @request.addfinalizer
1825 def cleanup():
1826 def cleanup():
1826 IntegrationModel().delete(integration)
1827 IntegrationModel().delete(integration)
1827
1828
1828 return integration
1829 return integration
1829
1830
1830
1831
1831 @pytest.fixture
1832 @pytest.fixture
1832 def root_repos_integration_stub(request, StubIntegrationType,
1833 def root_repos_integration_stub(request, StubIntegrationType,
1833 stub_integration_settings):
1834 stub_integration_settings):
1834 integration = IntegrationModel().create(
1835 integration = IntegrationModel().create(
1835 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1836 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1836 name='test global integration',
1837 name='test global integration',
1837 repo=None, repo_group=None, child_repos_only=True)
1838 repo=None, repo_group=None, child_repos_only=True)
1838
1839
1839 @request.addfinalizer
1840 @request.addfinalizer
1840 def cleanup():
1841 def cleanup():
1841 IntegrationModel().delete(integration)
1842 IntegrationModel().delete(integration)
1842
1843
1843 return integration
1844 return integration
1844
1845
1845
1846
1846 @pytest.fixture
1847 @pytest.fixture
1847 def local_dt_to_utc():
1848 def local_dt_to_utc():
1848 def _factory(dt):
1849 def _factory(dt):
1849 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1850 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1850 dateutil.tz.tzutc()).replace(tzinfo=None)
1851 dateutil.tz.tzutc()).replace(tzinfo=None)
1851 return _factory
1852 return _factory
1852
1853
1853
1854
1854 @pytest.fixture
1855 @pytest.fixture
1855 def disable_anonymous_user(request, baseapp):
1856 def disable_anonymous_user(request, baseapp):
1856 set_anonymous_access(False)
1857 set_anonymous_access(False)
1857
1858
1858 @request.addfinalizer
1859 @request.addfinalizer
1859 def cleanup():
1860 def cleanup():
1860 set_anonymous_access(True)
1861 set_anonymous_access(True)
1861
1862
1862
1863
1863 @pytest.fixture(scope='module')
1864 @pytest.fixture(scope='module')
1864 def rc_fixture(request):
1865 def rc_fixture(request):
1865 return Fixture()
1866 return Fixture()
1866
1867
1867
1868
1868 @pytest.fixture
1869 @pytest.fixture
1869 def repo_groups(request):
1870 def repo_groups(request):
1870 fixture = Fixture()
1871 fixture = Fixture()
1871
1872
1872 session = Session()
1873 session = Session()
1873 zombie_group = fixture.create_repo_group('zombie')
1874 zombie_group = fixture.create_repo_group('zombie')
1874 parent_group = fixture.create_repo_group('parent')
1875 parent_group = fixture.create_repo_group('parent')
1875 child_group = fixture.create_repo_group('parent/child')
1876 child_group = fixture.create_repo_group('parent/child')
1876 groups_in_db = session.query(RepoGroup).all()
1877 groups_in_db = session.query(RepoGroup).all()
1877 assert len(groups_in_db) == 3
1878 assert len(groups_in_db) == 3
1878 assert child_group.group_parent_id == parent_group.group_id
1879 assert child_group.group_parent_id == parent_group.group_id
1879
1880
1880 @request.addfinalizer
1881 @request.addfinalizer
1881 def cleanup():
1882 def cleanup():
1882 fixture.destroy_repo_group(zombie_group)
1883 fixture.destroy_repo_group(zombie_group)
1883 fixture.destroy_repo_group(child_group)
1884 fixture.destroy_repo_group(child_group)
1884 fixture.destroy_repo_group(parent_group)
1885 fixture.destroy_repo_group(parent_group)
1885
1886
1886 return zombie_group, parent_group, child_group
1887 return zombie_group, parent_group, child_group
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
This diff has been collapsed as it changes many lines, (823 lines changed) Show them Hide them
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
This diff has been collapsed as it changes many lines, (521 lines changed) Show them Hide them
General Comments 0
You need to be logged in to leave comments. Login now