##// END OF EJS Templates
rcextensions: new builtin rcextensions....
marcink -
r3133:6afdd8e7 default
parent child Browse files
Show More
@@ -0,0 +1,44 b''
1 .. _integrations-rcextensions:
2
3
4 rcextensions integrations
5 =========================
6
7
8 Since RhodeCode 4.14 release rcextensions aren't part of rhodecode-tools, and instead
9 they are shipped with the new or upgraded installations.
10
11 The rcextensions template `rcextensions.tmpl` is created in the `etc/` directory
12 of enterprise or community installation. It's always re-created and updated on upgrades.
13
14
15 Activating rcextensions
16 +++++++++++++++++++++++
17
18 To activate rcextensions simply copy or rename the created template rcextensions
19 into the path where the rhodecode.ini file is located::
20
21 pushd ~/rccontrol/enterprise-1/
22 or
23 pushd ~/rccontrol/community-1/
24
25 mv etc/rcextensions.tmpl rcextensions
26
27
28 rcextensions are loaded when |RCE| starts. So a restart is required after activation or
29 change of code in rcextensions.
30
31 Simply restart only the enterprise/community instance::
32
33 rccontrol restart enterprise-1
34 or
35 rccontrol restart community-1
36
37
38 Example usage
39 +++++++++++++
40
41
42 To see examples of usage please check the examples directory under:
43
44 https://code.rhodecode.com/rhodecode-enterprise-ce/files/stable/rhodecode/config/rcextensions/examples
@@ -0,0 +1,56 b''
1 # Copyright (C) 2016-2018 RhodeCode GmbH
2 #
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
6 #
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
11 #
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
19 """
20 rcextensions module, please edit `hooks.py` to over write hooks logic
21 """
22
23 from .hooks import (
24 _create_repo_hook,
25 _create_repo_group_hook,
26 _pre_create_user_hook,
27 _create_user_hook,
28 _delete_repo_hook,
29 _delete_user_hook,
30 _pre_push_hook,
31 _push_hook,
32 _pre_pull_hook,
33 _pull_hook,
34 _create_pull_request_hook,
35 _review_pull_request_hook,
36 _update_pull_request_hook,
37 _merge_pull_request_hook,
38 _close_pull_request_hook,
39 )
40
41 # set as module attributes, we use those to call hooks. *do not change this*
42 CREATE_REPO_HOOK = _create_repo_hook
43 CREATE_REPO_GROUP_HOOK = _create_repo_group_hook
44 PRE_CREATE_USER_HOOK = _pre_create_user_hook
45 CREATE_USER_HOOK = _create_user_hook
46 DELETE_REPO_HOOK = _delete_repo_hook
47 DELETE_USER_HOOK = _delete_user_hook
48 PRE_PUSH_HOOK = _pre_push_hook
49 PUSH_HOOK = _push_hook
50 PRE_PULL_HOOK = _pre_pull_hook
51 PULL_HOOK = _pull_hook
52 CREATE_PULL_REQUEST = _create_pull_request_hook
53 REVIEW_PULL_REQUEST = _review_pull_request_hook
54 UPDATE_PULL_REQUEST = _update_pull_request_hook
55 MERGE_PULL_REQUEST = _merge_pull_request_hook
56 CLOSE_PULL_REQUEST = _close_pull_request_hook
@@ -0,0 +1,36 b''
1 # Example to trigger a HTTP call via an HTTP helper via post_push hook
2
3
4 @has_kwargs({
5 'server_url': 'url of instance that triggered this hook',
6 'config': 'path to .ini config used',
7 'scm': 'type of version control "git", "hg", "svn"',
8 'username': 'username of actor who triggered this event',
9 'ip': 'ip address of actor who triggered this hook',
10 'action': '',
11 'repository': 'repository name',
12 'repo_store_path': 'full path to where repositories are stored',
13 'commit_ids': '',
14 'hook_type': '',
15 'user_agent': '',
16 })
17 def _push_hook(*args, **kwargs):
18 """
19 POST PUSH HOOK, this function will be executed after each push it's
20 executed after the build-in hook that RhodeCode uses for logging pushes
21 """
22
23 from .helpers import http_call, extra_fields
24 # returns list of dicts with key-val fetched from extra fields
25 repo_extra_fields = extra_fields.run(**kwargs)
26
27 if repo_extra_fields.get('endpoint_url'):
28 endpoint = repo_extra_fields['endpoint_url']
29 if endpoint:
30 data = {
31 'some_key': 'val'
32 }
33 response = http_call.run(url=endpoint, json_data=data)
34 return HookResponse(0, 'Called endpoint {}, with response {}'.format(endpoint, response))
35
36 return HookResponse(0, '')
@@ -0,0 +1,36 b''
1 # Example to trigger a CI call via an HTTP helper via post_push hook
2
3
4 @has_kwargs({
5 'server_url': 'url of instance that triggered this hook',
6 'config': 'path to .ini config used',
7 'scm': 'type of version control "git", "hg", "svn"',
8 'username': 'username of actor who triggered this event',
9 'ip': 'ip address of actor who triggered this hook',
10 'action': '',
11 'repository': 'repository name',
12 'repo_store_path': 'full path to where repositories are stored',
13 'commit_ids': '',
14 'hook_type': '',
15 'user_agent': '',
16 })
17 def _push_hook(*args, **kwargs):
18 """
19 POST PUSH HOOK, this function will be executed after each push it's
20 executed after the build-in hook that RhodeCode uses for logging pushes
21 """
22
23 from .helpers import http_call, extra_fields
24 # returns list of dicts with key-val fetched from extra fields
25 repo_extra_fields = extra_fields.run(**kwargs)
26
27 if repo_extra_fields.get('endpoint_url'):
28 endpoint = repo_extra_fields['endpoint_url']
29 if endpoint:
30 data = {
31 'some_key': 'val'
32 }
33 response = http_call.run(url=endpoint, json_data=data)
34 return HookResponse(0, 'Called endpoint {}, with response {}'.format(endpoint, response))
35
36 return HookResponse(0, '')
1 NO CONTENT: new file 100644
1 NO CONTENT: new file 100644
@@ -0,0 +1,17 b''
1 # Copyright (C) 2016-2018 RhodeCode GmbH
2 #
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
6 #
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
11 #
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
@@ -0,0 +1,40 b''
1 # -*- coding: utf-8 -*-
2 # Copyright (C) 2016-2018 RhodeCode GmbH
3 #
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
7 #
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
12 #
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
20 """
21 us in hooks::
22
23 from .helpers import extra_fields
24 # returns list of dicts with key-val fetched from extra fields
25 repo_extra_fields = extra_fields.run(**kwargs)
26
27 """
28
29
30 def run(*args, **kwargs):
31 from rhodecode.model.db import Repository
32 # use temp name then the main one propagated
33 repo_name = kwargs.pop('REPOSITORY', None) or kwargs['repository']
34 repo = Repository.get_by_repo_name(repo_name)
35
36 fields = {}
37 for field in repo.extra_fields:
38 fields[field.field_key] = field.get_dict()
39
40 return fields
@@ -0,0 +1,61 b''
1 # -*- coding: utf-8 -*-
2 # Copyright (C) 2016-2018 RhodeCode GmbH
3 #
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
7 #
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
12 #
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
20 """
21 Extract and serialize commits taken from a list of commit_ids. This should
22 be used in post_push hook
23
24 us in hooks::
25
26 from .helpers import extract_post_commits
27 # returns list of dicts with key-val fetched from extra fields
28 commit_list = extract_post_commits.run(**kwargs)
29 """
30 import traceback
31
32
33 def run(*args, **kwargs):
34 from rhodecode.lib.utils2 import extract_mentioned_users
35 from rhodecode.model.db import Repository
36
37 commit_ids = kwargs.get('commit_ids')
38 if not commit_ids:
39 return 0
40
41 # use temp name then the main one propagated
42 repo_name = kwargs.pop('REPOSITORY', None) or kwargs['repository']
43
44 repo = Repository.get_by_repo_name(repo_name)
45 commits = []
46
47 vcs_repo = repo.scm_instance(cache=False)
48 try:
49 for commit_id in commit_ids:
50 cs = vcs_repo.get_changeset(commit_id)
51 cs_data = cs.__json__()
52 cs_data['mentions'] = extract_mentioned_users(cs_data['message'])
53 # optionally add more logic to parse the commits, like reading extra
54 # fields of repository to read managers of reviewers ?
55 commits.append(cs_data)
56 except Exception:
57 print(traceback.format_exc())
58 # we don't send any commits when crash happens, only full list matters
59 # we short circuit then.
60 return []
61 return commits
@@ -0,0 +1,63 b''
1 # -*- coding: utf-8 -*-
2 # Copyright (C) 2016-2018 RhodeCode GmbH
3 #
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
7 #
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
12 #
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
20 """
21 us in hooks::
22
23 from .helpers import extract_pre_commits
24 # returns list of dicts with key-val fetched from extra fields
25 commit_list = extract_pre_commits.run(**kwargs)
26
27 """
28 import re
29 import collections
30
31
32 def get_hg_commits(repo, refs):
33 commits = []
34 return commits
35
36
37 def get_git_commits(repo, refs):
38 commits = []
39 return commits
40
41
42 def run(*args, **kwargs):
43 from rhodecode.model.db import Repository
44
45 vcs_type = kwargs['scm']
46 # use temp name then the main one propagated
47 repo_name = kwargs.pop('REPOSITORY', None) or kwargs['repository']
48
49 repo = Repository.get_by_repo_name(repo_name)
50 vcs_repo = repo.scm_instance(cache=False)
51
52 commits = []
53
54 for rev_data in kwargs['commit_ids']:
55 new_environ = dict((k, v) for k, v in rev_data['hg_env'])
56
57 if vcs_type == 'git':
58 commits = get_git_commits(vcs_repo, kwargs['commit_ids'])
59
60 if vcs_type == 'hg':
61 commits = get_hg_commits(vcs_repo, kwargs['commit_ids'])
62
63 return commits
@@ -0,0 +1,36 b''
1 # -*- coding: utf-8 -*-
2 # Copyright (C) 2016-2018 RhodeCode GmbH
3 #
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
7 #
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
12 #
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
20 """
21 us in hooks::
22
23 from .helpers import http_call
24 # returns response after making a POST call
25 response = http_call.run(url=url, json_data=data)
26
27 """
28
29 from rhodecode.integrations.types.base import requests_retry_call
30
31
32 def run(url, json_data, method='post'):
33 requests_session = requests_retry_call()
34 requests_session.verify = True # Verify SSL
35 resp = requests_session.post(url, json=json_data, timeout=60)
36 return resp.raise_for_status() # raise exception on a failed request
@@ -0,0 +1,431 b''
1 # Copyright (C) 2016-2018 RhodeCode GmbH
2 #
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
6 #
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
11 #
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
19 from .utils import DotDict, HookResponse, has_kwargs
20
21
22 # Config shortcut to keep, all configuration in one place
23 # Example: api_key = CONFIG.my_config.api_key
24 CONFIG = DotDict(
25 my_config=DotDict(
26 api_key='<secret>',
27 ),
28
29 )
30
31
32 @has_kwargs({
33 'repo_name': '',
34 'repo_type': '',
35 'description': '',
36 'private': '',
37 'created_on': '',
38 'enable_downloads': '',
39 'repo_id': '',
40 'user_id': '',
41 'enable_statistics': '',
42 'clone_uri': '',
43 'fork_id': '',
44 'group_id': '',
45 'created_by': ''
46 })
47 def _create_repo_hook(*args, **kwargs):
48 """
49 POST CREATE REPOSITORY HOOK. This function will be executed after
50 each repository is created. kwargs available:
51
52 """
53 return HookResponse(0, '')
54
55
56 @has_kwargs({
57 'group_name': '',
58 'group_parent_id': '',
59 'group_description': '',
60 'group_id': '',
61 'user_id': '',
62 'created_by': '',
63 'created_on': '',
64 'enable_locking': ''
65 })
66 def _create_repo_group_hook(*args, **kwargs):
67 """
68 POST CREATE REPOSITORY GROUP HOOK, this function will be
69 executed after each repository group is created. kwargs available:
70 """
71 return HookResponse(0, '')
72
73
74 @has_kwargs({
75 'username': '',
76 'password': '',
77 'email': '',
78 'firstname': '',
79 'lastname': '',
80 'active': '',
81 'admin': '',
82 'created_by': '',
83 })
84 def _pre_create_user_hook(*args, **kwargs):
85 """
86 PRE CREATE USER HOOK, this function will be executed before each
87 user is created, it returns a tuple of bool, reason.
88 If bool is False the user creation will be stopped and reason
89 will be displayed to the user.
90
91 Return HookResponse(1, reason) to block user creation
92
93 """
94
95 reason = 'allowed'
96 return HookResponse(0, reason)
97
98
99 @has_kwargs({
100 'username': '',
101 'full_name_or_username': '',
102 'full_contact': '',
103 'user_id': '',
104 'name': '',
105 'firstname': '',
106 'short_contact': '',
107 'admin': '',
108 'lastname': '',
109 'ip_addresses': '',
110 'extern_type': '',
111 'extern_name': '',
112 'email': '',
113 'api_key': '',
114 'api_keys': '',
115 'last_login': '',
116 'full_name': '',
117 'active': '',
118 'password': '',
119 'emails': '',
120 'inherit_default_permissions': '',
121 'created_by': '',
122 'created_on': '',
123 })
124 def _create_user_hook(*args, **kwargs):
125 """
126 POST CREATE USER HOOK, this function will be executed after each user is created
127 """
128 return HookResponse(0, '')
129
130
131 @has_kwargs({
132 'repo_name': '',
133 'repo_type': '',
134 'description': '',
135 'private': '',
136 'created_on': '',
137 'enable_downloads': '',
138 'repo_id': '',
139 'user_id': '',
140 'enable_statistics': '',
141 'clone_uri': '',
142 'fork_id': '',
143 'group_id': '',
144 'deleted_by': '',
145 'deleted_on': '',
146 })
147 def _delete_repo_hook(*args, **kwargs):
148 """
149 POST DELETE REPOSITORY HOOK, this function will be executed after
150 each repository deletion
151 """
152 return HookResponse(0, '')
153
154
155 @has_kwargs({
156 'username': '',
157 'full_name_or_username': '',
158 'full_contact': '',
159 'user_id': '',
160 'name': '',
161 'short_contact': '',
162 'admin': '',
163 'firstname': '',
164 'lastname': '',
165 'ip_addresses': '',
166 'email': '',
167 'api_key': '',
168 'last_login': '',
169 'full_name': '',
170 'active': '',
171 'password': '',
172 'emails': '',
173 'inherit_default_permissions': '',
174 'deleted_by': '',
175 })
176 def _delete_user_hook(*args, **kwargs):
177 """
178 POST DELETE USER HOOK, this function will be executed after each
179 user is deleted kwargs available:
180 """
181 return HookResponse(0, '')
182
183
184 # =============================================================================
185 # PUSH/PULL RELATED HOOKS
186 # =============================================================================
187 @has_kwargs({
188 'server_url': 'url of instance that triggered this hook',
189 'config': 'path to .ini config used',
190 'scm': 'type of version control "git", "hg", "svn"',
191 'username': 'username of actor who triggered this event',
192 'ip': 'ip address of actor who triggered this hook',
193 'action': '',
194 'repository': 'repository name',
195 'repo_store_path': 'full path to where repositories are stored',
196 'commit_ids': 'pre transaction metadata for commit ids',
197 'hook_type': '',
198 'user_agent': 'Client user agent, e.g git or mercurial CLI version',
199 })
200 def _pre_push_hook(*args, **kwargs):
201 """
202 Post push hook
203 To stop version control from storing the transaction and send a message to user
204 use non-zero HookResponse with a message, e.g return HookResponse(1, 'Not allowed')
205
206 This message will be shown back to client during PUSH operation
207
208 Commit ids might look like that::
209
210 [{u'hg_env|git_env': ...,
211 u'multiple_heads': [],
212 u'name': u'default',
213 u'new_rev': u'd0befe0692e722e01d5677f27a104631cf798b69',
214 u'old_rev': u'd0befe0692e722e01d5677f27a104631cf798b69',
215 u'ref': u'',
216 u'total_commits': 2,
217 u'type': u'branch'}]
218 """
219 return HookResponse(0, '')
220
221
222 @has_kwargs({
223 'server_url': 'url of instance that triggered this hook',
224 'config': 'path to .ini config used',
225 'scm': 'type of version control "git", "hg", "svn"',
226 'username': 'username of actor who triggered this event',
227 'ip': 'ip address of actor who triggered this hook',
228 'action': '',
229 'repository': 'repository name',
230 'repo_store_path': 'full path to where repositories are stored',
231 'commit_ids': 'list of pushed commit_ids (sha1)',
232 'hook_type': '',
233 'user_agent': 'Client user agent, e.g git or mercurial CLI version',
234 })
235 def _push_hook(*args, **kwargs):
236 """
237 POST PUSH HOOK, this function will be executed after each push it's
238 executed after the build-in hook that RhodeCode uses for logging pushes
239 """
240 return HookResponse(0, '')
241
242
243 @has_kwargs({
244 'server_url': 'url of instance that triggered this hook',
245 'repo_store_path': 'full path to where repositories are stored',
246 'config': 'path to .ini config used',
247 'scm': 'type of version control "git", "hg", "svn"',
248 'username': 'username of actor who triggered this event',
249 'ip': 'ip address of actor who triggered this hook',
250 'action': '',
251 'repository': 'repository name',
252 'hook_type': '',
253 'user_agent': 'Client user agent, e.g git or mercurial CLI version',
254 })
255 def _pre_pull_hook(*args, **kwargs):
256 """
257 Post pull hook
258 """
259 return HookResponse(0, '')
260
261
262 @has_kwargs({
263 'server_url': 'url of instance that triggered this hook',
264 'repo_store_path': 'full path to where repositories are stored',
265 'config': 'path to .ini config used',
266 'scm': 'type of version control "git", "hg", "svn"',
267 'username': 'username of actor who triggered this event',
268 'ip': 'ip address of actor who triggered this hook',
269 'action': '',
270 'repository': 'repository name',
271 'hook_type': '',
272 'user_agent': 'Client user agent, e.g git or mercurial CLI version',
273 })
274 def _pull_hook(*args, **kwargs):
275 """
276 This hook will be executed after each code pull.
277 """
278 return HookResponse(0, '')
279
280
281 # =============================================================================
282 # PULL REQUEST RELATED HOOKS
283 # =============================================================================
284 @has_kwargs({
285 'server_url': 'url of instance that triggered this hook',
286 'config': 'path to .ini config used',
287 'scm': 'type of version control "git", "hg", "svn"',
288 'username': 'username of actor who triggered this event',
289 'ip': 'ip address of actor who triggered this hook',
290 'action': '',
291 'repository': 'repository name',
292 'pull_request_id': '',
293 'url': '',
294 'title': '',
295 'description': '',
296 'status': '',
297 'created_on': '',
298 'updated_on': '',
299 'commit_ids': '',
300 'review_status': '',
301 'mergeable': '',
302 'source': '',
303 'target': '',
304 'author': '',
305 'reviewers': '',
306 })
307 def _create_pull_request_hook(*args, **kwargs):
308 """
309 This hook will be executed after creation of a pull request.
310 """
311 return HookResponse(0, '')
312
313
314 @has_kwargs({
315 'server_url': 'url of instance that triggered this hook',
316 'config': 'path to .ini config used',
317 'scm': 'type of version control "git", "hg", "svn"',
318 'username': 'username of actor who triggered this event',
319 'ip': 'ip address of actor who triggered this hook',
320 'action': '',
321 'repository': 'repository name',
322 'pull_request_id': '',
323 'url': '',
324 'title': '',
325 'description': '',
326 'status': '',
327 'created_on': '',
328 'updated_on': '',
329 'commit_ids': '',
330 'review_status': '',
331 'mergeable': '',
332 'source': '',
333 'target': '',
334 'author': '',
335 'reviewers': '',
336 })
337 def _review_pull_request_hook(*args, **kwargs):
338 """
339 This hook will be executed after review action was made on a pull request.
340 """
341 return HookResponse(0, '')
342
343
344 @has_kwargs({
345 'server_url': 'url of instance that triggered this hook',
346 'config': 'path to .ini config used',
347 'scm': 'type of version control "git", "hg", "svn"',
348 'username': 'username of actor who triggered this event',
349 'ip': 'ip address of actor who triggered this hook',
350 'action': '',
351 'repository': 'repository name',
352 'pull_request_id': '',
353 'url': '',
354 'title': '',
355 'description': '',
356 'status': '',
357 'created_on': '',
358 'updated_on': '',
359 'commit_ids': '',
360 'review_status': '',
361 'mergeable': '',
362 'source': '',
363 'target': '',
364 'author': '',
365 'reviewers': '',
366 })
367 def _update_pull_request_hook(*args, **kwargs):
368 """
369 This hook will be executed after pull requests has been updated with new commits.
370 """
371 return HookResponse(0, '')
372
373
374 @has_kwargs({
375 'server_url': 'url of instance that triggered this hook',
376 'config': 'path to .ini config used',
377 'scm': 'type of version control "git", "hg", "svn"',
378 'username': 'username of actor who triggered this event',
379 'ip': 'ip address of actor who triggered this hook',
380 'action': '',
381 'repository': 'repository name',
382 'pull_request_id': '',
383 'url': '',
384 'title': '',
385 'description': '',
386 'status': '',
387 'created_on': '',
388 'updated_on': '',
389 'commit_ids': '',
390 'review_status': '',
391 'mergeable': '',
392 'source': '',
393 'target': '',
394 'author': '',
395 'reviewers': '',
396 })
397 def _merge_pull_request_hook(*args, **kwargs):
398 """
399 This hook will be executed after merge of a pull request.
400 """
401 return HookResponse(0, '')
402
403
404 @has_kwargs({
405 'server_url': 'url of instance that triggered this hook',
406 'config': 'path to .ini config used',
407 'scm': 'type of version control "git", "hg", "svn"',
408 'username': 'username of actor who triggered this event',
409 'ip': 'ip address of actor who triggered this hook',
410 'action': '',
411 'repository': 'repository name',
412 'pull_request_id': '',
413 'url': '',
414 'title': '',
415 'description': '',
416 'status': '',
417 'created_on': '',
418 'updated_on': '',
419 'commit_ids': '',
420 'review_status': '',
421 'mergeable': '',
422 'source': '',
423 'target': '',
424 'author': '',
425 'reviewers': '',
426 })
427 def _close_pull_request_hook(*args, **kwargs):
428 """
429 This hook will be executed after close of a pull request.
430 """
431 return HookResponse(0, '')
@@ -0,0 +1,21 b''
1 # =============================================================================
2 # END OF UTILITY FUNCTIONS HERE
3 # =============================================================================
4
5 # Additional mappings that are not present in the pygments lexers
6 # used for building stats
7 # format is {'ext':['Names']} eg. {'py':['Python']} note: there can be
8 # more than one name for extension
9 # NOTE: that this will override any mappings in LANGUAGES_EXTENSIONS_MAP
10 # build by pygments
11 EXTRA_MAPPINGS = {'html': ['Text']}
12
13 # additional lexer definitions for custom files it's overrides pygments lexers,
14 # and uses defined name of lexer to colorize the files. Format is {'ext':
15 # 'lexer_name'} List of lexers can be printed running:
16 # >> python -c "import pprint;from pygments import lexers;
17 # pprint.pprint([(x[0], x[1]) for x in lexers.get_all_lexers()]);"
18
19 EXTRA_LEXERS = {
20 'tt': 'vbnet'
21 }
@@ -0,0 +1,147 b''
1 # Copyright (C) 2016-2018 RhodeCode GmbH
2 #
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
6 #
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
11 #
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
19 import os
20 import functools
21 import collections
22
23
24 class HookResponse(object):
25 def __init__(self, status, output):
26 self.status = status
27 self.output = output
28
29 def __add__(self, other):
30 other_status = getattr(other, 'status', 0)
31 new_status = max(self.status, other_status)
32 other_output = getattr(other, 'output', '')
33 new_output = self.output + other_output
34
35 return HookResponse(new_status, new_output)
36
37 def __bool__(self):
38 return self.status == 0
39
40
41 class DotDict(dict):
42
43 def __contains__(self, k):
44 try:
45 return dict.__contains__(self, k) or hasattr(self, k)
46 except:
47 return False
48
49 # only called if k not found in normal places
50 def __getattr__(self, k):
51 try:
52 return object.__getattribute__(self, k)
53 except AttributeError:
54 try:
55 return self[k]
56 except KeyError:
57 raise AttributeError(k)
58
59 def __setattr__(self, k, v):
60 try:
61 object.__getattribute__(self, k)
62 except AttributeError:
63 try:
64 self[k] = v
65 except:
66 raise AttributeError(k)
67 else:
68 object.__setattr__(self, k, v)
69
70 def __delattr__(self, k):
71 try:
72 object.__getattribute__(self, k)
73 except AttributeError:
74 try:
75 del self[k]
76 except KeyError:
77 raise AttributeError(k)
78 else:
79 object.__delattr__(self, k)
80
81 def toDict(self):
82 return unserialize(self)
83
84 def __repr__(self):
85 keys = list(self.keys())
86 keys.sort()
87 args = ', '.join(['%s=%r' % (key, self[key]) for key in keys])
88 return '%s(%s)' % (self.__class__.__name__, args)
89
90 @staticmethod
91 def fromDict(d):
92 return serialize(d)
93
94
95 def serialize(x):
96 if isinstance(x, dict):
97 return DotDict((k, serialize(v)) for k, v in x.items())
98 elif isinstance(x, (list, tuple)):
99 return type(x)(serialize(v) for v in x)
100 else:
101 return x
102
103
104 def unserialize(x):
105 if isinstance(x, dict):
106 return dict((k, unserialize(v)) for k, v in x.items())
107 elif isinstance(x, (list, tuple)):
108 return type(x)(unserialize(v) for v in x)
109 else:
110 return x
111
112
113 def _verify_kwargs(func_name, expected_parameters, kwargs):
114 """
115 Verify that exactly `expected_parameters` are passed in as `kwargs`.
116 """
117 expected_parameters = set(expected_parameters)
118 kwargs_keys = set(kwargs.keys())
119 if kwargs_keys != expected_parameters:
120 missing_kwargs = expected_parameters - kwargs_keys
121 unexpected_kwargs = kwargs_keys - expected_parameters
122 raise AssertionError(
123 "func:%s: missing parameters: %r, unexpected parameters: %s" %
124 (func_name, missing_kwargs, unexpected_kwargs))
125
126
127 def has_kwargs(required_args):
128 """
129 decorator to verify extension calls arguments.
130
131 :param required_args:
132 """
133 def wrap(func):
134 def wrapper(*args, **kwargs):
135 _verify_kwargs(func.func_name, required_args.keys(), kwargs)
136 # in case there's `calls` defined on module we store the data
137 maybe_log_call(func.func_name, args, kwargs)
138 return func(*args, **kwargs)
139 return wrapper
140 return wrap
141
142
143 def maybe_log_call(name, args, kwargs):
144 from rhodecode.config import rcextensions
145 if hasattr(rcextensions, 'calls'):
146 calls = rcextensions.calls
147 calls[name].append((args, kwargs))
@@ -1,272 +1,275 b''
1 1 # Nix environment for the community edition
2 2 #
3 3 # This shall be as lean as possible, just producing the enterprise-ce
4 4 # derivation. For advanced tweaks to pimp up the development environment we use
5 5 # "shell.nix" so that it does not have to clutter this file.
6 6 #
7 7 # Configuration, set values in "~/.nixpkgs/config.nix".
8 8 # example
9 9 # {
10 10 # # Thoughts on how to configure the dev environment
11 11 # rc = {
12 12 # codeInternalUrl = "https://usr:token@internal-code.rhodecode.com";
13 13 # sources = {
14 14 # rhodecode-vcsserver = "/home/user/work/rhodecode-vcsserver";
15 15 # rhodecode-enterprise-ce = "/home/user/work/rhodecode-enterprise-ce";
16 16 # rhodecode-enterprise-ee = "/home/user/work/rhodecode-enterprise-ee";
17 17 # };
18 18 # };
19 19 # }
20 20
21 21 args@
22 22 { pythonPackages ? "python27Packages"
23 23 , pythonExternalOverrides ? self: super: {}
24 24 , doCheck ? false
25 25 , ...
26 26 }:
27 27
28 28 let
29 29 # Use nixpkgs from args or import them. We use this indirect approach
30 30 # through args to be able to use the name `pkgs` for our customized packages.
31 31 # Otherwise we will end up with an infinite recursion.
32 32 pkgs = args.pkgs or (import <nixpkgs> { });
33 33
34 34 # Works with the new python-packages, still can fallback to the old
35 35 # variant.
36 36 basePythonPackagesUnfix = basePythonPackages.__unfix__ or (
37 37 self: basePythonPackages.override (a: { inherit self; }));
38 38
39 39 # Evaluates to the last segment of a file system path.
40 40 basename = path: with pkgs.lib; last (splitString "/" path);
41 41
42 42 # source code filter used as arugment to builtins.filterSource.
43 43 src-filter = path: type: with pkgs.lib;
44 44 let
45 45 ext = last (splitString "." path);
46 46 in
47 47 !builtins.elem (basename path) [
48 48 ".git" ".hg" "__pycache__" ".eggs" ".idea" ".dev"
49 49 "bower_components" "node_modules"
50 50 "build" "data" "result" "tmp"] &&
51 51 !builtins.elem ext ["egg-info" "pyc"] &&
52 52 # TODO: johbo: This check is wrong, since "path" contains an absolute path,
53 53 # it would still be good to restore it since we want to ignore "result-*".
54 54 !hasPrefix "result" path;
55 55
56 56 sources =
57 57 let
58 58 inherit (pkgs.lib) all isString attrValues;
59 59 sourcesConfig = pkgs.config.rc.sources or {};
60 60 in
61 61 # Ensure that sources are configured as strings. Using a path
62 62 # would result in a copy into the nix store.
63 63 assert all isString (attrValues sourcesConfig);
64 64 sourcesConfig;
65 65
66 66 version = builtins.readFile "${rhodecode-enterprise-ce-src}/rhodecode/VERSION";
67 67 rhodecode-enterprise-ce-src = builtins.filterSource src-filter ./.;
68 68
69 69 buildBowerComponents = pkgs.buildBowerComponents;
70 70 nodeEnv = import ./pkgs/node-default.nix {
71 71 inherit pkgs;
72 72 };
73 73 nodeDependencies = nodeEnv.shell.nodeDependencies;
74 74
75 75 bowerComponents = buildBowerComponents {
76 76 name = "enterprise-ce-${version}";
77 77 generated = ./pkgs/bower-packages.nix;
78 78 src = rhodecode-enterprise-ce-src;
79 79 };
80 80
81 81 rhodecode-testdata-src = sources.rhodecode-testdata or (
82 82 pkgs.fetchhg {
83 83 url = "https://code.rhodecode.com/upstream/rc_testdata";
84 84 rev = "v0.10.0";
85 85 sha256 = "0zn9swwvx4vgw4qn8q3ri26vvzgrxn15x6xnjrysi1bwmz01qjl0";
86 86 });
87 87
88 88 rhodecode-testdata = import "${rhodecode-testdata-src}/default.nix" {
89 89 inherit
90 90 doCheck
91 91 pkgs
92 92 pythonPackages;
93 93 };
94 94
95 95 pythonLocalOverrides = self: super: {
96 96 rhodecode-enterprise-ce =
97 97 let
98 98 linkNodeAndBowerPackages = ''
99 99 export RHODECODE_CE_PATH=${rhodecode-enterprise-ce-src}
100 100
101 101 echo "[BEGIN]: Link node packages"
102 102 rm -fr node_modules
103 103 mkdir node_modules
104 104 # johbo: Linking individual packages allows us to run "npm install"
105 105 # inside of a shell to try things out. Re-entering the shell will
106 106 # restore a clean environment.
107 107 ln -s ${nodeDependencies}/lib/node_modules/* node_modules/
108 108 echo "[DONE]: Link node packages"
109 109
110 110 echo "[BEGIN]: Link bower packages"
111 111 rm -fr bower_components
112 112 mkdir bower_components
113 113 ln -s ${bowerComponents}/bower_components/* bower_components/
114 114 echo "[DONE]: Link bower packages"
115 115 '';
116 116
117 117 releaseName = "RhodeCodeEnterpriseCE-${version}";
118 118 in super.rhodecode-enterprise-ce.override (attrs: {
119 119 inherit
120 120 doCheck
121 121 version;
122 122
123 123 name = "rhodecode-enterprise-ce-${version}";
124 124 releaseName = releaseName;
125 125 src = rhodecode-enterprise-ce-src;
126 126 dontStrip = true; # prevent strip, we don't need it.
127 127
128 128 # expose following attributed outside
129 129 passthru = {
130 130 inherit
131 131 rhodecode-testdata
132 132 bowerComponents
133 133 linkNodeAndBowerPackages
134 134 myPythonPackagesUnfix
135 135 pythonLocalOverrides
136 136 pythonCommunityOverrides;
137 137
138 138 pythonPackages = self;
139 139 };
140 140
141 141 buildInputs =
142 142 attrs.buildInputs or [] ++ [
143 143 rhodecode-testdata
144 144 pkgs.nodePackages.bower
145 145 pkgs.nodePackages.grunt-cli
146 146 ];
147 147
148 148 #NOTE: option to inject additional propagatedBuildInputs
149 149 propagatedBuildInputs =
150 150 attrs.propagatedBuildInputs or [] ++ [
151 151
152 152 ];
153 153
154 154 LC_ALL = "en_US.UTF-8";
155 155 LOCALE_ARCHIVE =
156 156 if pkgs.stdenv.isLinux
157 157 then "${pkgs.glibcLocales}/lib/locale/locale-archive"
158 158 else "";
159 159
160 160 # Add bin directory to path so that tests can find 'rhodecode'.
161 161 preCheck = ''
162 162 export PATH="$out/bin:$PATH"
163 163 '';
164 164
165 165 # custom check phase for testing
166 166 checkPhase = ''
167 167 runHook preCheck
168 168 PYTHONHASHSEED=random py.test -vv -p no:sugar -r xw --cov-config=.coveragerc --cov=rhodecode --cov-report=term-missing rhodecode
169 169 runHook postCheck
170 170 '';
171 171
172 172 postCheck = ''
173 173 echo "Cleanup of rhodecode/tests"
174 174 rm -rf $out/lib/${self.python.libPrefix}/site-packages/rhodecode/tests
175 175 '';
176 176
177 177 preBuild = ''
178 178 echo "Building frontend assets"
179 179 ${linkNodeAndBowerPackages}
180 180 grunt
181 181 rm -fr node_modules
182 182 '';
183 183
184 184 postInstall = ''
185 185 # check required files
186 186 if [ ! -f rhodecode/public/js/scripts.js ]; then
187 187 echo "Missing scripts.js"
188 188 exit 1
189 189 fi
190 190 if [ ! -f rhodecode/public/css/style.css ]; then
191 191 echo "Missing style.css"
192 192 exit 1
193 193 fi
194 194
195 195 echo "Writing enterprise-ce meta information for rccontrol to nix-support/rccontrol"
196 196 mkdir -p $out/nix-support/rccontrol
197 197 cp -v rhodecode/VERSION $out/nix-support/rccontrol/version
198 198 echo "[DONE]: enterprise-ce meta information for rccontrol written"
199 199
200 200 mkdir -p $out/etc
201 201 cp configs/production.ini $out/etc
202 202 echo "[DONE]: saved enterprise-ce production.ini into $out/etc"
203 203
204 cp -r rhodecode/config/rcextensions $out/etc/rcextensions.tmpl
205 echo "[DONE]: saved enterprise-ce rcextensions into $out/etc/rcextensions.tmpl"
206
204 207 # python based programs need to be wrapped
205 208 mkdir -p $out/bin
206 209
207 210 # required binaries from dependencies
208 211 ln -s ${self.supervisor}/bin/supervisorctl $out/bin/
209 212 ln -s ${self.supervisor}/bin/supervisord $out/bin/
210 213 ln -s ${self.pastescript}/bin/paster $out/bin/
211 214 ln -s ${self.channelstream}/bin/channelstream $out/bin/
212 215 ln -s ${self.celery}/bin/celery $out/bin/
213 216 ln -s ${self.gunicorn}/bin/gunicorn $out/bin/
214 217 ln -s ${self.pyramid}/bin/prequest $out/bin/
215 218 ln -s ${self.pyramid}/bin/pserve $out/bin/
216 219
217 220 echo "[DONE]: created symlinks into $out/bin"
218 221 DEPS="$out/bin/supervisorctl \
219 222 $out/bin/supervisord \
220 223 $out/bin/paster \
221 224 $out/bin/channelstream \
222 225 $out/bin/celery \
223 226 $out/bin/gunicorn \
224 227 $out/bin/prequest \
225 228 $out/bin/pserve"
226 229
227 230 # wrap only dependency scripts, they require to have full PYTHONPATH set
228 231 # to be able to import all packages
229 232 for file in $DEPS;
230 233 do
231 234 wrapProgram $file \
232 235 --prefix PATH : $PATH \
233 236 --prefix PYTHONPATH : $PYTHONPATH \
234 237 --set PYTHONHASHSEED random
235 238 done
236 239
237 240 echo "[DONE]: enterprise-ce binary wrapping"
238 241
239 242 # rhodecode-tools don't need wrapping
240 243 ln -s ${self.rhodecode-tools}/bin/rhodecode-* $out/bin/
241 244
242 245 '';
243 246 });
244 247
245 248 };
246 249
247 250 basePythonPackages = with builtins;
248 251 if isAttrs pythonPackages then
249 252 pythonPackages
250 253 else
251 254 getAttr pythonPackages pkgs;
252 255
253 256 pythonGeneratedPackages = import ./pkgs/python-packages.nix {
254 257 inherit pkgs;
255 258 inherit (pkgs) fetchurl fetchgit fetchhg;
256 259 };
257 260
258 261 pythonCommunityOverrides = import ./pkgs/python-packages-overrides.nix {
259 262 inherit pkgs basePythonPackages;
260 263 };
261 264
262 265 # Apply all overrides and fix the final package set
263 266 myPythonPackagesUnfix = with pkgs.lib;
264 267 (extends pythonExternalOverrides
265 268 (extends pythonLocalOverrides
266 269 (extends pythonCommunityOverrides
267 270 (extends pythonGeneratedPackages
268 271 basePythonPackagesUnfix))));
269 272
270 273 myPythonPackages = (pkgs.lib.fix myPythonPackagesUnfix);
271 274
272 275 in myPythonPackages.rhodecode-enterprise-ce
@@ -1,25 +1,36 b''
1 1 .. _extensions-hooks-ref:
2 2
3 3 Extensions & Hooks
4 4 ==================
5 5
6 6 The extensions & hooks section references three concepts regularly,
7 7 so to clarify what is meant each time, read the following definitions:
8 8
9 9 * **Plugin**: A Plugin is software that adds a specific feature to
10 10 an existing software application.
11 11 * **Extension**: An extension extends the capabilities of,
12 12 or the data available to, an existing software application.
13 13 * **Hook**: A hook intercepts function calls, messages, or events passed
14 14 between software components and can be used to trigger plugins, or their
15 15 extensions.
16 16
17 .. toctree::
17
18 Hooks
19 -----
20
21 Within |RCM| there are two types of supported hooks.
18 22
19 rcx
20 install-ext
21 config-ext
22 extensions
23 hooks
24 full-blown-example
25 int-slack
23 * **Internal built-in hooks**: The internal |hg|, |git| or |svn| hooks are
24 triggered by different VCS operations, like push, pull,
25 or clone and are non-configurable, but you can add your own VCS hooks,
26 see :ref:`custom-hooks`.
27 * **Custom rcextensions hooks**: User defined hooks centre around the lifecycle of
28 certain actions such are |repo| creation, user creation etc. The actions
29 these hooks trigger can be rejected based on the API permissions of the
30 user calling them.
31
32 On instructions how to use the custom `rcextensions`
33 see :ref:`integrations-rcextensions` section.
34
35
36
@@ -1,57 +1,60 b''
1 1 .. _integrations:
2 2
3 3 Integrations
4 4 ------------
5 5
6 Rhodecode supports integrations with external services for various events,
6 |RCE| supports integrations with external services for various events,
7 7 such as commit pushes and pull requests. Multiple integrations of the same type
8 8 can be added at the same time; this is useful for posting different events to
9 9 different Slack channels, for example.
10 10
11 11 Supported integrations
12 12 ^^^^^^^^^^^^^^^^^^^^^^
13 13
14 ============================ ============ =====================================
15 Type/Name |RC| Edition Description
16 ============================ ============ =====================================
17 :ref:`integrations-slack` |RCCEshort| https://slack.com/
18 :ref:`integrations-hipchat` |RCCEshort| https://www.hipchat.com/
19 :ref:`integrations-webhook` |RCCEshort| POST events as `json` to a custom url
20 :ref:`integrations-ci` |RCCEshort| Trigger Builds for Common CI Systems
21 :ref:`integrations-email` |RCCEshort| Send repo push commits by email
22 :ref:`integrations-jenkins` |RCEEshort| Trigger Builds for Jenkins CI System
23 :ref:`integrations-redmine` |RCEEshort| Close/Resolve/Reference Redmine issues
24 :ref:`integrations-jira` |RCEEshort| Close/Resolve/Reference JIRA issues
25 ============================ ============ =====================================
14 ================================ ============ ========================================
15 Type/Name |RC| Edition Description
16 ================================ ============ ========================================
17 :ref:`integrations-webhook` |RCCEshort| Trigger events as `json` to a custom url
18 :ref:`integrations-slack` |RCCEshort| Integrate with https://slack.com/
19 :ref:`integrations-hipchat` |RCCEshort| Integrate with https://www.hipchat.com/
20 :ref:`integrations-email` |RCCEshort| Send repo push commits by email
21 :ref:`integrations-ci` |RCCEshort| Trigger Builds for Common CI Systems
22 :ref:`integrations-rcextensions` |RCCEshort| Advanced low-level integration framework
23
24 :ref:`integrations-jenkins` |RCEEshort| Trigger Builds for Jenkins CI System
25 :ref:`integrations-redmine` |RCEEshort| Close/Resolve/Reference Redmine issues
26 :ref:`integrations-jira` |RCEEshort| Close/Resolve/Reference JIRA issues
27 ================================ ============ ========================================
26 28
27 29 .. _creating-integrations:
28 30
29 31 Creating an Integration
30 32 ^^^^^^^^^^^^^^^^^^^^^^^
31 33
32 34 Integrations can be added globally via the admin UI:
33 35
34 36 :menuselection:`Admin --> Integrations`
35 37
36 38 or per repository in each repository's settings:
37 39
38 40 :menuselection:`Admin --> Repositories --> Edit --> Integrations`
39 41
40 42 To create an integration, select the type from the list in the *Create New
41 43 Integration* section.
42 44
43 45 The *Current Integrations* section shows existing integrations that have been
44 46 created along with their type (eg. Slack) and enabled status.
45 47
46 48 See pages specific to each type of integration for more instructions:
47 49
48 50 .. toctree::
49 51
50 52 slack
51 53 hipchat
52 54 redmine
53 55 jira
54 56 webhook
55 57 email
56 58 ci
57 59 jenkins
60 integrations-rcextensions
@@ -1,677 +1,578 b''
1 1 .. _tools-cli:
2 2
3 3 |RCT| CLI
4 4 ---------
5 5
6 6 The commands available with |RCT| can be split into three categories:
7 7
8 8 - Remotely executable commands that can be run from your local machine once you
9 9 have your connection details to |RCE| configured.
10 10 - Locally executable commands the can be run on the server to carry out
11 11 general maintenance.
12 12 - Local configuration commands used to help set up your |RCT| configuration.
13 13
14 14
15 15 rhodecode-tools
16 16 ---------------
17 17
18 18 Use |RCT| to setup automation, run the indexer, and install extensions for
19 19 your |RCM| instances. Options:
20 20
21 21 .. rst-class:: dl-horizontal
22 22
23 23 \ - -apihost <api_host>
24 24 Set the API host value.
25 25
26 26 \ - -apikey <apikey_value>
27 27 Set the API key value.
28 28
29 29 \-c, - -config <config_file>
30 30 Create a configuration file. The default file is created
31 31 in ``~/.rhoderc``
32 32
33 33 \ - -save-config
34 34 Save the configuration file.
35 35
36 36 \ - -show-config
37 37 Show the current configuration values.
38 38
39 39 \ - -format {json,pretty}
40 40 Set the formatted representation.
41 41
42 42 Example usage:
43 43
44 44 .. code-block:: bash
45 45
46 46 $ rhodecode-tools --apikey=key --apihost=http://rhodecode.server \
47 47 --save-config
48 48
49 49 rhodecode-api
50 50 -------------
51 51
52 52 The |RC| API lets you connect to |RCE| and carry out management tasks from a
53 53 remote machine, for more information about the API, see the :ref:`api`. To
54 54 pass arguments on the command-line use the ``method:option`` syntax.
55 55
56 56 Example usage:
57 57
58 58 .. code-block:: bash
59 59
60 60 # Run the get_repos API call and sample output
61 61 $ rhodecode-api --instance-name=enterprise-1 create_repo \
62 62 repo_name:brand-new repo_type:hg description:repo-desc
63 63
64 64 {
65 65 "error": null,
66 66 "id": 1110,
67 67 "result": {
68 68 "msg": "Created new repository `brand-new`",
69 69 "success": true,
70 70 "task": null
71 71 }
72 72 }
73 73
74 74 Options:
75 75
76 76 .. rst-class:: dl-horizontal
77 77
78 78 \ - -api-cache-only
79 79 Requires a cache to be present when running this call
80 80
81 81 \ - -api-cache-rebuild
82 82 Replaces existing cached values with new ones from server
83 83
84 84 \ - -api-cache <PATH>
85 85 Use a special cache dir to read responses from instead of the server
86 86
87 87 \ - -api-cert-verify
88 88 Verify the endpoint ssl certificate
89 89
90 90 \ - -api-cert <PATH>
91 91 Path to alternate CA bundle.
92 92
93 93 \ - -apihost <api_host>
94 94 Set the API host value.
95 95
96 96 \ - -apikey <apikey_value>
97 97 Set the API key value.
98 98
99 99 \ - -instance-name <instance-id>
100 100 Set the instance name
101 101
102 102 \-I, - -install-dir <DIR>
103 103 Location of application instances
104 104
105 105 \-c, - -config <.rhoderc-file>
106 106 Location of the :file:`.rhoderc`
107 107
108 108 \-F, - -format {json,pretty}
109 109 Set the formatted representation.
110 110
111 111 \-h, - -help
112 112 Show help messages.
113 113
114 114 \-v, - -verbose
115 115 Enable verbose messaging
116 116
117 117 rhodecode-cleanup-gists
118 118 -----------------------
119 119
120 120 Use this to delete gists within |RCM|. Options:
121 121
122 122 .. rst-class:: dl-horizontal
123 123
124 124 \-c, - -config <config_file>
125 125 Set the file path to the configuration file. The default file is
126 126 :file:`/home/{user}/.rhoderc`
127 127
128 128 \ - -corrupted
129 129 Remove gists with corrupted metadata.
130 130
131 131 \ - -dont-ask
132 132 Remove gists without asking for confirmation.
133 133
134 134 \-h, - -help
135 135 Show help messages. current configuration values.
136 136
137 137 \ - -instance-name <instance-id>
138 138 Set the instance name.
139 139
140 140 \-R, - -repo-dir
141 141 Set the repository file path.
142 142
143 143 \ - -version
144 144 Display your |RCT| version.
145 145
146 146 Example usage:
147 147
148 148 .. code-block:: bash
149 149
150 150 # Clean up gists related to an instance
151 151 $ rhodecode-cleanup-gists --instance-name=enterprise-1
152 152 Scanning for gists in /home/brian/repos/.rc_gist_store...
153 153 preparing to remove [3] found gists
154 154
155 155 # Clean up corrupted gists in an instance
156 156 $ rhodecode-cleanup-gists --instance-name=enterprise-1 --corrupted
157 157 Scanning for gists in /home/brian/repos/.rc_gist_store...
158 158 preparing to remove [2] found gists
159 159 the following gists will be archived:
160 160 * EXPIRED: BAD METADATA | /home/brian/repos/.rc_gist_store/5
161 161 * EXPIRED: BAD METADATA | /home/brian/repos/.rc_gist_store/8FtC
162 162 are you sure you want to archive them? [y/N]: y
163 163 removing gist /home/brian/repos/.rc_gist_store/5
164 164 removing gist /home/brian/repos/.rc_gist_store/8FtCKdcbRKmEvRzTVsEt
165 165
166 166 rhodecode-cleanup-repos
167 167 -----------------------
168 168
169 169 Use this to manage |repos| and |repo| groups within |RCM|. Options:
170 170
171 171 .. rst-class:: dl-horizontal
172 172
173 173 \-c, - -config <config_file>
174 174 Set the file path to the configuration file. The default file is
175 175 :file:`/home/{user}/.rhoderc`.
176 176
177 177 \-h, - -help
178 178 Show help messages. current configuration values.
179 179
180 180 \ - -interactive
181 181 Enable an interactive prompt for each repository when deleting.
182 182
183 183 \ - -include-groups
184 184 Remove repository groups.
185 185
186 186 \ - -instance-name <instance-id>
187 187 Set the instance name.
188 188
189 189 \ - -list-only
190 190 Display repositories selected for deletion.
191 191
192 192 \ - -older-than <str>
193 193 Delete repositories older that a specified time.
194 194 You can use the following suffixes; d for days, h for hours,
195 195 m for minutes, s for seconds.
196 196
197 197 \-R, - -repo-dir
198 198 Set the repository file path
199 199
200 200 Example usage:
201 201
202 202 .. code-block:: bash
203 203
204 204 # Cleaning up repos using tools installed with RCE 350 and above
205 205 $ ~/.rccontrol/enterprise-4/profile/bin/rhodecode-cleanup-repos \
206 206 --instance-name=enterprise-4 --older-than=1d
207 207 Scanning for repositories in /home/brian/repos...
208 208 preparing to remove [2] found repositories older than 1 day, 0:00:00 (1d)
209 209
210 210 the following repositories will be deleted completely:
211 211 * REMOVED: 2015-08-05 00:23:18 | /home/brian/repos/rm__20150805_002318_831
212 212 * REMOVED: 2015-08-04 01:22:10 | /home/brian/repos/rm__20150804_012210_336
213 213 are you sure you want to remove them? [y/N]:
214 214
215 215 # Clean up repos older than 1 year
216 216 # If using virtualenv and pre RCE 350 tools installation
217 217 (venv)$ rhodecode-cleanup-repos --instance-name=enterprise-1 \
218 218 --older-than=365d
219 219
220 220 Scanning for repositories in /home/brian/repos...
221 221 preparing to remove [343] found repositories older than 365 days
222 222
223 223 # clean up repos older than 3 days
224 224 # If using virtualenv and pre RCE 350 tools installation
225 225 (venv)$ rhodecode-cleanup-repos --instance-name=enterprise-1 \
226 226 --older-than=3d
227 227 Scanning for repositories in /home/brian/repos...
228 228 preparing to remove [3] found repositories older than 3 days
229 229
230 230 .. _tools-config:
231 231
232 232 rhodecode-config
233 233 ----------------
234 234
235 235 Use this to create or update a |RCE| configuration file on the local machine.
236 236
237 237 .. rst-class:: dl-horizontal
238 238
239 239 \- -filename </path/to/config_file>
240 240 Set the file path to the |RCE| configuration file.
241 241
242 242 \- -show-defaults
243 243 Display the defaults set in the |RCE| configuration file.
244 244
245 245 \- -update
246 246 Update the configuration with the new settings passed on the command
247 247 line.
248 248
249 249 .. code-block:: bash
250 250
251 251 # Create a new config file
252 252 $ rhodecode-config --filename=dev.ini
253 253 Wrote new config file in /Users/user/dev.ini
254 254
255 255 # Update config value for given section:
256 256 $ rhodecode-config --update --filename=prod.ini [handler_console]level=INFO
257 257
258 258 $ rhodecode-config --filename=dev.ini --show-defaults
259 259 lang=en
260 260 cpu_number=4
261 261 uuid=<function <lambda> at 0x10d86ac08>
262 262 license_token=ff1e-aa9c-bb66-11e5
263 263 host=127.0.0.1
264 264 here=/Users/brian
265 265 error_aggregation_service=None
266 266 database_url=sqlite:///%(here)s/rhodecode.db?timeout=30
267 267 git_path=git
268 268 http_server=waitress
269 269 port=5000
270 270
271 271 .. _tools-rhodecode-extensions:
272 272
273 273 rhodecode-extensions
274 274 --------------------
275 275
276 |RCT| adds additional mapping for :ref:`indexing-ref`, statistics, and adds
277 additional code for push/pull/create/delete |repo| hooks. These hooks can be
278 used to send signals to build-bots such as jenkins. Options:
279
280 .. rst-class:: dl-horizontal
281
282 \-c, - -config <config_file>
283 Create a configuration file. The default file is created
284 in ``~/.rhoderc``
285
286 \-h, - -help
287 Show help messages.
288
289 \-F, - -format {json,pretty}
290 Set the formatted representation.
291
292 \-I, - -install-dir <str>
293 Set the location of the |RCE| installation. The default location is
294 :file:`/home/{user}/.rccontrol/`.
295
296 \ - -ini-file <str>
297 Path to the :file:`rhodecode.ini` file for that instance.
298
299 \ - -instance-name <instance-id>
300 Set the instance name.
301
302 \ - -plugins
303 Add plugins to your |RCE| installation. See the
304 :ref:`extensions-hooks-ref` section for more details.
305
306 \ - -version
307 Display your |RCT| version.
308
309
310 Once installed, you will see a :file:`rcextensions` folder in the instance
311 directory, for example :file:`home/{user}/.rccontrol/{instance-id}/rcextensions`
312
313 To install ``rcextensions``, use the following example:
314
315 .. code-block:: bash
316
317 # install extensions on the given instance
318 # If using virtualenv prior to RCE 350
319 (venv)$ rhodecode-extensions --instance-name=enterprise-1 \
320 --ini-file=rhodecode.ini
321 Writen new extensions file to rcextensions
322
323 # install extensions with additional plugins on the given instance
324 (venv)$ rhodecode-extensions --instance-name=enterprise-1 \
325 --ini-file=rhodecode.ini --plugins
326 Writen new extensions file to rcextensions
327
328 # installing extensions from 350 onwards
329 # as they are packaged with RCE
330 $ .rccontrol/enterprise-4/profile/bin/rhodecode-extensions --plugins \
331 --instance-name=enterprise-4 --ini-file=rhodecode.ini
332
333 Writen new extensions file to rcextensions
334
335 See the new extensions inside this directory for more details about the
336 additional hooks available, for example see the ``push_post.py`` file.
337
338 .. code-block:: python
339
340 import urllib
341 import urllib2
342
343 def run(*args, **kwargs):
344 """
345 Extra params
346
347 :param URL: url to send the data to
348 """
349
350 url = kwargs.pop('URL', None)
351 if url:
352 from rhodecode.lib.compat import json
353 from rhodecode.model.db import Repository
354
355 repo = Repository.get_by_repo_name(kwargs['repository'])
356 changesets = []
357 vcs_repo = repo.scm_instance_no_cache()
358 for r in kwargs['pushed_revs']:
359 cs = vcs_repo.get_changeset(r)
360 changesets.append(json.dumps(cs))
361
362 kwargs['pushed_revs'] = changesets
363 headers = {
364 'User-Agent': 'RhodeCode-SCM web hook',
365 'Content-type': 'application/x-www-form-urlencoded; charset=UTF-8',
366 'Accept': 'text/javascript, text/html, application/xml, '
367 'text/xml, */*',
368 'Accept-Encoding': 'gzip,deflate,sdch',
369 }
370
371 data = kwargs
372 data = urllib.urlencode(data)
373 req = urllib2.Request(url, data, headers)
374 response = urllib2.urlopen(req)
375 response.read()
376 return 0
276 The `rcextensions` since version 4.14 are now shipped together with |RCE| please check
277 the using :ref:`integrations-rcextensions` section.
377 278
378 279
379 280 rhodecode-gist
380 281 --------------
381 282
382 283 Use this to create, list, show, or delete gists within |RCM|. Options:
383 284
384 285 .. rst-class:: dl-horizontal
385 286
386 287 \ - -api-cache-only
387 288 Requires a cache to be present when running this call
388 289
389 290 \ - -api-cache-rebuild
390 291 Replaces existing cached values with new ones from server
391 292
392 293 \ - -api-cache PATH
393 294 Use a special cache dir to read responses from instead of the server
394 295
395 296 \ - -api-cert-verify
396 297 Verify the endpoint ssl certificate
397 298
398 299 \ - -api-cert PATH
399 300 Path to alternate CA bundle.
400 301
401 302 \ - -apihost <api_host>
402 303 Set the API host value.
403 304
404 305 \ - -apikey <apikey_value>
405 306 Set the API key value.
406 307
407 308 \-c, - -config <config_file>
408 309 Create a configuration file.
409 310 The default file is created in :file:`~/.rhoderc`
410 311
411 312 \ - -create <gistname>
412 313 create the gist
413 314
414 315 \-d, - -description <str>
415 316 Set gist description
416 317
417 318 \ - -delete <gistid>
418 319 Delete the gist
419 320
420 321 \-f, - -file
421 322 Specify the filename The file extension will enable syntax highlighting.
422 323
423 324 \-F, - -format {json,pretty}
424 325 Set the formatted representation.
425 326
426 327 \ - -help
427 328 Show help messages.
428 329
429 330 \-I, - -install-dir <DIR>
430 331 Location of application instances
431 332
432 333 \ - -instance-name <instance-id>
433 334 Set the instance name.
434 335
435 336 \ - -list
436 337 Display instance gists.
437 338
438 339 \-l, --lifetime <minutes>
439 340 Set the gist lifetime. The default value is (-1) forever
440 341
441 342 \ - -show <gistname>
442 343 Show the content of the gist
443 344
444 345 \-o, - -open
445 346 After creating Gist open it in browser
446 347
447 348 \-p, - -private
448 349 Create a private gist
449 350
450 351 \ - -version
451 352 Display your |RCT| version.
452 353
453 354 Example usage:
454 355
455 356 .. code-block:: bash
456 357
457 358 # List the gists in an instance
458 359 (venv)brian@ubuntu:~$ rhodecode-gist --instance-name=enterprise-1 list
459 360 {
460 361 "error": null,
461 362 "id": 7102,
462 363 "result": [
463 364 {
464 365 "access_id": "2",
465 366 "content": null,
466 367 "created_on": "2015-01-19T12:52:26.494",
467 368 "description": "A public gust",
468 369 "expires": -1.0,
469 370 "gist_id": 2,
470 371 "type": "public",
471 372 "url": "http://127.0.0.1:10003/_admin/gists/2"
472 373 },
473 374 {
474 375 "access_id": "7gs6BsSEC4pKUEPLz5AB",
475 376 "content": null,
476 377 "created_on": "2015-01-19T11:27:40.812",
477 378 "description": "Gist testing API",
478 379 "expires": -1.0,
479 380 "gist_id": 1,
480 381 "type": "private",
481 382 "url": "http://127.0.0.1:10003/_admin/gists/7gs6BsSEC4pKUEPLz5AB"
482 383 }
483 384 ]
484 385 }
485 386
486 387 # delete a particular gist
487 388 # You use the access_id to specify the gist to delete
488 389 (venv)brian@ubuntu:~$ rhodecode-gist delete 2 --instance-name=enterprise-1
489 390 {
490 391 "error": null,
491 392 "id": 6284,
492 393 "result": {
493 394 "gist": null,
494 395 "msg": "deleted gist ID:2"
495 396 }
496 397 }
497 398
498 399 # cat a file and pipe to new gist
499 400 # This is if you are using virtualenv
500 401 (venv)$ cat ~/.rhoderc | rhodecode-gist --instance-name=enterprise-1 \
501 402 -d '.rhoderc copy' create
502 403
503 404 {
504 405 "error": null,
505 406 "id": 5374,
506 407 "result": {
507 408 "gist": {
508 409 "access_id": "7",
509 410 "content": null,
510 411 "created_on": "2015-01-26T11:31:58.774",
511 412 "description": ".rhoderc copy",
512 413 "expires": -1.0,
513 414 "gist_id": 7,
514 415 "type": "public",
515 416 "url": "http://127.0.0.1:10003/_admin/gists/7"
516 417 },
517 418 "msg": "created new gist"
518 419 }
519 420 }
520 421
521 422 # Cat a file and pipe to gist
522 423 # in RCE 3.5.0 tools and above
523 424 $ cat ~/.rhoderc | ~/.rccontrol/{instance-id}/profile/bin/rhodecode-gist \
524 425 --instance-name=enterprise-4 -d '.rhoderc copy' create
525 426 {
526 427 "error": null,
527 428 "id": 9253,
528 429 "result": {
529 430 "gist": {
530 431 "access_id": "4",
531 432 "acl_level": "acl_public",
532 433 "content": null,
533 434 "created_on": "2015-08-20T05:54:11.250",
534 435 "description": ".rhoderc copy",
535 436 "expires": -1.0,
536 437 "gist_id": 4,
537 438 "modified_at": "2015-08-20T05:54:11.250",
538 439 "type": "public",
539 440 "url": "http://127.0.0.1:10000/_admin/gists/4"
540 441 },
541 442 "msg": "created new gist"
542 443 }
543 444 }
544 445
545 446
546 447 rhodecode-index
547 448 ---------------
548 449
549 450 More detailed information regarding setting up the indexer is available in
550 451 the :ref:`indexing-ref` section. Options:
551 452
552 453 .. rst-class:: dl-horizontal
553 454
554 455 \ - -api-cache-only
555 456 Requires a cache to be present when running this call
556 457
557 458 \ - -api-cache-rebuild
558 459 Replaces existing cached values with new ones from server
559 460
560 461 \ - -api-cache PATH
561 462 Use a special cache dir to read responses from instead of the server
562 463
563 464 \ - -api-cert-verify
564 465 Verify the endpoint ssl certificate
565 466
566 467 \ - -api-cert PATH
567 468 Path to alternate CA bundle.
568 469
569 470 \ - -apihost <api_host>
570 471 Set the API host value.
571 472
572 473 \ - -apikey <apikey_value>
573 474 Set the API key value.
574 475
575 476 \-c, --config <config_file>
576 477 Create a configuration file.
577 478 The default file is created in :file:`~/.rhoderc`
578 479
579 480 \ - -create-mapping <PATH>
580 481 Creates an example mapping configuration for indexer.
581 482
582 483 \-F, - -format {json,pretty}
583 484 Set the formatted representation.
584 485
585 486 \-h, - -help
586 487 Show help messages.
587 488
588 489 \ - -instance-name <instance-id>
589 490 Set the instance name
590 491
591 492 \-I, - -install-dir <DIR>
592 493 Location of application instances
593 494
594 495 \-m, - -mapping <file_name>
595 496 Parse the output to the .ini mapping file.
596 497
597 498 \ - -optimize
598 499 Optimize index for performance by amalgamating multiple index files
599 500 into one. Greatly increases incremental indexing speed.
600 501
601 502 \-R, - -repo-dir <DIRECTORY>
602 503 Location of repositories
603 504
604 505 \ - -source <PATH>
605 506 Use a special source JSON file to feed the indexer
606 507
607 508 \ - -version
608 509 Display your |RCT| version.
609 510
610 511 Example usage:
611 512
612 513 .. code-block:: bash
613 514
614 515 # Run the indexer
615 516 $ ~/.rccontrol/enterprise-4/profile/bin/rhodecode-index \
616 517 --instance-name=enterprise-4
617 518
618 519 # Run indexer based on mapping.ini file
619 520 # This is using pre-350 virtualenv
620 521 (venv)$ rhodecode-index --instance-name=enterprise-1
621 522
622 523 # Index from the command line without creating
623 524 # the .rhoderc file
624 525 $ rhodecode-index --apikey=key --apihost=http://rhodecode.server \
625 526 --instance-name=enterprise-2 --save-config
626 527
627 528 # Create the indexing mapping file
628 529 $ ~/.rccontrol/enterprise-4/profile/bin/rhodecode-index \
629 530 --create-mapping mapping.ini --instance-name=enterprise-4
630 531
631 532 .. _tools-rhodecode-list-instance:
632 533
633 534 rhodecode-list-instances
634 535 ------------------------
635 536
636 537 Use this command to list the instance details configured in the
637 538 :file:`~/.rhoderc` file.
638 539
639 540 .. code-block:: bash
640 541
641 542 $ .rccontrol/enterprise-1/profile/bin/rhodecode-list-instances
642 543 [instance:production] - Config only
643 544 API-HOST: https://some.url.com
644 545 API-KEY: some.auth.token
645 546
646 547 [instance:development] - Config only
647 548 API-HOST: http://some.ip.address
648 549 API-KEY: some.auth.token
649 550
650 551
651 552 .. _tools-setup-config:
652 553
653 554 rhodecode-setup-config
654 555 ----------------------
655 556
656 557 Use this command to create the ``~.rhoderc`` file required by |RCT| to access
657 558 remote instances.
658 559
659 560 .. rst-class:: dl-horizontal
660 561
661 562 \- -instance-name <name>
662 563 Specify the instance name in the :file:`~/.rhoderc`
663 564
664 565 \api_host <hostname>
665 566 Create a configuration file. The default file is created
666 567 in ``~/.rhoderc``
667 568
668 569 \api_key <auth-token>
669 570 Create a configuration file. The default file is created
670 571 in ``~/.rhoderc``
671 572
672 573
673 574 .. code-block:: bash
674 575
675 576 (venv)$ rhodecode-setup-config --instance-name=tea api_host=URL api_key=xyz
676 577 Config not found under /Users/username/.rhoderc, creating a new one
677 578 Wrote new configuration into /Users/username/.rhoderc
@@ -1,76 +1,66 b''
1 1 .. _tools-overview:
2 2
3 3 |RCT| Overview
4 4 --------------
5 5
6 6 To install |RCT| correctly, see the installation steps covered in
7 7 :ref:`install-tools`, and :ref:`config-rhoderc`.
8 8
9 9 Once |RCT| is installed, and the :file:`/home/{user}/.rhoderc` file is
10 10 configured you can then use |RCT| on each |RCM| instance to carry out admin
11 11 tasks. Use the following example to configure that file,
12 12 and once configured see the :ref:`tools-cli` for more details.
13 13
14 14 .. note::
15 15
16 16 |RCT| require |PY| 2.7 to run.
17 17
18 18 .. code-block:: bash
19 19
20 20 # Get the status of each instance you wish to use with Tools
21 21 (venv)brian@ubuntu:~$ rccontrol status
22 22
23 23 - NAME: momentum-1
24 24 - STATUS: RUNNING
25 25 - TYPE: Momentum
26 26 - VERSION: 3.0.0-nightly-momentum
27 27 - URL: http://127.0.0.1:10003
28 28
29 29 - NAME: momentum-3
30 30 - STATUS: RUNNING
31 31 - TYPE: Momentum
32 32 - VERSION: 3.0.0-nightly-momentum
33 33 - URL: http://127.0.0.1:10007
34 34
35 35 Example :file:`/home/{user}/.rhoderc` file.
36 36
37 37 .. code-block:: ini
38 38
39 39 # Configure the .rhoderc file for each instance
40 40 # API keys found in your instance
41 41 [instance:enterprise-1]
42 42 api_host = http://127.0.0.1:10003/
43 43 api_key = 91fdbdc257289c46633ef5aab274412911de1ba9
44 44 repo_dir = /home/brian/repos
45 45
46 46 [instance:enterprise-3]
47 47 api_host = http://127.0.0.1:10007/
48 48 api_key = 5a925f65438d29f8d6ced8ab8e8c3d305998d1d9
49 49 repo_dir = /home/brian/testing-repos/
50 50
51 51
52 52 Example usage of |RCT| after |RCE| 3.5.0. From this version onwards |RCT| is
53 53 packaged with |RCE| by default.
54 54
55 55 .. code-block:: bash
56 56
57 $ .rccontrol/enterprise-4/profile/bin/rhodecode-extensions --plugins \
58 --instance-name=enterprise-4 --ini-file=rhodecode.ini
57 $ .rccontrol/enterprise-4/profile/bin/rhodecode-api --instance-name=enterprise-4 get_ip [11:56:57 on 05/10/2018]
59 58
60 Writen new extensions file to rcextensions
61 Copied hipchat_push_notify.py plugin to rcextensions
62 Copied jira_pr_flow.py plugin to rcextensions
63 Copied default_reviewers.py plugin to rcextensions
64 Copied extract_commits.py plugin to rcextensions
65 Copied extract_issues.py plugin to rcextensions
66 Copied redmine_pr_flow.py plugin to rcextensions
67 Copied extra_fields.py plugin to rcextensions
68 Copied jira_smart_commits.py plugin to rcextensions
69 Copied http_notify.py plugin to rcextensions
70 Copied slack_push_notify.py plugin to rcextensions
71 Copied slack_message.py plugin to rcextensions
72 Copied extract_jira_issues.py plugin to rcextensions
73 Copied extract_redmine_issues.py plugin to rcextensions
74 Copied redmine_smart_commits.py plugin to rcextensions
75 Copied send_mail.py plugin to rcextensions
76
59 {
60 "error": null,
61 "id": 1000,
62 "result": {
63 "server_ip_addr": "1.2.3.4",
64 "user_ips": []
65 }
66 }
@@ -1,1233 +1,1233 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20 import mock
21 21 import pytest
22 22
23 23 import rhodecode
24 24 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason
25 25 from rhodecode.lib.vcs.nodes import FileNode
26 26 from rhodecode.lib import helpers as h
27 27 from rhodecode.model.changeset_status import ChangesetStatusModel
28 28 from rhodecode.model.db import (
29 29 PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment, Repository)
30 30 from rhodecode.model.meta import Session
31 31 from rhodecode.model.pull_request import PullRequestModel
32 32 from rhodecode.model.user import UserModel
33 33 from rhodecode.tests import (
34 34 assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN)
35 35 from rhodecode.tests.utils import AssertResponse
36 36
37 37
38 38 def route_path(name, params=None, **kwargs):
39 39 import urllib
40 40
41 41 base_url = {
42 42 'repo_changelog': '/{repo_name}/changelog',
43 43 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}',
44 44 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}',
45 45 'pullrequest_show_all': '/{repo_name}/pull-request',
46 46 'pullrequest_show_all_data': '/{repo_name}/pull-request-data',
47 47 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
48 48 'pullrequest_repo_destinations': '/{repo_name}/pull-request/repo-destinations',
49 49 'pullrequest_new': '/{repo_name}/pull-request/new',
50 50 'pullrequest_create': '/{repo_name}/pull-request/create',
51 51 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update',
52 52 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge',
53 53 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete',
54 54 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment',
55 55 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete',
56 56 }[name].format(**kwargs)
57 57
58 58 if params:
59 59 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
60 60 return base_url
61 61
62 62
63 63 @pytest.mark.usefixtures('app', 'autologin_user')
64 64 @pytest.mark.backends("git", "hg")
65 65 class TestPullrequestsView(object):
66 66
67 67 def test_index(self, backend):
68 68 self.app.get(route_path(
69 69 'pullrequest_new',
70 70 repo_name=backend.repo_name))
71 71
72 72 def test_option_menu_create_pull_request_exists(self, backend):
73 73 repo_name = backend.repo_name
74 74 response = self.app.get(h.route_path('repo_summary', repo_name=repo_name))
75 75
76 76 create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path(
77 77 'pullrequest_new', repo_name=repo_name)
78 78 response.mustcontain(create_pr_link)
79 79
80 80 def test_create_pr_form_with_raw_commit_id(self, backend):
81 81 repo = backend.repo
82 82
83 83 self.app.get(
84 84 route_path('pullrequest_new', repo_name=repo.repo_name,
85 85 commit=repo.get_commit().raw_id),
86 86 status=200)
87 87
88 88 @pytest.mark.parametrize('pr_merge_enabled', [True, False])
89 89 @pytest.mark.parametrize('range_diff', ["0", "1"])
90 90 def test_show(self, pr_util, pr_merge_enabled, range_diff):
91 91 pull_request = pr_util.create_pull_request(
92 92 mergeable=pr_merge_enabled, enable_notifications=False)
93 93
94 94 response = self.app.get(route_path(
95 95 'pullrequest_show',
96 96 repo_name=pull_request.target_repo.scm_instance().name,
97 97 pull_request_id=pull_request.pull_request_id,
98 98 params={'range-diff': range_diff}))
99 99
100 100 for commit_id in pull_request.revisions:
101 101 response.mustcontain(commit_id)
102 102
103 103 assert pull_request.target_ref_parts.type in response
104 104 assert pull_request.target_ref_parts.name in response
105 105 target_clone_url = pull_request.target_repo.clone_url()
106 106 assert target_clone_url in response
107 107
108 108 assert 'class="pull-request-merge"' in response
109 109 if pr_merge_enabled:
110 110 response.mustcontain('Pull request reviewer approval is pending')
111 111 else:
112 112 response.mustcontain('Server-side pull request merging is disabled.')
113 113
114 114 if range_diff == "1":
115 115 response.mustcontain('Turn off: Show the diff as commit range')
116 116
117 117 def test_close_status_visibility(self, pr_util, user_util, csrf_token):
118 118 # Logout
119 119 response = self.app.post(
120 120 h.route_path('logout'),
121 121 params={'csrf_token': csrf_token})
122 122 # Login as regular user
123 123 response = self.app.post(h.route_path('login'),
124 124 {'username': TEST_USER_REGULAR_LOGIN,
125 125 'password': 'test12'})
126 126
127 127 pull_request = pr_util.create_pull_request(
128 128 author=TEST_USER_REGULAR_LOGIN)
129 129
130 130 response = self.app.get(route_path(
131 131 'pullrequest_show',
132 132 repo_name=pull_request.target_repo.scm_instance().name,
133 133 pull_request_id=pull_request.pull_request_id))
134 134
135 135 response.mustcontain('Server-side pull request merging is disabled.')
136 136
137 137 assert_response = response.assert_response()
138 138 # for regular user without a merge permissions, we don't see it
139 139 assert_response.no_element_exists('#close-pull-request-action')
140 140
141 141 user_util.grant_user_permission_to_repo(
142 142 pull_request.target_repo,
143 143 UserModel().get_by_username(TEST_USER_REGULAR_LOGIN),
144 144 'repository.write')
145 145 response = self.app.get(route_path(
146 146 'pullrequest_show',
147 147 repo_name=pull_request.target_repo.scm_instance().name,
148 148 pull_request_id=pull_request.pull_request_id))
149 149
150 150 response.mustcontain('Server-side pull request merging is disabled.')
151 151
152 152 assert_response = response.assert_response()
153 153 # now regular user has a merge permissions, we have CLOSE button
154 154 assert_response.one_element_exists('#close-pull-request-action')
155 155
156 156 def test_show_invalid_commit_id(self, pr_util):
157 157 # Simulating invalid revisions which will cause a lookup error
158 158 pull_request = pr_util.create_pull_request()
159 159 pull_request.revisions = ['invalid']
160 160 Session().add(pull_request)
161 161 Session().commit()
162 162
163 163 response = self.app.get(route_path(
164 164 'pullrequest_show',
165 165 repo_name=pull_request.target_repo.scm_instance().name,
166 166 pull_request_id=pull_request.pull_request_id))
167 167
168 168 for commit_id in pull_request.revisions:
169 169 response.mustcontain(commit_id)
170 170
171 171 def test_show_invalid_source_reference(self, pr_util):
172 172 pull_request = pr_util.create_pull_request()
173 173 pull_request.source_ref = 'branch:b:invalid'
174 174 Session().add(pull_request)
175 175 Session().commit()
176 176
177 177 self.app.get(route_path(
178 178 'pullrequest_show',
179 179 repo_name=pull_request.target_repo.scm_instance().name,
180 180 pull_request_id=pull_request.pull_request_id))
181 181
182 182 def test_edit_title_description(self, pr_util, csrf_token):
183 183 pull_request = pr_util.create_pull_request()
184 184 pull_request_id = pull_request.pull_request_id
185 185
186 186 response = self.app.post(
187 187 route_path('pullrequest_update',
188 188 repo_name=pull_request.target_repo.repo_name,
189 189 pull_request_id=pull_request_id),
190 190 params={
191 191 'edit_pull_request': 'true',
192 192 'title': 'New title',
193 193 'description': 'New description',
194 194 'csrf_token': csrf_token})
195 195
196 196 assert_session_flash(
197 197 response, u'Pull request title & description updated.',
198 198 category='success')
199 199
200 200 pull_request = PullRequest.get(pull_request_id)
201 201 assert pull_request.title == 'New title'
202 202 assert pull_request.description == 'New description'
203 203
204 204 def test_edit_title_description_closed(self, pr_util, csrf_token):
205 205 pull_request = pr_util.create_pull_request()
206 206 pull_request_id = pull_request.pull_request_id
207 207 repo_name = pull_request.target_repo.repo_name
208 208 pr_util.close()
209 209
210 210 response = self.app.post(
211 211 route_path('pullrequest_update',
212 212 repo_name=repo_name, pull_request_id=pull_request_id),
213 213 params={
214 214 'edit_pull_request': 'true',
215 215 'title': 'New title',
216 216 'description': 'New description',
217 217 'csrf_token': csrf_token}, status=200)
218 218 assert_session_flash(
219 219 response, u'Cannot update closed pull requests.',
220 220 category='error')
221 221
222 222 def test_update_invalid_source_reference(self, pr_util, csrf_token):
223 223 from rhodecode.lib.vcs.backends.base import UpdateFailureReason
224 224
225 225 pull_request = pr_util.create_pull_request()
226 226 pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id'
227 227 Session().add(pull_request)
228 228 Session().commit()
229 229
230 230 pull_request_id = pull_request.pull_request_id
231 231
232 232 response = self.app.post(
233 233 route_path('pullrequest_update',
234 234 repo_name=pull_request.target_repo.repo_name,
235 235 pull_request_id=pull_request_id),
236 236 params={'update_commits': 'true',
237 237 'csrf_token': csrf_token})
238 238
239 239 expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[
240 240 UpdateFailureReason.MISSING_SOURCE_REF])
241 241 assert_session_flash(response, expected_msg, category='error')
242 242
243 243 def test_missing_target_reference(self, pr_util, csrf_token):
244 244 from rhodecode.lib.vcs.backends.base import MergeFailureReason
245 245 pull_request = pr_util.create_pull_request(
246 246 approved=True, mergeable=True)
247 247 pull_request.target_ref = 'branch:invalid-branch:invalid-commit-id'
248 248 Session().add(pull_request)
249 249 Session().commit()
250 250
251 251 pull_request_id = pull_request.pull_request_id
252 252 pull_request_url = route_path(
253 253 'pullrequest_show',
254 254 repo_name=pull_request.target_repo.repo_name,
255 255 pull_request_id=pull_request_id)
256 256
257 257 response = self.app.get(pull_request_url)
258 258
259 259 assertr = AssertResponse(response)
260 260 expected_msg = PullRequestModel.MERGE_STATUS_MESSAGES[
261 261 MergeFailureReason.MISSING_TARGET_REF]
262 262 assertr.element_contains(
263 263 'span[data-role="merge-message"]', str(expected_msg))
264 264
265 265 def test_comment_and_close_pull_request_custom_message_approved(
266 266 self, pr_util, csrf_token, xhr_header):
267 267
268 268 pull_request = pr_util.create_pull_request(approved=True)
269 269 pull_request_id = pull_request.pull_request_id
270 270 author = pull_request.user_id
271 271 repo = pull_request.target_repo.repo_id
272 272
273 273 self.app.post(
274 274 route_path('pullrequest_comment_create',
275 275 repo_name=pull_request.target_repo.scm_instance().name,
276 276 pull_request_id=pull_request_id),
277 277 params={
278 278 'close_pull_request': '1',
279 279 'text': 'Closing a PR',
280 280 'csrf_token': csrf_token},
281 281 extra_environ=xhr_header,)
282 282
283 283 journal = UserLog.query()\
284 284 .filter(UserLog.user_id == author)\
285 285 .filter(UserLog.repository_id == repo) \
286 286 .order_by('user_log_id') \
287 287 .all()
288 288 assert journal[-1].action == 'repo.pull_request.close'
289 289
290 290 pull_request = PullRequest.get(pull_request_id)
291 291 assert pull_request.is_closed()
292 292
293 293 status = ChangesetStatusModel().get_status(
294 294 pull_request.source_repo, pull_request=pull_request)
295 295 assert status == ChangesetStatus.STATUS_APPROVED
296 296 comments = ChangesetComment().query() \
297 297 .filter(ChangesetComment.pull_request == pull_request) \
298 298 .order_by(ChangesetComment.comment_id.asc())\
299 299 .all()
300 300 assert comments[-1].text == 'Closing a PR'
301 301
302 302 def test_comment_force_close_pull_request_rejected(
303 303 self, pr_util, csrf_token, xhr_header):
304 304 pull_request = pr_util.create_pull_request()
305 305 pull_request_id = pull_request.pull_request_id
306 306 PullRequestModel().update_reviewers(
307 307 pull_request_id, [(1, ['reason'], False, []), (2, ['reason2'], False, [])],
308 308 pull_request.author)
309 309 author = pull_request.user_id
310 310 repo = pull_request.target_repo.repo_id
311 311
312 312 self.app.post(
313 313 route_path('pullrequest_comment_create',
314 314 repo_name=pull_request.target_repo.scm_instance().name,
315 315 pull_request_id=pull_request_id),
316 316 params={
317 317 'close_pull_request': '1',
318 318 'csrf_token': csrf_token},
319 319 extra_environ=xhr_header)
320 320
321 321 pull_request = PullRequest.get(pull_request_id)
322 322
323 323 journal = UserLog.query()\
324 324 .filter(UserLog.user_id == author, UserLog.repository_id == repo) \
325 325 .order_by('user_log_id') \
326 326 .all()
327 327 assert journal[-1].action == 'repo.pull_request.close'
328 328
329 329 # check only the latest status, not the review status
330 330 status = ChangesetStatusModel().get_status(
331 331 pull_request.source_repo, pull_request=pull_request)
332 332 assert status == ChangesetStatus.STATUS_REJECTED
333 333
334 334 def test_comment_and_close_pull_request(
335 335 self, pr_util, csrf_token, xhr_header):
336 336 pull_request = pr_util.create_pull_request()
337 337 pull_request_id = pull_request.pull_request_id
338 338
339 339 response = self.app.post(
340 340 route_path('pullrequest_comment_create',
341 341 repo_name=pull_request.target_repo.scm_instance().name,
342 342 pull_request_id=pull_request.pull_request_id),
343 343 params={
344 344 'close_pull_request': 'true',
345 345 'csrf_token': csrf_token},
346 346 extra_environ=xhr_header)
347 347
348 348 assert response.json
349 349
350 350 pull_request = PullRequest.get(pull_request_id)
351 351 assert pull_request.is_closed()
352 352
353 353 # check only the latest status, not the review status
354 354 status = ChangesetStatusModel().get_status(
355 355 pull_request.source_repo, pull_request=pull_request)
356 356 assert status == ChangesetStatus.STATUS_REJECTED
357 357
358 358 def test_create_pull_request(self, backend, csrf_token):
359 359 commits = [
360 360 {'message': 'ancestor'},
361 361 {'message': 'change'},
362 362 {'message': 'change2'},
363 363 ]
364 364 commit_ids = backend.create_master_repo(commits)
365 365 target = backend.create_repo(heads=['ancestor'])
366 366 source = backend.create_repo(heads=['change2'])
367 367
368 368 response = self.app.post(
369 369 route_path('pullrequest_create', repo_name=source.repo_name),
370 370 [
371 371 ('source_repo', source.repo_name),
372 372 ('source_ref', 'branch:default:' + commit_ids['change2']),
373 373 ('target_repo', target.repo_name),
374 374 ('target_ref', 'branch:default:' + commit_ids['ancestor']),
375 375 ('common_ancestor', commit_ids['ancestor']),
376 376 ('pullrequest_title', 'Title'),
377 377 ('pullrequest_desc', 'Description'),
378 378 ('description_renderer', 'markdown'),
379 379 ('__start__', 'review_members:sequence'),
380 380 ('__start__', 'reviewer:mapping'),
381 381 ('user_id', '1'),
382 382 ('__start__', 'reasons:sequence'),
383 383 ('reason', 'Some reason'),
384 384 ('__end__', 'reasons:sequence'),
385 385 ('__start__', 'rules:sequence'),
386 386 ('__end__', 'rules:sequence'),
387 387 ('mandatory', 'False'),
388 388 ('__end__', 'reviewer:mapping'),
389 389 ('__end__', 'review_members:sequence'),
390 390 ('__start__', 'revisions:sequence'),
391 391 ('revisions', commit_ids['change']),
392 392 ('revisions', commit_ids['change2']),
393 393 ('__end__', 'revisions:sequence'),
394 394 ('user', ''),
395 395 ('csrf_token', csrf_token),
396 396 ],
397 397 status=302)
398 398
399 399 location = response.headers['Location']
400 400 pull_request_id = location.rsplit('/', 1)[1]
401 401 assert pull_request_id != 'new'
402 402 pull_request = PullRequest.get(int(pull_request_id))
403 403
404 404 # check that we have now both revisions
405 405 assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']]
406 406 assert pull_request.source_ref == 'branch:default:' + commit_ids['change2']
407 407 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
408 408 assert pull_request.target_ref == expected_target_ref
409 409
410 410 def test_reviewer_notifications(self, backend, csrf_token):
411 411 # We have to use the app.post for this test so it will create the
412 412 # notifications properly with the new PR
413 413 commits = [
414 414 {'message': 'ancestor',
415 415 'added': [FileNode('file_A', content='content_of_ancestor')]},
416 416 {'message': 'change',
417 417 'added': [FileNode('file_a', content='content_of_change')]},
418 418 {'message': 'change-child'},
419 419 {'message': 'ancestor-child', 'parents': ['ancestor'],
420 420 'added': [
421 421 FileNode('file_B', content='content_of_ancestor_child')]},
422 422 {'message': 'ancestor-child-2'},
423 423 ]
424 424 commit_ids = backend.create_master_repo(commits)
425 425 target = backend.create_repo(heads=['ancestor-child'])
426 426 source = backend.create_repo(heads=['change'])
427 427
428 428 response = self.app.post(
429 429 route_path('pullrequest_create', repo_name=source.repo_name),
430 430 [
431 431 ('source_repo', source.repo_name),
432 432 ('source_ref', 'branch:default:' + commit_ids['change']),
433 433 ('target_repo', target.repo_name),
434 434 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
435 435 ('common_ancestor', commit_ids['ancestor']),
436 436 ('pullrequest_title', 'Title'),
437 437 ('pullrequest_desc', 'Description'),
438 438 ('description_renderer', 'markdown'),
439 439 ('__start__', 'review_members:sequence'),
440 440 ('__start__', 'reviewer:mapping'),
441 441 ('user_id', '2'),
442 442 ('__start__', 'reasons:sequence'),
443 443 ('reason', 'Some reason'),
444 444 ('__end__', 'reasons:sequence'),
445 445 ('__start__', 'rules:sequence'),
446 446 ('__end__', 'rules:sequence'),
447 447 ('mandatory', 'False'),
448 448 ('__end__', 'reviewer:mapping'),
449 449 ('__end__', 'review_members:sequence'),
450 450 ('__start__', 'revisions:sequence'),
451 451 ('revisions', commit_ids['change']),
452 452 ('__end__', 'revisions:sequence'),
453 453 ('user', ''),
454 454 ('csrf_token', csrf_token),
455 455 ],
456 456 status=302)
457 457
458 458 location = response.headers['Location']
459 459
460 460 pull_request_id = location.rsplit('/', 1)[1]
461 461 assert pull_request_id != 'new'
462 462 pull_request = PullRequest.get(int(pull_request_id))
463 463
464 464 # Check that a notification was made
465 465 notifications = Notification.query()\
466 466 .filter(Notification.created_by == pull_request.author.user_id,
467 467 Notification.type_ == Notification.TYPE_PULL_REQUEST,
468 468 Notification.subject.contains(
469 469 "wants you to review pull request #%s" % pull_request_id))
470 470 assert len(notifications.all()) == 1
471 471
472 472 # Change reviewers and check that a notification was made
473 473 PullRequestModel().update_reviewers(
474 474 pull_request.pull_request_id, [(1, [], False, [])],
475 475 pull_request.author)
476 476 assert len(notifications.all()) == 2
477 477
478 478 def test_create_pull_request_stores_ancestor_commit_id(self, backend,
479 479 csrf_token):
480 480 commits = [
481 481 {'message': 'ancestor',
482 482 'added': [FileNode('file_A', content='content_of_ancestor')]},
483 483 {'message': 'change',
484 484 'added': [FileNode('file_a', content='content_of_change')]},
485 485 {'message': 'change-child'},
486 486 {'message': 'ancestor-child', 'parents': ['ancestor'],
487 487 'added': [
488 488 FileNode('file_B', content='content_of_ancestor_child')]},
489 489 {'message': 'ancestor-child-2'},
490 490 ]
491 491 commit_ids = backend.create_master_repo(commits)
492 492 target = backend.create_repo(heads=['ancestor-child'])
493 493 source = backend.create_repo(heads=['change'])
494 494
495 495 response = self.app.post(
496 496 route_path('pullrequest_create', repo_name=source.repo_name),
497 497 [
498 498 ('source_repo', source.repo_name),
499 499 ('source_ref', 'branch:default:' + commit_ids['change']),
500 500 ('target_repo', target.repo_name),
501 501 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
502 502 ('common_ancestor', commit_ids['ancestor']),
503 503 ('pullrequest_title', 'Title'),
504 504 ('pullrequest_desc', 'Description'),
505 505 ('description_renderer', 'markdown'),
506 506 ('__start__', 'review_members:sequence'),
507 507 ('__start__', 'reviewer:mapping'),
508 508 ('user_id', '1'),
509 509 ('__start__', 'reasons:sequence'),
510 510 ('reason', 'Some reason'),
511 511 ('__end__', 'reasons:sequence'),
512 512 ('__start__', 'rules:sequence'),
513 513 ('__end__', 'rules:sequence'),
514 514 ('mandatory', 'False'),
515 515 ('__end__', 'reviewer:mapping'),
516 516 ('__end__', 'review_members:sequence'),
517 517 ('__start__', 'revisions:sequence'),
518 518 ('revisions', commit_ids['change']),
519 519 ('__end__', 'revisions:sequence'),
520 520 ('user', ''),
521 521 ('csrf_token', csrf_token),
522 522 ],
523 523 status=302)
524 524
525 525 location = response.headers['Location']
526 526
527 527 pull_request_id = location.rsplit('/', 1)[1]
528 528 assert pull_request_id != 'new'
529 529 pull_request = PullRequest.get(int(pull_request_id))
530 530
531 531 # target_ref has to point to the ancestor's commit_id in order to
532 532 # show the correct diff
533 533 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
534 534 assert pull_request.target_ref == expected_target_ref
535 535
536 536 # Check generated diff contents
537 537 response = response.follow()
538 538 assert 'content_of_ancestor' not in response.body
539 539 assert 'content_of_ancestor-child' not in response.body
540 540 assert 'content_of_change' in response.body
541 541
542 542 def test_merge_pull_request_enabled(self, pr_util, csrf_token):
543 543 # Clear any previous calls to rcextensions
544 544 rhodecode.EXTENSIONS.calls.clear()
545 545
546 546 pull_request = pr_util.create_pull_request(
547 547 approved=True, mergeable=True)
548 548 pull_request_id = pull_request.pull_request_id
549 549 repo_name = pull_request.target_repo.scm_instance().name,
550 550
551 551 response = self.app.post(
552 552 route_path('pullrequest_merge',
553 553 repo_name=str(repo_name[0]),
554 554 pull_request_id=pull_request_id),
555 555 params={'csrf_token': csrf_token}).follow()
556 556
557 557 pull_request = PullRequest.get(pull_request_id)
558 558
559 559 assert response.status_int == 200
560 560 assert pull_request.is_closed()
561 561 assert_pull_request_status(
562 562 pull_request, ChangesetStatus.STATUS_APPROVED)
563 563
564 564 # Check the relevant log entries were added
565 565 user_logs = UserLog.query().order_by('-user_log_id').limit(3)
566 566 actions = [log.action for log in user_logs]
567 567 pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request)
568 568 expected_actions = [
569 569 u'repo.pull_request.close',
570 570 u'repo.pull_request.merge',
571 571 u'repo.pull_request.comment.create'
572 572 ]
573 573 assert actions == expected_actions
574 574
575 575 user_logs = UserLog.query().order_by('-user_log_id').limit(4)
576 576 actions = [log for log in user_logs]
577 577 assert actions[-1].action == 'user.push'
578 578 assert actions[-1].action_data['commit_ids'] == pr_commit_ids
579 579
580 580 # Check post_push rcextension was really executed
581 push_calls = rhodecode.EXTENSIONS.calls['post_push']
581 push_calls = rhodecode.EXTENSIONS.calls['_push_hook']
582 582 assert len(push_calls) == 1
583 583 unused_last_call_args, last_call_kwargs = push_calls[0]
584 584 assert last_call_kwargs['action'] == 'push'
585 assert last_call_kwargs['pushed_revs'] == pr_commit_ids
585 assert last_call_kwargs['commit_ids'] == pr_commit_ids
586 586
587 587 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
588 588 pull_request = pr_util.create_pull_request(mergeable=False)
589 589 pull_request_id = pull_request.pull_request_id
590 590 pull_request = PullRequest.get(pull_request_id)
591 591
592 592 response = self.app.post(
593 593 route_path('pullrequest_merge',
594 594 repo_name=pull_request.target_repo.scm_instance().name,
595 595 pull_request_id=pull_request.pull_request_id),
596 596 params={'csrf_token': csrf_token}).follow()
597 597
598 598 assert response.status_int == 200
599 599 response.mustcontain(
600 600 'Merge is not currently possible because of below failed checks.')
601 601 response.mustcontain('Server-side pull request merging is disabled.')
602 602
603 603 @pytest.mark.skip_backends('svn')
604 604 def test_merge_pull_request_not_approved(self, pr_util, csrf_token):
605 605 pull_request = pr_util.create_pull_request(mergeable=True)
606 606 pull_request_id = pull_request.pull_request_id
607 607 repo_name = pull_request.target_repo.scm_instance().name
608 608
609 609 response = self.app.post(
610 610 route_path('pullrequest_merge',
611 611 repo_name=repo_name,
612 612 pull_request_id=pull_request_id),
613 613 params={'csrf_token': csrf_token}).follow()
614 614
615 615 assert response.status_int == 200
616 616
617 617 response.mustcontain(
618 618 'Merge is not currently possible because of below failed checks.')
619 619 response.mustcontain('Pull request reviewer approval is pending.')
620 620
621 621 def test_merge_pull_request_renders_failure_reason(
622 622 self, user_regular, csrf_token, pr_util):
623 623 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
624 624 pull_request_id = pull_request.pull_request_id
625 625 repo_name = pull_request.target_repo.scm_instance().name
626 626
627 627 model_patcher = mock.patch.multiple(
628 628 PullRequestModel,
629 629 merge_repo=mock.Mock(return_value=MergeResponse(
630 630 True, False, 'STUB_COMMIT_ID', MergeFailureReason.PUSH_FAILED)),
631 631 merge_status=mock.Mock(return_value=(True, 'WRONG_MESSAGE')))
632 632
633 633 with model_patcher:
634 634 response = self.app.post(
635 635 route_path('pullrequest_merge',
636 636 repo_name=repo_name,
637 637 pull_request_id=pull_request_id),
638 638 params={'csrf_token': csrf_token}, status=302)
639 639
640 640 assert_session_flash(response, PullRequestModel.MERGE_STATUS_MESSAGES[
641 641 MergeFailureReason.PUSH_FAILED])
642 642
643 643 def test_update_source_revision(self, backend, csrf_token):
644 644 commits = [
645 645 {'message': 'ancestor'},
646 646 {'message': 'change'},
647 647 {'message': 'change-2'},
648 648 ]
649 649 commit_ids = backend.create_master_repo(commits)
650 650 target = backend.create_repo(heads=['ancestor'])
651 651 source = backend.create_repo(heads=['change'])
652 652
653 653 # create pr from a in source to A in target
654 654 pull_request = PullRequest()
655 655 pull_request.source_repo = source
656 656 # TODO: johbo: Make sure that we write the source ref this way!
657 657 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
658 658 branch=backend.default_branch_name, commit_id=commit_ids['change'])
659 659 pull_request.target_repo = target
660 660
661 661 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
662 662 branch=backend.default_branch_name,
663 663 commit_id=commit_ids['ancestor'])
664 664 pull_request.revisions = [commit_ids['change']]
665 665 pull_request.title = u"Test"
666 666 pull_request.description = u"Description"
667 667 pull_request.author = UserModel().get_by_username(
668 668 TEST_USER_ADMIN_LOGIN)
669 669 Session().add(pull_request)
670 670 Session().commit()
671 671 pull_request_id = pull_request.pull_request_id
672 672
673 673 # source has ancestor - change - change-2
674 674 backend.pull_heads(source, heads=['change-2'])
675 675
676 676 # update PR
677 677 self.app.post(
678 678 route_path('pullrequest_update',
679 679 repo_name=target.repo_name,
680 680 pull_request_id=pull_request_id),
681 681 params={'update_commits': 'true',
682 682 'csrf_token': csrf_token})
683 683
684 684 # check that we have now both revisions
685 685 pull_request = PullRequest.get(pull_request_id)
686 686 assert pull_request.revisions == [
687 687 commit_ids['change-2'], commit_ids['change']]
688 688
689 689 # TODO: johbo: this should be a test on its own
690 690 response = self.app.get(route_path(
691 691 'pullrequest_new',
692 692 repo_name=target.repo_name))
693 693 assert response.status_int == 200
694 694 assert 'Pull request updated to' in response.body
695 695 assert 'with 1 added, 0 removed commits.' in response.body
696 696
697 697 def test_update_target_revision(self, backend, csrf_token):
698 698 commits = [
699 699 {'message': 'ancestor'},
700 700 {'message': 'change'},
701 701 {'message': 'ancestor-new', 'parents': ['ancestor']},
702 702 {'message': 'change-rebased'},
703 703 ]
704 704 commit_ids = backend.create_master_repo(commits)
705 705 target = backend.create_repo(heads=['ancestor'])
706 706 source = backend.create_repo(heads=['change'])
707 707
708 708 # create pr from a in source to A in target
709 709 pull_request = PullRequest()
710 710 pull_request.source_repo = source
711 711 # TODO: johbo: Make sure that we write the source ref this way!
712 712 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
713 713 branch=backend.default_branch_name, commit_id=commit_ids['change'])
714 714 pull_request.target_repo = target
715 715 # TODO: johbo: Target ref should be branch based, since tip can jump
716 716 # from branch to branch
717 717 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
718 718 branch=backend.default_branch_name,
719 719 commit_id=commit_ids['ancestor'])
720 720 pull_request.revisions = [commit_ids['change']]
721 721 pull_request.title = u"Test"
722 722 pull_request.description = u"Description"
723 723 pull_request.author = UserModel().get_by_username(
724 724 TEST_USER_ADMIN_LOGIN)
725 725 Session().add(pull_request)
726 726 Session().commit()
727 727 pull_request_id = pull_request.pull_request_id
728 728
729 729 # target has ancestor - ancestor-new
730 730 # source has ancestor - ancestor-new - change-rebased
731 731 backend.pull_heads(target, heads=['ancestor-new'])
732 732 backend.pull_heads(source, heads=['change-rebased'])
733 733
734 734 # update PR
735 735 self.app.post(
736 736 route_path('pullrequest_update',
737 737 repo_name=target.repo_name,
738 738 pull_request_id=pull_request_id),
739 739 params={'update_commits': 'true',
740 740 'csrf_token': csrf_token},
741 741 status=200)
742 742
743 743 # check that we have now both revisions
744 744 pull_request = PullRequest.get(pull_request_id)
745 745 assert pull_request.revisions == [commit_ids['change-rebased']]
746 746 assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format(
747 747 branch=backend.default_branch_name,
748 748 commit_id=commit_ids['ancestor-new'])
749 749
750 750 # TODO: johbo: This should be a test on its own
751 751 response = self.app.get(route_path(
752 752 'pullrequest_new',
753 753 repo_name=target.repo_name))
754 754 assert response.status_int == 200
755 755 assert 'Pull request updated to' in response.body
756 756 assert 'with 1 added, 1 removed commits.' in response.body
757 757
758 758 def test_update_target_revision_with_removal_of_1_commit_git(self, backend_git, csrf_token):
759 759 backend = backend_git
760 760 commits = [
761 761 {'message': 'master-commit-1'},
762 762 {'message': 'master-commit-2-change-1'},
763 763 {'message': 'master-commit-3-change-2'},
764 764
765 765 {'message': 'feat-commit-1', 'parents': ['master-commit-1']},
766 766 {'message': 'feat-commit-2'},
767 767 ]
768 768 commit_ids = backend.create_master_repo(commits)
769 769 target = backend.create_repo(heads=['master-commit-3-change-2'])
770 770 source = backend.create_repo(heads=['feat-commit-2'])
771 771
772 772 # create pr from a in source to A in target
773 773 pull_request = PullRequest()
774 774 pull_request.source_repo = source
775 775 # TODO: johbo: Make sure that we write the source ref this way!
776 776 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
777 777 branch=backend.default_branch_name,
778 778 commit_id=commit_ids['master-commit-3-change-2'])
779 779
780 780 pull_request.target_repo = target
781 781 # TODO: johbo: Target ref should be branch based, since tip can jump
782 782 # from branch to branch
783 783 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
784 784 branch=backend.default_branch_name,
785 785 commit_id=commit_ids['feat-commit-2'])
786 786
787 787 pull_request.revisions = [
788 788 commit_ids['feat-commit-1'],
789 789 commit_ids['feat-commit-2']
790 790 ]
791 791 pull_request.title = u"Test"
792 792 pull_request.description = u"Description"
793 793 pull_request.author = UserModel().get_by_username(
794 794 TEST_USER_ADMIN_LOGIN)
795 795 Session().add(pull_request)
796 796 Session().commit()
797 797 pull_request_id = pull_request.pull_request_id
798 798
799 799 # PR is created, now we simulate a force-push into target,
800 800 # that drops a 2 last commits
801 801 vcsrepo = target.scm_instance()
802 802 vcsrepo.config.clear_section('hooks')
803 803 vcsrepo.run_git_command(['reset', '--soft', 'HEAD~2'])
804 804
805 805 # update PR
806 806 self.app.post(
807 807 route_path('pullrequest_update',
808 808 repo_name=target.repo_name,
809 809 pull_request_id=pull_request_id),
810 810 params={'update_commits': 'true',
811 811 'csrf_token': csrf_token},
812 812 status=200)
813 813
814 814 response = self.app.get(route_path(
815 815 'pullrequest_new',
816 816 repo_name=target.repo_name))
817 817 assert response.status_int == 200
818 818 response.mustcontain('Pull request updated to')
819 819 response.mustcontain('with 0 added, 0 removed commits.')
820 820
821 821 def test_update_of_ancestor_reference(self, backend, csrf_token):
822 822 commits = [
823 823 {'message': 'ancestor'},
824 824 {'message': 'change'},
825 825 {'message': 'change-2'},
826 826 {'message': 'ancestor-new', 'parents': ['ancestor']},
827 827 {'message': 'change-rebased'},
828 828 ]
829 829 commit_ids = backend.create_master_repo(commits)
830 830 target = backend.create_repo(heads=['ancestor'])
831 831 source = backend.create_repo(heads=['change'])
832 832
833 833 # create pr from a in source to A in target
834 834 pull_request = PullRequest()
835 835 pull_request.source_repo = source
836 836 # TODO: johbo: Make sure that we write the source ref this way!
837 837 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
838 838 branch=backend.default_branch_name,
839 839 commit_id=commit_ids['change'])
840 840 pull_request.target_repo = target
841 841 # TODO: johbo: Target ref should be branch based, since tip can jump
842 842 # from branch to branch
843 843 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
844 844 branch=backend.default_branch_name,
845 845 commit_id=commit_ids['ancestor'])
846 846 pull_request.revisions = [commit_ids['change']]
847 847 pull_request.title = u"Test"
848 848 pull_request.description = u"Description"
849 849 pull_request.author = UserModel().get_by_username(
850 850 TEST_USER_ADMIN_LOGIN)
851 851 Session().add(pull_request)
852 852 Session().commit()
853 853 pull_request_id = pull_request.pull_request_id
854 854
855 855 # target has ancestor - ancestor-new
856 856 # source has ancestor - ancestor-new - change-rebased
857 857 backend.pull_heads(target, heads=['ancestor-new'])
858 858 backend.pull_heads(source, heads=['change-rebased'])
859 859
860 860 # update PR
861 861 self.app.post(
862 862 route_path('pullrequest_update',
863 863 repo_name=target.repo_name,
864 864 pull_request_id=pull_request_id),
865 865 params={'update_commits': 'true',
866 866 'csrf_token': csrf_token},
867 867 status=200)
868 868
869 869 # Expect the target reference to be updated correctly
870 870 pull_request = PullRequest.get(pull_request_id)
871 871 assert pull_request.revisions == [commit_ids['change-rebased']]
872 872 expected_target_ref = 'branch:{branch}:{commit_id}'.format(
873 873 branch=backend.default_branch_name,
874 874 commit_id=commit_ids['ancestor-new'])
875 875 assert pull_request.target_ref == expected_target_ref
876 876
877 877 def test_remove_pull_request_branch(self, backend_git, csrf_token):
878 878 branch_name = 'development'
879 879 commits = [
880 880 {'message': 'initial-commit'},
881 881 {'message': 'old-feature'},
882 882 {'message': 'new-feature', 'branch': branch_name},
883 883 ]
884 884 repo = backend_git.create_repo(commits)
885 885 commit_ids = backend_git.commit_ids
886 886
887 887 pull_request = PullRequest()
888 888 pull_request.source_repo = repo
889 889 pull_request.target_repo = repo
890 890 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
891 891 branch=branch_name, commit_id=commit_ids['new-feature'])
892 892 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
893 893 branch=backend_git.default_branch_name,
894 894 commit_id=commit_ids['old-feature'])
895 895 pull_request.revisions = [commit_ids['new-feature']]
896 896 pull_request.title = u"Test"
897 897 pull_request.description = u"Description"
898 898 pull_request.author = UserModel().get_by_username(
899 899 TEST_USER_ADMIN_LOGIN)
900 900 Session().add(pull_request)
901 901 Session().commit()
902 902
903 903 vcs = repo.scm_instance()
904 904 vcs.remove_ref('refs/heads/{}'.format(branch_name))
905 905
906 906 response = self.app.get(route_path(
907 907 'pullrequest_show',
908 908 repo_name=repo.repo_name,
909 909 pull_request_id=pull_request.pull_request_id))
910 910
911 911 assert response.status_int == 200
912 912 assert_response = AssertResponse(response)
913 913 assert_response.element_contains(
914 914 '#changeset_compare_view_content .alert strong',
915 915 'Missing commits')
916 916 assert_response.element_contains(
917 917 '#changeset_compare_view_content .alert',
918 918 'This pull request cannot be displayed, because one or more'
919 919 ' commits no longer exist in the source repository.')
920 920
921 921 def test_strip_commits_from_pull_request(
922 922 self, backend, pr_util, csrf_token):
923 923 commits = [
924 924 {'message': 'initial-commit'},
925 925 {'message': 'old-feature'},
926 926 {'message': 'new-feature', 'parents': ['initial-commit']},
927 927 ]
928 928 pull_request = pr_util.create_pull_request(
929 929 commits, target_head='initial-commit', source_head='new-feature',
930 930 revisions=['new-feature'])
931 931
932 932 vcs = pr_util.source_repository.scm_instance()
933 933 if backend.alias == 'git':
934 934 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
935 935 else:
936 936 vcs.strip(pr_util.commit_ids['new-feature'])
937 937
938 938 response = self.app.get(route_path(
939 939 'pullrequest_show',
940 940 repo_name=pr_util.target_repository.repo_name,
941 941 pull_request_id=pull_request.pull_request_id))
942 942
943 943 assert response.status_int == 200
944 944 assert_response = AssertResponse(response)
945 945 assert_response.element_contains(
946 946 '#changeset_compare_view_content .alert strong',
947 947 'Missing commits')
948 948 assert_response.element_contains(
949 949 '#changeset_compare_view_content .alert',
950 950 'This pull request cannot be displayed, because one or more'
951 951 ' commits no longer exist in the source repository.')
952 952 assert_response.element_contains(
953 953 '#update_commits',
954 954 'Update commits')
955 955
956 956 def test_strip_commits_and_update(
957 957 self, backend, pr_util, csrf_token):
958 958 commits = [
959 959 {'message': 'initial-commit'},
960 960 {'message': 'old-feature'},
961 961 {'message': 'new-feature', 'parents': ['old-feature']},
962 962 ]
963 963 pull_request = pr_util.create_pull_request(
964 964 commits, target_head='old-feature', source_head='new-feature',
965 965 revisions=['new-feature'], mergeable=True)
966 966
967 967 vcs = pr_util.source_repository.scm_instance()
968 968 if backend.alias == 'git':
969 969 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
970 970 else:
971 971 vcs.strip(pr_util.commit_ids['new-feature'])
972 972
973 973 response = self.app.post(
974 974 route_path('pullrequest_update',
975 975 repo_name=pull_request.target_repo.repo_name,
976 976 pull_request_id=pull_request.pull_request_id),
977 977 params={'update_commits': 'true',
978 978 'csrf_token': csrf_token})
979 979
980 980 assert response.status_int == 200
981 981 assert response.body == 'true'
982 982
983 983 # Make sure that after update, it won't raise 500 errors
984 984 response = self.app.get(route_path(
985 985 'pullrequest_show',
986 986 repo_name=pr_util.target_repository.repo_name,
987 987 pull_request_id=pull_request.pull_request_id))
988 988
989 989 assert response.status_int == 200
990 990 assert_response = AssertResponse(response)
991 991 assert_response.element_contains(
992 992 '#changeset_compare_view_content .alert strong',
993 993 'Missing commits')
994 994
995 995 def test_branch_is_a_link(self, pr_util):
996 996 pull_request = pr_util.create_pull_request()
997 997 pull_request.source_ref = 'branch:origin:1234567890abcdef'
998 998 pull_request.target_ref = 'branch:target:abcdef1234567890'
999 999 Session().add(pull_request)
1000 1000 Session().commit()
1001 1001
1002 1002 response = self.app.get(route_path(
1003 1003 'pullrequest_show',
1004 1004 repo_name=pull_request.target_repo.scm_instance().name,
1005 1005 pull_request_id=pull_request.pull_request_id))
1006 1006 assert response.status_int == 200
1007 1007 assert_response = AssertResponse(response)
1008 1008
1009 1009 origin = assert_response.get_element('.pr-origininfo .tag')
1010 1010 origin_children = origin.getchildren()
1011 1011 assert len(origin_children) == 1
1012 1012 target = assert_response.get_element('.pr-targetinfo .tag')
1013 1013 target_children = target.getchildren()
1014 1014 assert len(target_children) == 1
1015 1015
1016 1016 expected_origin_link = route_path(
1017 1017 'repo_changelog',
1018 1018 repo_name=pull_request.source_repo.scm_instance().name,
1019 1019 params=dict(branch='origin'))
1020 1020 expected_target_link = route_path(
1021 1021 'repo_changelog',
1022 1022 repo_name=pull_request.target_repo.scm_instance().name,
1023 1023 params=dict(branch='target'))
1024 1024 assert origin_children[0].attrib['href'] == expected_origin_link
1025 1025 assert origin_children[0].text == 'branch: origin'
1026 1026 assert target_children[0].attrib['href'] == expected_target_link
1027 1027 assert target_children[0].text == 'branch: target'
1028 1028
1029 1029 def test_bookmark_is_not_a_link(self, pr_util):
1030 1030 pull_request = pr_util.create_pull_request()
1031 1031 pull_request.source_ref = 'bookmark:origin:1234567890abcdef'
1032 1032 pull_request.target_ref = 'bookmark:target:abcdef1234567890'
1033 1033 Session().add(pull_request)
1034 1034 Session().commit()
1035 1035
1036 1036 response = self.app.get(route_path(
1037 1037 'pullrequest_show',
1038 1038 repo_name=pull_request.target_repo.scm_instance().name,
1039 1039 pull_request_id=pull_request.pull_request_id))
1040 1040 assert response.status_int == 200
1041 1041 assert_response = AssertResponse(response)
1042 1042
1043 1043 origin = assert_response.get_element('.pr-origininfo .tag')
1044 1044 assert origin.text.strip() == 'bookmark: origin'
1045 1045 assert origin.getchildren() == []
1046 1046
1047 1047 target = assert_response.get_element('.pr-targetinfo .tag')
1048 1048 assert target.text.strip() == 'bookmark: target'
1049 1049 assert target.getchildren() == []
1050 1050
1051 1051 def test_tag_is_not_a_link(self, pr_util):
1052 1052 pull_request = pr_util.create_pull_request()
1053 1053 pull_request.source_ref = 'tag:origin:1234567890abcdef'
1054 1054 pull_request.target_ref = 'tag:target:abcdef1234567890'
1055 1055 Session().add(pull_request)
1056 1056 Session().commit()
1057 1057
1058 1058 response = self.app.get(route_path(
1059 1059 'pullrequest_show',
1060 1060 repo_name=pull_request.target_repo.scm_instance().name,
1061 1061 pull_request_id=pull_request.pull_request_id))
1062 1062 assert response.status_int == 200
1063 1063 assert_response = AssertResponse(response)
1064 1064
1065 1065 origin = assert_response.get_element('.pr-origininfo .tag')
1066 1066 assert origin.text.strip() == 'tag: origin'
1067 1067 assert origin.getchildren() == []
1068 1068
1069 1069 target = assert_response.get_element('.pr-targetinfo .tag')
1070 1070 assert target.text.strip() == 'tag: target'
1071 1071 assert target.getchildren() == []
1072 1072
1073 1073 @pytest.mark.parametrize('mergeable', [True, False])
1074 1074 def test_shadow_repository_link(
1075 1075 self, mergeable, pr_util, http_host_only_stub):
1076 1076 """
1077 1077 Check that the pull request summary page displays a link to the shadow
1078 1078 repository if the pull request is mergeable. If it is not mergeable
1079 1079 the link should not be displayed.
1080 1080 """
1081 1081 pull_request = pr_util.create_pull_request(
1082 1082 mergeable=mergeable, enable_notifications=False)
1083 1083 target_repo = pull_request.target_repo.scm_instance()
1084 1084 pr_id = pull_request.pull_request_id
1085 1085 shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format(
1086 1086 host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id)
1087 1087
1088 1088 response = self.app.get(route_path(
1089 1089 'pullrequest_show',
1090 1090 repo_name=target_repo.name,
1091 1091 pull_request_id=pr_id))
1092 1092
1093 1093 assertr = AssertResponse(response)
1094 1094 if mergeable:
1095 1095 assertr.element_value_contains('input.pr-mergeinfo', shadow_url)
1096 1096 assertr.element_value_contains('input.pr-mergeinfo ', 'pr-merge')
1097 1097 else:
1098 1098 assertr.no_element_exists('.pr-mergeinfo')
1099 1099
1100 1100
1101 1101 @pytest.mark.usefixtures('app')
1102 1102 @pytest.mark.backends("git", "hg")
1103 1103 class TestPullrequestsControllerDelete(object):
1104 1104 def test_pull_request_delete_button_permissions_admin(
1105 1105 self, autologin_user, user_admin, pr_util):
1106 1106 pull_request = pr_util.create_pull_request(
1107 1107 author=user_admin.username, enable_notifications=False)
1108 1108
1109 1109 response = self.app.get(route_path(
1110 1110 'pullrequest_show',
1111 1111 repo_name=pull_request.target_repo.scm_instance().name,
1112 1112 pull_request_id=pull_request.pull_request_id))
1113 1113
1114 1114 response.mustcontain('id="delete_pullrequest"')
1115 1115 response.mustcontain('Confirm to delete this pull request')
1116 1116
1117 1117 def test_pull_request_delete_button_permissions_owner(
1118 1118 self, autologin_regular_user, user_regular, pr_util):
1119 1119 pull_request = pr_util.create_pull_request(
1120 1120 author=user_regular.username, enable_notifications=False)
1121 1121
1122 1122 response = self.app.get(route_path(
1123 1123 'pullrequest_show',
1124 1124 repo_name=pull_request.target_repo.scm_instance().name,
1125 1125 pull_request_id=pull_request.pull_request_id))
1126 1126
1127 1127 response.mustcontain('id="delete_pullrequest"')
1128 1128 response.mustcontain('Confirm to delete this pull request')
1129 1129
1130 1130 def test_pull_request_delete_button_permissions_forbidden(
1131 1131 self, autologin_regular_user, user_regular, user_admin, pr_util):
1132 1132 pull_request = pr_util.create_pull_request(
1133 1133 author=user_admin.username, enable_notifications=False)
1134 1134
1135 1135 response = self.app.get(route_path(
1136 1136 'pullrequest_show',
1137 1137 repo_name=pull_request.target_repo.scm_instance().name,
1138 1138 pull_request_id=pull_request.pull_request_id))
1139 1139 response.mustcontain(no=['id="delete_pullrequest"'])
1140 1140 response.mustcontain(no=['Confirm to delete this pull request'])
1141 1141
1142 1142 def test_pull_request_delete_button_permissions_can_update_cannot_delete(
1143 1143 self, autologin_regular_user, user_regular, user_admin, pr_util,
1144 1144 user_util):
1145 1145
1146 1146 pull_request = pr_util.create_pull_request(
1147 1147 author=user_admin.username, enable_notifications=False)
1148 1148
1149 1149 user_util.grant_user_permission_to_repo(
1150 1150 pull_request.target_repo, user_regular,
1151 1151 'repository.write')
1152 1152
1153 1153 response = self.app.get(route_path(
1154 1154 'pullrequest_show',
1155 1155 repo_name=pull_request.target_repo.scm_instance().name,
1156 1156 pull_request_id=pull_request.pull_request_id))
1157 1157
1158 1158 response.mustcontain('id="open_edit_pullrequest"')
1159 1159 response.mustcontain('id="delete_pullrequest"')
1160 1160 response.mustcontain(no=['Confirm to delete this pull request'])
1161 1161
1162 1162 def test_delete_comment_returns_404_if_comment_does_not_exist(
1163 1163 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1164 1164
1165 1165 pull_request = pr_util.create_pull_request(
1166 1166 author=user_admin.username, enable_notifications=False)
1167 1167
1168 1168 self.app.post(
1169 1169 route_path(
1170 1170 'pullrequest_comment_delete',
1171 1171 repo_name=pull_request.target_repo.scm_instance().name,
1172 1172 pull_request_id=pull_request.pull_request_id,
1173 1173 comment_id=1024404),
1174 1174 extra_environ=xhr_header,
1175 1175 params={'csrf_token': csrf_token},
1176 1176 status=404
1177 1177 )
1178 1178
1179 1179 def test_delete_comment(
1180 1180 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1181 1181
1182 1182 pull_request = pr_util.create_pull_request(
1183 1183 author=user_admin.username, enable_notifications=False)
1184 1184 comment = pr_util.create_comment()
1185 1185 comment_id = comment.comment_id
1186 1186
1187 1187 response = self.app.post(
1188 1188 route_path(
1189 1189 'pullrequest_comment_delete',
1190 1190 repo_name=pull_request.target_repo.scm_instance().name,
1191 1191 pull_request_id=pull_request.pull_request_id,
1192 1192 comment_id=comment_id),
1193 1193 extra_environ=xhr_header,
1194 1194 params={'csrf_token': csrf_token},
1195 1195 status=200
1196 1196 )
1197 1197 assert response.body == 'true'
1198 1198
1199 1199 @pytest.mark.parametrize('url_type', [
1200 1200 'pullrequest_new',
1201 1201 'pullrequest_create',
1202 1202 'pullrequest_update',
1203 1203 'pullrequest_merge',
1204 1204 ])
1205 1205 def test_pull_request_is_forbidden_on_archived_repo(
1206 1206 self, autologin_user, backend, xhr_header, user_util, url_type):
1207 1207
1208 1208 # create a temporary repo
1209 1209 source = user_util.create_repo(repo_type=backend.alias)
1210 1210 repo_name = source.repo_name
1211 1211 repo = Repository.get_by_repo_name(repo_name)
1212 1212 repo.archived = True
1213 1213 Session().commit()
1214 1214
1215 1215 response = self.app.get(
1216 1216 route_path(url_type, repo_name=repo_name, pull_request_id=1), status=302)
1217 1217
1218 1218 msg = 'Action not supported for archived repository.'
1219 1219 assert_session_flash(response, msg)
1220 1220
1221 1221
1222 1222 def assert_pull_request_status(pull_request, expected_status):
1223 1223 status = ChangesetStatusModel().calculated_review_status(
1224 1224 pull_request=pull_request)
1225 1225 assert status == expected_status
1226 1226
1227 1227
1228 1228 @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create'])
1229 1229 @pytest.mark.usefixtures("autologin_user")
1230 1230 def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route):
1231 1231 response = app.get(
1232 1232 route_path(route, repo_name=backend_svn.repo_name), status=404)
1233 1233
@@ -1,465 +1,493 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2013-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 Set of hooks run by RhodeCode Enterprise
24 24 """
25 25
26 26 import os
27 27 import collections
28 28 import logging
29 29
30 30 import rhodecode
31 31 from rhodecode import events
32 32 from rhodecode.lib import helpers as h
33 33 from rhodecode.lib import audit_logger
34 34 from rhodecode.lib.utils2 import safe_str
35 35 from rhodecode.lib.exceptions import (
36 36 HTTPLockedRC, HTTPBranchProtected, UserCreationError)
37 37 from rhodecode.model.db import Repository, User
38 38
39 39 log = logging.getLogger(__name__)
40 40
41 41
42 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
42 class HookResponse(object):
43 def __init__(self, status, output):
44 self.status = status
45 self.output = output
46
47 def __add__(self, other):
48 other_status = getattr(other, 'status', 0)
49 new_status = max(self.status, other_status)
50 other_output = getattr(other, 'output', '')
51 new_output = self.output + other_output
52
53 return HookResponse(new_status, new_output)
54
55 def __bool__(self):
56 return self.status == 0
43 57
44 58
45 59 def is_shadow_repo(extras):
46 60 """
47 61 Returns ``True`` if this is an action executed against a shadow repository.
48 62 """
49 63 return extras['is_shadow_repo']
50 64
51 65
52 66 def _get_scm_size(alias, root_path):
53 67
54 68 if not alias.startswith('.'):
55 69 alias += '.'
56 70
57 71 size_scm, size_root = 0, 0
58 72 for path, unused_dirs, files in os.walk(safe_str(root_path)):
59 73 if path.find(alias) != -1:
60 74 for f in files:
61 75 try:
62 76 size_scm += os.path.getsize(os.path.join(path, f))
63 77 except OSError:
64 78 pass
65 79 else:
66 80 for f in files:
67 81 try:
68 82 size_root += os.path.getsize(os.path.join(path, f))
69 83 except OSError:
70 84 pass
71 85
72 86 size_scm_f = h.format_byte_size_binary(size_scm)
73 87 size_root_f = h.format_byte_size_binary(size_root)
74 88 size_total_f = h.format_byte_size_binary(size_root + size_scm)
75 89
76 90 return size_scm_f, size_root_f, size_total_f
77 91
78 92
79 93 # actual hooks called by Mercurial internally, and GIT by our Python Hooks
80 94 def repo_size(extras):
81 95 """Present size of repository after push."""
82 96 repo = Repository.get_by_repo_name(extras.repository)
83 97 vcs_part = safe_str(u'.%s' % repo.repo_type)
84 98 size_vcs, size_root, size_total = _get_scm_size(vcs_part,
85 99 repo.repo_full_path)
86 100 msg = ('Repository `%s` size summary %s:%s repo:%s total:%s\n'
87 101 % (repo.repo_name, vcs_part, size_vcs, size_root, size_total))
88 102 return HookResponse(0, msg)
89 103
90 104
91 105 def pre_push(extras):
92 106 """
93 107 Hook executed before pushing code.
94 108
95 109 It bans pushing when the repository is locked.
96 110 """
97 111
98 112 user = User.get_by_username(extras.username)
99 113 output = ''
100 114 if extras.locked_by[0] and user.user_id != int(extras.locked_by[0]):
101 115 locked_by = User.get(extras.locked_by[0]).username
102 116 reason = extras.locked_by[2]
103 117 # this exception is interpreted in git/hg middlewares and based
104 118 # on that proper return code is server to client
105 119 _http_ret = HTTPLockedRC(
106 120 _locked_by_explanation(extras.repository, locked_by, reason))
107 121 if str(_http_ret.code).startswith('2'):
108 122 # 2xx Codes don't raise exceptions
109 123 output = _http_ret.title
110 124 else:
111 125 raise _http_ret
112 126
127 hook_response = ''
113 128 if not is_shadow_repo(extras):
114 129 if extras.commit_ids and extras.check_branch_perms:
115 130
116 131 auth_user = user.AuthUser()
117 132 repo = Repository.get_by_repo_name(extras.repository)
118 133 affected_branches = []
119 134 if repo.repo_type == 'hg':
120 135 for entry in extras.commit_ids:
121 136 if entry['type'] == 'branch':
122 137 is_forced = bool(entry['multiple_heads'])
123 138 affected_branches.append([entry['name'], is_forced])
124 139 elif repo.repo_type == 'git':
125 140 for entry in extras.commit_ids:
126 141 if entry['type'] == 'heads':
127 142 is_forced = bool(entry['pruned_sha'])
128 143 affected_branches.append([entry['name'], is_forced])
129 144
130 145 for branch_name, is_forced in affected_branches:
131 146
132 147 rule, branch_perm = auth_user.get_rule_and_branch_permission(
133 148 extras.repository, branch_name)
134 149 if not branch_perm:
135 150 # no branch permission found for this branch, just keep checking
136 151 continue
137 152
138 153 if branch_perm == 'branch.push_force':
139 154 continue
140 155 elif branch_perm == 'branch.push' and is_forced is False:
141 156 continue
142 157 elif branch_perm == 'branch.push' and is_forced is True:
143 158 halt_message = 'Branch `{}` changes rejected by rule {}. ' \
144 159 'FORCE PUSH FORBIDDEN.'.format(branch_name, rule)
145 160 else:
146 161 halt_message = 'Branch `{}` changes rejected by rule {}.'.format(
147 162 branch_name, rule)
148 163
149 164 if halt_message:
150 165 _http_ret = HTTPBranchProtected(halt_message)
151 166 raise _http_ret
152 167
153 168 # Propagate to external components. This is done after checking the
154 169 # lock, for consistent behavior.
155 pre_push_extension(repo_store_path=Repository.base_path(), **extras)
170 hook_response = pre_push_extension(
171 repo_store_path=Repository.base_path(), **extras)
156 172 events.trigger(events.RepoPrePushEvent(
157 173 repo_name=extras.repository, extras=extras))
158 174
159 return HookResponse(0, output)
175 return HookResponse(0, output) + hook_response
160 176
161 177
162 178 def pre_pull(extras):
163 179 """
164 180 Hook executed before pulling the code.
165 181
166 182 It bans pulling when the repository is locked.
167 183 """
168 184
169 185 output = ''
170 186 if extras.locked_by[0]:
171 187 locked_by = User.get(extras.locked_by[0]).username
172 188 reason = extras.locked_by[2]
173 189 # this exception is interpreted in git/hg middlewares and based
174 190 # on that proper return code is server to client
175 191 _http_ret = HTTPLockedRC(
176 192 _locked_by_explanation(extras.repository, locked_by, reason))
177 193 if str(_http_ret.code).startswith('2'):
178 194 # 2xx Codes don't raise exceptions
179 195 output = _http_ret.title
180 196 else:
181 197 raise _http_ret
182 198
183 199 # Propagate to external components. This is done after checking the
184 200 # lock, for consistent behavior.
201 hook_response = ''
185 202 if not is_shadow_repo(extras):
186 pre_pull_extension(**extras)
203 extras.hook_type = extras.hook_type or 'pre_pull'
204 hook_response = pre_pull_extension(
205 repo_store_path=Repository.base_path(), **extras)
187 206 events.trigger(events.RepoPrePullEvent(
188 207 repo_name=extras.repository, extras=extras))
189 208
190 return HookResponse(0, output)
209 return HookResponse(0, output) + hook_response
191 210
192 211
193 212 def post_pull(extras):
194 213 """Hook executed after client pulls the code."""
195 214
196 215 audit_user = audit_logger.UserWrap(
197 216 username=extras.username,
198 217 ip_addr=extras.ip)
199 218 repo = audit_logger.RepoWrap(repo_name=extras.repository)
200 219 audit_logger.store(
201 'user.pull', action_data={
202 'user_agent': extras.user_agent},
220 'user.pull', action_data={'user_agent': extras.user_agent},
203 221 user=audit_user, repo=repo, commit=True)
204 222
205 # Propagate to external components.
206 if not is_shadow_repo(extras):
207 post_pull_extension(**extras)
208 events.trigger(events.RepoPullEvent(
209 repo_name=extras.repository, extras=extras))
210
211 223 output = ''
212 224 # make lock is a tri state False, True, None. We only make lock on True
213 225 if extras.make_lock is True and not is_shadow_repo(extras):
214 226 user = User.get_by_username(extras.username)
215 227 Repository.lock(Repository.get_by_repo_name(extras.repository),
216 228 user.user_id,
217 229 lock_reason=Repository.LOCK_PULL)
218 230 msg = 'Made lock on repo `%s`' % (extras.repository,)
219 231 output += msg
220 232
221 233 if extras.locked_by[0]:
222 234 locked_by = User.get(extras.locked_by[0]).username
223 235 reason = extras.locked_by[2]
224 236 _http_ret = HTTPLockedRC(
225 237 _locked_by_explanation(extras.repository, locked_by, reason))
226 238 if str(_http_ret.code).startswith('2'):
227 239 # 2xx Codes don't raise exceptions
228 240 output += _http_ret.title
229 241
230 return HookResponse(0, output)
242 # Propagate to external components.
243 hook_response = ''
244 if not is_shadow_repo(extras):
245 extras.hook_type = extras.hook_type or 'post_pull'
246 hook_response = post_pull_extension(
247 repo_store_path=Repository.base_path(), **extras)
248 events.trigger(events.RepoPullEvent(
249 repo_name=extras.repository, extras=extras))
250
251 return HookResponse(0, output) + hook_response
231 252
232 253
233 254 def post_push(extras):
234 255 """Hook executed after user pushes to the repository."""
235 256 commit_ids = extras.commit_ids
236 257
237 258 # log the push call
238 259 audit_user = audit_logger.UserWrap(
239 260 username=extras.username, ip_addr=extras.ip)
240 261 repo = audit_logger.RepoWrap(repo_name=extras.repository)
241 262 audit_logger.store(
242 263 'user.push', action_data={
243 264 'user_agent': extras.user_agent,
244 265 'commit_ids': commit_ids[:400]},
245 266 user=audit_user, repo=repo, commit=True)
246 267
247 268 # Propagate to external components.
248 if not is_shadow_repo(extras):
249 post_push_extension(
250 repo_store_path=Repository.base_path(),
251 pushed_revs=commit_ids,
252 **extras)
253 events.trigger(events.RepoPushEvent(
254 repo_name=extras.repository,
255 pushed_commit_ids=commit_ids,
256 extras=extras))
257
258 269 output = ''
259 270 # make lock is a tri state False, True, None. We only release lock on False
260 271 if extras.make_lock is False and not is_shadow_repo(extras):
261 272 Repository.unlock(Repository.get_by_repo_name(extras.repository))
262 273 msg = 'Released lock on repo `%s`\n' % extras.repository
263 274 output += msg
264 275
265 276 if extras.locked_by[0]:
266 277 locked_by = User.get(extras.locked_by[0]).username
267 278 reason = extras.locked_by[2]
268 279 _http_ret = HTTPLockedRC(
269 280 _locked_by_explanation(extras.repository, locked_by, reason))
270 281 # TODO: johbo: if not?
271 282 if str(_http_ret.code).startswith('2'):
272 283 # 2xx Codes don't raise exceptions
273 284 output += _http_ret.title
274 285
275 286 if extras.new_refs:
276 287 tmpl = \
277 288 extras.server_url + '/' + \
278 289 extras.repository + \
279 290 "/pull-request/new?{ref_type}={ref_name}"
280 291 for branch_name in extras.new_refs['branches']:
281 292 output += 'RhodeCode: open pull request link: {}\n'.format(
282 293 tmpl.format(ref_type='branch', ref_name=branch_name))
283 294
284 295 for book_name in extras.new_refs['bookmarks']:
285 296 output += 'RhodeCode: open pull request link: {}\n'.format(
286 297 tmpl.format(ref_type='bookmark', ref_name=book_name))
287 298
299 hook_response = ''
300 if not is_shadow_repo(extras):
301 hook_response = post_push_extension(
302 repo_store_path=Repository.base_path(),
303 **extras)
304 events.trigger(events.RepoPushEvent(
305 repo_name=extras.repository, pushed_commit_ids=commit_ids, extras=extras))
306
288 307 output += 'RhodeCode: push completed\n'
289 return HookResponse(0, output)
308 return HookResponse(0, output) + hook_response
290 309
291 310
292 311 def _locked_by_explanation(repo_name, user_name, reason):
293 312 message = (
294 313 'Repository `%s` locked by user `%s`. Reason:`%s`'
295 314 % (repo_name, user_name, reason))
296 315 return message
297 316
298 317
299 318 def check_allowed_create_user(user_dict, created_by, **kwargs):
300 319 # pre create hooks
301 320 if pre_create_user.is_active():
302 allowed, reason = pre_create_user(created_by=created_by, **user_dict)
321 hook_result = pre_create_user(created_by=created_by, **user_dict)
322 allowed = hook_result.status == 0
303 323 if not allowed:
324 reason = hook_result.output
304 325 raise UserCreationError(reason)
305 326
306 327
307 328 class ExtensionCallback(object):
308 329 """
309 330 Forwards a given call to rcextensions, sanitizes keyword arguments.
310 331
311 332 Does check if there is an extension active for that hook. If it is
312 333 there, it will forward all `kwargs_keys` keyword arguments to the
313 334 extension callback.
314 335 """
315 336
316 337 def __init__(self, hook_name, kwargs_keys):
317 338 self._hook_name = hook_name
318 339 self._kwargs_keys = set(kwargs_keys)
319 340
320 341 def __call__(self, *args, **kwargs):
321 342 log.debug('Calling extension callback for %s', self._hook_name)
343 kwargs_to_pass = {}
344 for key in self._kwargs_keys:
345 try:
346 kwargs_to_pass[key] = kwargs[key]
347 except KeyError:
348 log.error('Failed to fetch %s key. Expected keys: %s',
349 key, self._kwargs_keys)
350 raise
322 351
323 kwargs_to_pass = dict((key, kwargs[key]) for key in self._kwargs_keys)
324 352 # backward compat for removed api_key for old hooks. THis was it works
325 353 # with older rcextensions that require api_key present
326 354 if self._hook_name in ['CREATE_USER_HOOK', 'DELETE_USER_HOOK']:
327 355 kwargs_to_pass['api_key'] = '_DEPRECATED_'
328 356
329 357 callback = self._get_callback()
330 358 if callback:
331 359 return callback(**kwargs_to_pass)
332 360 else:
333 361 log.debug('extensions callback not found skipping...')
334 362
335 363 def is_active(self):
336 364 return hasattr(rhodecode.EXTENSIONS, self._hook_name)
337 365
338 366 def _get_callback(self):
339 367 return getattr(rhodecode.EXTENSIONS, self._hook_name, None)
340 368
341 369
342 370 pre_pull_extension = ExtensionCallback(
343 371 hook_name='PRE_PULL_HOOK',
344 372 kwargs_keys=(
345 373 'server_url', 'config', 'scm', 'username', 'ip', 'action',
346 'repository'))
374 'repository', 'hook_type', 'user_agent', 'repo_store_path',))
347 375
348 376
349 377 post_pull_extension = ExtensionCallback(
350 378 hook_name='PULL_HOOK',
351 379 kwargs_keys=(
352 380 'server_url', 'config', 'scm', 'username', 'ip', 'action',
353 'repository'))
381 'repository', 'hook_type', 'user_agent', 'repo_store_path',))
354 382
355 383
356 384 pre_push_extension = ExtensionCallback(
357 385 hook_name='PRE_PUSH_HOOK',
358 386 kwargs_keys=(
359 387 'server_url', 'config', 'scm', 'username', 'ip', 'action',
360 'repository', 'repo_store_path', 'commit_ids'))
388 'repository', 'repo_store_path', 'commit_ids', 'hook_type', 'user_agent',))
361 389
362 390
363 391 post_push_extension = ExtensionCallback(
364 392 hook_name='PUSH_HOOK',
365 393 kwargs_keys=(
366 394 'server_url', 'config', 'scm', 'username', 'ip', 'action',
367 'repository', 'repo_store_path', 'pushed_revs'))
395 'repository', 'repo_store_path', 'commit_ids', 'hook_type', 'user_agent',))
368 396
369 397
370 398 pre_create_user = ExtensionCallback(
371 399 hook_name='PRE_CREATE_USER_HOOK',
372 400 kwargs_keys=(
373 401 'username', 'password', 'email', 'firstname', 'lastname', 'active',
374 402 'admin', 'created_by'))
375 403
376 404
377 405 log_create_pull_request = ExtensionCallback(
378 406 hook_name='CREATE_PULL_REQUEST',
379 407 kwargs_keys=(
380 408 'server_url', 'config', 'scm', 'username', 'ip', 'action',
381 409 'repository', 'pull_request_id', 'url', 'title', 'description',
382 410 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
383 411 'mergeable', 'source', 'target', 'author', 'reviewers'))
384 412
385 413
386 414 log_merge_pull_request = ExtensionCallback(
387 415 hook_name='MERGE_PULL_REQUEST',
388 416 kwargs_keys=(
389 417 'server_url', 'config', 'scm', 'username', 'ip', 'action',
390 418 'repository', 'pull_request_id', 'url', 'title', 'description',
391 419 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
392 420 'mergeable', 'source', 'target', 'author', 'reviewers'))
393 421
394 422
395 423 log_close_pull_request = ExtensionCallback(
396 424 hook_name='CLOSE_PULL_REQUEST',
397 425 kwargs_keys=(
398 426 'server_url', 'config', 'scm', 'username', 'ip', 'action',
399 427 'repository', 'pull_request_id', 'url', 'title', 'description',
400 428 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
401 429 'mergeable', 'source', 'target', 'author', 'reviewers'))
402 430
403 431
404 432 log_review_pull_request = ExtensionCallback(
405 433 hook_name='REVIEW_PULL_REQUEST',
406 434 kwargs_keys=(
407 435 'server_url', 'config', 'scm', 'username', 'ip', 'action',
408 436 'repository', 'pull_request_id', 'url', 'title', 'description',
409 437 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
410 438 'mergeable', 'source', 'target', 'author', 'reviewers'))
411 439
412 440
413 441 log_update_pull_request = ExtensionCallback(
414 442 hook_name='UPDATE_PULL_REQUEST',
415 443 kwargs_keys=(
416 444 'server_url', 'config', 'scm', 'username', 'ip', 'action',
417 445 'repository', 'pull_request_id', 'url', 'title', 'description',
418 446 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
419 447 'mergeable', 'source', 'target', 'author', 'reviewers'))
420 448
421 449
422 450 log_create_user = ExtensionCallback(
423 451 hook_name='CREATE_USER_HOOK',
424 452 kwargs_keys=(
425 453 'username', 'full_name_or_username', 'full_contact', 'user_id',
426 454 'name', 'firstname', 'short_contact', 'admin', 'lastname',
427 455 'ip_addresses', 'extern_type', 'extern_name',
428 456 'email', 'api_keys', 'last_login',
429 457 'full_name', 'active', 'password', 'emails',
430 458 'inherit_default_permissions', 'created_by', 'created_on'))
431 459
432 460
433 461 log_delete_user = ExtensionCallback(
434 462 hook_name='DELETE_USER_HOOK',
435 463 kwargs_keys=(
436 464 'username', 'full_name_or_username', 'full_contact', 'user_id',
437 465 'name', 'firstname', 'short_contact', 'admin', 'lastname',
438 466 'ip_addresses',
439 467 'email', 'last_login',
440 468 'full_name', 'active', 'password', 'emails',
441 469 'inherit_default_permissions', 'deleted_by'))
442 470
443 471
444 472 log_create_repository = ExtensionCallback(
445 473 hook_name='CREATE_REPO_HOOK',
446 474 kwargs_keys=(
447 475 'repo_name', 'repo_type', 'description', 'private', 'created_on',
448 476 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics',
449 477 'clone_uri', 'fork_id', 'group_id', 'created_by'))
450 478
451 479
452 480 log_delete_repository = ExtensionCallback(
453 481 hook_name='DELETE_REPO_HOOK',
454 482 kwargs_keys=(
455 483 'repo_name', 'repo_type', 'description', 'private', 'created_on',
456 484 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics',
457 485 'clone_uri', 'fork_id', 'group_id', 'deleted_by', 'deleted_on'))
458 486
459 487
460 488 log_create_repository_group = ExtensionCallback(
461 489 hook_name='CREATE_REPO_GROUP_HOOK',
462 490 kwargs_keys=(
463 491 'group_name', 'group_parent_id', 'group_description',
464 492 'group_id', 'user_id', 'created_by', 'created_on',
465 493 'enable_locking'))
@@ -1,162 +1,163 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import webob
22 22 from pyramid.threadlocal import get_current_request
23 23
24 24 from rhodecode import events
25 25 from rhodecode.lib import hooks_base
26 26 from rhodecode.lib import utils2
27 27
28 28
29 29 def _get_rc_scm_extras(username, repo_name, repo_alias, action):
30 30 # TODO: johbo: Replace by vcs_operation_context and remove fully
31 31 from rhodecode.lib.base import vcs_operation_context
32 32 check_locking = action in ('pull', 'push')
33 33
34 34 request = get_current_request()
35 35
36 36 # default
37 37 dummy_environ = webob.Request.blank('').environ
38 38 try:
39 39 environ = request.environ or dummy_environ
40 40 except TypeError:
41 41 # we might use this outside of request context
42 42 environ = dummy_environ
43 43
44 44 extras = vcs_operation_context(
45 45 environ, repo_name, username, action, repo_alias, check_locking)
46 46 return utils2.AttributeDict(extras)
47 47
48 48
49 49 def trigger_post_push_hook(
50 username, action, repo_name, repo_alias, commit_ids):
50 username, action, hook_type, repo_name, repo_alias, commit_ids):
51 51 """
52 52 Triggers push action hooks
53 53
54 54 :param username: username who pushes
55 55 :param action: push/push_local/push_remote
56 56 :param repo_name: name of repo
57 57 :param repo_alias: the type of SCM repo
58 58 :param commit_ids: list of commit ids that we pushed
59 59 """
60 60 extras = _get_rc_scm_extras(username, repo_name, repo_alias, action)
61 61 extras.commit_ids = commit_ids
62 extras.hook_type = hook_type
62 63 hooks_base.post_push(extras)
63 64
64 65
65 66 def trigger_log_create_pull_request_hook(username, repo_name, repo_alias,
66 67 pull_request):
67 68 """
68 69 Triggers create pull request action hooks
69 70
70 71 :param username: username who creates the pull request
71 72 :param repo_name: name of target repo
72 73 :param repo_alias: the type of SCM target repo
73 74 :param pull_request: the pull request that was created
74 75 """
75 76 if repo_alias not in ('hg', 'git'):
76 77 return
77 78
78 79 extras = _get_rc_scm_extras(username, repo_name, repo_alias,
79 80 'create_pull_request')
80 81 events.trigger(events.PullRequestCreateEvent(pull_request))
81 82 extras.update(pull_request.get_api_data())
82 83 hooks_base.log_create_pull_request(**extras)
83 84
84 85
85 86 def trigger_log_merge_pull_request_hook(username, repo_name, repo_alias,
86 87 pull_request):
87 88 """
88 89 Triggers merge pull request action hooks
89 90
90 91 :param username: username who creates the pull request
91 92 :param repo_name: name of target repo
92 93 :param repo_alias: the type of SCM target repo
93 94 :param pull_request: the pull request that was merged
94 95 """
95 96 if repo_alias not in ('hg', 'git'):
96 97 return
97 98
98 99 extras = _get_rc_scm_extras(username, repo_name, repo_alias,
99 100 'merge_pull_request')
100 101 events.trigger(events.PullRequestMergeEvent(pull_request))
101 102 extras.update(pull_request.get_api_data())
102 103 hooks_base.log_merge_pull_request(**extras)
103 104
104 105
105 106 def trigger_log_close_pull_request_hook(username, repo_name, repo_alias,
106 107 pull_request):
107 108 """
108 109 Triggers close pull request action hooks
109 110
110 111 :param username: username who creates the pull request
111 112 :param repo_name: name of target repo
112 113 :param repo_alias: the type of SCM target repo
113 114 :param pull_request: the pull request that was closed
114 115 """
115 116 if repo_alias not in ('hg', 'git'):
116 117 return
117 118
118 119 extras = _get_rc_scm_extras(username, repo_name, repo_alias,
119 120 'close_pull_request')
120 121 events.trigger(events.PullRequestCloseEvent(pull_request))
121 122 extras.update(pull_request.get_api_data())
122 123 hooks_base.log_close_pull_request(**extras)
123 124
124 125
125 126 def trigger_log_review_pull_request_hook(username, repo_name, repo_alias,
126 127 pull_request):
127 128 """
128 129 Triggers review status change pull request action hooks
129 130
130 131 :param username: username who creates the pull request
131 132 :param repo_name: name of target repo
132 133 :param repo_alias: the type of SCM target repo
133 134 :param pull_request: the pull request that review status changed
134 135 """
135 136 if repo_alias not in ('hg', 'git'):
136 137 return
137 138
138 139 extras = _get_rc_scm_extras(username, repo_name, repo_alias,
139 140 'review_pull_request')
140 141 events.trigger(events.PullRequestReviewEvent(pull_request))
141 142 extras.update(pull_request.get_api_data())
142 143 hooks_base.log_review_pull_request(**extras)
143 144
144 145
145 146 def trigger_log_update_pull_request_hook(username, repo_name, repo_alias,
146 147 pull_request):
147 148 """
148 149 Triggers update pull request action hooks
149 150
150 151 :param username: username who creates the pull request
151 152 :param repo_name: name of target repo
152 153 :param repo_alias: the type of SCM target repo
153 154 :param pull_request: the pull request that was updated
154 155 """
155 156 if repo_alias not in ('hg', 'git'):
156 157 return
157 158
158 159 extras = _get_rc_scm_extras(username, repo_name, repo_alias,
159 160 'update_pull_request')
160 161 events.trigger(events.PullRequestUpdateEvent(pull_request))
161 162 extras.update(pull_request.get_api_data())
162 163 hooks_base.log_update_pull_request(**extras)
@@ -1,781 +1,781 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Utilities library for RhodeCode
23 23 """
24 24
25 25 import datetime
26 26 import decorator
27 27 import json
28 28 import logging
29 29 import os
30 30 import re
31 import sys
31 32 import shutil
32 33 import tempfile
33 34 import traceback
34 35 import tarfile
35 36 import warnings
36 37 import hashlib
37 38 from os.path import join as jn
38 39
39 40 import paste
40 41 import pkg_resources
41 42 from webhelpers.text import collapse, remove_formatting, strip_tags
42 43 from mako import exceptions
43 44 from pyramid.threadlocal import get_current_registry
44 45 from rhodecode.lib.request import Request
45 46
46 from rhodecode.lib.fakemod import create_module
47 47 from rhodecode.lib.vcs.backends.base import Config
48 48 from rhodecode.lib.vcs.exceptions import VCSError
49 49 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
50 50 from rhodecode.lib.utils2 import (
51 51 safe_str, safe_unicode, get_current_rhodecode_user, md5, sha1)
52 52 from rhodecode.model import meta
53 53 from rhodecode.model.db import (
54 54 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
55 55 from rhodecode.model.meta import Session
56 56
57 57
58 58 log = logging.getLogger(__name__)
59 59
60 60 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
61 61
62 62 # String which contains characters that are not allowed in slug names for
63 63 # repositories or repository groups. It is properly escaped to use it in
64 64 # regular expressions.
65 65 SLUG_BAD_CHARS = re.escape('`?=[]\;\'"<>,/~!@#$%^&*()+{}|:')
66 66
67 67 # Regex that matches forbidden characters in repo/group slugs.
68 68 SLUG_BAD_CHAR_RE = re.compile('[{}]'.format(SLUG_BAD_CHARS))
69 69
70 70 # Regex that matches allowed characters in repo/group slugs.
71 71 SLUG_GOOD_CHAR_RE = re.compile('[^{}]'.format(SLUG_BAD_CHARS))
72 72
73 73 # Regex that matches whole repo/group slugs.
74 74 SLUG_RE = re.compile('[^{}]+'.format(SLUG_BAD_CHARS))
75 75
76 76 _license_cache = None
77 77
78 78
79 79 def repo_name_slug(value):
80 80 """
81 81 Return slug of name of repository
82 82 This function is called on each creation/modification
83 83 of repository to prevent bad names in repo
84 84 """
85 85 replacement_char = '-'
86 86
87 87 slug = remove_formatting(value)
88 88 slug = SLUG_BAD_CHAR_RE.sub('', slug)
89 89 slug = re.sub('[\s]+', '-', slug)
90 90 slug = collapse(slug, replacement_char)
91 91 return slug
92 92
93 93
94 94 #==============================================================================
95 95 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
96 96 #==============================================================================
97 97 def get_repo_slug(request):
98 98 _repo = ''
99 99
100 100 if hasattr(request, 'db_repo'):
101 101 # if our requests has set db reference use it for name, this
102 102 # translates the example.com/_<id> into proper repo names
103 103 _repo = request.db_repo.repo_name
104 104 elif getattr(request, 'matchdict', None):
105 105 # pyramid
106 106 _repo = request.matchdict.get('repo_name')
107 107
108 108 if _repo:
109 109 _repo = _repo.rstrip('/')
110 110 return _repo
111 111
112 112
113 113 def get_repo_group_slug(request):
114 114 _group = ''
115 115 if hasattr(request, 'db_repo_group'):
116 116 # if our requests has set db reference use it for name, this
117 117 # translates the example.com/_<id> into proper repo group names
118 118 _group = request.db_repo_group.group_name
119 119 elif getattr(request, 'matchdict', None):
120 120 # pyramid
121 121 _group = request.matchdict.get('repo_group_name')
122 122
123 123 if _group:
124 124 _group = _group.rstrip('/')
125 125 return _group
126 126
127 127
128 128 def get_user_group_slug(request):
129 129 _user_group = ''
130 130
131 131 if hasattr(request, 'db_user_group'):
132 132 _user_group = request.db_user_group.users_group_name
133 133 elif getattr(request, 'matchdict', None):
134 134 # pyramid
135 135 _user_group = request.matchdict.get('user_group_id')
136 136 _user_group_name = request.matchdict.get('user_group_name')
137 137 try:
138 138 if _user_group:
139 139 _user_group = UserGroup.get(_user_group)
140 140 elif _user_group_name:
141 141 _user_group = UserGroup.get_by_group_name(_user_group_name)
142 142
143 143 if _user_group:
144 144 _user_group = _user_group.users_group_name
145 145 except Exception:
146 146 log.exception('Failed to get user group by id and name')
147 147 # catch all failures here
148 148 return None
149 149
150 150 return _user_group
151 151
152 152
153 153 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
154 154 """
155 155 Scans given path for repos and return (name,(type,path)) tuple
156 156
157 157 :param path: path to scan for repositories
158 158 :param recursive: recursive search and return names with subdirs in front
159 159 """
160 160
161 161 # remove ending slash for better results
162 162 path = path.rstrip(os.sep)
163 163 log.debug('now scanning in %s location recursive:%s...', path, recursive)
164 164
165 165 def _get_repos(p):
166 166 dirpaths = _get_dirpaths(p)
167 167 if not _is_dir_writable(p):
168 168 log.warning('repo path without write access: %s', p)
169 169
170 170 for dirpath in dirpaths:
171 171 if os.path.isfile(os.path.join(p, dirpath)):
172 172 continue
173 173 cur_path = os.path.join(p, dirpath)
174 174
175 175 # skip removed repos
176 176 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
177 177 continue
178 178
179 179 #skip .<somethin> dirs
180 180 if dirpath.startswith('.'):
181 181 continue
182 182
183 183 try:
184 184 scm_info = get_scm(cur_path)
185 185 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
186 186 except VCSError:
187 187 if not recursive:
188 188 continue
189 189 #check if this dir containts other repos for recursive scan
190 190 rec_path = os.path.join(p, dirpath)
191 191 if os.path.isdir(rec_path):
192 192 for inner_scm in _get_repos(rec_path):
193 193 yield inner_scm
194 194
195 195 return _get_repos(path)
196 196
197 197
198 198 def _get_dirpaths(p):
199 199 try:
200 200 # OS-independable way of checking if we have at least read-only
201 201 # access or not.
202 202 dirpaths = os.listdir(p)
203 203 except OSError:
204 204 log.warning('ignoring repo path without read access: %s', p)
205 205 return []
206 206
207 207 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
208 208 # decode paths and suddenly returns unicode objects itself. The items it
209 209 # cannot decode are returned as strings and cause issues.
210 210 #
211 211 # Those paths are ignored here until a solid solution for path handling has
212 212 # been built.
213 213 expected_type = type(p)
214 214
215 215 def _has_correct_type(item):
216 216 if type(item) is not expected_type:
217 217 log.error(
218 218 u"Ignoring path %s since it cannot be decoded into unicode.",
219 219 # Using "repr" to make sure that we see the byte value in case
220 220 # of support.
221 221 repr(item))
222 222 return False
223 223 return True
224 224
225 225 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
226 226
227 227 return dirpaths
228 228
229 229
230 230 def _is_dir_writable(path):
231 231 """
232 232 Probe if `path` is writable.
233 233
234 234 Due to trouble on Cygwin / Windows, this is actually probing if it is
235 235 possible to create a file inside of `path`, stat does not produce reliable
236 236 results in this case.
237 237 """
238 238 try:
239 239 with tempfile.TemporaryFile(dir=path):
240 240 pass
241 241 except OSError:
242 242 return False
243 243 return True
244 244
245 245
246 246 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None, config=None):
247 247 """
248 248 Returns True if given path is a valid repository False otherwise.
249 249 If expect_scm param is given also, compare if given scm is the same
250 250 as expected from scm parameter. If explicit_scm is given don't try to
251 251 detect the scm, just use the given one to check if repo is valid
252 252
253 253 :param repo_name:
254 254 :param base_path:
255 255 :param expect_scm:
256 256 :param explicit_scm:
257 257 :param config:
258 258
259 259 :return True: if given path is a valid repository
260 260 """
261 261 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
262 262 log.debug('Checking if `%s` is a valid path for repository. '
263 263 'Explicit type: %s', repo_name, explicit_scm)
264 264
265 265 try:
266 266 if explicit_scm:
267 267 detected_scms = [get_scm_backend(explicit_scm)(
268 268 full_path, config=config).alias]
269 269 else:
270 270 detected_scms = get_scm(full_path)
271 271
272 272 if expect_scm:
273 273 return detected_scms[0] == expect_scm
274 274 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
275 275 return True
276 276 except VCSError:
277 277 log.debug('path: %s is not a valid repo !', full_path)
278 278 return False
279 279
280 280
281 281 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
282 282 """
283 283 Returns True if given path is a repository group, False otherwise
284 284
285 285 :param repo_name:
286 286 :param base_path:
287 287 """
288 288 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
289 289 log.debug('Checking if `%s` is a valid path for repository group',
290 290 repo_group_name)
291 291
292 292 # check if it's not a repo
293 293 if is_valid_repo(repo_group_name, base_path):
294 294 log.debug('Repo called %s exist, it is not a valid repo group', repo_group_name)
295 295 return False
296 296
297 297 try:
298 298 # we need to check bare git repos at higher level
299 299 # since we might match branches/hooks/info/objects or possible
300 300 # other things inside bare git repo
301 301 maybe_repo = os.path.dirname(full_path)
302 302 if maybe_repo == base_path:
303 303 # skip root level repo check, we know root location CANNOT BE a repo group
304 304 return False
305 305
306 306 scm_ = get_scm(maybe_repo)
307 307 log.debug('path: %s is a vcs object:%s, not valid repo group', full_path, scm_)
308 308 return False
309 309 except VCSError:
310 310 pass
311 311
312 312 # check if it's a valid path
313 313 if skip_path_check or os.path.isdir(full_path):
314 314 log.debug('path: %s is a valid repo group !', full_path)
315 315 return True
316 316
317 317 log.debug('path: %s is not a valid repo group !', full_path)
318 318 return False
319 319
320 320
321 321 def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'):
322 322 while True:
323 323 ok = raw_input(prompt)
324 324 if ok.lower() in ('y', 'ye', 'yes'):
325 325 return True
326 326 if ok.lower() in ('n', 'no', 'nop', 'nope'):
327 327 return False
328 328 retries = retries - 1
329 329 if retries < 0:
330 330 raise IOError
331 331 print(complaint)
332 332
333 333 # propagated from mercurial documentation
334 334 ui_sections = [
335 335 'alias', 'auth',
336 336 'decode/encode', 'defaults',
337 337 'diff', 'email',
338 338 'extensions', 'format',
339 339 'merge-patterns', 'merge-tools',
340 340 'hooks', 'http_proxy',
341 341 'smtp', 'patch',
342 342 'paths', 'profiling',
343 343 'server', 'trusted',
344 344 'ui', 'web', ]
345 345
346 346
347 347 def config_data_from_db(clear_session=True, repo=None):
348 348 """
349 349 Read the configuration data from the database and return configuration
350 350 tuples.
351 351 """
352 352 from rhodecode.model.settings import VcsSettingsModel
353 353
354 354 config = []
355 355
356 356 sa = meta.Session()
357 357 settings_model = VcsSettingsModel(repo=repo, sa=sa)
358 358
359 359 ui_settings = settings_model.get_ui_settings()
360 360
361 361 ui_data = []
362 362 for setting in ui_settings:
363 363 if setting.active:
364 364 ui_data.append((setting.section, setting.key, setting.value))
365 365 config.append((
366 366 safe_str(setting.section), safe_str(setting.key),
367 367 safe_str(setting.value)))
368 368 if setting.key == 'push_ssl':
369 369 # force set push_ssl requirement to False, rhodecode
370 370 # handles that
371 371 config.append((
372 372 safe_str(setting.section), safe_str(setting.key), False))
373 373 log.debug(
374 374 'settings ui from db: %s',
375 375 ','.join(map(lambda s: '[{}] {}={}'.format(*s), ui_data)))
376 376 if clear_session:
377 377 meta.Session.remove()
378 378
379 379 # TODO: mikhail: probably it makes no sense to re-read hooks information.
380 380 # It's already there and activated/deactivated
381 381 skip_entries = []
382 382 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
383 383 if 'pull' not in enabled_hook_classes:
384 384 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
385 385 if 'push' not in enabled_hook_classes:
386 386 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
387 387 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRETX_PUSH))
388 388 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PUSH_KEY))
389 389
390 390 config = [entry for entry in config if entry[:2] not in skip_entries]
391 391
392 392 return config
393 393
394 394
395 395 def make_db_config(clear_session=True, repo=None):
396 396 """
397 397 Create a :class:`Config` instance based on the values in the database.
398 398 """
399 399 config = Config()
400 400 config_data = config_data_from_db(clear_session=clear_session, repo=repo)
401 401 for section, option, value in config_data:
402 402 config.set(section, option, value)
403 403 return config
404 404
405 405
406 406 def get_enabled_hook_classes(ui_settings):
407 407 """
408 408 Return the enabled hook classes.
409 409
410 410 :param ui_settings: List of ui_settings as returned
411 411 by :meth:`VcsSettingsModel.get_ui_settings`
412 412
413 413 :return: a list with the enabled hook classes. The order is not guaranteed.
414 414 :rtype: list
415 415 """
416 416 enabled_hooks = []
417 417 active_hook_keys = [
418 418 key for section, key, value, active in ui_settings
419 419 if section == 'hooks' and active]
420 420
421 421 hook_names = {
422 422 RhodeCodeUi.HOOK_PUSH: 'push',
423 423 RhodeCodeUi.HOOK_PULL: 'pull',
424 424 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
425 425 }
426 426
427 427 for key in active_hook_keys:
428 428 hook = hook_names.get(key)
429 429 if hook:
430 430 enabled_hooks.append(hook)
431 431
432 432 return enabled_hooks
433 433
434 434
435 435 def set_rhodecode_config(config):
436 436 """
437 437 Updates pyramid config with new settings from database
438 438
439 439 :param config:
440 440 """
441 441 from rhodecode.model.settings import SettingsModel
442 442 app_settings = SettingsModel().get_all_settings()
443 443
444 444 for k, v in app_settings.items():
445 445 config[k] = v
446 446
447 447
448 448 def get_rhodecode_realm():
449 449 """
450 450 Return the rhodecode realm from database.
451 451 """
452 452 from rhodecode.model.settings import SettingsModel
453 453 realm = SettingsModel().get_setting_by_name('realm')
454 454 return safe_str(realm.app_settings_value)
455 455
456 456
457 457 def get_rhodecode_base_path():
458 458 """
459 459 Returns the base path. The base path is the filesystem path which points
460 460 to the repository store.
461 461 """
462 462 from rhodecode.model.settings import SettingsModel
463 463 paths_ui = SettingsModel().get_ui_by_section_and_key('paths', '/')
464 464 return safe_str(paths_ui.ui_value)
465 465
466 466
467 467 def map_groups(path):
468 468 """
469 469 Given a full path to a repository, create all nested groups that this
470 470 repo is inside. This function creates parent-child relationships between
471 471 groups and creates default perms for all new groups.
472 472
473 473 :param paths: full path to repository
474 474 """
475 475 from rhodecode.model.repo_group import RepoGroupModel
476 476 sa = meta.Session()
477 477 groups = path.split(Repository.NAME_SEP)
478 478 parent = None
479 479 group = None
480 480
481 481 # last element is repo in nested groups structure
482 482 groups = groups[:-1]
483 483 rgm = RepoGroupModel(sa)
484 484 owner = User.get_first_super_admin()
485 485 for lvl, group_name in enumerate(groups):
486 486 group_name = '/'.join(groups[:lvl] + [group_name])
487 487 group = RepoGroup.get_by_group_name(group_name)
488 488 desc = '%s group' % group_name
489 489
490 490 # skip folders that are now removed repos
491 491 if REMOVED_REPO_PAT.match(group_name):
492 492 break
493 493
494 494 if group is None:
495 495 log.debug('creating group level: %s group_name: %s',
496 496 lvl, group_name)
497 497 group = RepoGroup(group_name, parent)
498 498 group.group_description = desc
499 499 group.user = owner
500 500 sa.add(group)
501 501 perm_obj = rgm._create_default_perms(group)
502 502 sa.add(perm_obj)
503 503 sa.flush()
504 504
505 505 parent = group
506 506 return group
507 507
508 508
509 509 def repo2db_mapper(initial_repo_list, remove_obsolete=False):
510 510 """
511 511 maps all repos given in initial_repo_list, non existing repositories
512 512 are created, if remove_obsolete is True it also checks for db entries
513 513 that are not in initial_repo_list and removes them.
514 514
515 515 :param initial_repo_list: list of repositories found by scanning methods
516 516 :param remove_obsolete: check for obsolete entries in database
517 517 """
518 518 from rhodecode.model.repo import RepoModel
519 519 from rhodecode.model.repo_group import RepoGroupModel
520 520 from rhodecode.model.settings import SettingsModel
521 521
522 522 sa = meta.Session()
523 523 repo_model = RepoModel()
524 524 user = User.get_first_super_admin()
525 525 added = []
526 526
527 527 # creation defaults
528 528 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
529 529 enable_statistics = defs.get('repo_enable_statistics')
530 530 enable_locking = defs.get('repo_enable_locking')
531 531 enable_downloads = defs.get('repo_enable_downloads')
532 532 private = defs.get('repo_private')
533 533
534 534 for name, repo in initial_repo_list.items():
535 535 group = map_groups(name)
536 536 unicode_name = safe_unicode(name)
537 537 db_repo = repo_model.get_by_repo_name(unicode_name)
538 538 # found repo that is on filesystem not in RhodeCode database
539 539 if not db_repo:
540 540 log.info('repository %s not found, creating now', name)
541 541 added.append(name)
542 542 desc = (repo.description
543 543 if repo.description != 'unknown'
544 544 else '%s repository' % name)
545 545
546 546 db_repo = repo_model._create_repo(
547 547 repo_name=name,
548 548 repo_type=repo.alias,
549 549 description=desc,
550 550 repo_group=getattr(group, 'group_id', None),
551 551 owner=user,
552 552 enable_locking=enable_locking,
553 553 enable_downloads=enable_downloads,
554 554 enable_statistics=enable_statistics,
555 555 private=private,
556 556 state=Repository.STATE_CREATED
557 557 )
558 558 sa.commit()
559 559 # we added that repo just now, and make sure we updated server info
560 560 if db_repo.repo_type == 'git':
561 561 git_repo = db_repo.scm_instance()
562 562 # update repository server-info
563 563 log.debug('Running update server info')
564 564 git_repo._update_server_info()
565 565
566 566 db_repo.update_commit_cache()
567 567
568 568 config = db_repo._config
569 569 config.set('extensions', 'largefiles', '')
570 570 repo = db_repo.scm_instance(config=config)
571 571 repo.install_hooks()
572 572
573 573 removed = []
574 574 if remove_obsolete:
575 575 # remove from database those repositories that are not in the filesystem
576 576 for repo in sa.query(Repository).all():
577 577 if repo.repo_name not in initial_repo_list.keys():
578 578 log.debug("Removing non-existing repository found in db `%s`",
579 579 repo.repo_name)
580 580 try:
581 581 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
582 582 sa.commit()
583 583 removed.append(repo.repo_name)
584 584 except Exception:
585 585 # don't hold further removals on error
586 586 log.error(traceback.format_exc())
587 587 sa.rollback()
588 588
589 589 def splitter(full_repo_name):
590 590 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
591 591 gr_name = None
592 592 if len(_parts) == 2:
593 593 gr_name = _parts[0]
594 594 return gr_name
595 595
596 596 initial_repo_group_list = [splitter(x) for x in
597 597 initial_repo_list.keys() if splitter(x)]
598 598
599 599 # remove from database those repository groups that are not in the
600 600 # filesystem due to parent child relationships we need to delete them
601 601 # in a specific order of most nested first
602 602 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
603 603 nested_sort = lambda gr: len(gr.split('/'))
604 604 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
605 605 if group_name not in initial_repo_group_list:
606 606 repo_group = RepoGroup.get_by_group_name(group_name)
607 607 if (repo_group.children.all() or
608 608 not RepoGroupModel().check_exist_filesystem(
609 609 group_name=group_name, exc_on_failure=False)):
610 610 continue
611 611
612 612 log.info(
613 613 'Removing non-existing repository group found in db `%s`',
614 614 group_name)
615 615 try:
616 616 RepoGroupModel(sa).delete(group_name, fs_remove=False)
617 617 sa.commit()
618 618 removed.append(group_name)
619 619 except Exception:
620 620 # don't hold further removals on error
621 621 log.exception(
622 622 'Unable to remove repository group `%s`',
623 623 group_name)
624 624 sa.rollback()
625 625 raise
626 626
627 627 return added, removed
628 628
629 629
630 630 def load_rcextensions(root_path):
631 631 import rhodecode
632 632 from rhodecode.config import conf
633 633
634 path = os.path.join(root_path, 'rcextensions', '__init__.py')
635 if os.path.isfile(path):
636 rcext = create_module('rc', path)
637 EXT = rhodecode.EXTENSIONS = rcext
638 log.debug('Found rcextensions now loading %s...', rcext)
634 path = os.path.join(root_path)
635 sys.path.append(path)
636 try:
637 rcextensions = __import__('rcextensions')
638 except ImportError:
639 log.warn('Unable to load rcextensions from %s', path)
640 rcextensions = None
641
642 if rcextensions:
643 log.debug('Found rcextensions module loaded %s...', rcextensions)
644 rhodecode.EXTENSIONS = rcextensions
639 645
640 646 # Additional mappings that are not present in the pygments lexers
641 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
642
643 # auto check if the module is not missing any data, set to default if is
644 # this will help autoupdate new feature of rcext module
645 #from rhodecode.config import rcextensions
646 #for k in dir(rcextensions):
647 # if not k.startswith('_') and not hasattr(EXT, k):
648 # setattr(EXT, k, getattr(rcextensions, k))
647 conf.LANGUAGES_EXTENSIONS_MAP.update(
648 getattr(rhodecode.EXTENSIONS, 'EXTRA_MAPPINGS', {}))
649 649
650 650
651 651 def get_custom_lexer(extension):
652 652 """
653 653 returns a custom lexer if it is defined in rcextensions module, or None
654 654 if there's no custom lexer defined
655 655 """
656 656 import rhodecode
657 657 from pygments import lexers
658 658
659 659 # custom override made by RhodeCode
660 660 if extension in ['mako']:
661 661 return lexers.get_lexer_by_name('html+mako')
662 662
663 663 # check if we didn't define this extension as other lexer
664 664 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
665 665 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
666 666 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
667 667 return lexers.get_lexer_by_name(_lexer_name)
668 668
669 669
670 670 #==============================================================================
671 671 # TEST FUNCTIONS AND CREATORS
672 672 #==============================================================================
673 673 def create_test_index(repo_location, config):
674 674 """
675 675 Makes default test index.
676 676 """
677 677 import rc_testdata
678 678
679 679 rc_testdata.extract_search_index(
680 680 'vcs_search_index', os.path.dirname(config['search.location']))
681 681
682 682
683 683 def create_test_directory(test_path):
684 684 """
685 685 Create test directory if it doesn't exist.
686 686 """
687 687 if not os.path.isdir(test_path):
688 688 log.debug('Creating testdir %s', test_path)
689 689 os.makedirs(test_path)
690 690
691 691
692 692 def create_test_database(test_path, config):
693 693 """
694 694 Makes a fresh database.
695 695 """
696 696 from rhodecode.lib.db_manage import DbManage
697 697
698 698 # PART ONE create db
699 699 dbconf = config['sqlalchemy.db1.url']
700 700 log.debug('making test db %s', dbconf)
701 701
702 702 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
703 703 tests=True, cli_args={'force_ask': True})
704 704 dbmanage.create_tables(override=True)
705 705 dbmanage.set_db_version()
706 706 # for tests dynamically set new root paths based on generated content
707 707 dbmanage.create_settings(dbmanage.config_prompt(test_path))
708 708 dbmanage.create_default_user()
709 709 dbmanage.create_test_admin_and_users()
710 710 dbmanage.create_permissions()
711 711 dbmanage.populate_default_permissions()
712 712 Session().commit()
713 713
714 714
715 715 def create_test_repositories(test_path, config):
716 716 """
717 717 Creates test repositories in the temporary directory. Repositories are
718 718 extracted from archives within the rc_testdata package.
719 719 """
720 720 import rc_testdata
721 721 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
722 722
723 723 log.debug('making test vcs repositories')
724 724
725 725 idx_path = config['search.location']
726 726 data_path = config['cache_dir']
727 727
728 728 # clean index and data
729 729 if idx_path and os.path.exists(idx_path):
730 730 log.debug('remove %s', idx_path)
731 731 shutil.rmtree(idx_path)
732 732
733 733 if data_path and os.path.exists(data_path):
734 734 log.debug('remove %s', data_path)
735 735 shutil.rmtree(data_path)
736 736
737 737 rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO))
738 738 rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO))
739 739
740 740 # Note: Subversion is in the process of being integrated with the system,
741 741 # until we have a properly packed version of the test svn repository, this
742 742 # tries to copy over the repo from a package "rc_testdata"
743 743 svn_repo_path = rc_testdata.get_svn_repo_archive()
744 744 with tarfile.open(svn_repo_path) as tar:
745 745 tar.extractall(jn(test_path, SVN_REPO))
746 746
747 747
748 748 def password_changed(auth_user, session):
749 749 # Never report password change in case of default user or anonymous user.
750 750 if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None:
751 751 return False
752 752
753 753 password_hash = md5(auth_user.password) if auth_user.password else None
754 754 rhodecode_user = session.get('rhodecode_user', {})
755 755 session_password_hash = rhodecode_user.get('password', '')
756 756 return password_hash != session_password_hash
757 757
758 758
759 759 def read_opensource_licenses():
760 760 global _license_cache
761 761
762 762 if not _license_cache:
763 763 licenses = pkg_resources.resource_string(
764 764 'rhodecode', 'config/licenses.json')
765 765 _license_cache = json.loads(licenses)
766 766
767 767 return _license_cache
768 768
769 769
770 770 def generate_platform_uuid():
771 771 """
772 772 Generates platform UUID based on it's name
773 773 """
774 774 import platform
775 775
776 776 try:
777 777 uuid_list = [platform.platform()]
778 778 return hashlib.sha256(':'.join(uuid_list)).hexdigest()
779 779 except Exception as e:
780 780 log.error('Failed to generate host uuid: %s', e)
781 781 return 'UNDEFINED'
@@ -1,1730 +1,1731 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26
27 27 import json
28 28 import logging
29 29 import datetime
30 30 import urllib
31 31 import collections
32 32
33 33 from pyramid.threadlocal import get_current_request
34 34
35 35 from rhodecode import events
36 36 from rhodecode.translation import lazy_ugettext#, _
37 37 from rhodecode.lib import helpers as h, hooks_utils, diffs
38 38 from rhodecode.lib import audit_logger
39 39 from rhodecode.lib.compat import OrderedDict
40 40 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
41 41 from rhodecode.lib.markup_renderer import (
42 42 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
43 43 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
44 44 from rhodecode.lib.vcs.backends.base import (
45 45 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
46 46 from rhodecode.lib.vcs.conf import settings as vcs_settings
47 47 from rhodecode.lib.vcs.exceptions import (
48 48 CommitDoesNotExistError, EmptyRepositoryError)
49 49 from rhodecode.model import BaseModel
50 50 from rhodecode.model.changeset_status import ChangesetStatusModel
51 51 from rhodecode.model.comment import CommentsModel
52 52 from rhodecode.model.db import (
53 53 or_, PullRequest, PullRequestReviewers, ChangesetStatus,
54 54 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
55 55 from rhodecode.model.meta import Session
56 56 from rhodecode.model.notification import NotificationModel, \
57 57 EmailNotificationModel
58 58 from rhodecode.model.scm import ScmModel
59 59 from rhodecode.model.settings import VcsSettingsModel
60 60
61 61
62 62 log = logging.getLogger(__name__)
63 63
64 64
65 65 # Data structure to hold the response data when updating commits during a pull
66 66 # request update.
67 67 UpdateResponse = collections.namedtuple('UpdateResponse', [
68 68 'executed', 'reason', 'new', 'old', 'changes',
69 69 'source_changed', 'target_changed'])
70 70
71 71
72 72 class PullRequestModel(BaseModel):
73 73
74 74 cls = PullRequest
75 75
76 76 DIFF_CONTEXT = 3
77 77
78 78 MERGE_STATUS_MESSAGES = {
79 79 MergeFailureReason.NONE: lazy_ugettext(
80 80 'This pull request can be automatically merged.'),
81 81 MergeFailureReason.UNKNOWN: lazy_ugettext(
82 82 'This pull request cannot be merged because of an unhandled'
83 83 ' exception.'),
84 84 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
85 85 'This pull request cannot be merged because of merge conflicts.'),
86 86 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
87 87 'This pull request could not be merged because push to target'
88 88 ' failed.'),
89 89 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
90 90 'This pull request cannot be merged because the target is not a'
91 91 ' head.'),
92 92 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
93 93 'This pull request cannot be merged because the source contains'
94 94 ' more branches than the target.'),
95 95 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
96 96 'This pull request cannot be merged because the target has'
97 97 ' multiple heads.'),
98 98 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
99 99 'This pull request cannot be merged because the target repository'
100 100 ' is locked.'),
101 101 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
102 102 'This pull request cannot be merged because the target or the '
103 103 'source reference is missing.'),
104 104 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
105 105 'This pull request cannot be merged because the target '
106 106 'reference is missing.'),
107 107 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
108 108 'This pull request cannot be merged because the source '
109 109 'reference is missing.'),
110 110 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
111 111 'This pull request cannot be merged because of conflicts related '
112 112 'to sub repositories.'),
113 113 }
114 114
115 115 UPDATE_STATUS_MESSAGES = {
116 116 UpdateFailureReason.NONE: lazy_ugettext(
117 117 'Pull request update successful.'),
118 118 UpdateFailureReason.UNKNOWN: lazy_ugettext(
119 119 'Pull request update failed because of an unknown error.'),
120 120 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
121 121 'No update needed because the source and target have not changed.'),
122 122 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
123 123 'Pull request cannot be updated because the reference type is '
124 124 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
125 125 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
126 126 'This pull request cannot be updated because the target '
127 127 'reference is missing.'),
128 128 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
129 129 'This pull request cannot be updated because the source '
130 130 'reference is missing.'),
131 131 }
132 132
133 133 def __get_pull_request(self, pull_request):
134 134 return self._get_instance((
135 135 PullRequest, PullRequestVersion), pull_request)
136 136
137 137 def _check_perms(self, perms, pull_request, user, api=False):
138 138 if not api:
139 139 return h.HasRepoPermissionAny(*perms)(
140 140 user=user, repo_name=pull_request.target_repo.repo_name)
141 141 else:
142 142 return h.HasRepoPermissionAnyApi(*perms)(
143 143 user=user, repo_name=pull_request.target_repo.repo_name)
144 144
145 145 def check_user_read(self, pull_request, user, api=False):
146 146 _perms = ('repository.admin', 'repository.write', 'repository.read',)
147 147 return self._check_perms(_perms, pull_request, user, api)
148 148
149 149 def check_user_merge(self, pull_request, user, api=False):
150 150 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
151 151 return self._check_perms(_perms, pull_request, user, api)
152 152
153 153 def check_user_update(self, pull_request, user, api=False):
154 154 owner = user.user_id == pull_request.user_id
155 155 return self.check_user_merge(pull_request, user, api) or owner
156 156
157 157 def check_user_delete(self, pull_request, user):
158 158 owner = user.user_id == pull_request.user_id
159 159 _perms = ('repository.admin',)
160 160 return self._check_perms(_perms, pull_request, user) or owner
161 161
162 162 def check_user_change_status(self, pull_request, user, api=False):
163 163 reviewer = user.user_id in [x.user_id for x in
164 164 pull_request.reviewers]
165 165 return self.check_user_update(pull_request, user, api) or reviewer
166 166
167 167 def check_user_comment(self, pull_request, user):
168 168 owner = user.user_id == pull_request.user_id
169 169 return self.check_user_read(pull_request, user) or owner
170 170
171 171 def get(self, pull_request):
172 172 return self.__get_pull_request(pull_request)
173 173
174 174 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
175 175 opened_by=None, order_by=None,
176 176 order_dir='desc'):
177 177 repo = None
178 178 if repo_name:
179 179 repo = self._get_repo(repo_name)
180 180
181 181 q = PullRequest.query()
182 182
183 183 # source or target
184 184 if repo and source:
185 185 q = q.filter(PullRequest.source_repo == repo)
186 186 elif repo:
187 187 q = q.filter(PullRequest.target_repo == repo)
188 188
189 189 # closed,opened
190 190 if statuses:
191 191 q = q.filter(PullRequest.status.in_(statuses))
192 192
193 193 # opened by filter
194 194 if opened_by:
195 195 q = q.filter(PullRequest.user_id.in_(opened_by))
196 196
197 197 if order_by:
198 198 order_map = {
199 199 'name_raw': PullRequest.pull_request_id,
200 200 'title': PullRequest.title,
201 201 'updated_on_raw': PullRequest.updated_on,
202 202 'target_repo': PullRequest.target_repo_id
203 203 }
204 204 if order_dir == 'asc':
205 205 q = q.order_by(order_map[order_by].asc())
206 206 else:
207 207 q = q.order_by(order_map[order_by].desc())
208 208
209 209 return q
210 210
211 211 def count_all(self, repo_name, source=False, statuses=None,
212 212 opened_by=None):
213 213 """
214 214 Count the number of pull requests for a specific repository.
215 215
216 216 :param repo_name: target or source repo
217 217 :param source: boolean flag to specify if repo_name refers to source
218 218 :param statuses: list of pull request statuses
219 219 :param opened_by: author user of the pull request
220 220 :returns: int number of pull requests
221 221 """
222 222 q = self._prepare_get_all_query(
223 223 repo_name, source=source, statuses=statuses, opened_by=opened_by)
224 224
225 225 return q.count()
226 226
227 227 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
228 228 offset=0, length=None, order_by=None, order_dir='desc'):
229 229 """
230 230 Get all pull requests for a specific repository.
231 231
232 232 :param repo_name: target or source repo
233 233 :param source: boolean flag to specify if repo_name refers to source
234 234 :param statuses: list of pull request statuses
235 235 :param opened_by: author user of the pull request
236 236 :param offset: pagination offset
237 237 :param length: length of returned list
238 238 :param order_by: order of the returned list
239 239 :param order_dir: 'asc' or 'desc' ordering direction
240 240 :returns: list of pull requests
241 241 """
242 242 q = self._prepare_get_all_query(
243 243 repo_name, source=source, statuses=statuses, opened_by=opened_by,
244 244 order_by=order_by, order_dir=order_dir)
245 245
246 246 if length:
247 247 pull_requests = q.limit(length).offset(offset).all()
248 248 else:
249 249 pull_requests = q.all()
250 250
251 251 return pull_requests
252 252
253 253 def count_awaiting_review(self, repo_name, source=False, statuses=None,
254 254 opened_by=None):
255 255 """
256 256 Count the number of pull requests for a specific repository that are
257 257 awaiting review.
258 258
259 259 :param repo_name: target or source repo
260 260 :param source: boolean flag to specify if repo_name refers to source
261 261 :param statuses: list of pull request statuses
262 262 :param opened_by: author user of the pull request
263 263 :returns: int number of pull requests
264 264 """
265 265 pull_requests = self.get_awaiting_review(
266 266 repo_name, source=source, statuses=statuses, opened_by=opened_by)
267 267
268 268 return len(pull_requests)
269 269
270 270 def get_awaiting_review(self, repo_name, source=False, statuses=None,
271 271 opened_by=None, offset=0, length=None,
272 272 order_by=None, order_dir='desc'):
273 273 """
274 274 Get all pull requests for a specific repository that are awaiting
275 275 review.
276 276
277 277 :param repo_name: target or source repo
278 278 :param source: boolean flag to specify if repo_name refers to source
279 279 :param statuses: list of pull request statuses
280 280 :param opened_by: author user of the pull request
281 281 :param offset: pagination offset
282 282 :param length: length of returned list
283 283 :param order_by: order of the returned list
284 284 :param order_dir: 'asc' or 'desc' ordering direction
285 285 :returns: list of pull requests
286 286 """
287 287 pull_requests = self.get_all(
288 288 repo_name, source=source, statuses=statuses, opened_by=opened_by,
289 289 order_by=order_by, order_dir=order_dir)
290 290
291 291 _filtered_pull_requests = []
292 292 for pr in pull_requests:
293 293 status = pr.calculated_review_status()
294 294 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
295 295 ChangesetStatus.STATUS_UNDER_REVIEW]:
296 296 _filtered_pull_requests.append(pr)
297 297 if length:
298 298 return _filtered_pull_requests[offset:offset+length]
299 299 else:
300 300 return _filtered_pull_requests
301 301
302 302 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
303 303 opened_by=None, user_id=None):
304 304 """
305 305 Count the number of pull requests for a specific repository that are
306 306 awaiting review from a specific user.
307 307
308 308 :param repo_name: target or source repo
309 309 :param source: boolean flag to specify if repo_name refers to source
310 310 :param statuses: list of pull request statuses
311 311 :param opened_by: author user of the pull request
312 312 :param user_id: reviewer user of the pull request
313 313 :returns: int number of pull requests
314 314 """
315 315 pull_requests = self.get_awaiting_my_review(
316 316 repo_name, source=source, statuses=statuses, opened_by=opened_by,
317 317 user_id=user_id)
318 318
319 319 return len(pull_requests)
320 320
321 321 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
322 322 opened_by=None, user_id=None, offset=0,
323 323 length=None, order_by=None, order_dir='desc'):
324 324 """
325 325 Get all pull requests for a specific repository that are awaiting
326 326 review from a specific user.
327 327
328 328 :param repo_name: target or source repo
329 329 :param source: boolean flag to specify if repo_name refers to source
330 330 :param statuses: list of pull request statuses
331 331 :param opened_by: author user of the pull request
332 332 :param user_id: reviewer user of the pull request
333 333 :param offset: pagination offset
334 334 :param length: length of returned list
335 335 :param order_by: order of the returned list
336 336 :param order_dir: 'asc' or 'desc' ordering direction
337 337 :returns: list of pull requests
338 338 """
339 339 pull_requests = self.get_all(
340 340 repo_name, source=source, statuses=statuses, opened_by=opened_by,
341 341 order_by=order_by, order_dir=order_dir)
342 342
343 343 _my = PullRequestModel().get_not_reviewed(user_id)
344 344 my_participation = []
345 345 for pr in pull_requests:
346 346 if pr in _my:
347 347 my_participation.append(pr)
348 348 _filtered_pull_requests = my_participation
349 349 if length:
350 350 return _filtered_pull_requests[offset:offset+length]
351 351 else:
352 352 return _filtered_pull_requests
353 353
354 354 def get_not_reviewed(self, user_id):
355 355 return [
356 356 x.pull_request for x in PullRequestReviewers.query().filter(
357 357 PullRequestReviewers.user_id == user_id).all()
358 358 ]
359 359
360 360 def _prepare_participating_query(self, user_id=None, statuses=None,
361 361 order_by=None, order_dir='desc'):
362 362 q = PullRequest.query()
363 363 if user_id:
364 364 reviewers_subquery = Session().query(
365 365 PullRequestReviewers.pull_request_id).filter(
366 366 PullRequestReviewers.user_id == user_id).subquery()
367 367 user_filter = or_(
368 368 PullRequest.user_id == user_id,
369 369 PullRequest.pull_request_id.in_(reviewers_subquery)
370 370 )
371 371 q = PullRequest.query().filter(user_filter)
372 372
373 373 # closed,opened
374 374 if statuses:
375 375 q = q.filter(PullRequest.status.in_(statuses))
376 376
377 377 if order_by:
378 378 order_map = {
379 379 'name_raw': PullRequest.pull_request_id,
380 380 'title': PullRequest.title,
381 381 'updated_on_raw': PullRequest.updated_on,
382 382 'target_repo': PullRequest.target_repo_id
383 383 }
384 384 if order_dir == 'asc':
385 385 q = q.order_by(order_map[order_by].asc())
386 386 else:
387 387 q = q.order_by(order_map[order_by].desc())
388 388
389 389 return q
390 390
391 391 def count_im_participating_in(self, user_id=None, statuses=None):
392 392 q = self._prepare_participating_query(user_id, statuses=statuses)
393 393 return q.count()
394 394
395 395 def get_im_participating_in(
396 396 self, user_id=None, statuses=None, offset=0,
397 397 length=None, order_by=None, order_dir='desc'):
398 398 """
399 399 Get all Pull requests that i'm participating in, or i have opened
400 400 """
401 401
402 402 q = self._prepare_participating_query(
403 403 user_id, statuses=statuses, order_by=order_by,
404 404 order_dir=order_dir)
405 405
406 406 if length:
407 407 pull_requests = q.limit(length).offset(offset).all()
408 408 else:
409 409 pull_requests = q.all()
410 410
411 411 return pull_requests
412 412
413 413 def get_versions(self, pull_request):
414 414 """
415 415 returns version of pull request sorted by ID descending
416 416 """
417 417 return PullRequestVersion.query()\
418 418 .filter(PullRequestVersion.pull_request == pull_request)\
419 419 .order_by(PullRequestVersion.pull_request_version_id.asc())\
420 420 .all()
421 421
422 422 def get_pr_version(self, pull_request_id, version=None):
423 423 at_version = None
424 424
425 425 if version and version == 'latest':
426 426 pull_request_ver = PullRequest.get(pull_request_id)
427 427 pull_request_obj = pull_request_ver
428 428 _org_pull_request_obj = pull_request_obj
429 429 at_version = 'latest'
430 430 elif version:
431 431 pull_request_ver = PullRequestVersion.get_or_404(version)
432 432 pull_request_obj = pull_request_ver
433 433 _org_pull_request_obj = pull_request_ver.pull_request
434 434 at_version = pull_request_ver.pull_request_version_id
435 435 else:
436 436 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
437 437 pull_request_id)
438 438
439 439 pull_request_display_obj = PullRequest.get_pr_display_object(
440 440 pull_request_obj, _org_pull_request_obj)
441 441
442 442 return _org_pull_request_obj, pull_request_obj, \
443 443 pull_request_display_obj, at_version
444 444
445 445 def create(self, created_by, source_repo, source_ref, target_repo,
446 446 target_ref, revisions, reviewers, title, description=None,
447 447 description_renderer=None,
448 448 reviewer_data=None, translator=None, auth_user=None):
449 449 translator = translator or get_current_request().translate
450 450
451 451 created_by_user = self._get_user(created_by)
452 452 auth_user = auth_user or created_by_user.AuthUser()
453 453 source_repo = self._get_repo(source_repo)
454 454 target_repo = self._get_repo(target_repo)
455 455
456 456 pull_request = PullRequest()
457 457 pull_request.source_repo = source_repo
458 458 pull_request.source_ref = source_ref
459 459 pull_request.target_repo = target_repo
460 460 pull_request.target_ref = target_ref
461 461 pull_request.revisions = revisions
462 462 pull_request.title = title
463 463 pull_request.description = description
464 464 pull_request.description_renderer = description_renderer
465 465 pull_request.author = created_by_user
466 466 pull_request.reviewer_data = reviewer_data
467 467
468 468 Session().add(pull_request)
469 469 Session().flush()
470 470
471 471 reviewer_ids = set()
472 472 # members / reviewers
473 473 for reviewer_object in reviewers:
474 474 user_id, reasons, mandatory, rules = reviewer_object
475 475 user = self._get_user(user_id)
476 476
477 477 # skip duplicates
478 478 if user.user_id in reviewer_ids:
479 479 continue
480 480
481 481 reviewer_ids.add(user.user_id)
482 482
483 483 reviewer = PullRequestReviewers()
484 484 reviewer.user = user
485 485 reviewer.pull_request = pull_request
486 486 reviewer.reasons = reasons
487 487 reviewer.mandatory = mandatory
488 488
489 489 # NOTE(marcink): pick only first rule for now
490 490 rule_id = list(rules)[0] if rules else None
491 491 rule = RepoReviewRule.get(rule_id) if rule_id else None
492 492 if rule:
493 493 review_group = rule.user_group_vote_rule(user_id)
494 494 # we check if this particular reviewer is member of a voting group
495 495 if review_group:
496 496 # NOTE(marcink):
497 497 # can be that user is member of more but we pick the first same,
498 498 # same as default reviewers algo
499 499 review_group = review_group[0]
500 500
501 501 rule_data = {
502 502 'rule_name':
503 503 rule.review_rule_name,
504 504 'rule_user_group_entry_id':
505 505 review_group.repo_review_rule_users_group_id,
506 506 'rule_user_group_name':
507 507 review_group.users_group.users_group_name,
508 508 'rule_user_group_members':
509 509 [x.user.username for x in review_group.users_group.members],
510 510 'rule_user_group_members_id':
511 511 [x.user.user_id for x in review_group.users_group.members],
512 512 }
513 513 # e.g {'vote_rule': -1, 'mandatory': True}
514 514 rule_data.update(review_group.rule_data())
515 515
516 516 reviewer.rule_data = rule_data
517 517
518 518 Session().add(reviewer)
519 519 Session().flush()
520 520
521 521 # Set approval status to "Under Review" for all commits which are
522 522 # part of this pull request.
523 523 ChangesetStatusModel().set_status(
524 524 repo=target_repo,
525 525 status=ChangesetStatus.STATUS_UNDER_REVIEW,
526 526 user=created_by_user,
527 527 pull_request=pull_request
528 528 )
529 529 # we commit early at this point. This has to do with a fact
530 530 # that before queries do some row-locking. And because of that
531 531 # we need to commit and finish transation before below validate call
532 532 # that for large repos could be long resulting in long row locks
533 533 Session().commit()
534 534
535 535 # prepare workspace, and run initial merge simulation
536 536 MergeCheck.validate(
537 537 pull_request, auth_user=auth_user, translator=translator)
538 538
539 539 self.notify_reviewers(pull_request, reviewer_ids)
540 540 self._trigger_pull_request_hook(
541 541 pull_request, created_by_user, 'create')
542 542
543 543 creation_data = pull_request.get_api_data(with_merge_state=False)
544 544 self._log_audit_action(
545 545 'repo.pull_request.create', {'data': creation_data},
546 546 auth_user, pull_request)
547 547
548 548 return pull_request
549 549
550 550 def _trigger_pull_request_hook(self, pull_request, user, action):
551 551 pull_request = self.__get_pull_request(pull_request)
552 552 target_scm = pull_request.target_repo.scm_instance()
553 553 if action == 'create':
554 554 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
555 555 elif action == 'merge':
556 556 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
557 557 elif action == 'close':
558 558 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
559 559 elif action == 'review_status_change':
560 560 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
561 561 elif action == 'update':
562 562 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
563 563 else:
564 564 return
565 565
566 566 trigger_hook(
567 567 username=user.username,
568 568 repo_name=pull_request.target_repo.repo_name,
569 569 repo_alias=target_scm.alias,
570 570 pull_request=pull_request)
571 571
572 572 def _get_commit_ids(self, pull_request):
573 573 """
574 574 Return the commit ids of the merged pull request.
575 575
576 576 This method is not dealing correctly yet with the lack of autoupdates
577 577 nor with the implicit target updates.
578 578 For example: if a commit in the source repo is already in the target it
579 579 will be reported anyways.
580 580 """
581 581 merge_rev = pull_request.merge_rev
582 582 if merge_rev is None:
583 583 raise ValueError('This pull request was not merged yet')
584 584
585 585 commit_ids = list(pull_request.revisions)
586 586 if merge_rev not in commit_ids:
587 587 commit_ids.append(merge_rev)
588 588
589 589 return commit_ids
590 590
591 591 def merge_repo(self, pull_request, user, extras):
592 592 log.debug("Merging pull request %s", pull_request.pull_request_id)
593 extras['user_agent'] = 'internal-merge'
593 594 merge_state = self._merge_pull_request(pull_request, user, extras)
594 595 if merge_state.executed:
595 596 log.debug(
596 597 "Merge was successful, updating the pull request comments.")
597 598 self._comment_and_close_pr(pull_request, user, merge_state)
598 599
599 600 self._log_audit_action(
600 601 'repo.pull_request.merge',
601 602 {'merge_state': merge_state.__dict__},
602 603 user, pull_request)
603 604
604 605 else:
605 606 log.warn("Merge failed, not updating the pull request.")
606 607 return merge_state
607 608
608 609 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
609 610 target_vcs = pull_request.target_repo.scm_instance()
610 611 source_vcs = pull_request.source_repo.scm_instance()
611 612
612 613 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
613 614 pr_id=pull_request.pull_request_id,
614 615 pr_title=pull_request.title,
615 616 source_repo=source_vcs.name,
616 617 source_ref_name=pull_request.source_ref_parts.name,
617 618 target_repo=target_vcs.name,
618 619 target_ref_name=pull_request.target_ref_parts.name,
619 620 )
620 621
621 622 workspace_id = self._workspace_id(pull_request)
622 623 repo_id = pull_request.target_repo.repo_id
623 624 use_rebase = self._use_rebase_for_merging(pull_request)
624 625 close_branch = self._close_branch_before_merging(pull_request)
625 626
626 627 target_ref = self._refresh_reference(
627 628 pull_request.target_ref_parts, target_vcs)
628 629
629 630 callback_daemon, extras = prepare_callback_daemon(
630 631 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
631 632 host=vcs_settings.HOOKS_HOST,
632 633 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
633 634
634 635 with callback_daemon:
635 636 # TODO: johbo: Implement a clean way to run a config_override
636 637 # for a single call.
637 638 target_vcs.config.set(
638 639 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
639 640
640 641 user_name = user.short_contact
641 642 merge_state = target_vcs.merge(
642 643 repo_id, workspace_id, target_ref, source_vcs,
643 644 pull_request.source_ref_parts,
644 645 user_name=user_name, user_email=user.email,
645 646 message=message, use_rebase=use_rebase,
646 647 close_branch=close_branch)
647 648 return merge_state
648 649
649 650 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
650 651 pull_request.merge_rev = merge_state.merge_ref.commit_id
651 652 pull_request.updated_on = datetime.datetime.now()
652 653 close_msg = close_msg or 'Pull request merged and closed'
653 654
654 655 CommentsModel().create(
655 656 text=safe_unicode(close_msg),
656 657 repo=pull_request.target_repo.repo_id,
657 658 user=user.user_id,
658 659 pull_request=pull_request.pull_request_id,
659 660 f_path=None,
660 661 line_no=None,
661 662 closing_pr=True
662 663 )
663 664
664 665 Session().add(pull_request)
665 666 Session().flush()
666 667 # TODO: paris: replace invalidation with less radical solution
667 668 ScmModel().mark_for_invalidation(
668 669 pull_request.target_repo.repo_name)
669 670 self._trigger_pull_request_hook(pull_request, user, 'merge')
670 671
671 672 def has_valid_update_type(self, pull_request):
672 673 source_ref_type = pull_request.source_ref_parts.type
673 674 return source_ref_type in ['book', 'branch', 'tag']
674 675
675 676 def update_commits(self, pull_request):
676 677 """
677 678 Get the updated list of commits for the pull request
678 679 and return the new pull request version and the list
679 680 of commits processed by this update action
680 681 """
681 682 pull_request = self.__get_pull_request(pull_request)
682 683 source_ref_type = pull_request.source_ref_parts.type
683 684 source_ref_name = pull_request.source_ref_parts.name
684 685 source_ref_id = pull_request.source_ref_parts.commit_id
685 686
686 687 target_ref_type = pull_request.target_ref_parts.type
687 688 target_ref_name = pull_request.target_ref_parts.name
688 689 target_ref_id = pull_request.target_ref_parts.commit_id
689 690
690 691 if not self.has_valid_update_type(pull_request):
691 692 log.debug(
692 693 "Skipping update of pull request %s due to ref type: %s",
693 694 pull_request, source_ref_type)
694 695 return UpdateResponse(
695 696 executed=False,
696 697 reason=UpdateFailureReason.WRONG_REF_TYPE,
697 698 old=pull_request, new=None, changes=None,
698 699 source_changed=False, target_changed=False)
699 700
700 701 # source repo
701 702 source_repo = pull_request.source_repo.scm_instance()
702 703 try:
703 704 source_commit = source_repo.get_commit(commit_id=source_ref_name)
704 705 except CommitDoesNotExistError:
705 706 return UpdateResponse(
706 707 executed=False,
707 708 reason=UpdateFailureReason.MISSING_SOURCE_REF,
708 709 old=pull_request, new=None, changes=None,
709 710 source_changed=False, target_changed=False)
710 711
711 712 source_changed = source_ref_id != source_commit.raw_id
712 713
713 714 # target repo
714 715 target_repo = pull_request.target_repo.scm_instance()
715 716 try:
716 717 target_commit = target_repo.get_commit(commit_id=target_ref_name)
717 718 except CommitDoesNotExistError:
718 719 return UpdateResponse(
719 720 executed=False,
720 721 reason=UpdateFailureReason.MISSING_TARGET_REF,
721 722 old=pull_request, new=None, changes=None,
722 723 source_changed=False, target_changed=False)
723 724 target_changed = target_ref_id != target_commit.raw_id
724 725
725 726 if not (source_changed or target_changed):
726 727 log.debug("Nothing changed in pull request %s", pull_request)
727 728 return UpdateResponse(
728 729 executed=False,
729 730 reason=UpdateFailureReason.NO_CHANGE,
730 731 old=pull_request, new=None, changes=None,
731 732 source_changed=target_changed, target_changed=source_changed)
732 733
733 734 change_in_found = 'target repo' if target_changed else 'source repo'
734 735 log.debug('Updating pull request because of change in %s detected',
735 736 change_in_found)
736 737
737 738 # Finally there is a need for an update, in case of source change
738 739 # we create a new version, else just an update
739 740 if source_changed:
740 741 pull_request_version = self._create_version_from_snapshot(pull_request)
741 742 self._link_comments_to_version(pull_request_version)
742 743 else:
743 744 try:
744 745 ver = pull_request.versions[-1]
745 746 except IndexError:
746 747 ver = None
747 748
748 749 pull_request.pull_request_version_id = \
749 750 ver.pull_request_version_id if ver else None
750 751 pull_request_version = pull_request
751 752
752 753 try:
753 754 if target_ref_type in ('tag', 'branch', 'book'):
754 755 target_commit = target_repo.get_commit(target_ref_name)
755 756 else:
756 757 target_commit = target_repo.get_commit(target_ref_id)
757 758 except CommitDoesNotExistError:
758 759 return UpdateResponse(
759 760 executed=False,
760 761 reason=UpdateFailureReason.MISSING_TARGET_REF,
761 762 old=pull_request, new=None, changes=None,
762 763 source_changed=source_changed, target_changed=target_changed)
763 764
764 765 # re-compute commit ids
765 766 old_commit_ids = pull_request.revisions
766 767 pre_load = ["author", "branch", "date", "message"]
767 768 commit_ranges = target_repo.compare(
768 769 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
769 770 pre_load=pre_load)
770 771
771 772 ancestor = target_repo.get_common_ancestor(
772 773 target_commit.raw_id, source_commit.raw_id, source_repo)
773 774
774 775 pull_request.source_ref = '%s:%s:%s' % (
775 776 source_ref_type, source_ref_name, source_commit.raw_id)
776 777 pull_request.target_ref = '%s:%s:%s' % (
777 778 target_ref_type, target_ref_name, ancestor)
778 779
779 780 pull_request.revisions = [
780 781 commit.raw_id for commit in reversed(commit_ranges)]
781 782 pull_request.updated_on = datetime.datetime.now()
782 783 Session().add(pull_request)
783 784 new_commit_ids = pull_request.revisions
784 785
785 786 old_diff_data, new_diff_data = self._generate_update_diffs(
786 787 pull_request, pull_request_version)
787 788
788 789 # calculate commit and file changes
789 790 changes = self._calculate_commit_id_changes(
790 791 old_commit_ids, new_commit_ids)
791 792 file_changes = self._calculate_file_changes(
792 793 old_diff_data, new_diff_data)
793 794
794 795 # set comments as outdated if DIFFS changed
795 796 CommentsModel().outdate_comments(
796 797 pull_request, old_diff_data=old_diff_data,
797 798 new_diff_data=new_diff_data)
798 799
799 800 commit_changes = (changes.added or changes.removed)
800 801 file_node_changes = (
801 802 file_changes.added or file_changes.modified or file_changes.removed)
802 803 pr_has_changes = commit_changes or file_node_changes
803 804
804 805 # Add an automatic comment to the pull request, in case
805 806 # anything has changed
806 807 if pr_has_changes:
807 808 update_comment = CommentsModel().create(
808 809 text=self._render_update_message(changes, file_changes),
809 810 repo=pull_request.target_repo,
810 811 user=pull_request.author,
811 812 pull_request=pull_request,
812 813 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
813 814
814 815 # Update status to "Under Review" for added commits
815 816 for commit_id in changes.added:
816 817 ChangesetStatusModel().set_status(
817 818 repo=pull_request.source_repo,
818 819 status=ChangesetStatus.STATUS_UNDER_REVIEW,
819 820 comment=update_comment,
820 821 user=pull_request.author,
821 822 pull_request=pull_request,
822 823 revision=commit_id)
823 824
824 825 log.debug(
825 826 'Updated pull request %s, added_ids: %s, common_ids: %s, '
826 827 'removed_ids: %s', pull_request.pull_request_id,
827 828 changes.added, changes.common, changes.removed)
828 829 log.debug(
829 830 'Updated pull request with the following file changes: %s',
830 831 file_changes)
831 832
832 833 log.info(
833 834 "Updated pull request %s from commit %s to commit %s, "
834 835 "stored new version %s of this pull request.",
835 836 pull_request.pull_request_id, source_ref_id,
836 837 pull_request.source_ref_parts.commit_id,
837 838 pull_request_version.pull_request_version_id)
838 839 Session().commit()
839 840 self._trigger_pull_request_hook(
840 841 pull_request, pull_request.author, 'update')
841 842
842 843 return UpdateResponse(
843 844 executed=True, reason=UpdateFailureReason.NONE,
844 845 old=pull_request, new=pull_request_version, changes=changes,
845 846 source_changed=source_changed, target_changed=target_changed)
846 847
847 848 def _create_version_from_snapshot(self, pull_request):
848 849 version = PullRequestVersion()
849 850 version.title = pull_request.title
850 851 version.description = pull_request.description
851 852 version.status = pull_request.status
852 853 version.created_on = datetime.datetime.now()
853 854 version.updated_on = pull_request.updated_on
854 855 version.user_id = pull_request.user_id
855 856 version.source_repo = pull_request.source_repo
856 857 version.source_ref = pull_request.source_ref
857 858 version.target_repo = pull_request.target_repo
858 859 version.target_ref = pull_request.target_ref
859 860
860 861 version._last_merge_source_rev = pull_request._last_merge_source_rev
861 862 version._last_merge_target_rev = pull_request._last_merge_target_rev
862 863 version.last_merge_status = pull_request.last_merge_status
863 864 version.shadow_merge_ref = pull_request.shadow_merge_ref
864 865 version.merge_rev = pull_request.merge_rev
865 866 version.reviewer_data = pull_request.reviewer_data
866 867
867 868 version.revisions = pull_request.revisions
868 869 version.pull_request = pull_request
869 870 Session().add(version)
870 871 Session().flush()
871 872
872 873 return version
873 874
874 875 def _generate_update_diffs(self, pull_request, pull_request_version):
875 876
876 877 diff_context = (
877 878 self.DIFF_CONTEXT +
878 879 CommentsModel.needed_extra_diff_context())
879 880
880 881 source_repo = pull_request_version.source_repo
881 882 source_ref_id = pull_request_version.source_ref_parts.commit_id
882 883 target_ref_id = pull_request_version.target_ref_parts.commit_id
883 884 old_diff = self._get_diff_from_pr_or_version(
884 885 source_repo, source_ref_id, target_ref_id, context=diff_context)
885 886
886 887 source_repo = pull_request.source_repo
887 888 source_ref_id = pull_request.source_ref_parts.commit_id
888 889 target_ref_id = pull_request.target_ref_parts.commit_id
889 890
890 891 new_diff = self._get_diff_from_pr_or_version(
891 892 source_repo, source_ref_id, target_ref_id, context=diff_context)
892 893
893 894 old_diff_data = diffs.DiffProcessor(old_diff)
894 895 old_diff_data.prepare()
895 896 new_diff_data = diffs.DiffProcessor(new_diff)
896 897 new_diff_data.prepare()
897 898
898 899 return old_diff_data, new_diff_data
899 900
900 901 def _link_comments_to_version(self, pull_request_version):
901 902 """
902 903 Link all unlinked comments of this pull request to the given version.
903 904
904 905 :param pull_request_version: The `PullRequestVersion` to which
905 906 the comments shall be linked.
906 907
907 908 """
908 909 pull_request = pull_request_version.pull_request
909 910 comments = ChangesetComment.query()\
910 911 .filter(
911 912 # TODO: johbo: Should we query for the repo at all here?
912 913 # Pending decision on how comments of PRs are to be related
913 914 # to either the source repo, the target repo or no repo at all.
914 915 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
915 916 ChangesetComment.pull_request == pull_request,
916 917 ChangesetComment.pull_request_version == None)\
917 918 .order_by(ChangesetComment.comment_id.asc())
918 919
919 920 # TODO: johbo: Find out why this breaks if it is done in a bulk
920 921 # operation.
921 922 for comment in comments:
922 923 comment.pull_request_version_id = (
923 924 pull_request_version.pull_request_version_id)
924 925 Session().add(comment)
925 926
926 927 def _calculate_commit_id_changes(self, old_ids, new_ids):
927 928 added = [x for x in new_ids if x not in old_ids]
928 929 common = [x for x in new_ids if x in old_ids]
929 930 removed = [x for x in old_ids if x not in new_ids]
930 931 total = new_ids
931 932 return ChangeTuple(added, common, removed, total)
932 933
933 934 def _calculate_file_changes(self, old_diff_data, new_diff_data):
934 935
935 936 old_files = OrderedDict()
936 937 for diff_data in old_diff_data.parsed_diff:
937 938 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
938 939
939 940 added_files = []
940 941 modified_files = []
941 942 removed_files = []
942 943 for diff_data in new_diff_data.parsed_diff:
943 944 new_filename = diff_data['filename']
944 945 new_hash = md5_safe(diff_data['raw_diff'])
945 946
946 947 old_hash = old_files.get(new_filename)
947 948 if not old_hash:
948 949 # file is not present in old diff, means it's added
949 950 added_files.append(new_filename)
950 951 else:
951 952 if new_hash != old_hash:
952 953 modified_files.append(new_filename)
953 954 # now remove a file from old, since we have seen it already
954 955 del old_files[new_filename]
955 956
956 957 # removed files is when there are present in old, but not in NEW,
957 958 # since we remove old files that are present in new diff, left-overs
958 959 # if any should be the removed files
959 960 removed_files.extend(old_files.keys())
960 961
961 962 return FileChangeTuple(added_files, modified_files, removed_files)
962 963
963 964 def _render_update_message(self, changes, file_changes):
964 965 """
965 966 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
966 967 so it's always looking the same disregarding on which default
967 968 renderer system is using.
968 969
969 970 :param changes: changes named tuple
970 971 :param file_changes: file changes named tuple
971 972
972 973 """
973 974 new_status = ChangesetStatus.get_status_lbl(
974 975 ChangesetStatus.STATUS_UNDER_REVIEW)
975 976
976 977 changed_files = (
977 978 file_changes.added + file_changes.modified + file_changes.removed)
978 979
979 980 params = {
980 981 'under_review_label': new_status,
981 982 'added_commits': changes.added,
982 983 'removed_commits': changes.removed,
983 984 'changed_files': changed_files,
984 985 'added_files': file_changes.added,
985 986 'modified_files': file_changes.modified,
986 987 'removed_files': file_changes.removed,
987 988 }
988 989 renderer = RstTemplateRenderer()
989 990 return renderer.render('pull_request_update.mako', **params)
990 991
991 992 def edit(self, pull_request, title, description, description_renderer, user):
992 993 pull_request = self.__get_pull_request(pull_request)
993 994 old_data = pull_request.get_api_data(with_merge_state=False)
994 995 if pull_request.is_closed():
995 996 raise ValueError('This pull request is closed')
996 997 if title:
997 998 pull_request.title = title
998 999 pull_request.description = description
999 1000 pull_request.updated_on = datetime.datetime.now()
1000 1001 pull_request.description_renderer = description_renderer
1001 1002 Session().add(pull_request)
1002 1003 self._log_audit_action(
1003 1004 'repo.pull_request.edit', {'old_data': old_data},
1004 1005 user, pull_request)
1005 1006
1006 1007 def update_reviewers(self, pull_request, reviewer_data, user):
1007 1008 """
1008 1009 Update the reviewers in the pull request
1009 1010
1010 1011 :param pull_request: the pr to update
1011 1012 :param reviewer_data: list of tuples
1012 1013 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
1013 1014 """
1014 1015 pull_request = self.__get_pull_request(pull_request)
1015 1016 if pull_request.is_closed():
1016 1017 raise ValueError('This pull request is closed')
1017 1018
1018 1019 reviewers = {}
1019 1020 for user_id, reasons, mandatory, rules in reviewer_data:
1020 1021 if isinstance(user_id, (int, basestring)):
1021 1022 user_id = self._get_user(user_id).user_id
1022 1023 reviewers[user_id] = {
1023 1024 'reasons': reasons, 'mandatory': mandatory}
1024 1025
1025 1026 reviewers_ids = set(reviewers.keys())
1026 1027 current_reviewers = PullRequestReviewers.query()\
1027 1028 .filter(PullRequestReviewers.pull_request ==
1028 1029 pull_request).all()
1029 1030 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1030 1031
1031 1032 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1032 1033 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1033 1034
1034 1035 log.debug("Adding %s reviewers", ids_to_add)
1035 1036 log.debug("Removing %s reviewers", ids_to_remove)
1036 1037 changed = False
1037 1038 for uid in ids_to_add:
1038 1039 changed = True
1039 1040 _usr = self._get_user(uid)
1040 1041 reviewer = PullRequestReviewers()
1041 1042 reviewer.user = _usr
1042 1043 reviewer.pull_request = pull_request
1043 1044 reviewer.reasons = reviewers[uid]['reasons']
1044 1045 # NOTE(marcink): mandatory shouldn't be changed now
1045 1046 # reviewer.mandatory = reviewers[uid]['reasons']
1046 1047 Session().add(reviewer)
1047 1048 self._log_audit_action(
1048 1049 'repo.pull_request.reviewer.add', {'data': reviewer.get_dict()},
1049 1050 user, pull_request)
1050 1051
1051 1052 for uid in ids_to_remove:
1052 1053 changed = True
1053 1054 reviewers = PullRequestReviewers.query()\
1054 1055 .filter(PullRequestReviewers.user_id == uid,
1055 1056 PullRequestReviewers.pull_request == pull_request)\
1056 1057 .all()
1057 1058 # use .all() in case we accidentally added the same person twice
1058 1059 # this CAN happen due to the lack of DB checks
1059 1060 for obj in reviewers:
1060 1061 old_data = obj.get_dict()
1061 1062 Session().delete(obj)
1062 1063 self._log_audit_action(
1063 1064 'repo.pull_request.reviewer.delete',
1064 1065 {'old_data': old_data}, user, pull_request)
1065 1066
1066 1067 if changed:
1067 1068 pull_request.updated_on = datetime.datetime.now()
1068 1069 Session().add(pull_request)
1069 1070
1070 1071 self.notify_reviewers(pull_request, ids_to_add)
1071 1072 return ids_to_add, ids_to_remove
1072 1073
1073 1074 def get_url(self, pull_request, request=None, permalink=False):
1074 1075 if not request:
1075 1076 request = get_current_request()
1076 1077
1077 1078 if permalink:
1078 1079 return request.route_url(
1079 1080 'pull_requests_global',
1080 1081 pull_request_id=pull_request.pull_request_id,)
1081 1082 else:
1082 1083 return request.route_url('pullrequest_show',
1083 1084 repo_name=safe_str(pull_request.target_repo.repo_name),
1084 1085 pull_request_id=pull_request.pull_request_id,)
1085 1086
1086 1087 def get_shadow_clone_url(self, pull_request, request=None):
1087 1088 """
1088 1089 Returns qualified url pointing to the shadow repository. If this pull
1089 1090 request is closed there is no shadow repository and ``None`` will be
1090 1091 returned.
1091 1092 """
1092 1093 if pull_request.is_closed():
1093 1094 return None
1094 1095 else:
1095 1096 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1096 1097 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1097 1098
1098 1099 def notify_reviewers(self, pull_request, reviewers_ids):
1099 1100 # notification to reviewers
1100 1101 if not reviewers_ids:
1101 1102 return
1102 1103
1103 1104 pull_request_obj = pull_request
1104 1105 # get the current participants of this pull request
1105 1106 recipients = reviewers_ids
1106 1107 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1107 1108
1108 1109 pr_source_repo = pull_request_obj.source_repo
1109 1110 pr_target_repo = pull_request_obj.target_repo
1110 1111
1111 1112 pr_url = h.route_url('pullrequest_show',
1112 1113 repo_name=pr_target_repo.repo_name,
1113 1114 pull_request_id=pull_request_obj.pull_request_id,)
1114 1115
1115 1116 # set some variables for email notification
1116 1117 pr_target_repo_url = h.route_url(
1117 1118 'repo_summary', repo_name=pr_target_repo.repo_name)
1118 1119
1119 1120 pr_source_repo_url = h.route_url(
1120 1121 'repo_summary', repo_name=pr_source_repo.repo_name)
1121 1122
1122 1123 # pull request specifics
1123 1124 pull_request_commits = [
1124 1125 (x.raw_id, x.message)
1125 1126 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1126 1127
1127 1128 kwargs = {
1128 1129 'user': pull_request.author,
1129 1130 'pull_request': pull_request_obj,
1130 1131 'pull_request_commits': pull_request_commits,
1131 1132
1132 1133 'pull_request_target_repo': pr_target_repo,
1133 1134 'pull_request_target_repo_url': pr_target_repo_url,
1134 1135
1135 1136 'pull_request_source_repo': pr_source_repo,
1136 1137 'pull_request_source_repo_url': pr_source_repo_url,
1137 1138
1138 1139 'pull_request_url': pr_url,
1139 1140 }
1140 1141
1141 1142 # pre-generate the subject for notification itself
1142 1143 (subject,
1143 1144 _h, _e, # we don't care about those
1144 1145 body_plaintext) = EmailNotificationModel().render_email(
1145 1146 notification_type, **kwargs)
1146 1147
1147 1148 # create notification objects, and emails
1148 1149 NotificationModel().create(
1149 1150 created_by=pull_request.author,
1150 1151 notification_subject=subject,
1151 1152 notification_body=body_plaintext,
1152 1153 notification_type=notification_type,
1153 1154 recipients=recipients,
1154 1155 email_kwargs=kwargs,
1155 1156 )
1156 1157
1157 1158 def delete(self, pull_request, user):
1158 1159 pull_request = self.__get_pull_request(pull_request)
1159 1160 old_data = pull_request.get_api_data(with_merge_state=False)
1160 1161 self._cleanup_merge_workspace(pull_request)
1161 1162 self._log_audit_action(
1162 1163 'repo.pull_request.delete', {'old_data': old_data},
1163 1164 user, pull_request)
1164 1165 Session().delete(pull_request)
1165 1166
1166 1167 def close_pull_request(self, pull_request, user):
1167 1168 pull_request = self.__get_pull_request(pull_request)
1168 1169 self._cleanup_merge_workspace(pull_request)
1169 1170 pull_request.status = PullRequest.STATUS_CLOSED
1170 1171 pull_request.updated_on = datetime.datetime.now()
1171 1172 Session().add(pull_request)
1172 1173 self._trigger_pull_request_hook(
1173 1174 pull_request, pull_request.author, 'close')
1174 1175
1175 1176 pr_data = pull_request.get_api_data(with_merge_state=False)
1176 1177 self._log_audit_action(
1177 1178 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1178 1179
1179 1180 def close_pull_request_with_comment(
1180 1181 self, pull_request, user, repo, message=None, auth_user=None):
1181 1182
1182 1183 pull_request_review_status = pull_request.calculated_review_status()
1183 1184
1184 1185 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1185 1186 # approved only if we have voting consent
1186 1187 status = ChangesetStatus.STATUS_APPROVED
1187 1188 else:
1188 1189 status = ChangesetStatus.STATUS_REJECTED
1189 1190 status_lbl = ChangesetStatus.get_status_lbl(status)
1190 1191
1191 1192 default_message = (
1192 1193 'Closing with status change {transition_icon} {status}.'
1193 1194 ).format(transition_icon='>', status=status_lbl)
1194 1195 text = message or default_message
1195 1196
1196 1197 # create a comment, and link it to new status
1197 1198 comment = CommentsModel().create(
1198 1199 text=text,
1199 1200 repo=repo.repo_id,
1200 1201 user=user.user_id,
1201 1202 pull_request=pull_request.pull_request_id,
1202 1203 status_change=status_lbl,
1203 1204 status_change_type=status,
1204 1205 closing_pr=True,
1205 1206 auth_user=auth_user,
1206 1207 )
1207 1208
1208 1209 # calculate old status before we change it
1209 1210 old_calculated_status = pull_request.calculated_review_status()
1210 1211 ChangesetStatusModel().set_status(
1211 1212 repo.repo_id,
1212 1213 status,
1213 1214 user.user_id,
1214 1215 comment=comment,
1215 1216 pull_request=pull_request.pull_request_id
1216 1217 )
1217 1218
1218 1219 Session().flush()
1219 1220 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1220 1221 # we now calculate the status of pull request again, and based on that
1221 1222 # calculation trigger status change. This might happen in cases
1222 1223 # that non-reviewer admin closes a pr, which means his vote doesn't
1223 1224 # change the status, while if he's a reviewer this might change it.
1224 1225 calculated_status = pull_request.calculated_review_status()
1225 1226 if old_calculated_status != calculated_status:
1226 1227 self._trigger_pull_request_hook(
1227 1228 pull_request, user, 'review_status_change')
1228 1229
1229 1230 # finally close the PR
1230 1231 PullRequestModel().close_pull_request(
1231 1232 pull_request.pull_request_id, user)
1232 1233
1233 1234 return comment, status
1234 1235
1235 1236 def merge_status(self, pull_request, translator=None,
1236 1237 force_shadow_repo_refresh=False):
1237 1238 _ = translator or get_current_request().translate
1238 1239
1239 1240 if not self._is_merge_enabled(pull_request):
1240 1241 return False, _('Server-side pull request merging is disabled.')
1241 1242 if pull_request.is_closed():
1242 1243 return False, _('This pull request is closed.')
1243 1244 merge_possible, msg = self._check_repo_requirements(
1244 1245 target=pull_request.target_repo, source=pull_request.source_repo,
1245 1246 translator=_)
1246 1247 if not merge_possible:
1247 1248 return merge_possible, msg
1248 1249
1249 1250 try:
1250 1251 resp = self._try_merge(
1251 1252 pull_request,
1252 1253 force_shadow_repo_refresh=force_shadow_repo_refresh)
1253 1254 log.debug("Merge response: %s", resp)
1254 1255 status = resp.possible, self.merge_status_message(
1255 1256 resp.failure_reason)
1256 1257 except NotImplementedError:
1257 1258 status = False, _('Pull request merging is not supported.')
1258 1259
1259 1260 return status
1260 1261
1261 1262 def _check_repo_requirements(self, target, source, translator):
1262 1263 """
1263 1264 Check if `target` and `source` have compatible requirements.
1264 1265
1265 1266 Currently this is just checking for largefiles.
1266 1267 """
1267 1268 _ = translator
1268 1269 target_has_largefiles = self._has_largefiles(target)
1269 1270 source_has_largefiles = self._has_largefiles(source)
1270 1271 merge_possible = True
1271 1272 message = u''
1272 1273
1273 1274 if target_has_largefiles != source_has_largefiles:
1274 1275 merge_possible = False
1275 1276 if source_has_largefiles:
1276 1277 message = _(
1277 1278 'Target repository large files support is disabled.')
1278 1279 else:
1279 1280 message = _(
1280 1281 'Source repository large files support is disabled.')
1281 1282
1282 1283 return merge_possible, message
1283 1284
1284 1285 def _has_largefiles(self, repo):
1285 1286 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1286 1287 'extensions', 'largefiles')
1287 1288 return largefiles_ui and largefiles_ui[0].active
1288 1289
1289 1290 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1290 1291 """
1291 1292 Try to merge the pull request and return the merge status.
1292 1293 """
1293 1294 log.debug(
1294 1295 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1295 1296 pull_request.pull_request_id, force_shadow_repo_refresh)
1296 1297 target_vcs = pull_request.target_repo.scm_instance()
1297 1298
1298 1299 # Refresh the target reference.
1299 1300 try:
1300 1301 target_ref = self._refresh_reference(
1301 1302 pull_request.target_ref_parts, target_vcs)
1302 1303 except CommitDoesNotExistError:
1303 1304 merge_state = MergeResponse(
1304 1305 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
1305 1306 return merge_state
1306 1307
1307 1308 target_locked = pull_request.target_repo.locked
1308 1309 if target_locked and target_locked[0]:
1309 1310 log.debug("The target repository is locked.")
1310 1311 merge_state = MergeResponse(
1311 1312 False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
1312 1313 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1313 1314 pull_request, target_ref):
1314 1315 log.debug("Refreshing the merge status of the repository.")
1315 1316 merge_state = self._refresh_merge_state(
1316 1317 pull_request, target_vcs, target_ref)
1317 1318 else:
1318 1319 possible = pull_request.\
1319 1320 last_merge_status == MergeFailureReason.NONE
1320 1321 merge_state = MergeResponse(
1321 1322 possible, False, None, pull_request.last_merge_status)
1322 1323
1323 1324 return merge_state
1324 1325
1325 1326 def _refresh_reference(self, reference, vcs_repository):
1326 1327 if reference.type in ('branch', 'book'):
1327 1328 name_or_id = reference.name
1328 1329 else:
1329 1330 name_or_id = reference.commit_id
1330 1331 refreshed_commit = vcs_repository.get_commit(name_or_id)
1331 1332 refreshed_reference = Reference(
1332 1333 reference.type, reference.name, refreshed_commit.raw_id)
1333 1334 return refreshed_reference
1334 1335
1335 1336 def _needs_merge_state_refresh(self, pull_request, target_reference):
1336 1337 return not(
1337 1338 pull_request.revisions and
1338 1339 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1339 1340 target_reference.commit_id == pull_request._last_merge_target_rev)
1340 1341
1341 1342 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1342 1343 workspace_id = self._workspace_id(pull_request)
1343 1344 source_vcs = pull_request.source_repo.scm_instance()
1344 1345 repo_id = pull_request.target_repo.repo_id
1345 1346 use_rebase = self._use_rebase_for_merging(pull_request)
1346 1347 close_branch = self._close_branch_before_merging(pull_request)
1347 1348 merge_state = target_vcs.merge(
1348 1349 repo_id, workspace_id,
1349 1350 target_reference, source_vcs, pull_request.source_ref_parts,
1350 1351 dry_run=True, use_rebase=use_rebase,
1351 1352 close_branch=close_branch)
1352 1353
1353 1354 # Do not store the response if there was an unknown error.
1354 1355 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1355 1356 pull_request._last_merge_source_rev = \
1356 1357 pull_request.source_ref_parts.commit_id
1357 1358 pull_request._last_merge_target_rev = target_reference.commit_id
1358 1359 pull_request.last_merge_status = merge_state.failure_reason
1359 1360 pull_request.shadow_merge_ref = merge_state.merge_ref
1360 1361 Session().add(pull_request)
1361 1362 Session().commit()
1362 1363
1363 1364 return merge_state
1364 1365
1365 1366 def _workspace_id(self, pull_request):
1366 1367 workspace_id = 'pr-%s' % pull_request.pull_request_id
1367 1368 return workspace_id
1368 1369
1369 1370 def merge_status_message(self, status_code):
1370 1371 """
1371 1372 Return a human friendly error message for the given merge status code.
1372 1373 """
1373 1374 return self.MERGE_STATUS_MESSAGES[status_code]
1374 1375
1375 1376 def generate_repo_data(self, repo, commit_id=None, branch=None,
1376 1377 bookmark=None, translator=None):
1377 1378 from rhodecode.model.repo import RepoModel
1378 1379
1379 1380 all_refs, selected_ref = \
1380 1381 self._get_repo_pullrequest_sources(
1381 1382 repo.scm_instance(), commit_id=commit_id,
1382 1383 branch=branch, bookmark=bookmark, translator=translator)
1383 1384
1384 1385 refs_select2 = []
1385 1386 for element in all_refs:
1386 1387 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1387 1388 refs_select2.append({'text': element[1], 'children': children})
1388 1389
1389 1390 return {
1390 1391 'user': {
1391 1392 'user_id': repo.user.user_id,
1392 1393 'username': repo.user.username,
1393 1394 'firstname': repo.user.first_name,
1394 1395 'lastname': repo.user.last_name,
1395 1396 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1396 1397 },
1397 1398 'name': repo.repo_name,
1398 1399 'link': RepoModel().get_url(repo),
1399 1400 'description': h.chop_at_smart(repo.description_safe, '\n'),
1400 1401 'refs': {
1401 1402 'all_refs': all_refs,
1402 1403 'selected_ref': selected_ref,
1403 1404 'select2_refs': refs_select2
1404 1405 }
1405 1406 }
1406 1407
1407 1408 def generate_pullrequest_title(self, source, source_ref, target):
1408 1409 return u'{source}#{at_ref} to {target}'.format(
1409 1410 source=source,
1410 1411 at_ref=source_ref,
1411 1412 target=target,
1412 1413 )
1413 1414
1414 1415 def _cleanup_merge_workspace(self, pull_request):
1415 1416 # Merging related cleanup
1416 1417 repo_id = pull_request.target_repo.repo_id
1417 1418 target_scm = pull_request.target_repo.scm_instance()
1418 1419 workspace_id = self._workspace_id(pull_request)
1419 1420
1420 1421 try:
1421 1422 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1422 1423 except NotImplementedError:
1423 1424 pass
1424 1425
1425 1426 def _get_repo_pullrequest_sources(
1426 1427 self, repo, commit_id=None, branch=None, bookmark=None,
1427 1428 translator=None):
1428 1429 """
1429 1430 Return a structure with repo's interesting commits, suitable for
1430 1431 the selectors in pullrequest controller
1431 1432
1432 1433 :param commit_id: a commit that must be in the list somehow
1433 1434 and selected by default
1434 1435 :param branch: a branch that must be in the list and selected
1435 1436 by default - even if closed
1436 1437 :param bookmark: a bookmark that must be in the list and selected
1437 1438 """
1438 1439 _ = translator or get_current_request().translate
1439 1440
1440 1441 commit_id = safe_str(commit_id) if commit_id else None
1441 1442 branch = safe_str(branch) if branch else None
1442 1443 bookmark = safe_str(bookmark) if bookmark else None
1443 1444
1444 1445 selected = None
1445 1446
1446 1447 # order matters: first source that has commit_id in it will be selected
1447 1448 sources = []
1448 1449 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1449 1450 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1450 1451
1451 1452 if commit_id:
1452 1453 ref_commit = (h.short_id(commit_id), commit_id)
1453 1454 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1454 1455
1455 1456 sources.append(
1456 1457 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1457 1458 )
1458 1459
1459 1460 groups = []
1460 1461 for group_key, ref_list, group_name, match in sources:
1461 1462 group_refs = []
1462 1463 for ref_name, ref_id in ref_list:
1463 1464 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1464 1465 group_refs.append((ref_key, ref_name))
1465 1466
1466 1467 if not selected:
1467 1468 if set([commit_id, match]) & set([ref_id, ref_name]):
1468 1469 selected = ref_key
1469 1470
1470 1471 if group_refs:
1471 1472 groups.append((group_refs, group_name))
1472 1473
1473 1474 if not selected:
1474 1475 ref = commit_id or branch or bookmark
1475 1476 if ref:
1476 1477 raise CommitDoesNotExistError(
1477 1478 'No commit refs could be found matching: %s' % ref)
1478 1479 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1479 1480 selected = 'branch:%s:%s' % (
1480 1481 repo.DEFAULT_BRANCH_NAME,
1481 1482 repo.branches[repo.DEFAULT_BRANCH_NAME]
1482 1483 )
1483 1484 elif repo.commit_ids:
1484 1485 # make the user select in this case
1485 1486 selected = None
1486 1487 else:
1487 1488 raise EmptyRepositoryError()
1488 1489 return groups, selected
1489 1490
1490 1491 def get_diff(self, source_repo, source_ref_id, target_ref_id, context=DIFF_CONTEXT):
1491 1492 return self._get_diff_from_pr_or_version(
1492 1493 source_repo, source_ref_id, target_ref_id, context=context)
1493 1494
1494 1495 def _get_diff_from_pr_or_version(
1495 1496 self, source_repo, source_ref_id, target_ref_id, context):
1496 1497 target_commit = source_repo.get_commit(
1497 1498 commit_id=safe_str(target_ref_id))
1498 1499 source_commit = source_repo.get_commit(
1499 1500 commit_id=safe_str(source_ref_id))
1500 1501 if isinstance(source_repo, Repository):
1501 1502 vcs_repo = source_repo.scm_instance()
1502 1503 else:
1503 1504 vcs_repo = source_repo
1504 1505
1505 1506 # TODO: johbo: In the context of an update, we cannot reach
1506 1507 # the old commit anymore with our normal mechanisms. It needs
1507 1508 # some sort of special support in the vcs layer to avoid this
1508 1509 # workaround.
1509 1510 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1510 1511 vcs_repo.alias == 'git'):
1511 1512 source_commit.raw_id = safe_str(source_ref_id)
1512 1513
1513 1514 log.debug('calculating diff between '
1514 1515 'source_ref:%s and target_ref:%s for repo `%s`',
1515 1516 target_ref_id, source_ref_id,
1516 1517 safe_unicode(vcs_repo.path))
1517 1518
1518 1519 vcs_diff = vcs_repo.get_diff(
1519 1520 commit1=target_commit, commit2=source_commit, context=context)
1520 1521 return vcs_diff
1521 1522
1522 1523 def _is_merge_enabled(self, pull_request):
1523 1524 return self._get_general_setting(
1524 1525 pull_request, 'rhodecode_pr_merge_enabled')
1525 1526
1526 1527 def _use_rebase_for_merging(self, pull_request):
1527 1528 repo_type = pull_request.target_repo.repo_type
1528 1529 if repo_type == 'hg':
1529 1530 return self._get_general_setting(
1530 1531 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1531 1532 elif repo_type == 'git':
1532 1533 return self._get_general_setting(
1533 1534 pull_request, 'rhodecode_git_use_rebase_for_merging')
1534 1535
1535 1536 return False
1536 1537
1537 1538 def _close_branch_before_merging(self, pull_request):
1538 1539 repo_type = pull_request.target_repo.repo_type
1539 1540 if repo_type == 'hg':
1540 1541 return self._get_general_setting(
1541 1542 pull_request, 'rhodecode_hg_close_branch_before_merging')
1542 1543 elif repo_type == 'git':
1543 1544 return self._get_general_setting(
1544 1545 pull_request, 'rhodecode_git_close_branch_before_merging')
1545 1546
1546 1547 return False
1547 1548
1548 1549 def _get_general_setting(self, pull_request, settings_key, default=False):
1549 1550 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1550 1551 settings = settings_model.get_general_settings()
1551 1552 return settings.get(settings_key, default)
1552 1553
1553 1554 def _log_audit_action(self, action, action_data, user, pull_request):
1554 1555 audit_logger.store(
1555 1556 action=action,
1556 1557 action_data=action_data,
1557 1558 user=user,
1558 1559 repo=pull_request.target_repo)
1559 1560
1560 1561 def get_reviewer_functions(self):
1561 1562 """
1562 1563 Fetches functions for validation and fetching default reviewers.
1563 1564 If available we use the EE package, else we fallback to CE
1564 1565 package functions
1565 1566 """
1566 1567 try:
1567 1568 from rc_reviewers.utils import get_default_reviewers_data
1568 1569 from rc_reviewers.utils import validate_default_reviewers
1569 1570 except ImportError:
1570 1571 from rhodecode.apps.repository.utils import \
1571 1572 get_default_reviewers_data
1572 1573 from rhodecode.apps.repository.utils import \
1573 1574 validate_default_reviewers
1574 1575
1575 1576 return get_default_reviewers_data, validate_default_reviewers
1576 1577
1577 1578
1578 1579 class MergeCheck(object):
1579 1580 """
1580 1581 Perform Merge Checks and returns a check object which stores information
1581 1582 about merge errors, and merge conditions
1582 1583 """
1583 1584 TODO_CHECK = 'todo'
1584 1585 PERM_CHECK = 'perm'
1585 1586 REVIEW_CHECK = 'review'
1586 1587 MERGE_CHECK = 'merge'
1587 1588
1588 1589 def __init__(self):
1589 1590 self.review_status = None
1590 1591 self.merge_possible = None
1591 1592 self.merge_msg = ''
1592 1593 self.failed = None
1593 1594 self.errors = []
1594 1595 self.error_details = OrderedDict()
1595 1596
1596 1597 def push_error(self, error_type, message, error_key, details):
1597 1598 self.failed = True
1598 1599 self.errors.append([error_type, message])
1599 1600 self.error_details[error_key] = dict(
1600 1601 details=details,
1601 1602 error_type=error_type,
1602 1603 message=message
1603 1604 )
1604 1605
1605 1606 @classmethod
1606 1607 def validate(cls, pull_request, auth_user, translator, fail_early=False,
1607 1608 force_shadow_repo_refresh=False):
1608 1609 _ = translator
1609 1610 merge_check = cls()
1610 1611
1611 1612 # permissions to merge
1612 1613 user_allowed_to_merge = PullRequestModel().check_user_merge(
1613 1614 pull_request, auth_user)
1614 1615 if not user_allowed_to_merge:
1615 1616 log.debug("MergeCheck: cannot merge, approval is pending.")
1616 1617
1617 1618 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
1618 1619 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1619 1620 if fail_early:
1620 1621 return merge_check
1621 1622
1622 1623 # permission to merge into the target branch
1623 1624 target_commit_id = pull_request.target_ref_parts.commit_id
1624 1625 if pull_request.target_ref_parts.type == 'branch':
1625 1626 branch_name = pull_request.target_ref_parts.name
1626 1627 else:
1627 1628 # for mercurial we can always figure out the branch from the commit
1628 1629 # in case of bookmark
1629 1630 target_commit = pull_request.target_repo.get_commit(target_commit_id)
1630 1631 branch_name = target_commit.branch
1631 1632
1632 1633 rule, branch_perm = auth_user.get_rule_and_branch_permission(
1633 1634 pull_request.target_repo.repo_name, branch_name)
1634 1635 if branch_perm and branch_perm == 'branch.none':
1635 1636 msg = _('Target branch `{}` changes rejected by rule {}.').format(
1636 1637 branch_name, rule)
1637 1638 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1638 1639 if fail_early:
1639 1640 return merge_check
1640 1641
1641 1642 # review status, must be always present
1642 1643 review_status = pull_request.calculated_review_status()
1643 1644 merge_check.review_status = review_status
1644 1645
1645 1646 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1646 1647 if not status_approved:
1647 1648 log.debug("MergeCheck: cannot merge, approval is pending.")
1648 1649
1649 1650 msg = _('Pull request reviewer approval is pending.')
1650 1651
1651 1652 merge_check.push_error(
1652 1653 'warning', msg, cls.REVIEW_CHECK, review_status)
1653 1654
1654 1655 if fail_early:
1655 1656 return merge_check
1656 1657
1657 1658 # left over TODOs
1658 1659 todos = CommentsModel().get_unresolved_todos(pull_request)
1659 1660 if todos:
1660 1661 log.debug("MergeCheck: cannot merge, {} "
1661 1662 "unresolved todos left.".format(len(todos)))
1662 1663
1663 1664 if len(todos) == 1:
1664 1665 msg = _('Cannot merge, {} TODO still not resolved.').format(
1665 1666 len(todos))
1666 1667 else:
1667 1668 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1668 1669 len(todos))
1669 1670
1670 1671 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1671 1672
1672 1673 if fail_early:
1673 1674 return merge_check
1674 1675
1675 1676 # merge possible, here is the filesystem simulation + shadow repo
1676 1677 merge_status, msg = PullRequestModel().merge_status(
1677 1678 pull_request, translator=translator,
1678 1679 force_shadow_repo_refresh=force_shadow_repo_refresh)
1679 1680 merge_check.merge_possible = merge_status
1680 1681 merge_check.merge_msg = msg
1681 1682 if not merge_status:
1682 1683 log.debug(
1683 1684 "MergeCheck: cannot merge, pull request merge not possible.")
1684 1685 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1685 1686
1686 1687 if fail_early:
1687 1688 return merge_check
1688 1689
1689 1690 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1690 1691 return merge_check
1691 1692
1692 1693 @classmethod
1693 1694 def get_merge_conditions(cls, pull_request, translator):
1694 1695 _ = translator
1695 1696 merge_details = {}
1696 1697
1697 1698 model = PullRequestModel()
1698 1699 use_rebase = model._use_rebase_for_merging(pull_request)
1699 1700
1700 1701 if use_rebase:
1701 1702 merge_details['merge_strategy'] = dict(
1702 1703 details={},
1703 1704 message=_('Merge strategy: rebase')
1704 1705 )
1705 1706 else:
1706 1707 merge_details['merge_strategy'] = dict(
1707 1708 details={},
1708 1709 message=_('Merge strategy: explicit merge commit')
1709 1710 )
1710 1711
1711 1712 close_branch = model._close_branch_before_merging(pull_request)
1712 1713 if close_branch:
1713 1714 repo_type = pull_request.target_repo.repo_type
1714 1715 if repo_type == 'hg':
1715 1716 close_msg = _('Source branch will be closed after merge.')
1716 1717 elif repo_type == 'git':
1717 1718 close_msg = _('Source branch will be deleted after merge.')
1718 1719
1719 1720 merge_details['close_branch'] = dict(
1720 1721 details={},
1721 1722 message=close_msg
1722 1723 )
1723 1724
1724 1725 return merge_details
1725 1726
1726 1727 ChangeTuple = collections.namedtuple(
1727 1728 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1728 1729
1729 1730 FileChangeTuple = collections.namedtuple(
1730 1731 'FileChangeTuple', ['added', 'modified', 'removed'])
@@ -1,833 +1,834 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Scm model for RhodeCode
23 23 """
24 24
25 25 import os.path
26 26 import traceback
27 27 import logging
28 28 import cStringIO
29 29
30 30 from sqlalchemy import func
31 31 from zope.cachedescriptors.property import Lazy as LazyProperty
32 32
33 33 import rhodecode
34 34 from rhodecode.lib.vcs import get_backend
35 35 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
36 36 from rhodecode.lib.vcs.nodes import FileNode
37 37 from rhodecode.lib.vcs.backends.base import EmptyCommit
38 38 from rhodecode.lib import helpers as h, rc_cache
39 39 from rhodecode.lib.auth import (
40 40 HasRepoPermissionAny, HasRepoGroupPermissionAny,
41 41 HasUserGroupPermissionAny)
42 42 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
43 43 from rhodecode.lib import hooks_utils
44 44 from rhodecode.lib.utils import (
45 45 get_filesystem_repos, make_db_config)
46 46 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
47 47 from rhodecode.lib.system_info import get_system_info
48 48 from rhodecode.model import BaseModel
49 49 from rhodecode.model.db import (
50 50 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
51 51 PullRequest)
52 52 from rhodecode.model.settings import VcsSettingsModel
53 53 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
54 54
55 55 log = logging.getLogger(__name__)
56 56
57 57
58 58 class UserTemp(object):
59 59 def __init__(self, user_id):
60 60 self.user_id = user_id
61 61
62 62 def __repr__(self):
63 63 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
64 64
65 65
66 66 class RepoTemp(object):
67 67 def __init__(self, repo_id):
68 68 self.repo_id = repo_id
69 69
70 70 def __repr__(self):
71 71 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
72 72
73 73
74 74 class SimpleCachedRepoList(object):
75 75 """
76 76 Lighter version of of iteration of repos without the scm initialisation,
77 77 and with cache usage
78 78 """
79 79 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
80 80 self.db_repo_list = db_repo_list
81 81 self.repos_path = repos_path
82 82 self.order_by = order_by
83 83 self.reversed = (order_by or '').startswith('-')
84 84 if not perm_set:
85 85 perm_set = ['repository.read', 'repository.write',
86 86 'repository.admin']
87 87 self.perm_set = perm_set
88 88
89 89 def __len__(self):
90 90 return len(self.db_repo_list)
91 91
92 92 def __repr__(self):
93 93 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
94 94
95 95 def __iter__(self):
96 96 for dbr in self.db_repo_list:
97 97 # check permission at this level
98 98 has_perm = HasRepoPermissionAny(*self.perm_set)(
99 99 dbr.repo_name, 'SimpleCachedRepoList check')
100 100 if not has_perm:
101 101 continue
102 102
103 103 tmp_d = {
104 104 'name': dbr.repo_name,
105 105 'dbrepo': dbr.get_dict(),
106 106 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
107 107 }
108 108 yield tmp_d
109 109
110 110
111 111 class _PermCheckIterator(object):
112 112
113 113 def __init__(
114 114 self, obj_list, obj_attr, perm_set, perm_checker,
115 115 extra_kwargs=None):
116 116 """
117 117 Creates iterator from given list of objects, additionally
118 118 checking permission for them from perm_set var
119 119
120 120 :param obj_list: list of db objects
121 121 :param obj_attr: attribute of object to pass into perm_checker
122 122 :param perm_set: list of permissions to check
123 123 :param perm_checker: callable to check permissions against
124 124 """
125 125 self.obj_list = obj_list
126 126 self.obj_attr = obj_attr
127 127 self.perm_set = perm_set
128 128 self.perm_checker = perm_checker
129 129 self.extra_kwargs = extra_kwargs or {}
130 130
131 131 def __len__(self):
132 132 return len(self.obj_list)
133 133
134 134 def __repr__(self):
135 135 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
136 136
137 137 def __iter__(self):
138 138 checker = self.perm_checker(*self.perm_set)
139 139 for db_obj in self.obj_list:
140 140 # check permission at this level
141 141 name = getattr(db_obj, self.obj_attr, None)
142 142 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
143 143 continue
144 144
145 145 yield db_obj
146 146
147 147
148 148 class RepoList(_PermCheckIterator):
149 149
150 150 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
151 151 if not perm_set:
152 152 perm_set = [
153 153 'repository.read', 'repository.write', 'repository.admin']
154 154
155 155 super(RepoList, self).__init__(
156 156 obj_list=db_repo_list,
157 157 obj_attr='repo_name', perm_set=perm_set,
158 158 perm_checker=HasRepoPermissionAny,
159 159 extra_kwargs=extra_kwargs)
160 160
161 161
162 162 class RepoGroupList(_PermCheckIterator):
163 163
164 164 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
165 165 if not perm_set:
166 166 perm_set = ['group.read', 'group.write', 'group.admin']
167 167
168 168 super(RepoGroupList, self).__init__(
169 169 obj_list=db_repo_group_list,
170 170 obj_attr='group_name', perm_set=perm_set,
171 171 perm_checker=HasRepoGroupPermissionAny,
172 172 extra_kwargs=extra_kwargs)
173 173
174 174
175 175 class UserGroupList(_PermCheckIterator):
176 176
177 177 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
178 178 if not perm_set:
179 179 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
180 180
181 181 super(UserGroupList, self).__init__(
182 182 obj_list=db_user_group_list,
183 183 obj_attr='users_group_name', perm_set=perm_set,
184 184 perm_checker=HasUserGroupPermissionAny,
185 185 extra_kwargs=extra_kwargs)
186 186
187 187
188 188 class ScmModel(BaseModel):
189 189 """
190 190 Generic Scm Model
191 191 """
192 192
193 193 @LazyProperty
194 194 def repos_path(self):
195 195 """
196 196 Gets the repositories root path from database
197 197 """
198 198
199 199 settings_model = VcsSettingsModel(sa=self.sa)
200 200 return settings_model.get_repos_location()
201 201
202 202 def repo_scan(self, repos_path=None):
203 203 """
204 204 Listing of repositories in given path. This path should not be a
205 205 repository itself. Return a dictionary of repository objects
206 206
207 207 :param repos_path: path to directory containing repositories
208 208 """
209 209
210 210 if repos_path is None:
211 211 repos_path = self.repos_path
212 212
213 213 log.info('scanning for repositories in %s', repos_path)
214 214
215 215 config = make_db_config()
216 216 config.set('extensions', 'largefiles', '')
217 217 repos = {}
218 218
219 219 for name, path in get_filesystem_repos(repos_path, recursive=True):
220 220 # name need to be decomposed and put back together using the /
221 221 # since this is internal storage separator for rhodecode
222 222 name = Repository.normalize_repo_name(name)
223 223
224 224 try:
225 225 if name in repos:
226 226 raise RepositoryError('Duplicate repository name %s '
227 227 'found in %s' % (name, path))
228 228 elif path[0] in rhodecode.BACKENDS:
229 229 klass = get_backend(path[0])
230 230 repos[name] = klass(path[1], config=config)
231 231 except OSError:
232 232 continue
233 233 log.debug('found %s paths with repositories', len(repos))
234 234 return repos
235 235
236 236 def get_repos(self, all_repos=None, sort_key=None):
237 237 """
238 238 Get all repositories from db and for each repo create it's
239 239 backend instance and fill that backed with information from database
240 240
241 241 :param all_repos: list of repository names as strings
242 242 give specific repositories list, good for filtering
243 243
244 244 :param sort_key: initial sorting of repositories
245 245 """
246 246 if all_repos is None:
247 247 all_repos = self.sa.query(Repository)\
248 248 .filter(Repository.group_id == None)\
249 249 .order_by(func.lower(Repository.repo_name)).all()
250 250 repo_iter = SimpleCachedRepoList(
251 251 all_repos, repos_path=self.repos_path, order_by=sort_key)
252 252 return repo_iter
253 253
254 254 def get_repo_groups(self, all_groups=None):
255 255 if all_groups is None:
256 256 all_groups = RepoGroup.query()\
257 257 .filter(RepoGroup.group_parent_id == None).all()
258 258 return [x for x in RepoGroupList(all_groups)]
259 259
260 260 def mark_for_invalidation(self, repo_name, delete=False):
261 261 """
262 262 Mark caches of this repo invalid in the database. `delete` flag
263 263 removes the cache entries
264 264
265 265 :param repo_name: the repo_name for which caches should be marked
266 266 invalid, or deleted
267 267 :param delete: delete the entry keys instead of setting bool
268 268 flag on them, and also purge caches used by the dogpile
269 269 """
270 270 repo = Repository.get_by_repo_name(repo_name)
271 271
272 272 if repo:
273 273 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
274 274 repo_id=repo.repo_id)
275 275 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
276 276
277 277 repo_id = repo.repo_id
278 278 config = repo._config
279 279 config.set('extensions', 'largefiles', '')
280 280 repo.update_commit_cache(config=config, cs_cache=None)
281 281 if delete:
282 282 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
283 283 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid)
284 284
285 285 def toggle_following_repo(self, follow_repo_id, user_id):
286 286
287 287 f = self.sa.query(UserFollowing)\
288 288 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
289 289 .filter(UserFollowing.user_id == user_id).scalar()
290 290
291 291 if f is not None:
292 292 try:
293 293 self.sa.delete(f)
294 294 return
295 295 except Exception:
296 296 log.error(traceback.format_exc())
297 297 raise
298 298
299 299 try:
300 300 f = UserFollowing()
301 301 f.user_id = user_id
302 302 f.follows_repo_id = follow_repo_id
303 303 self.sa.add(f)
304 304 except Exception:
305 305 log.error(traceback.format_exc())
306 306 raise
307 307
308 308 def toggle_following_user(self, follow_user_id, user_id):
309 309 f = self.sa.query(UserFollowing)\
310 310 .filter(UserFollowing.follows_user_id == follow_user_id)\
311 311 .filter(UserFollowing.user_id == user_id).scalar()
312 312
313 313 if f is not None:
314 314 try:
315 315 self.sa.delete(f)
316 316 return
317 317 except Exception:
318 318 log.error(traceback.format_exc())
319 319 raise
320 320
321 321 try:
322 322 f = UserFollowing()
323 323 f.user_id = user_id
324 324 f.follows_user_id = follow_user_id
325 325 self.sa.add(f)
326 326 except Exception:
327 327 log.error(traceback.format_exc())
328 328 raise
329 329
330 330 def is_following_repo(self, repo_name, user_id, cache=False):
331 331 r = self.sa.query(Repository)\
332 332 .filter(Repository.repo_name == repo_name).scalar()
333 333
334 334 f = self.sa.query(UserFollowing)\
335 335 .filter(UserFollowing.follows_repository == r)\
336 336 .filter(UserFollowing.user_id == user_id).scalar()
337 337
338 338 return f is not None
339 339
340 340 def is_following_user(self, username, user_id, cache=False):
341 341 u = User.get_by_username(username)
342 342
343 343 f = self.sa.query(UserFollowing)\
344 344 .filter(UserFollowing.follows_user == u)\
345 345 .filter(UserFollowing.user_id == user_id).scalar()
346 346
347 347 return f is not None
348 348
349 349 def get_followers(self, repo):
350 350 repo = self._get_repo(repo)
351 351
352 352 return self.sa.query(UserFollowing)\
353 353 .filter(UserFollowing.follows_repository == repo).count()
354 354
355 355 def get_forks(self, repo):
356 356 repo = self._get_repo(repo)
357 357 return self.sa.query(Repository)\
358 358 .filter(Repository.fork == repo).count()
359 359
360 360 def get_pull_requests(self, repo):
361 361 repo = self._get_repo(repo)
362 362 return self.sa.query(PullRequest)\
363 363 .filter(PullRequest.target_repo == repo)\
364 364 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
365 365
366 366 def mark_as_fork(self, repo, fork, user):
367 367 repo = self._get_repo(repo)
368 368 fork = self._get_repo(fork)
369 369 if fork and repo.repo_id == fork.repo_id:
370 370 raise Exception("Cannot set repository as fork of itself")
371 371
372 372 if fork and repo.repo_type != fork.repo_type:
373 373 raise RepositoryError(
374 374 "Cannot set repository as fork of repository with other type")
375 375
376 376 repo.fork = fork
377 377 self.sa.add(repo)
378 378 return repo
379 379
380 380 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True):
381 381 dbrepo = self._get_repo(repo)
382 382 remote_uri = remote_uri or dbrepo.clone_uri
383 383 if not remote_uri:
384 384 raise Exception("This repository doesn't have a clone uri")
385 385
386 386 repo = dbrepo.scm_instance(cache=False)
387 387 repo.config.clear_section('hooks')
388 388
389 389 try:
390 390 # NOTE(marcink): add extra validation so we skip invalid urls
391 391 # this is due this tasks can be executed via scheduler without
392 392 # proper validation of remote_uri
393 393 if validate_uri:
394 394 config = make_db_config(clear_session=False)
395 395 url_validator(remote_uri, dbrepo.repo_type, config)
396 396 except InvalidCloneUrl:
397 397 raise
398 398
399 399 repo_name = dbrepo.repo_name
400 400 try:
401 401 # TODO: we need to make sure those operations call proper hooks !
402 402 repo.fetch(remote_uri)
403 403
404 404 self.mark_for_invalidation(repo_name)
405 405 except Exception:
406 406 log.error(traceback.format_exc())
407 407 raise
408 408
409 409 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
410 410 dbrepo = self._get_repo(repo)
411 411 remote_uri = remote_uri or dbrepo.push_uri
412 412 if not remote_uri:
413 413 raise Exception("This repository doesn't have a clone uri")
414 414
415 415 repo = dbrepo.scm_instance(cache=False)
416 416 repo.config.clear_section('hooks')
417 417
418 418 try:
419 419 # NOTE(marcink): add extra validation so we skip invalid urls
420 420 # this is due this tasks can be executed via scheduler without
421 421 # proper validation of remote_uri
422 422 if validate_uri:
423 423 config = make_db_config(clear_session=False)
424 424 url_validator(remote_uri, dbrepo.repo_type, config)
425 425 except InvalidCloneUrl:
426 426 raise
427 427
428 428 try:
429 429 repo.push(remote_uri)
430 430 except Exception:
431 431 log.error(traceback.format_exc())
432 432 raise
433 433
434 434 def commit_change(self, repo, repo_name, commit, user, author, message,
435 435 content, f_path):
436 436 """
437 437 Commits changes
438 438
439 439 :param repo: SCM instance
440 440
441 441 """
442 442 user = self._get_user(user)
443 443
444 444 # decoding here will force that we have proper encoded values
445 445 # in any other case this will throw exceptions and deny commit
446 446 content = safe_str(content)
447 447 path = safe_str(f_path)
448 448 # message and author needs to be unicode
449 449 # proper backend should then translate that into required type
450 450 message = safe_unicode(message)
451 451 author = safe_unicode(author)
452 452 imc = repo.in_memory_commit
453 453 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
454 454 try:
455 455 # TODO: handle pre-push action !
456 456 tip = imc.commit(
457 457 message=message, author=author, parents=[commit],
458 458 branch=commit.branch)
459 459 except Exception as e:
460 460 log.error(traceback.format_exc())
461 461 raise IMCCommitError(str(e))
462 462 finally:
463 463 # always clear caches, if commit fails we want fresh object also
464 464 self.mark_for_invalidation(repo_name)
465 465
466 466 # We trigger the post-push action
467 467 hooks_utils.trigger_post_push_hook(
468 username=user.username, action='push_local', repo_name=repo_name,
469 repo_alias=repo.alias, commit_ids=[tip.raw_id])
468 username=user.username, action='push_local', hook_type='post_push',
469 repo_name=repo_name, repo_alias=repo.alias, commit_ids=[tip.raw_id])
470 470 return tip
471 471
472 472 def _sanitize_path(self, f_path):
473 473 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
474 474 raise NonRelativePathError('%s is not an relative path' % f_path)
475 475 if f_path:
476 476 f_path = os.path.normpath(f_path)
477 477 return f_path
478 478
479 479 def get_dirnode_metadata(self, request, commit, dir_node):
480 480 if not dir_node.is_dir():
481 481 return []
482 482
483 483 data = []
484 484 for node in dir_node:
485 485 if not node.is_file():
486 486 # we skip file-nodes
487 487 continue
488 488
489 489 last_commit = node.last_commit
490 490 last_commit_date = last_commit.date
491 491 data.append({
492 492 'name': node.name,
493 493 'size': h.format_byte_size_binary(node.size),
494 494 'modified_at': h.format_date(last_commit_date),
495 495 'modified_ts': last_commit_date.isoformat(),
496 496 'revision': last_commit.revision,
497 497 'short_id': last_commit.short_id,
498 498 'message': h.escape(last_commit.message),
499 499 'author': h.escape(last_commit.author),
500 500 'user_profile': h.gravatar_with_user(
501 501 request, last_commit.author),
502 502 })
503 503
504 504 return data
505 505
506 506 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
507 507 extended_info=False, content=False, max_file_bytes=None):
508 508 """
509 509 recursive walk in root dir and return a set of all path in that dir
510 510 based on repository walk function
511 511
512 512 :param repo_name: name of repository
513 513 :param commit_id: commit id for which to list nodes
514 514 :param root_path: root path to list
515 515 :param flat: return as a list, if False returns a dict with description
516 516 :param max_file_bytes: will not return file contents over this limit
517 517
518 518 """
519 519 _files = list()
520 520 _dirs = list()
521 521 try:
522 522 _repo = self._get_repo(repo_name)
523 523 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
524 524 root_path = root_path.lstrip('/')
525 525 for __, dirs, files in commit.walk(root_path):
526 526 for f in files:
527 527 _content = None
528 528 _data = f.unicode_path
529 529 over_size_limit = (max_file_bytes is not None
530 530 and f.size > max_file_bytes)
531 531
532 532 if not flat:
533 533 _data = {
534 534 "name": h.escape(f.unicode_path),
535 535 "type": "file",
536 536 }
537 537 if extended_info:
538 538 _data.update({
539 539 "md5": f.md5,
540 540 "binary": f.is_binary,
541 541 "size": f.size,
542 542 "extension": f.extension,
543 543 "mimetype": f.mimetype,
544 544 "lines": f.lines()[0]
545 545 })
546 546
547 547 if content:
548 548 full_content = None
549 549 if not f.is_binary and not over_size_limit:
550 550 full_content = safe_str(f.content)
551 551
552 552 _data.update({
553 553 "content": full_content,
554 554 })
555 555 _files.append(_data)
556 556 for d in dirs:
557 557 _data = d.unicode_path
558 558 if not flat:
559 559 _data = {
560 560 "name": h.escape(d.unicode_path),
561 561 "type": "dir",
562 562 }
563 563 if extended_info:
564 564 _data.update({
565 565 "md5": None,
566 566 "binary": None,
567 567 "size": None,
568 568 "extension": None,
569 569 })
570 570 if content:
571 571 _data.update({
572 572 "content": None
573 573 })
574 574 _dirs.append(_data)
575 575 except RepositoryError:
576 576 log.debug("Exception in get_nodes", exc_info=True)
577 577 raise
578 578
579 579 return _dirs, _files
580 580
581 581 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
582 582 author=None, trigger_push_hook=True):
583 583 """
584 584 Commits given multiple nodes into repo
585 585
586 586 :param user: RhodeCode User object or user_id, the commiter
587 587 :param repo: RhodeCode Repository object
588 588 :param message: commit message
589 589 :param nodes: mapping {filename:{'content':content},...}
590 590 :param parent_commit: parent commit, can be empty than it's
591 591 initial commit
592 592 :param author: author of commit, cna be different that commiter
593 593 only for git
594 594 :param trigger_push_hook: trigger push hooks
595 595
596 596 :returns: new commited commit
597 597 """
598 598
599 599 user = self._get_user(user)
600 600 scm_instance = repo.scm_instance(cache=False)
601 601
602 602 processed_nodes = []
603 603 for f_path in nodes:
604 604 f_path = self._sanitize_path(f_path)
605 605 content = nodes[f_path]['content']
606 606 f_path = safe_str(f_path)
607 607 # decoding here will force that we have proper encoded values
608 608 # in any other case this will throw exceptions and deny commit
609 609 if isinstance(content, (basestring,)):
610 610 content = safe_str(content)
611 611 elif isinstance(content, (file, cStringIO.OutputType,)):
612 612 content = content.read()
613 613 else:
614 614 raise Exception('Content is of unrecognized type %s' % (
615 615 type(content)
616 616 ))
617 617 processed_nodes.append((f_path, content))
618 618
619 619 message = safe_unicode(message)
620 620 commiter = user.full_contact
621 621 author = safe_unicode(author) if author else commiter
622 622
623 623 imc = scm_instance.in_memory_commit
624 624
625 625 if not parent_commit:
626 626 parent_commit = EmptyCommit(alias=scm_instance.alias)
627 627
628 628 if isinstance(parent_commit, EmptyCommit):
629 629 # EmptyCommit means we we're editing empty repository
630 630 parents = None
631 631 else:
632 632 parents = [parent_commit]
633 633 # add multiple nodes
634 634 for path, content in processed_nodes:
635 635 imc.add(FileNode(path, content=content))
636 636 # TODO: handle pre push scenario
637 637 tip = imc.commit(message=message,
638 638 author=author,
639 639 parents=parents,
640 640 branch=parent_commit.branch)
641 641
642 642 self.mark_for_invalidation(repo.repo_name)
643 643 if trigger_push_hook:
644 644 hooks_utils.trigger_post_push_hook(
645 645 username=user.username, action='push_local',
646 646 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
647 hook_type='post_push',
647 648 commit_ids=[tip.raw_id])
648 649 return tip
649 650
650 651 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
651 652 author=None, trigger_push_hook=True):
652 653 user = self._get_user(user)
653 654 scm_instance = repo.scm_instance(cache=False)
654 655
655 656 message = safe_unicode(message)
656 657 commiter = user.full_contact
657 658 author = safe_unicode(author) if author else commiter
658 659
659 660 imc = scm_instance.in_memory_commit
660 661
661 662 if not parent_commit:
662 663 parent_commit = EmptyCommit(alias=scm_instance.alias)
663 664
664 665 if isinstance(parent_commit, EmptyCommit):
665 666 # EmptyCommit means we we're editing empty repository
666 667 parents = None
667 668 else:
668 669 parents = [parent_commit]
669 670
670 671 # add multiple nodes
671 672 for _filename, data in nodes.items():
672 673 # new filename, can be renamed from the old one, also sanitaze
673 674 # the path for any hack around relative paths like ../../ etc.
674 675 filename = self._sanitize_path(data['filename'])
675 676 old_filename = self._sanitize_path(_filename)
676 677 content = data['content']
677 678
678 679 filenode = FileNode(old_filename, content=content)
679 680 op = data['op']
680 681 if op == 'add':
681 682 imc.add(filenode)
682 683 elif op == 'del':
683 684 imc.remove(filenode)
684 685 elif op == 'mod':
685 686 if filename != old_filename:
686 687 # TODO: handle renames more efficient, needs vcs lib
687 688 # changes
688 689 imc.remove(filenode)
689 690 imc.add(FileNode(filename, content=content))
690 691 else:
691 692 imc.change(filenode)
692 693
693 694 try:
694 695 # TODO: handle pre push scenario
695 696 # commit changes
696 697 tip = imc.commit(message=message,
697 698 author=author,
698 699 parents=parents,
699 700 branch=parent_commit.branch)
700 701 except NodeNotChangedError:
701 702 raise
702 703 except Exception as e:
703 704 log.exception("Unexpected exception during call to imc.commit")
704 705 raise IMCCommitError(str(e))
705 706 finally:
706 707 # always clear caches, if commit fails we want fresh object also
707 708 self.mark_for_invalidation(repo.repo_name)
708 709
709 710 if trigger_push_hook:
710 711 hooks_utils.trigger_post_push_hook(
711 username=user.username, action='push_local',
712 username=user.username, action='push_local', hook_type='post_push',
712 713 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
713 714 commit_ids=[tip.raw_id])
714 715
715 716 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
716 717 author=None, trigger_push_hook=True):
717 718 """
718 719 Deletes given multiple nodes into `repo`
719 720
720 721 :param user: RhodeCode User object or user_id, the committer
721 722 :param repo: RhodeCode Repository object
722 723 :param message: commit message
723 724 :param nodes: mapping {filename:{'content':content},...}
724 725 :param parent_commit: parent commit, can be empty than it's initial
725 726 commit
726 727 :param author: author of commit, cna be different that commiter only
727 728 for git
728 729 :param trigger_push_hook: trigger push hooks
729 730
730 731 :returns: new commit after deletion
731 732 """
732 733
733 734 user = self._get_user(user)
734 735 scm_instance = repo.scm_instance(cache=False)
735 736
736 737 processed_nodes = []
737 738 for f_path in nodes:
738 739 f_path = self._sanitize_path(f_path)
739 740 # content can be empty but for compatabilty it allows same dicts
740 741 # structure as add_nodes
741 742 content = nodes[f_path].get('content')
742 743 processed_nodes.append((f_path, content))
743 744
744 745 message = safe_unicode(message)
745 746 commiter = user.full_contact
746 747 author = safe_unicode(author) if author else commiter
747 748
748 749 imc = scm_instance.in_memory_commit
749 750
750 751 if not parent_commit:
751 752 parent_commit = EmptyCommit(alias=scm_instance.alias)
752 753
753 754 if isinstance(parent_commit, EmptyCommit):
754 755 # EmptyCommit means we we're editing empty repository
755 756 parents = None
756 757 else:
757 758 parents = [parent_commit]
758 759 # add multiple nodes
759 760 for path, content in processed_nodes:
760 761 imc.remove(FileNode(path, content=content))
761 762
762 763 # TODO: handle pre push scenario
763 764 tip = imc.commit(message=message,
764 765 author=author,
765 766 parents=parents,
766 767 branch=parent_commit.branch)
767 768
768 769 self.mark_for_invalidation(repo.repo_name)
769 770 if trigger_push_hook:
770 771 hooks_utils.trigger_post_push_hook(
771 username=user.username, action='push_local',
772 username=user.username, action='push_local', hook_type='post_push',
772 773 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
773 774 commit_ids=[tip.raw_id])
774 775 return tip
775 776
776 777 def strip(self, repo, commit_id, branch):
777 778 scm_instance = repo.scm_instance(cache=False)
778 779 scm_instance.config.clear_section('hooks')
779 780 scm_instance.strip(commit_id, branch)
780 781 self.mark_for_invalidation(repo.repo_name)
781 782
782 783 def get_unread_journal(self):
783 784 return self.sa.query(UserLog).count()
784 785
785 786 def get_repo_landing_revs(self, translator, repo=None):
786 787 """
787 788 Generates select option with tags branches and bookmarks (for hg only)
788 789 grouped by type
789 790
790 791 :param repo:
791 792 """
792 793 _ = translator
793 794 repo = self._get_repo(repo)
794 795
795 796 hist_l = [
796 797 ['rev:tip', _('latest tip')]
797 798 ]
798 799 choices = [
799 800 'rev:tip'
800 801 ]
801 802
802 803 if not repo:
803 804 return choices, hist_l
804 805
805 806 repo = repo.scm_instance()
806 807
807 808 branches_group = (
808 809 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
809 810 for b in repo.branches],
810 811 _("Branches"))
811 812 hist_l.append(branches_group)
812 813 choices.extend([x[0] for x in branches_group[0]])
813 814
814 815 if repo.alias == 'hg':
815 816 bookmarks_group = (
816 817 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
817 818 for b in repo.bookmarks],
818 819 _("Bookmarks"))
819 820 hist_l.append(bookmarks_group)
820 821 choices.extend([x[0] for x in bookmarks_group[0]])
821 822
822 823 tags_group = (
823 824 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
824 825 for t in repo.tags],
825 826 _("Tags"))
826 827 hist_l.append(tags_group)
827 828 choices.extend([x[0] for x in tags_group[0]])
828 829
829 830 return choices, hist_l
830 831
831 832 def get_server_info(self, environ=None):
832 833 server_info = get_system_info(environ)
833 834 return server_info
@@ -1,122 +1,123 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import pytest
22 22
23 23 from rhodecode.tests.events.conftest import EventCatcher
24 24
25 25 from rhodecode.lib import hooks_base, utils2
26 26 from rhodecode.model.repo import RepoModel
27 27 from rhodecode.events.repo import (
28 28 RepoPrePullEvent, RepoPullEvent,
29 29 RepoPrePushEvent, RepoPushEvent,
30 30 RepoPreCreateEvent, RepoCreateEvent,
31 31 RepoPreDeleteEvent, RepoDeleteEvent,
32 32 )
33 33
34 34
35 35 @pytest.fixture
36 36 def scm_extras(user_regular, repo_stub):
37 37 extras = utils2.AttributeDict({
38 38 'ip': '127.0.0.1',
39 39 'username': user_regular.username,
40 40 'user_id': user_regular.user_id,
41 41 'action': '',
42 42 'repository': repo_stub.repo_name,
43 43 'scm': repo_stub.scm_instance().alias,
44 44 'config': '',
45 45 'repo_store': '',
46 46 'server_url': 'http://example.com',
47 47 'make_lock': None,
48 'user-agent': 'some-client',
48 'user_agent': 'some-client',
49 49 'locked_by': [None],
50 50 'commit_ids': ['a' * 40] * 3,
51 'hook_type': 'scm_extras_test',
51 52 'is_shadow_repo': False,
52 53 })
53 54 return extras
54 55
55 56
56 57 # TODO: dan: make the serialization tests complete json comparisons
57 58 @pytest.mark.parametrize('EventClass', [
58 59 RepoPreCreateEvent, RepoCreateEvent,
59 60 RepoPreDeleteEvent, RepoDeleteEvent,
60 61 ])
61 62 def test_repo_events_serialized(config_stub, repo_stub, EventClass):
62 63 event = EventClass(repo_stub)
63 64 data = event.as_dict()
64 65 assert data['name'] == EventClass.name
65 66 assert data['repo']['repo_name'] == repo_stub.repo_name
66 67 assert data['repo']['url']
67 68 assert data['repo']['permalink_url']
68 69
69 70
70 71 @pytest.mark.parametrize('EventClass', [
71 72 RepoPrePullEvent, RepoPullEvent, RepoPrePushEvent
72 73 ])
73 74 def test_vcs_repo_events_serialize(config_stub, repo_stub, scm_extras, EventClass):
74 75 event = EventClass(repo_name=repo_stub.repo_name, extras=scm_extras)
75 76 data = event.as_dict()
76 77 assert data['name'] == EventClass.name
77 78 assert data['repo']['repo_name'] == repo_stub.repo_name
78 79 assert data['repo']['url']
79 80 assert data['repo']['permalink_url']
80 81
81 82
82 83 @pytest.mark.parametrize('EventClass', [RepoPushEvent])
83 84 def test_vcs_repo_push_event_serialize(config_stub, repo_stub, scm_extras, EventClass):
84 85 event = EventClass(repo_name=repo_stub.repo_name,
85 86 pushed_commit_ids=scm_extras['commit_ids'],
86 87 extras=scm_extras)
87 88 data = event.as_dict()
88 89 assert data['name'] == EventClass.name
89 90 assert data['repo']['repo_name'] == repo_stub.repo_name
90 91 assert data['repo']['url']
91 92 assert data['repo']['permalink_url']
92 93
93 94
94 95 def test_create_delete_repo_fires_events(backend):
95 96 with EventCatcher() as event_catcher:
96 97 repo = backend.create_repo()
97 98 assert event_catcher.events_types == [RepoPreCreateEvent, RepoCreateEvent]
98 99
99 100 with EventCatcher() as event_catcher:
100 101 RepoModel().delete(repo)
101 102 assert event_catcher.events_types == [RepoPreDeleteEvent, RepoDeleteEvent]
102 103
103 104
104 105 def test_pull_fires_events(scm_extras):
105 106 with EventCatcher() as event_catcher:
106 107 hooks_base.pre_push(scm_extras)
107 108 assert event_catcher.events_types == [RepoPrePushEvent]
108 109
109 110 with EventCatcher() as event_catcher:
110 111 hooks_base.post_push(scm_extras)
111 112 assert event_catcher.events_types == [RepoPushEvent]
112 113
113 114
114 115 def test_push_fires_events(scm_extras):
115 116 with EventCatcher() as event_catcher:
116 117 hooks_base.pre_pull(scm_extras)
117 118 assert event_catcher.events_types == [RepoPrePullEvent]
118 119
119 120 with EventCatcher() as event_catcher:
120 121 hooks_base.post_pull(scm_extras)
121 122 assert event_catcher.events_types == [RepoPullEvent]
122 123
@@ -1,144 +1,151 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import mock
22 22 import pytest
23 23 from rhodecode.model.db import Session, UserLog
24 24 from rhodecode.lib import hooks_base, utils2
25 25
26 26
27 27 def test_post_push_truncates_commits(user_regular, repo_stub):
28 28 extras = {
29 29 'ip': '127.0.0.1',
30 30 'username': user_regular.username,
31 31 'user_id': user_regular.user_id,
32 32 'action': 'push_local',
33 33 'repository': repo_stub.repo_name,
34 34 'scm': 'git',
35 35 'config': '',
36 36 'server_url': 'http://example.com',
37 37 'make_lock': None,
38 38 'user_agent': 'some-client',
39 39 'locked_by': [None],
40 40 'commit_ids': ['abcde12345' * 4] * 30000,
41 'hook_type': 'large_push_test_type',
41 42 'is_shadow_repo': False,
42 43 }
43 44 extras = utils2.AttributeDict(extras)
44 45
45 46 hooks_base.post_push(extras)
46 47
47 48 # Calculate appropriate action string here
48 49 commit_ids = extras.commit_ids[:400]
49 50
50 51 entry = UserLog.query().order_by('-user_log_id').first()
51 52 assert entry.action == 'user.push'
52 53 assert entry.action_data['commit_ids'] == commit_ids
53 54 Session().delete(entry)
54 55 Session().commit()
55 56
56 57
57 58 def assert_called_with_mock(callable_, expected_mock_name):
58 59 mock_obj = callable_.call_args[0][0]
59 60 mock_name = mock_obj._mock_new_parent._mock_new_name
60 61 assert mock_name == expected_mock_name
61 62
62 63
63 64 @pytest.fixture
64 65 def hook_extras(user_regular, repo_stub):
65 66 extras = utils2.AttributeDict({
66 67 'ip': '127.0.0.1',
67 68 'username': user_regular.username,
68 69 'user_id': user_regular.user_id,
69 70 'action': 'push',
70 71 'repository': repo_stub.repo_name,
71 72 'scm': '',
72 73 'config': '',
73 74 'repo_store': '',
74 75 'server_url': 'http://example.com',
75 76 'make_lock': None,
76 77 'user_agent': 'some-client',
77 78 'locked_by': [None],
78 79 'commit_ids': [],
80 'hook_type': 'test_type',
79 81 'is_shadow_repo': False,
80 82 })
81 83 return extras
82 84
83 85
84 86 @pytest.mark.parametrize('func, extension, event', [
85 87 (hooks_base.pre_push, 'pre_push_extension', 'RepoPrePushEvent'),
86 88 (hooks_base.post_push, 'post_pull_extension', 'RepoPushEvent'),
87 89 (hooks_base.pre_pull, 'pre_pull_extension', 'RepoPrePullEvent'),
88 90 (hooks_base.post_pull, 'post_push_extension', 'RepoPullEvent'),
89 91 ])
90 92 def test_hooks_propagate(func, extension, event, hook_extras):
91 93 """
92 94 Tests that our hook code propagates to rhodecode extensions and triggers
93 95 the appropriate event.
94 96 """
95 extension_mock = mock.Mock()
97 class ExtensionMock(mock.Mock):
98 @property
99 def output(self):
100 return 'MOCK'
101
102 extension_mock = ExtensionMock()
96 103 events_mock = mock.Mock()
97 104 patches = {
98 105 'Repository': mock.Mock(),
99 106 'events': events_mock,
100 107 extension: extension_mock,
101 108 }
102 109
103 110 # Clear shadow repo flag.
104 111 hook_extras.is_shadow_repo = False
105 112
106 113 # Execute hook function.
107 114 with mock.patch.multiple(hooks_base, **patches):
108 115 func(hook_extras)
109 116
110 117 # Assert that extensions are called and event was fired.
111 118 extension_mock.called_once()
112 119 assert_called_with_mock(events_mock.trigger, event)
113 120
114 121
115 122 @pytest.mark.parametrize('func, extension, event', [
116 123 (hooks_base.pre_push, 'pre_push_extension', 'RepoPrePushEvent'),
117 124 (hooks_base.post_push, 'post_pull_extension', 'RepoPushEvent'),
118 125 (hooks_base.pre_pull, 'pre_pull_extension', 'RepoPrePullEvent'),
119 126 (hooks_base.post_pull, 'post_push_extension', 'RepoPullEvent'),
120 127 ])
121 128 def test_hooks_propagates_not_on_shadow(func, extension, event, hook_extras):
122 129 """
123 130 If hooks are called by a request to a shadow repo we only want to run our
124 131 internal hooks code but not external ones like rhodecode extensions or
125 132 trigger an event.
126 133 """
127 134 extension_mock = mock.Mock()
128 135 events_mock = mock.Mock()
129 136 patches = {
130 137 'Repository': mock.Mock(),
131 138 'events': events_mock,
132 139 extension: extension_mock,
133 140 }
134 141
135 142 # Set shadow repo flag.
136 143 hook_extras.is_shadow_repo = True
137 144
138 145 # Execute hook function.
139 146 with mock.patch.multiple(hooks_base, **patches):
140 147 func(hook_extras)
141 148
142 149 # Assert that extensions are *not* called and event was *not* fired.
143 150 assert not extension_mock.called
144 151 assert not events_mock.trigger.called
@@ -1,869 +1,870 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import mock
22 22 import pytest
23 23 import textwrap
24 24
25 25 import rhodecode
26 26 from rhodecode.lib.utils2 import safe_unicode
27 27 from rhodecode.lib.vcs.backends import get_backend
28 28 from rhodecode.lib.vcs.backends.base import (
29 29 MergeResponse, MergeFailureReason, Reference)
30 30 from rhodecode.lib.vcs.exceptions import RepositoryError
31 31 from rhodecode.lib.vcs.nodes import FileNode
32 32 from rhodecode.model.comment import CommentsModel
33 33 from rhodecode.model.db import PullRequest, Session
34 34 from rhodecode.model.pull_request import PullRequestModel
35 35 from rhodecode.model.user import UserModel
36 36 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
37 37
38 38
39 39 pytestmark = [
40 40 pytest.mark.backends("git", "hg"),
41 41 ]
42 42
43 43
44 44 @pytest.mark.usefixtures('config_stub')
45 45 class TestPullRequestModel(object):
46 46
47 47 @pytest.fixture
48 48 def pull_request(self, request, backend, pr_util):
49 49 """
50 50 A pull request combined with multiples patches.
51 51 """
52 52 BackendClass = get_backend(backend.alias)
53 53 self.merge_patcher = mock.patch.object(
54 54 BackendClass, 'merge', return_value=MergeResponse(
55 55 False, False, None, MergeFailureReason.UNKNOWN))
56 56 self.workspace_remove_patcher = mock.patch.object(
57 57 BackendClass, 'cleanup_merge_workspace')
58 58
59 59 self.workspace_remove_mock = self.workspace_remove_patcher.start()
60 60 self.merge_mock = self.merge_patcher.start()
61 61 self.comment_patcher = mock.patch(
62 62 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status')
63 63 self.comment_patcher.start()
64 64 self.notification_patcher = mock.patch(
65 65 'rhodecode.model.notification.NotificationModel.create')
66 66 self.notification_patcher.start()
67 67 self.helper_patcher = mock.patch(
68 68 'rhodecode.lib.helpers.route_path')
69 69 self.helper_patcher.start()
70 70
71 71 self.hook_patcher = mock.patch.object(PullRequestModel,
72 72 '_trigger_pull_request_hook')
73 73 self.hook_mock = self.hook_patcher.start()
74 74
75 75 self.invalidation_patcher = mock.patch(
76 76 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation')
77 77 self.invalidation_mock = self.invalidation_patcher.start()
78 78
79 79 self.pull_request = pr_util.create_pull_request(
80 80 mergeable=True, name_suffix=u'ąć')
81 81 self.source_commit = self.pull_request.source_ref_parts.commit_id
82 82 self.target_commit = self.pull_request.target_ref_parts.commit_id
83 83 self.workspace_id = 'pr-%s' % self.pull_request.pull_request_id
84 84 self.repo_id = self.pull_request.target_repo.repo_id
85 85
86 86 @request.addfinalizer
87 87 def cleanup_pull_request():
88 88 calls = [mock.call(
89 89 self.pull_request, self.pull_request.author, 'create')]
90 90 self.hook_mock.assert_has_calls(calls)
91 91
92 92 self.workspace_remove_patcher.stop()
93 93 self.merge_patcher.stop()
94 94 self.comment_patcher.stop()
95 95 self.notification_patcher.stop()
96 96 self.helper_patcher.stop()
97 97 self.hook_patcher.stop()
98 98 self.invalidation_patcher.stop()
99 99
100 100 return self.pull_request
101 101
102 102 def test_get_all(self, pull_request):
103 103 prs = PullRequestModel().get_all(pull_request.target_repo)
104 104 assert isinstance(prs, list)
105 105 assert len(prs) == 1
106 106
107 107 def test_count_all(self, pull_request):
108 108 pr_count = PullRequestModel().count_all(pull_request.target_repo)
109 109 assert pr_count == 1
110 110
111 111 def test_get_awaiting_review(self, pull_request):
112 112 prs = PullRequestModel().get_awaiting_review(pull_request.target_repo)
113 113 assert isinstance(prs, list)
114 114 assert len(prs) == 1
115 115
116 116 def test_count_awaiting_review(self, pull_request):
117 117 pr_count = PullRequestModel().count_awaiting_review(
118 118 pull_request.target_repo)
119 119 assert pr_count == 1
120 120
121 121 def test_get_awaiting_my_review(self, pull_request):
122 122 PullRequestModel().update_reviewers(
123 123 pull_request, [(pull_request.author, ['author'], False, [])],
124 124 pull_request.author)
125 125 prs = PullRequestModel().get_awaiting_my_review(
126 126 pull_request.target_repo, user_id=pull_request.author.user_id)
127 127 assert isinstance(prs, list)
128 128 assert len(prs) == 1
129 129
130 130 def test_count_awaiting_my_review(self, pull_request):
131 131 PullRequestModel().update_reviewers(
132 132 pull_request, [(pull_request.author, ['author'], False, [])],
133 133 pull_request.author)
134 134 pr_count = PullRequestModel().count_awaiting_my_review(
135 135 pull_request.target_repo, user_id=pull_request.author.user_id)
136 136 assert pr_count == 1
137 137
138 138 def test_delete_calls_cleanup_merge(self, pull_request):
139 139 repo_id = pull_request.target_repo.repo_id
140 140 PullRequestModel().delete(pull_request, pull_request.author)
141 141
142 142 self.workspace_remove_mock.assert_called_once_with(
143 143 repo_id, self.workspace_id)
144 144
145 145 def test_close_calls_cleanup_and_hook(self, pull_request):
146 146 PullRequestModel().close_pull_request(
147 147 pull_request, pull_request.author)
148 148 repo_id = pull_request.target_repo.repo_id
149 149
150 150 self.workspace_remove_mock.assert_called_once_with(
151 151 repo_id, self.workspace_id)
152 152 self.hook_mock.assert_called_with(
153 153 self.pull_request, self.pull_request.author, 'close')
154 154
155 155 def test_merge_status(self, pull_request):
156 156 self.merge_mock.return_value = MergeResponse(
157 157 True, False, None, MergeFailureReason.NONE)
158 158
159 159 assert pull_request._last_merge_source_rev is None
160 160 assert pull_request._last_merge_target_rev is None
161 161 assert pull_request.last_merge_status is None
162 162
163 163 status, msg = PullRequestModel().merge_status(pull_request)
164 164 assert status is True
165 165 assert msg.eval() == 'This pull request can be automatically merged.'
166 166 self.merge_mock.assert_called_with(
167 167 self.repo_id, self.workspace_id,
168 168 pull_request.target_ref_parts,
169 169 pull_request.source_repo.scm_instance(),
170 170 pull_request.source_ref_parts, dry_run=True,
171 171 use_rebase=False, close_branch=False)
172 172
173 173 assert pull_request._last_merge_source_rev == self.source_commit
174 174 assert pull_request._last_merge_target_rev == self.target_commit
175 175 assert pull_request.last_merge_status is MergeFailureReason.NONE
176 176
177 177 self.merge_mock.reset_mock()
178 178 status, msg = PullRequestModel().merge_status(pull_request)
179 179 assert status is True
180 180 assert msg.eval() == 'This pull request can be automatically merged.'
181 181 assert self.merge_mock.called is False
182 182
183 183 def test_merge_status_known_failure(self, pull_request):
184 184 self.merge_mock.return_value = MergeResponse(
185 185 False, False, None, MergeFailureReason.MERGE_FAILED)
186 186
187 187 assert pull_request._last_merge_source_rev is None
188 188 assert pull_request._last_merge_target_rev is None
189 189 assert pull_request.last_merge_status is None
190 190
191 191 status, msg = PullRequestModel().merge_status(pull_request)
192 192 assert status is False
193 193 assert (
194 194 msg.eval() ==
195 195 'This pull request cannot be merged because of merge conflicts.')
196 196 self.merge_mock.assert_called_with(
197 197 self.repo_id, self.workspace_id,
198 198 pull_request.target_ref_parts,
199 199 pull_request.source_repo.scm_instance(),
200 200 pull_request.source_ref_parts, dry_run=True,
201 201 use_rebase=False, close_branch=False)
202 202
203 203 assert pull_request._last_merge_source_rev == self.source_commit
204 204 assert pull_request._last_merge_target_rev == self.target_commit
205 205 assert (
206 206 pull_request.last_merge_status is MergeFailureReason.MERGE_FAILED)
207 207
208 208 self.merge_mock.reset_mock()
209 209 status, msg = PullRequestModel().merge_status(pull_request)
210 210 assert status is False
211 211 assert (
212 212 msg.eval() ==
213 213 'This pull request cannot be merged because of merge conflicts.')
214 214 assert self.merge_mock.called is False
215 215
216 216 def test_merge_status_unknown_failure(self, pull_request):
217 217 self.merge_mock.return_value = MergeResponse(
218 218 False, False, None, MergeFailureReason.UNKNOWN)
219 219
220 220 assert pull_request._last_merge_source_rev is None
221 221 assert pull_request._last_merge_target_rev is None
222 222 assert pull_request.last_merge_status is None
223 223
224 224 status, msg = PullRequestModel().merge_status(pull_request)
225 225 assert status is False
226 226 assert msg.eval() == (
227 227 'This pull request cannot be merged because of an unhandled'
228 228 ' exception.')
229 229 self.merge_mock.assert_called_with(
230 230 self.repo_id, self.workspace_id,
231 231 pull_request.target_ref_parts,
232 232 pull_request.source_repo.scm_instance(),
233 233 pull_request.source_ref_parts, dry_run=True,
234 234 use_rebase=False, close_branch=False)
235 235
236 236 assert pull_request._last_merge_source_rev is None
237 237 assert pull_request._last_merge_target_rev is None
238 238 assert pull_request.last_merge_status is None
239 239
240 240 self.merge_mock.reset_mock()
241 241 status, msg = PullRequestModel().merge_status(pull_request)
242 242 assert status is False
243 243 assert msg.eval() == (
244 244 'This pull request cannot be merged because of an unhandled'
245 245 ' exception.')
246 246 assert self.merge_mock.called is True
247 247
248 248 def test_merge_status_when_target_is_locked(self, pull_request):
249 249 pull_request.target_repo.locked = [1, u'12345.50', 'lock_web']
250 250 status, msg = PullRequestModel().merge_status(pull_request)
251 251 assert status is False
252 252 assert msg.eval() == (
253 253 'This pull request cannot be merged because the target repository'
254 254 ' is locked.')
255 255
256 256 def test_merge_status_requirements_check_target(self, pull_request):
257 257
258 258 def has_largefiles(self, repo):
259 259 return repo == pull_request.source_repo
260 260
261 261 patcher = mock.patch.object(
262 262 PullRequestModel, '_has_largefiles', has_largefiles)
263 263 with patcher:
264 264 status, msg = PullRequestModel().merge_status(pull_request)
265 265
266 266 assert status is False
267 267 assert msg == 'Target repository large files support is disabled.'
268 268
269 269 def test_merge_status_requirements_check_source(self, pull_request):
270 270
271 271 def has_largefiles(self, repo):
272 272 return repo == pull_request.target_repo
273 273
274 274 patcher = mock.patch.object(
275 275 PullRequestModel, '_has_largefiles', has_largefiles)
276 276 with patcher:
277 277 status, msg = PullRequestModel().merge_status(pull_request)
278 278
279 279 assert status is False
280 280 assert msg == 'Source repository large files support is disabled.'
281 281
282 282 def test_merge(self, pull_request, merge_extras):
283 283 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
284 284 merge_ref = Reference(
285 285 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
286 286 self.merge_mock.return_value = MergeResponse(
287 287 True, True, merge_ref, MergeFailureReason.NONE)
288 288
289 289 merge_extras['repository'] = pull_request.target_repo.repo_name
290 290 PullRequestModel().merge_repo(
291 291 pull_request, pull_request.author, extras=merge_extras)
292 292
293 293 message = (
294 294 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
295 295 u'\n\n {pr_title}'.format(
296 296 pr_id=pull_request.pull_request_id,
297 297 source_repo=safe_unicode(
298 298 pull_request.source_repo.scm_instance().name),
299 299 source_ref_name=pull_request.source_ref_parts.name,
300 300 pr_title=safe_unicode(pull_request.title)
301 301 )
302 302 )
303 303 self.merge_mock.assert_called_with(
304 304 self.repo_id, self.workspace_id,
305 305 pull_request.target_ref_parts,
306 306 pull_request.source_repo.scm_instance(),
307 307 pull_request.source_ref_parts,
308 308 user_name=user.short_contact, user_email=user.email, message=message,
309 309 use_rebase=False, close_branch=False
310 310 )
311 311 self.invalidation_mock.assert_called_once_with(
312 312 pull_request.target_repo.repo_name)
313 313
314 314 self.hook_mock.assert_called_with(
315 315 self.pull_request, self.pull_request.author, 'merge')
316 316
317 317 pull_request = PullRequest.get(pull_request.pull_request_id)
318 318 assert (
319 319 pull_request.merge_rev ==
320 320 '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
321 321
322 322 def test_merge_failed(self, pull_request, merge_extras):
323 323 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
324 324 merge_ref = Reference(
325 325 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
326 326 self.merge_mock.return_value = MergeResponse(
327 327 False, False, merge_ref, MergeFailureReason.MERGE_FAILED)
328 328
329 329 merge_extras['repository'] = pull_request.target_repo.repo_name
330 330 PullRequestModel().merge_repo(
331 331 pull_request, pull_request.author, extras=merge_extras)
332 332
333 333 message = (
334 334 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
335 335 u'\n\n {pr_title}'.format(
336 336 pr_id=pull_request.pull_request_id,
337 337 source_repo=safe_unicode(
338 338 pull_request.source_repo.scm_instance().name),
339 339 source_ref_name=pull_request.source_ref_parts.name,
340 340 pr_title=safe_unicode(pull_request.title)
341 341 )
342 342 )
343 343 self.merge_mock.assert_called_with(
344 344 self.repo_id, self.workspace_id,
345 345 pull_request.target_ref_parts,
346 346 pull_request.source_repo.scm_instance(),
347 347 pull_request.source_ref_parts,
348 348 user_name=user.short_contact, user_email=user.email, message=message,
349 349 use_rebase=False, close_branch=False
350 350 )
351 351
352 352 pull_request = PullRequest.get(pull_request.pull_request_id)
353 353 assert self.invalidation_mock.called is False
354 354 assert pull_request.merge_rev is None
355 355
356 356 def test_get_commit_ids(self, pull_request):
357 357 # The PR has been not merget yet, so expect an exception
358 358 with pytest.raises(ValueError):
359 359 PullRequestModel()._get_commit_ids(pull_request)
360 360
361 361 # Merge revision is in the revisions list
362 362 pull_request.merge_rev = pull_request.revisions[0]
363 363 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
364 364 assert commit_ids == pull_request.revisions
365 365
366 366 # Merge revision is not in the revisions list
367 367 pull_request.merge_rev = 'f000' * 10
368 368 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
369 369 assert commit_ids == pull_request.revisions + [pull_request.merge_rev]
370 370
371 371 def test_get_diff_from_pr_version(self, pull_request):
372 372 source_repo = pull_request.source_repo
373 373 source_ref_id = pull_request.source_ref_parts.commit_id
374 374 target_ref_id = pull_request.target_ref_parts.commit_id
375 375 diff = PullRequestModel()._get_diff_from_pr_or_version(
376 376 source_repo, source_ref_id, target_ref_id, context=6)
377 377 assert 'file_1' in diff.raw
378 378
379 379 def test_generate_title_returns_unicode(self):
380 380 title = PullRequestModel().generate_pullrequest_title(
381 381 source='source-dummy',
382 382 source_ref='source-ref-dummy',
383 383 target='target-dummy',
384 384 )
385 385 assert type(title) == unicode
386 386
387 387
388 388 @pytest.mark.usefixtures('config_stub')
389 389 class TestIntegrationMerge(object):
390 390 @pytest.mark.parametrize('extra_config', (
391 391 {'vcs.hooks.protocol': 'http', 'vcs.hooks.direct_calls': False},
392 392 ))
393 393 def test_merge_triggers_push_hooks(
394 394 self, pr_util, user_admin, capture_rcextensions, merge_extras,
395 395 extra_config):
396
396 397 pull_request = pr_util.create_pull_request(
397 398 approved=True, mergeable=True)
398 399 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
399 400 merge_extras['repository'] = pull_request.target_repo.repo_name
400 401 Session().commit()
401 402
402 403 with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False):
403 404 merge_state = PullRequestModel().merge_repo(
404 405 pull_request, user_admin, extras=merge_extras)
405 406
406 407 assert merge_state.executed
407 assert 'pre_push' in capture_rcextensions
408 assert 'post_push' in capture_rcextensions
408 assert '_pre_push_hook' in capture_rcextensions
409 assert '_push_hook' in capture_rcextensions
409 410
410 411 def test_merge_can_be_rejected_by_pre_push_hook(
411 412 self, pr_util, user_admin, capture_rcextensions, merge_extras):
412 413 pull_request = pr_util.create_pull_request(
413 414 approved=True, mergeable=True)
414 415 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
415 416 merge_extras['repository'] = pull_request.target_repo.repo_name
416 417 Session().commit()
417 418
418 419 with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull:
419 420 pre_pull.side_effect = RepositoryError("Disallow push!")
420 421 merge_status = PullRequestModel().merge_repo(
421 422 pull_request, user_admin, extras=merge_extras)
422 423
423 424 assert not merge_status.executed
424 425 assert 'pre_push' not in capture_rcextensions
425 426 assert 'post_push' not in capture_rcextensions
426 427
427 428 def test_merge_fails_if_target_is_locked(
428 429 self, pr_util, user_regular, merge_extras):
429 430 pull_request = pr_util.create_pull_request(
430 431 approved=True, mergeable=True)
431 432 locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web']
432 433 pull_request.target_repo.locked = locked_by
433 434 # TODO: johbo: Check if this can work based on the database, currently
434 435 # all data is pre-computed, that's why just updating the DB is not
435 436 # enough.
436 437 merge_extras['locked_by'] = locked_by
437 438 merge_extras['repository'] = pull_request.target_repo.repo_name
438 439 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
439 440 Session().commit()
440 441 merge_status = PullRequestModel().merge_repo(
441 442 pull_request, user_regular, extras=merge_extras)
442 443 assert not merge_status.executed
443 444
444 445
445 446 @pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [
446 447 (False, 1, 0),
447 448 (True, 0, 1),
448 449 ])
449 450 def test_outdated_comments(
450 451 pr_util, use_outdated, inlines_count, outdated_count, config_stub):
451 452 pull_request = pr_util.create_pull_request()
452 453 pr_util.create_inline_comment(file_path='not_in_updated_diff')
453 454
454 455 with outdated_comments_patcher(use_outdated) as outdated_comment_mock:
455 456 pr_util.add_one_commit()
456 457 assert_inline_comments(
457 458 pull_request, visible=inlines_count, outdated=outdated_count)
458 459 outdated_comment_mock.assert_called_with(pull_request)
459 460
460 461
461 462 @pytest.fixture
462 463 def merge_extras(user_regular):
463 464 """
464 465 Context for the vcs operation when running a merge.
465 466 """
466 467 extras = {
467 468 'ip': '127.0.0.1',
468 469 'username': user_regular.username,
469 470 'user_id': user_regular.user_id,
470 471 'action': 'push',
471 472 'repository': 'fake_target_repo_name',
472 473 'scm': 'git',
473 474 'config': 'fake_config_ini_path',
474 475 'repo_store': '',
475 476 'make_lock': None,
476 477 'locked_by': [None, None, None],
477 478 'server_url': 'http://test.example.com:5000',
478 479 'hooks': ['push', 'pull'],
479 480 'is_shadow_repo': False,
480 481 }
481 482 return extras
482 483
483 484
484 485 @pytest.mark.usefixtures('config_stub')
485 486 class TestUpdateCommentHandling(object):
486 487
487 488 @pytest.fixture(autouse=True, scope='class')
488 489 def enable_outdated_comments(self, request, baseapp):
489 490 config_patch = mock.patch.dict(
490 491 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True})
491 492 config_patch.start()
492 493
493 494 @request.addfinalizer
494 495 def cleanup():
495 496 config_patch.stop()
496 497
497 498 def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util):
498 499 commits = [
499 500 {'message': 'a'},
500 501 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
501 502 {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]},
502 503 ]
503 504 pull_request = pr_util.create_pull_request(
504 505 commits=commits, target_head='a', source_head='b', revisions=['b'])
505 506 pr_util.create_inline_comment(file_path='file_b')
506 507 pr_util.add_one_commit(head='c')
507 508
508 509 assert_inline_comments(pull_request, visible=1, outdated=0)
509 510
510 511 def test_comment_stays_unflagged_on_change_above(self, pr_util):
511 512 original_content = ''.join(
512 513 ['line {}\n'.format(x) for x in range(1, 11)])
513 514 updated_content = 'new_line_at_top\n' + original_content
514 515 commits = [
515 516 {'message': 'a'},
516 517 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
517 518 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
518 519 ]
519 520 pull_request = pr_util.create_pull_request(
520 521 commits=commits, target_head='a', source_head='b', revisions=['b'])
521 522
522 523 with outdated_comments_patcher():
523 524 comment = pr_util.create_inline_comment(
524 525 line_no=u'n8', file_path='file_b')
525 526 pr_util.add_one_commit(head='c')
526 527
527 528 assert_inline_comments(pull_request, visible=1, outdated=0)
528 529 assert comment.line_no == u'n9'
529 530
530 531 def test_comment_stays_unflagged_on_change_below(self, pr_util):
531 532 original_content = ''.join(['line {}\n'.format(x) for x in range(10)])
532 533 updated_content = original_content + 'new_line_at_end\n'
533 534 commits = [
534 535 {'message': 'a'},
535 536 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
536 537 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
537 538 ]
538 539 pull_request = pr_util.create_pull_request(
539 540 commits=commits, target_head='a', source_head='b', revisions=['b'])
540 541 pr_util.create_inline_comment(file_path='file_b')
541 542 pr_util.add_one_commit(head='c')
542 543
543 544 assert_inline_comments(pull_request, visible=1, outdated=0)
544 545
545 546 @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9'])
546 547 def test_comment_flagged_on_change_around_context(self, pr_util, line_no):
547 548 base_lines = ['line {}\n'.format(x) for x in range(1, 13)]
548 549 change_lines = list(base_lines)
549 550 change_lines.insert(6, 'line 6a added\n')
550 551
551 552 # Changes on the last line of sight
552 553 update_lines = list(change_lines)
553 554 update_lines[0] = 'line 1 changed\n'
554 555 update_lines[-1] = 'line 12 changed\n'
555 556
556 557 def file_b(lines):
557 558 return FileNode('file_b', ''.join(lines))
558 559
559 560 commits = [
560 561 {'message': 'a', 'added': [file_b(base_lines)]},
561 562 {'message': 'b', 'changed': [file_b(change_lines)]},
562 563 {'message': 'c', 'changed': [file_b(update_lines)]},
563 564 ]
564 565
565 566 pull_request = pr_util.create_pull_request(
566 567 commits=commits, target_head='a', source_head='b', revisions=['b'])
567 568 pr_util.create_inline_comment(line_no=line_no, file_path='file_b')
568 569
569 570 with outdated_comments_patcher():
570 571 pr_util.add_one_commit(head='c')
571 572 assert_inline_comments(pull_request, visible=0, outdated=1)
572 573
573 574 @pytest.mark.parametrize("change, content", [
574 575 ('changed', 'changed\n'),
575 576 ('removed', ''),
576 577 ], ids=['changed', 'removed'])
577 578 def test_comment_flagged_on_change(self, pr_util, change, content):
578 579 commits = [
579 580 {'message': 'a'},
580 581 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
581 582 {'message': 'c', change: [FileNode('file_b', content)]},
582 583 ]
583 584 pull_request = pr_util.create_pull_request(
584 585 commits=commits, target_head='a', source_head='b', revisions=['b'])
585 586 pr_util.create_inline_comment(file_path='file_b')
586 587
587 588 with outdated_comments_patcher():
588 589 pr_util.add_one_commit(head='c')
589 590 assert_inline_comments(pull_request, visible=0, outdated=1)
590 591
591 592
592 593 @pytest.mark.usefixtures('config_stub')
593 594 class TestUpdateChangedFiles(object):
594 595
595 596 def test_no_changes_on_unchanged_diff(self, pr_util):
596 597 commits = [
597 598 {'message': 'a'},
598 599 {'message': 'b',
599 600 'added': [FileNode('file_b', 'test_content b\n')]},
600 601 {'message': 'c',
601 602 'added': [FileNode('file_c', 'test_content c\n')]},
602 603 ]
603 604 # open a PR from a to b, adding file_b
604 605 pull_request = pr_util.create_pull_request(
605 606 commits=commits, target_head='a', source_head='b', revisions=['b'],
606 607 name_suffix='per-file-review')
607 608
608 609 # modify PR adding new file file_c
609 610 pr_util.add_one_commit(head='c')
610 611
611 612 assert_pr_file_changes(
612 613 pull_request,
613 614 added=['file_c'],
614 615 modified=[],
615 616 removed=[])
616 617
617 618 def test_modify_and_undo_modification_diff(self, pr_util):
618 619 commits = [
619 620 {'message': 'a'},
620 621 {'message': 'b',
621 622 'added': [FileNode('file_b', 'test_content b\n')]},
622 623 {'message': 'c',
623 624 'changed': [FileNode('file_b', 'test_content b modified\n')]},
624 625 {'message': 'd',
625 626 'changed': [FileNode('file_b', 'test_content b\n')]},
626 627 ]
627 628 # open a PR from a to b, adding file_b
628 629 pull_request = pr_util.create_pull_request(
629 630 commits=commits, target_head='a', source_head='b', revisions=['b'],
630 631 name_suffix='per-file-review')
631 632
632 633 # modify PR modifying file file_b
633 634 pr_util.add_one_commit(head='c')
634 635
635 636 assert_pr_file_changes(
636 637 pull_request,
637 638 added=[],
638 639 modified=['file_b'],
639 640 removed=[])
640 641
641 642 # move the head again to d, which rollbacks change,
642 643 # meaning we should indicate no changes
643 644 pr_util.add_one_commit(head='d')
644 645
645 646 assert_pr_file_changes(
646 647 pull_request,
647 648 added=[],
648 649 modified=[],
649 650 removed=[])
650 651
651 652 def test_updated_all_files_in_pr(self, pr_util):
652 653 commits = [
653 654 {'message': 'a'},
654 655 {'message': 'b', 'added': [
655 656 FileNode('file_a', 'test_content a\n'),
656 657 FileNode('file_b', 'test_content b\n'),
657 658 FileNode('file_c', 'test_content c\n')]},
658 659 {'message': 'c', 'changed': [
659 660 FileNode('file_a', 'test_content a changed\n'),
660 661 FileNode('file_b', 'test_content b changed\n'),
661 662 FileNode('file_c', 'test_content c changed\n')]},
662 663 ]
663 664 # open a PR from a to b, changing 3 files
664 665 pull_request = pr_util.create_pull_request(
665 666 commits=commits, target_head='a', source_head='b', revisions=['b'],
666 667 name_suffix='per-file-review')
667 668
668 669 pr_util.add_one_commit(head='c')
669 670
670 671 assert_pr_file_changes(
671 672 pull_request,
672 673 added=[],
673 674 modified=['file_a', 'file_b', 'file_c'],
674 675 removed=[])
675 676
676 677 def test_updated_and_removed_all_files_in_pr(self, pr_util):
677 678 commits = [
678 679 {'message': 'a'},
679 680 {'message': 'b', 'added': [
680 681 FileNode('file_a', 'test_content a\n'),
681 682 FileNode('file_b', 'test_content b\n'),
682 683 FileNode('file_c', 'test_content c\n')]},
683 684 {'message': 'c', 'removed': [
684 685 FileNode('file_a', 'test_content a changed\n'),
685 686 FileNode('file_b', 'test_content b changed\n'),
686 687 FileNode('file_c', 'test_content c changed\n')]},
687 688 ]
688 689 # open a PR from a to b, removing 3 files
689 690 pull_request = pr_util.create_pull_request(
690 691 commits=commits, target_head='a', source_head='b', revisions=['b'],
691 692 name_suffix='per-file-review')
692 693
693 694 pr_util.add_one_commit(head='c')
694 695
695 696 assert_pr_file_changes(
696 697 pull_request,
697 698 added=[],
698 699 modified=[],
699 700 removed=['file_a', 'file_b', 'file_c'])
700 701
701 702
702 703 def test_update_writes_snapshot_into_pull_request_version(pr_util, config_stub):
703 704 model = PullRequestModel()
704 705 pull_request = pr_util.create_pull_request()
705 706 pr_util.update_source_repository()
706 707
707 708 model.update_commits(pull_request)
708 709
709 710 # Expect that it has a version entry now
710 711 assert len(model.get_versions(pull_request)) == 1
711 712
712 713
713 714 def test_update_skips_new_version_if_unchanged(pr_util, config_stub):
714 715 pull_request = pr_util.create_pull_request()
715 716 model = PullRequestModel()
716 717 model.update_commits(pull_request)
717 718
718 719 # Expect that it still has no versions
719 720 assert len(model.get_versions(pull_request)) == 0
720 721
721 722
722 723 def test_update_assigns_comments_to_the_new_version(pr_util, config_stub):
723 724 model = PullRequestModel()
724 725 pull_request = pr_util.create_pull_request()
725 726 comment = pr_util.create_comment()
726 727 pr_util.update_source_repository()
727 728
728 729 model.update_commits(pull_request)
729 730
730 731 # Expect that the comment is linked to the pr version now
731 732 assert comment.pull_request_version == model.get_versions(pull_request)[0]
732 733
733 734
734 735 def test_update_adds_a_comment_to_the_pull_request_about_the_change(pr_util, config_stub):
735 736 model = PullRequestModel()
736 737 pull_request = pr_util.create_pull_request()
737 738 pr_util.update_source_repository()
738 739 pr_util.update_source_repository()
739 740
740 741 model.update_commits(pull_request)
741 742
742 743 # Expect to find a new comment about the change
743 744 expected_message = textwrap.dedent(
744 745 """\
745 746 Pull request updated. Auto status change to |under_review|
746 747
747 748 .. role:: added
748 749 .. role:: removed
749 750 .. parsed-literal::
750 751
751 752 Changed commits:
752 753 * :added:`1 added`
753 754 * :removed:`0 removed`
754 755
755 756 Changed files:
756 757 * `A file_2 <#a_c--92ed3b5f07b4>`_
757 758
758 759 .. |under_review| replace:: *"Under Review"*"""
759 760 )
760 761 pull_request_comments = sorted(
761 762 pull_request.comments, key=lambda c: c.modified_at)
762 763 update_comment = pull_request_comments[-1]
763 764 assert update_comment.text == expected_message
764 765
765 766
766 767 def test_create_version_from_snapshot_updates_attributes(pr_util, config_stub):
767 768 pull_request = pr_util.create_pull_request()
768 769
769 770 # Avoiding default values
770 771 pull_request.status = PullRequest.STATUS_CLOSED
771 772 pull_request._last_merge_source_rev = "0" * 40
772 773 pull_request._last_merge_target_rev = "1" * 40
773 774 pull_request.last_merge_status = 1
774 775 pull_request.merge_rev = "2" * 40
775 776
776 777 # Remember automatic values
777 778 created_on = pull_request.created_on
778 779 updated_on = pull_request.updated_on
779 780
780 781 # Create a new version of the pull request
781 782 version = PullRequestModel()._create_version_from_snapshot(pull_request)
782 783
783 784 # Check attributes
784 785 assert version.title == pr_util.create_parameters['title']
785 786 assert version.description == pr_util.create_parameters['description']
786 787 assert version.status == PullRequest.STATUS_CLOSED
787 788
788 789 # versions get updated created_on
789 790 assert version.created_on != created_on
790 791
791 792 assert version.updated_on == updated_on
792 793 assert version.user_id == pull_request.user_id
793 794 assert version.revisions == pr_util.create_parameters['revisions']
794 795 assert version.source_repo == pr_util.source_repository
795 796 assert version.source_ref == pr_util.create_parameters['source_ref']
796 797 assert version.target_repo == pr_util.target_repository
797 798 assert version.target_ref == pr_util.create_parameters['target_ref']
798 799 assert version._last_merge_source_rev == pull_request._last_merge_source_rev
799 800 assert version._last_merge_target_rev == pull_request._last_merge_target_rev
800 801 assert version.last_merge_status == pull_request.last_merge_status
801 802 assert version.merge_rev == pull_request.merge_rev
802 803 assert version.pull_request == pull_request
803 804
804 805
805 806 def test_link_comments_to_version_only_updates_unlinked_comments(pr_util, config_stub):
806 807 version1 = pr_util.create_version_of_pull_request()
807 808 comment_linked = pr_util.create_comment(linked_to=version1)
808 809 comment_unlinked = pr_util.create_comment()
809 810 version2 = pr_util.create_version_of_pull_request()
810 811
811 812 PullRequestModel()._link_comments_to_version(version2)
812 813
813 814 # Expect that only the new comment is linked to version2
814 815 assert (
815 816 comment_unlinked.pull_request_version_id ==
816 817 version2.pull_request_version_id)
817 818 assert (
818 819 comment_linked.pull_request_version_id ==
819 820 version1.pull_request_version_id)
820 821 assert (
821 822 comment_unlinked.pull_request_version_id !=
822 823 comment_linked.pull_request_version_id)
823 824
824 825
825 826 def test_calculate_commits():
826 827 old_ids = [1, 2, 3]
827 828 new_ids = [1, 3, 4, 5]
828 829 change = PullRequestModel()._calculate_commit_id_changes(old_ids, new_ids)
829 830 assert change.added == [4, 5]
830 831 assert change.common == [1, 3]
831 832 assert change.removed == [2]
832 833 assert change.total == [1, 3, 4, 5]
833 834
834 835
835 836 def assert_inline_comments(pull_request, visible=None, outdated=None):
836 837 if visible is not None:
837 838 inline_comments = CommentsModel().get_inline_comments(
838 839 pull_request.target_repo.repo_id, pull_request=pull_request)
839 840 inline_cnt = CommentsModel().get_inline_comments_count(
840 841 inline_comments)
841 842 assert inline_cnt == visible
842 843 if outdated is not None:
843 844 outdated_comments = CommentsModel().get_outdated_comments(
844 845 pull_request.target_repo.repo_id, pull_request)
845 846 assert len(outdated_comments) == outdated
846 847
847 848
848 849 def assert_pr_file_changes(
849 850 pull_request, added=None, modified=None, removed=None):
850 851 pr_versions = PullRequestModel().get_versions(pull_request)
851 852 # always use first version, ie original PR to calculate changes
852 853 pull_request_version = pr_versions[0]
853 854 old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs(
854 855 pull_request, pull_request_version)
855 856 file_changes = PullRequestModel()._calculate_file_changes(
856 857 old_diff_data, new_diff_data)
857 858
858 859 assert added == file_changes.added, \
859 860 'expected added:%s vs value:%s' % (added, file_changes.added)
860 861 assert modified == file_changes.modified, \
861 862 'expected modified:%s vs value:%s' % (modified, file_changes.modified)
862 863 assert removed == file_changes.removed, \
863 864 'expected removed:%s vs value:%s' % (removed, file_changes.removed)
864 865
865 866
866 867 def outdated_comments_patcher(use_outdated=True):
867 868 return mock.patch.object(
868 869 CommentsModel, 'use_outdated_comments',
869 870 return_value=use_outdated)
@@ -1,1886 +1,1887 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import collections
22 22 import datetime
23 23 import hashlib
24 24 import os
25 25 import re
26 26 import pprint
27 27 import shutil
28 28 import socket
29 29 import subprocess32
30 30 import time
31 31 import uuid
32 32 import dateutil.tz
33 33 import functools
34 34
35 35 import mock
36 36 import pyramid.testing
37 37 import pytest
38 38 import colander
39 39 import requests
40 40 import pyramid.paster
41 41
42 42 import rhodecode
43 43 from rhodecode.lib.utils2 import AttributeDict
44 44 from rhodecode.model.changeset_status import ChangesetStatusModel
45 45 from rhodecode.model.comment import CommentsModel
46 46 from rhodecode.model.db import (
47 47 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
48 48 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
49 49 from rhodecode.model.meta import Session
50 50 from rhodecode.model.pull_request import PullRequestModel
51 51 from rhodecode.model.repo import RepoModel
52 52 from rhodecode.model.repo_group import RepoGroupModel
53 53 from rhodecode.model.user import UserModel
54 54 from rhodecode.model.settings import VcsSettingsModel
55 55 from rhodecode.model.user_group import UserGroupModel
56 56 from rhodecode.model.integration import IntegrationModel
57 57 from rhodecode.integrations import integration_type_registry
58 58 from rhodecode.integrations.types.base import IntegrationTypeBase
59 59 from rhodecode.lib.utils import repo2db_mapper
60 60 from rhodecode.lib.vcs import create_vcsserver_proxy
61 61 from rhodecode.lib.vcs.backends import get_backend
62 62 from rhodecode.lib.vcs.nodes import FileNode
63 63 from rhodecode.tests import (
64 64 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
65 65 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
66 66 TEST_USER_REGULAR_PASS)
67 67 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
68 68 from rhodecode.tests.fixture import Fixture
69 69 from rhodecode.config import utils as config_utils
70 70
71 71 def _split_comma(value):
72 72 return value.split(',')
73 73
74 74
75 75 def pytest_addoption(parser):
76 76 parser.addoption(
77 77 '--keep-tmp-path', action='store_true',
78 78 help="Keep the test temporary directories")
79 79 parser.addoption(
80 80 '--backends', action='store', type=_split_comma,
81 81 default=['git', 'hg', 'svn'],
82 82 help="Select which backends to test for backend specific tests.")
83 83 parser.addoption(
84 84 '--dbs', action='store', type=_split_comma,
85 85 default=['sqlite'],
86 86 help="Select which database to test for database specific tests. "
87 87 "Possible options are sqlite,postgres,mysql")
88 88 parser.addoption(
89 89 '--appenlight', '--ae', action='store_true',
90 90 help="Track statistics in appenlight.")
91 91 parser.addoption(
92 92 '--appenlight-api-key', '--ae-key',
93 93 help="API key for Appenlight.")
94 94 parser.addoption(
95 95 '--appenlight-url', '--ae-url',
96 96 default="https://ae.rhodecode.com",
97 97 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
98 98 parser.addoption(
99 99 '--sqlite-connection-string', action='store',
100 100 default='', help="Connection string for the dbs tests with SQLite")
101 101 parser.addoption(
102 102 '--postgres-connection-string', action='store',
103 103 default='', help="Connection string for the dbs tests with Postgres")
104 104 parser.addoption(
105 105 '--mysql-connection-string', action='store',
106 106 default='', help="Connection string for the dbs tests with MySQL")
107 107 parser.addoption(
108 108 '--repeat', type=int, default=100,
109 109 help="Number of repetitions in performance tests.")
110 110
111 111
112 112 def pytest_configure(config):
113 113 from rhodecode.config import patches
114 114
115 115
116 116 def pytest_collection_modifyitems(session, config, items):
117 117 # nottest marked, compare nose, used for transition from nose to pytest
118 118 remaining = [
119 119 i for i in items if getattr(i.obj, '__test__', True)]
120 120 items[:] = remaining
121 121
122 122
123 123 def pytest_generate_tests(metafunc):
124 124 # Support test generation based on --backend parameter
125 125 if 'backend_alias' in metafunc.fixturenames:
126 126 backends = get_backends_from_metafunc(metafunc)
127 127 scope = None
128 128 if not backends:
129 129 pytest.skip("Not enabled for any of selected backends")
130 130 metafunc.parametrize('backend_alias', backends, scope=scope)
131 131 elif hasattr(metafunc.function, 'backends'):
132 132 backends = get_backends_from_metafunc(metafunc)
133 133 if not backends:
134 134 pytest.skip("Not enabled for any of selected backends")
135 135
136 136
137 137 def get_backends_from_metafunc(metafunc):
138 138 requested_backends = set(metafunc.config.getoption('--backends'))
139 139 if hasattr(metafunc.function, 'backends'):
140 140 # Supported backends by this test function, created from
141 141 # pytest.mark.backends
142 142 backends = metafunc.definition.get_closest_marker('backends').args
143 143 elif hasattr(metafunc.cls, 'backend_alias'):
144 144 # Support class attribute "backend_alias", this is mainly
145 145 # for legacy reasons for tests not yet using pytest.mark.backends
146 146 backends = [metafunc.cls.backend_alias]
147 147 else:
148 148 backends = metafunc.config.getoption('--backends')
149 149 return requested_backends.intersection(backends)
150 150
151 151
152 152 @pytest.fixture(scope='session', autouse=True)
153 153 def activate_example_rcextensions(request):
154 154 """
155 155 Patch in an example rcextensions module which verifies passed in kwargs.
156 156 """
157 from rhodecode.tests.other import example_rcextensions
157 from rhodecode.config import rcextensions
158 158
159 159 old_extensions = rhodecode.EXTENSIONS
160 rhodecode.EXTENSIONS = example_rcextensions
160 rhodecode.EXTENSIONS = rcextensions
161 rhodecode.EXTENSIONS.calls = collections.defaultdict(list)
161 162
162 163 @request.addfinalizer
163 164 def cleanup():
164 165 rhodecode.EXTENSIONS = old_extensions
165 166
166 167
167 168 @pytest.fixture
168 169 def capture_rcextensions():
169 170 """
170 171 Returns the recorded calls to entry points in rcextensions.
171 172 """
172 173 calls = rhodecode.EXTENSIONS.calls
173 174 calls.clear()
174 175 # Note: At this moment, it is still the empty dict, but that will
175 176 # be filled during the test run and since it is a reference this
176 177 # is enough to make it work.
177 178 return calls
178 179
179 180
180 181 @pytest.fixture(scope='session')
181 182 def http_environ_session():
182 183 """
183 184 Allow to use "http_environ" in session scope.
184 185 """
185 186 return plain_http_environ()
186 187
187 188
188 189 def plain_http_host_stub():
189 190 """
190 191 Value of HTTP_HOST in the test run.
191 192 """
192 193 return 'example.com:80'
193 194
194 195
195 196 @pytest.fixture
196 197 def http_host_stub():
197 198 """
198 199 Value of HTTP_HOST in the test run.
199 200 """
200 201 return plain_http_host_stub()
201 202
202 203
203 204 def plain_http_host_only_stub():
204 205 """
205 206 Value of HTTP_HOST in the test run.
206 207 """
207 208 return plain_http_host_stub().split(':')[0]
208 209
209 210
210 211 @pytest.fixture
211 212 def http_host_only_stub():
212 213 """
213 214 Value of HTTP_HOST in the test run.
214 215 """
215 216 return plain_http_host_only_stub()
216 217
217 218
218 219 def plain_http_environ():
219 220 """
220 221 HTTP extra environ keys.
221 222
222 223 User by the test application and as well for setting up the pylons
223 224 environment. In the case of the fixture "app" it should be possible
224 225 to override this for a specific test case.
225 226 """
226 227 return {
227 228 'SERVER_NAME': plain_http_host_only_stub(),
228 229 'SERVER_PORT': plain_http_host_stub().split(':')[1],
229 230 'HTTP_HOST': plain_http_host_stub(),
230 231 'HTTP_USER_AGENT': 'rc-test-agent',
231 232 'REQUEST_METHOD': 'GET'
232 233 }
233 234
234 235
235 236 @pytest.fixture
236 237 def http_environ():
237 238 """
238 239 HTTP extra environ keys.
239 240
240 241 User by the test application and as well for setting up the pylons
241 242 environment. In the case of the fixture "app" it should be possible
242 243 to override this for a specific test case.
243 244 """
244 245 return plain_http_environ()
245 246
246 247
247 248 @pytest.fixture(scope='session')
248 249 def baseapp(ini_config, vcsserver, http_environ_session):
249 250 from rhodecode.lib.pyramid_utils import get_app_config
250 251 from rhodecode.config.middleware import make_pyramid_app
251 252
252 253 print("Using the RhodeCode configuration:{}".format(ini_config))
253 254 pyramid.paster.setup_logging(ini_config)
254 255
255 256 settings = get_app_config(ini_config)
256 257 app = make_pyramid_app({'__file__': ini_config}, **settings)
257 258
258 259 return app
259 260
260 261
261 262 @pytest.fixture(scope='function')
262 263 def app(request, config_stub, baseapp, http_environ):
263 264 app = CustomTestApp(
264 265 baseapp,
265 266 extra_environ=http_environ)
266 267 if request.cls:
267 268 request.cls.app = app
268 269 return app
269 270
270 271
271 272 @pytest.fixture(scope='session')
272 273 def app_settings(baseapp, ini_config):
273 274 """
274 275 Settings dictionary used to create the app.
275 276
276 277 Parses the ini file and passes the result through the sanitize and apply
277 278 defaults mechanism in `rhodecode.config.middleware`.
278 279 """
279 280 return baseapp.config.get_settings()
280 281
281 282
282 283 @pytest.fixture(scope='session')
283 284 def db_connection(ini_settings):
284 285 # Initialize the database connection.
285 286 config_utils.initialize_database(ini_settings)
286 287
287 288
288 289 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
289 290
290 291
291 292 def _autologin_user(app, *args):
292 293 session = login_user_session(app, *args)
293 294 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
294 295 return LoginData(csrf_token, session['rhodecode_user'])
295 296
296 297
297 298 @pytest.fixture
298 299 def autologin_user(app):
299 300 """
300 301 Utility fixture which makes sure that the admin user is logged in
301 302 """
302 303 return _autologin_user(app)
303 304
304 305
305 306 @pytest.fixture
306 307 def autologin_regular_user(app):
307 308 """
308 309 Utility fixture which makes sure that the regular user is logged in
309 310 """
310 311 return _autologin_user(
311 312 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
312 313
313 314
314 315 @pytest.fixture(scope='function')
315 316 def csrf_token(request, autologin_user):
316 317 return autologin_user.csrf_token
317 318
318 319
319 320 @pytest.fixture(scope='function')
320 321 def xhr_header(request):
321 322 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
322 323
323 324
324 325 @pytest.fixture
325 326 def real_crypto_backend(monkeypatch):
326 327 """
327 328 Switch the production crypto backend on for this test.
328 329
329 330 During the test run the crypto backend is replaced with a faster
330 331 implementation based on the MD5 algorithm.
331 332 """
332 333 monkeypatch.setattr(rhodecode, 'is_test', False)
333 334
334 335
335 336 @pytest.fixture(scope='class')
336 337 def index_location(request, baseapp):
337 338 index_location = baseapp.config.get_settings()['search.location']
338 339 if request.cls:
339 340 request.cls.index_location = index_location
340 341 return index_location
341 342
342 343
343 344 @pytest.fixture(scope='session', autouse=True)
344 345 def tests_tmp_path(request):
345 346 """
346 347 Create temporary directory to be used during the test session.
347 348 """
348 349 if not os.path.exists(TESTS_TMP_PATH):
349 350 os.makedirs(TESTS_TMP_PATH)
350 351
351 352 if not request.config.getoption('--keep-tmp-path'):
352 353 @request.addfinalizer
353 354 def remove_tmp_path():
354 355 shutil.rmtree(TESTS_TMP_PATH)
355 356
356 357 return TESTS_TMP_PATH
357 358
358 359
359 360 @pytest.fixture
360 361 def test_repo_group(request):
361 362 """
362 363 Create a temporary repository group, and destroy it after
363 364 usage automatically
364 365 """
365 366 fixture = Fixture()
366 367 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
367 368 repo_group = fixture.create_repo_group(repogroupid)
368 369
369 370 def _cleanup():
370 371 fixture.destroy_repo_group(repogroupid)
371 372
372 373 request.addfinalizer(_cleanup)
373 374 return repo_group
374 375
375 376
376 377 @pytest.fixture
377 378 def test_user_group(request):
378 379 """
379 380 Create a temporary user group, and destroy it after
380 381 usage automatically
381 382 """
382 383 fixture = Fixture()
383 384 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
384 385 user_group = fixture.create_user_group(usergroupid)
385 386
386 387 def _cleanup():
387 388 fixture.destroy_user_group(user_group)
388 389
389 390 request.addfinalizer(_cleanup)
390 391 return user_group
391 392
392 393
393 394 @pytest.fixture(scope='session')
394 395 def test_repo(request):
395 396 container = TestRepoContainer()
396 397 request.addfinalizer(container._cleanup)
397 398 return container
398 399
399 400
400 401 class TestRepoContainer(object):
401 402 """
402 403 Container for test repositories which are used read only.
403 404
404 405 Repositories will be created on demand and re-used during the lifetime
405 406 of this object.
406 407
407 408 Usage to get the svn test repository "minimal"::
408 409
409 410 test_repo = TestContainer()
410 411 repo = test_repo('minimal', 'svn')
411 412
412 413 """
413 414
414 415 dump_extractors = {
415 416 'git': utils.extract_git_repo_from_dump,
416 417 'hg': utils.extract_hg_repo_from_dump,
417 418 'svn': utils.extract_svn_repo_from_dump,
418 419 }
419 420
420 421 def __init__(self):
421 422 self._cleanup_repos = []
422 423 self._fixture = Fixture()
423 424 self._repos = {}
424 425
425 426 def __call__(self, dump_name, backend_alias, config=None):
426 427 key = (dump_name, backend_alias)
427 428 if key not in self._repos:
428 429 repo = self._create_repo(dump_name, backend_alias, config)
429 430 self._repos[key] = repo.repo_id
430 431 return Repository.get(self._repos[key])
431 432
432 433 def _create_repo(self, dump_name, backend_alias, config):
433 434 repo_name = '%s-%s' % (backend_alias, dump_name)
434 435 backend_class = get_backend(backend_alias)
435 436 dump_extractor = self.dump_extractors[backend_alias]
436 437 repo_path = dump_extractor(dump_name, repo_name)
437 438
438 439 vcs_repo = backend_class(repo_path, config=config)
439 440 repo2db_mapper({repo_name: vcs_repo})
440 441
441 442 repo = RepoModel().get_by_repo_name(repo_name)
442 443 self._cleanup_repos.append(repo_name)
443 444 return repo
444 445
445 446 def _cleanup(self):
446 447 for repo_name in reversed(self._cleanup_repos):
447 448 self._fixture.destroy_repo(repo_name)
448 449
449 450
450 451 def backend_base(request, backend_alias, baseapp, test_repo):
451 452 if backend_alias not in request.config.getoption('--backends'):
452 453 pytest.skip("Backend %s not selected." % (backend_alias, ))
453 454
454 455 utils.check_xfail_backends(request.node, backend_alias)
455 456 utils.check_skip_backends(request.node, backend_alias)
456 457
457 458 repo_name = 'vcs_test_%s' % (backend_alias, )
458 459 backend = Backend(
459 460 alias=backend_alias,
460 461 repo_name=repo_name,
461 462 test_name=request.node.name,
462 463 test_repo_container=test_repo)
463 464 request.addfinalizer(backend.cleanup)
464 465 return backend
465 466
466 467
467 468 @pytest.fixture
468 469 def backend(request, backend_alias, baseapp, test_repo):
469 470 """
470 471 Parametrized fixture which represents a single backend implementation.
471 472
472 473 It respects the option `--backends` to focus the test run on specific
473 474 backend implementations.
474 475
475 476 It also supports `pytest.mark.xfail_backends` to mark tests as failing
476 477 for specific backends. This is intended as a utility for incremental
477 478 development of a new backend implementation.
478 479 """
479 480 return backend_base(request, backend_alias, baseapp, test_repo)
480 481
481 482
482 483 @pytest.fixture
483 484 def backend_git(request, baseapp, test_repo):
484 485 return backend_base(request, 'git', baseapp, test_repo)
485 486
486 487
487 488 @pytest.fixture
488 489 def backend_hg(request, baseapp, test_repo):
489 490 return backend_base(request, 'hg', baseapp, test_repo)
490 491
491 492
492 493 @pytest.fixture
493 494 def backend_svn(request, baseapp, test_repo):
494 495 return backend_base(request, 'svn', baseapp, test_repo)
495 496
496 497
497 498 @pytest.fixture
498 499 def backend_random(backend_git):
499 500 """
500 501 Use this to express that your tests need "a backend.
501 502
502 503 A few of our tests need a backend, so that we can run the code. This
503 504 fixture is intended to be used for such cases. It will pick one of the
504 505 backends and run the tests.
505 506
506 507 The fixture `backend` would run the test multiple times for each
507 508 available backend which is a pure waste of time if the test is
508 509 independent of the backend type.
509 510 """
510 511 # TODO: johbo: Change this to pick a random backend
511 512 return backend_git
512 513
513 514
514 515 @pytest.fixture
515 516 def backend_stub(backend_git):
516 517 """
517 518 Use this to express that your tests need a backend stub
518 519
519 520 TODO: mikhail: Implement a real stub logic instead of returning
520 521 a git backend
521 522 """
522 523 return backend_git
523 524
524 525
525 526 @pytest.fixture
526 527 def repo_stub(backend_stub):
527 528 """
528 529 Use this to express that your tests need a repository stub
529 530 """
530 531 return backend_stub.create_repo()
531 532
532 533
533 534 class Backend(object):
534 535 """
535 536 Represents the test configuration for one supported backend
536 537
537 538 Provides easy access to different test repositories based on
538 539 `__getitem__`. Such repositories will only be created once per test
539 540 session.
540 541 """
541 542
542 543 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
543 544 _master_repo = None
544 545 _commit_ids = {}
545 546
546 547 def __init__(self, alias, repo_name, test_name, test_repo_container):
547 548 self.alias = alias
548 549 self.repo_name = repo_name
549 550 self._cleanup_repos = []
550 551 self._test_name = test_name
551 552 self._test_repo_container = test_repo_container
552 553 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
553 554 # Fixture will survive in the end.
554 555 self._fixture = Fixture()
555 556
556 557 def __getitem__(self, key):
557 558 return self._test_repo_container(key, self.alias)
558 559
559 560 def create_test_repo(self, key, config=None):
560 561 return self._test_repo_container(key, self.alias, config)
561 562
562 563 @property
563 564 def repo(self):
564 565 """
565 566 Returns the "current" repository. This is the vcs_test repo or the
566 567 last repo which has been created with `create_repo`.
567 568 """
568 569 from rhodecode.model.db import Repository
569 570 return Repository.get_by_repo_name(self.repo_name)
570 571
571 572 @property
572 573 def default_branch_name(self):
573 574 VcsRepository = get_backend(self.alias)
574 575 return VcsRepository.DEFAULT_BRANCH_NAME
575 576
576 577 @property
577 578 def default_head_id(self):
578 579 """
579 580 Returns the default head id of the underlying backend.
580 581
581 582 This will be the default branch name in case the backend does have a
582 583 default branch. In the other cases it will point to a valid head
583 584 which can serve as the base to create a new commit on top of it.
584 585 """
585 586 vcsrepo = self.repo.scm_instance()
586 587 head_id = (
587 588 vcsrepo.DEFAULT_BRANCH_NAME or
588 589 vcsrepo.commit_ids[-1])
589 590 return head_id
590 591
591 592 @property
592 593 def commit_ids(self):
593 594 """
594 595 Returns the list of commits for the last created repository
595 596 """
596 597 return self._commit_ids
597 598
598 599 def create_master_repo(self, commits):
599 600 """
600 601 Create a repository and remember it as a template.
601 602
602 603 This allows to easily create derived repositories to construct
603 604 more complex scenarios for diff, compare and pull requests.
604 605
605 606 Returns a commit map which maps from commit message to raw_id.
606 607 """
607 608 self._master_repo = self.create_repo(commits=commits)
608 609 return self._commit_ids
609 610
610 611 def create_repo(
611 612 self, commits=None, number_of_commits=0, heads=None,
612 613 name_suffix=u'', bare=False, **kwargs):
613 614 """
614 615 Create a repository and record it for later cleanup.
615 616
616 617 :param commits: Optional. A sequence of dict instances.
617 618 Will add a commit per entry to the new repository.
618 619 :param number_of_commits: Optional. If set to a number, this number of
619 620 commits will be added to the new repository.
620 621 :param heads: Optional. Can be set to a sequence of of commit
621 622 names which shall be pulled in from the master repository.
622 623 :param name_suffix: adds special suffix to generated repo name
623 624 :param bare: set a repo as bare (no checkout)
624 625 """
625 626 self.repo_name = self._next_repo_name() + name_suffix
626 627 repo = self._fixture.create_repo(
627 628 self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
628 629 self._cleanup_repos.append(repo.repo_name)
629 630
630 631 commits = commits or [
631 632 {'message': 'Commit %s of %s' % (x, self.repo_name)}
632 633 for x in range(number_of_commits)]
633 634 self._add_commits_to_repo(repo.scm_instance(), commits)
634 635 if heads:
635 636 self.pull_heads(repo, heads)
636 637
637 638 return repo
638 639
639 640 def pull_heads(self, repo, heads):
640 641 """
641 642 Make sure that repo contains all commits mentioned in `heads`
642 643 """
643 644 vcsmaster = self._master_repo.scm_instance()
644 645 vcsrepo = repo.scm_instance()
645 646 vcsrepo.config.clear_section('hooks')
646 647 commit_ids = [self._commit_ids[h] for h in heads]
647 648 vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids)
648 649
649 650 def create_fork(self):
650 651 repo_to_fork = self.repo_name
651 652 self.repo_name = self._next_repo_name()
652 653 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
653 654 self._cleanup_repos.append(self.repo_name)
654 655 return repo
655 656
656 657 def new_repo_name(self, suffix=u''):
657 658 self.repo_name = self._next_repo_name() + suffix
658 659 self._cleanup_repos.append(self.repo_name)
659 660 return self.repo_name
660 661
661 662 def _next_repo_name(self):
662 663 return u"%s_%s" % (
663 664 self.invalid_repo_name.sub(u'_', self._test_name),
664 665 len(self._cleanup_repos))
665 666
666 667 def ensure_file(self, filename, content='Test content\n'):
667 668 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
668 669 commits = [
669 670 {'added': [
670 671 FileNode(filename, content=content),
671 672 ]},
672 673 ]
673 674 self._add_commits_to_repo(self.repo.scm_instance(), commits)
674 675
675 676 def enable_downloads(self):
676 677 repo = self.repo
677 678 repo.enable_downloads = True
678 679 Session().add(repo)
679 680 Session().commit()
680 681
681 682 def cleanup(self):
682 683 for repo_name in reversed(self._cleanup_repos):
683 684 self._fixture.destroy_repo(repo_name)
684 685
685 686 def _add_commits_to_repo(self, repo, commits):
686 687 commit_ids = _add_commits_to_repo(repo, commits)
687 688 if not commit_ids:
688 689 return
689 690 self._commit_ids = commit_ids
690 691
691 692 # Creating refs for Git to allow fetching them from remote repository
692 693 if self.alias == 'git':
693 694 refs = {}
694 695 for message in self._commit_ids:
695 696 # TODO: mikhail: do more special chars replacements
696 697 ref_name = 'refs/test-refs/{}'.format(
697 698 message.replace(' ', ''))
698 699 refs[ref_name] = self._commit_ids[message]
699 700 self._create_refs(repo, refs)
700 701
701 702 def _create_refs(self, repo, refs):
702 703 for ref_name in refs:
703 704 repo.set_refs(ref_name, refs[ref_name])
704 705
705 706
706 707 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo):
707 708 if backend_alias not in request.config.getoption('--backends'):
708 709 pytest.skip("Backend %s not selected." % (backend_alias, ))
709 710
710 711 utils.check_xfail_backends(request.node, backend_alias)
711 712 utils.check_skip_backends(request.node, backend_alias)
712 713
713 714 repo_name = 'vcs_test_%s' % (backend_alias, )
714 715 repo_path = os.path.join(tests_tmp_path, repo_name)
715 716 backend = VcsBackend(
716 717 alias=backend_alias,
717 718 repo_path=repo_path,
718 719 test_name=request.node.name,
719 720 test_repo_container=test_repo)
720 721 request.addfinalizer(backend.cleanup)
721 722 return backend
722 723
723 724
724 725 @pytest.fixture
725 726 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
726 727 """
727 728 Parametrized fixture which represents a single vcs backend implementation.
728 729
729 730 See the fixture `backend` for more details. This one implements the same
730 731 concept, but on vcs level. So it does not provide model instances etc.
731 732
732 733 Parameters are generated dynamically, see :func:`pytest_generate_tests`
733 734 for how this works.
734 735 """
735 736 return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo)
736 737
737 738
738 739 @pytest.fixture
739 740 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
740 741 return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo)
741 742
742 743
743 744 @pytest.fixture
744 745 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
745 746 return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo)
746 747
747 748
748 749 @pytest.fixture
749 750 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
750 751 return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo)
751 752
752 753
753 754 @pytest.fixture
754 755 def vcsbackend_stub(vcsbackend_git):
755 756 """
756 757 Use this to express that your test just needs a stub of a vcsbackend.
757 758
758 759 Plan is to eventually implement an in-memory stub to speed tests up.
759 760 """
760 761 return vcsbackend_git
761 762
762 763
763 764 class VcsBackend(object):
764 765 """
765 766 Represents the test configuration for one supported vcs backend.
766 767 """
767 768
768 769 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
769 770
770 771 def __init__(self, alias, repo_path, test_name, test_repo_container):
771 772 self.alias = alias
772 773 self._repo_path = repo_path
773 774 self._cleanup_repos = []
774 775 self._test_name = test_name
775 776 self._test_repo_container = test_repo_container
776 777
777 778 def __getitem__(self, key):
778 779 return self._test_repo_container(key, self.alias).scm_instance()
779 780
780 781 @property
781 782 def repo(self):
782 783 """
783 784 Returns the "current" repository. This is the vcs_test repo of the last
784 785 repo which has been created.
785 786 """
786 787 Repository = get_backend(self.alias)
787 788 return Repository(self._repo_path)
788 789
789 790 @property
790 791 def backend(self):
791 792 """
792 793 Returns the backend implementation class.
793 794 """
794 795 return get_backend(self.alias)
795 796
796 797 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None,
797 798 bare=False):
798 799 repo_name = self._next_repo_name()
799 800 self._repo_path = get_new_dir(repo_name)
800 801 repo_class = get_backend(self.alias)
801 802 src_url = None
802 803 if _clone_repo:
803 804 src_url = _clone_repo.path
804 805 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
805 806 self._cleanup_repos.append(repo)
806 807
807 808 commits = commits or [
808 809 {'message': 'Commit %s of %s' % (x, repo_name)}
809 810 for x in xrange(number_of_commits)]
810 811 _add_commits_to_repo(repo, commits)
811 812 return repo
812 813
813 814 def clone_repo(self, repo):
814 815 return self.create_repo(_clone_repo=repo)
815 816
816 817 def cleanup(self):
817 818 for repo in self._cleanup_repos:
818 819 shutil.rmtree(repo.path)
819 820
820 821 def new_repo_path(self):
821 822 repo_name = self._next_repo_name()
822 823 self._repo_path = get_new_dir(repo_name)
823 824 return self._repo_path
824 825
825 826 def _next_repo_name(self):
826 827 return "%s_%s" % (
827 828 self.invalid_repo_name.sub('_', self._test_name),
828 829 len(self._cleanup_repos))
829 830
830 831 def add_file(self, repo, filename, content='Test content\n'):
831 832 imc = repo.in_memory_commit
832 833 imc.add(FileNode(filename, content=content))
833 834 imc.commit(
834 835 message=u'Automatic commit from vcsbackend fixture',
835 836 author=u'Automatic')
836 837
837 838 def ensure_file(self, filename, content='Test content\n'):
838 839 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
839 840 self.add_file(self.repo, filename, content)
840 841
841 842
842 843 def _add_commits_to_repo(vcs_repo, commits):
843 844 commit_ids = {}
844 845 if not commits:
845 846 return commit_ids
846 847
847 848 imc = vcs_repo.in_memory_commit
848 849 commit = None
849 850
850 851 for idx, commit in enumerate(commits):
851 852 message = unicode(commit.get('message', 'Commit %s' % idx))
852 853
853 854 for node in commit.get('added', []):
854 855 imc.add(FileNode(node.path, content=node.content))
855 856 for node in commit.get('changed', []):
856 857 imc.change(FileNode(node.path, content=node.content))
857 858 for node in commit.get('removed', []):
858 859 imc.remove(FileNode(node.path))
859 860
860 861 parents = [
861 862 vcs_repo.get_commit(commit_id=commit_ids[p])
862 863 for p in commit.get('parents', [])]
863 864
864 865 operations = ('added', 'changed', 'removed')
865 866 if not any((commit.get(o) for o in operations)):
866 867 imc.add(FileNode('file_%s' % idx, content=message))
867 868
868 869 commit = imc.commit(
869 870 message=message,
870 871 author=unicode(commit.get('author', 'Automatic')),
871 872 date=commit.get('date'),
872 873 branch=commit.get('branch'),
873 874 parents=parents)
874 875
875 876 commit_ids[commit.message] = commit.raw_id
876 877
877 878 return commit_ids
878 879
879 880
880 881 @pytest.fixture
881 882 def reposerver(request):
882 883 """
883 884 Allows to serve a backend repository
884 885 """
885 886
886 887 repo_server = RepoServer()
887 888 request.addfinalizer(repo_server.cleanup)
888 889 return repo_server
889 890
890 891
891 892 class RepoServer(object):
892 893 """
893 894 Utility to serve a local repository for the duration of a test case.
894 895
895 896 Supports only Subversion so far.
896 897 """
897 898
898 899 url = None
899 900
900 901 def __init__(self):
901 902 self._cleanup_servers = []
902 903
903 904 def serve(self, vcsrepo):
904 905 if vcsrepo.alias != 'svn':
905 906 raise TypeError("Backend %s not supported" % vcsrepo.alias)
906 907
907 908 proc = subprocess32.Popen(
908 909 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
909 910 '--root', vcsrepo.path])
910 911 self._cleanup_servers.append(proc)
911 912 self.url = 'svn://localhost'
912 913
913 914 def cleanup(self):
914 915 for proc in self._cleanup_servers:
915 916 proc.terminate()
916 917
917 918
918 919 @pytest.fixture
919 920 def pr_util(backend, request, config_stub):
920 921 """
921 922 Utility for tests of models and for functional tests around pull requests.
922 923
923 924 It gives an instance of :class:`PRTestUtility` which provides various
924 925 utility methods around one pull request.
925 926
926 927 This fixture uses `backend` and inherits its parameterization.
927 928 """
928 929
929 930 util = PRTestUtility(backend)
930 931 request.addfinalizer(util.cleanup)
931 932
932 933 return util
933 934
934 935
935 936 class PRTestUtility(object):
936 937
937 938 pull_request = None
938 939 pull_request_id = None
939 940 mergeable_patcher = None
940 941 mergeable_mock = None
941 942 notification_patcher = None
942 943
943 944 def __init__(self, backend):
944 945 self.backend = backend
945 946
946 947 def create_pull_request(
947 948 self, commits=None, target_head=None, source_head=None,
948 949 revisions=None, approved=False, author=None, mergeable=False,
949 950 enable_notifications=True, name_suffix=u'', reviewers=None,
950 951 title=u"Test", description=u"Description"):
951 952 self.set_mergeable(mergeable)
952 953 if not enable_notifications:
953 954 # mock notification side effect
954 955 self.notification_patcher = mock.patch(
955 956 'rhodecode.model.notification.NotificationModel.create')
956 957 self.notification_patcher.start()
957 958
958 959 if not self.pull_request:
959 960 if not commits:
960 961 commits = [
961 962 {'message': 'c1'},
962 963 {'message': 'c2'},
963 964 {'message': 'c3'},
964 965 ]
965 966 target_head = 'c1'
966 967 source_head = 'c2'
967 968 revisions = ['c2']
968 969
969 970 self.commit_ids = self.backend.create_master_repo(commits)
970 971 self.target_repository = self.backend.create_repo(
971 972 heads=[target_head], name_suffix=name_suffix)
972 973 self.source_repository = self.backend.create_repo(
973 974 heads=[source_head], name_suffix=name_suffix)
974 975 self.author = author or UserModel().get_by_username(
975 976 TEST_USER_ADMIN_LOGIN)
976 977
977 978 model = PullRequestModel()
978 979 self.create_parameters = {
979 980 'created_by': self.author,
980 981 'source_repo': self.source_repository.repo_name,
981 982 'source_ref': self._default_branch_reference(source_head),
982 983 'target_repo': self.target_repository.repo_name,
983 984 'target_ref': self._default_branch_reference(target_head),
984 985 'revisions': [self.commit_ids[r] for r in revisions],
985 986 'reviewers': reviewers or self._get_reviewers(),
986 987 'title': title,
987 988 'description': description,
988 989 }
989 990 self.pull_request = model.create(**self.create_parameters)
990 991 assert model.get_versions(self.pull_request) == []
991 992
992 993 self.pull_request_id = self.pull_request.pull_request_id
993 994
994 995 if approved:
995 996 self.approve()
996 997
997 998 Session().add(self.pull_request)
998 999 Session().commit()
999 1000
1000 1001 return self.pull_request
1001 1002
1002 1003 def approve(self):
1003 1004 self.create_status_votes(
1004 1005 ChangesetStatus.STATUS_APPROVED,
1005 1006 *self.pull_request.reviewers)
1006 1007
1007 1008 def close(self):
1008 1009 PullRequestModel().close_pull_request(self.pull_request, self.author)
1009 1010
1010 1011 def _default_branch_reference(self, commit_message):
1011 1012 reference = '%s:%s:%s' % (
1012 1013 'branch',
1013 1014 self.backend.default_branch_name,
1014 1015 self.commit_ids[commit_message])
1015 1016 return reference
1016 1017
1017 1018 def _get_reviewers(self):
1018 1019 return [
1019 1020 (TEST_USER_REGULAR_LOGIN, ['default1'], False, []),
1020 1021 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, []),
1021 1022 ]
1022 1023
1023 1024 def update_source_repository(self, head=None):
1024 1025 heads = [head or 'c3']
1025 1026 self.backend.pull_heads(self.source_repository, heads=heads)
1026 1027
1027 1028 def add_one_commit(self, head=None):
1028 1029 self.update_source_repository(head=head)
1029 1030 old_commit_ids = set(self.pull_request.revisions)
1030 1031 PullRequestModel().update_commits(self.pull_request)
1031 1032 commit_ids = set(self.pull_request.revisions)
1032 1033 new_commit_ids = commit_ids - old_commit_ids
1033 1034 assert len(new_commit_ids) == 1
1034 1035 return new_commit_ids.pop()
1035 1036
1036 1037 def remove_one_commit(self):
1037 1038 assert len(self.pull_request.revisions) == 2
1038 1039 source_vcs = self.source_repository.scm_instance()
1039 1040 removed_commit_id = source_vcs.commit_ids[-1]
1040 1041
1041 1042 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1042 1043 # remove the if once that's sorted out.
1043 1044 if self.backend.alias == "git":
1044 1045 kwargs = {'branch_name': self.backend.default_branch_name}
1045 1046 else:
1046 1047 kwargs = {}
1047 1048 source_vcs.strip(removed_commit_id, **kwargs)
1048 1049
1049 1050 PullRequestModel().update_commits(self.pull_request)
1050 1051 assert len(self.pull_request.revisions) == 1
1051 1052 return removed_commit_id
1052 1053
1053 1054 def create_comment(self, linked_to=None):
1054 1055 comment = CommentsModel().create(
1055 1056 text=u"Test comment",
1056 1057 repo=self.target_repository.repo_name,
1057 1058 user=self.author,
1058 1059 pull_request=self.pull_request)
1059 1060 assert comment.pull_request_version_id is None
1060 1061
1061 1062 if linked_to:
1062 1063 PullRequestModel()._link_comments_to_version(linked_to)
1063 1064
1064 1065 return comment
1065 1066
1066 1067 def create_inline_comment(
1067 1068 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1068 1069 comment = CommentsModel().create(
1069 1070 text=u"Test comment",
1070 1071 repo=self.target_repository.repo_name,
1071 1072 user=self.author,
1072 1073 line_no=line_no,
1073 1074 f_path=file_path,
1074 1075 pull_request=self.pull_request)
1075 1076 assert comment.pull_request_version_id is None
1076 1077
1077 1078 if linked_to:
1078 1079 PullRequestModel()._link_comments_to_version(linked_to)
1079 1080
1080 1081 return comment
1081 1082
1082 1083 def create_version_of_pull_request(self):
1083 1084 pull_request = self.create_pull_request()
1084 1085 version = PullRequestModel()._create_version_from_snapshot(
1085 1086 pull_request)
1086 1087 return version
1087 1088
1088 1089 def create_status_votes(self, status, *reviewers):
1089 1090 for reviewer in reviewers:
1090 1091 ChangesetStatusModel().set_status(
1091 1092 repo=self.pull_request.target_repo,
1092 1093 status=status,
1093 1094 user=reviewer.user_id,
1094 1095 pull_request=self.pull_request)
1095 1096
1096 1097 def set_mergeable(self, value):
1097 1098 if not self.mergeable_patcher:
1098 1099 self.mergeable_patcher = mock.patch.object(
1099 1100 VcsSettingsModel, 'get_general_settings')
1100 1101 self.mergeable_mock = self.mergeable_patcher.start()
1101 1102 self.mergeable_mock.return_value = {
1102 1103 'rhodecode_pr_merge_enabled': value}
1103 1104
1104 1105 def cleanup(self):
1105 1106 # In case the source repository is already cleaned up, the pull
1106 1107 # request will already be deleted.
1107 1108 pull_request = PullRequest().get(self.pull_request_id)
1108 1109 if pull_request:
1109 1110 PullRequestModel().delete(pull_request, pull_request.author)
1110 1111 Session().commit()
1111 1112
1112 1113 if self.notification_patcher:
1113 1114 self.notification_patcher.stop()
1114 1115
1115 1116 if self.mergeable_patcher:
1116 1117 self.mergeable_patcher.stop()
1117 1118
1118 1119
1119 1120 @pytest.fixture
1120 1121 def user_admin(baseapp):
1121 1122 """
1122 1123 Provides the default admin test user as an instance of `db.User`.
1123 1124 """
1124 1125 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1125 1126 return user
1126 1127
1127 1128
1128 1129 @pytest.fixture
1129 1130 def user_regular(baseapp):
1130 1131 """
1131 1132 Provides the default regular test user as an instance of `db.User`.
1132 1133 """
1133 1134 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1134 1135 return user
1135 1136
1136 1137
1137 1138 @pytest.fixture
1138 1139 def user_util(request, db_connection):
1139 1140 """
1140 1141 Provides a wired instance of `UserUtility` with integrated cleanup.
1141 1142 """
1142 1143 utility = UserUtility(test_name=request.node.name)
1143 1144 request.addfinalizer(utility.cleanup)
1144 1145 return utility
1145 1146
1146 1147
1147 1148 # TODO: johbo: Split this up into utilities per domain or something similar
1148 1149 class UserUtility(object):
1149 1150
1150 1151 def __init__(self, test_name="test"):
1151 1152 self._test_name = self._sanitize_name(test_name)
1152 1153 self.fixture = Fixture()
1153 1154 self.repo_group_ids = []
1154 1155 self.repos_ids = []
1155 1156 self.user_ids = []
1156 1157 self.user_group_ids = []
1157 1158 self.user_repo_permission_ids = []
1158 1159 self.user_group_repo_permission_ids = []
1159 1160 self.user_repo_group_permission_ids = []
1160 1161 self.user_group_repo_group_permission_ids = []
1161 1162 self.user_user_group_permission_ids = []
1162 1163 self.user_group_user_group_permission_ids = []
1163 1164 self.user_permissions = []
1164 1165
1165 1166 def _sanitize_name(self, name):
1166 1167 for char in ['[', ']']:
1167 1168 name = name.replace(char, '_')
1168 1169 return name
1169 1170
1170 1171 def create_repo_group(
1171 1172 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1172 1173 group_name = "{prefix}_repogroup_{count}".format(
1173 1174 prefix=self._test_name,
1174 1175 count=len(self.repo_group_ids))
1175 1176 repo_group = self.fixture.create_repo_group(
1176 1177 group_name, cur_user=owner)
1177 1178 if auto_cleanup:
1178 1179 self.repo_group_ids.append(repo_group.group_id)
1179 1180 return repo_group
1180 1181
1181 1182 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1182 1183 auto_cleanup=True, repo_type='hg', bare=False):
1183 1184 repo_name = "{prefix}_repository_{count}".format(
1184 1185 prefix=self._test_name,
1185 1186 count=len(self.repos_ids))
1186 1187
1187 1188 repository = self.fixture.create_repo(
1188 1189 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare)
1189 1190 if auto_cleanup:
1190 1191 self.repos_ids.append(repository.repo_id)
1191 1192 return repository
1192 1193
1193 1194 def create_user(self, auto_cleanup=True, **kwargs):
1194 1195 user_name = "{prefix}_user_{count}".format(
1195 1196 prefix=self._test_name,
1196 1197 count=len(self.user_ids))
1197 1198 user = self.fixture.create_user(user_name, **kwargs)
1198 1199 if auto_cleanup:
1199 1200 self.user_ids.append(user.user_id)
1200 1201 return user
1201 1202
1202 1203 def create_additional_user_email(self, user, email):
1203 1204 uem = self.fixture.create_additional_user_email(user=user, email=email)
1204 1205 return uem
1205 1206
1206 1207 def create_user_with_group(self):
1207 1208 user = self.create_user()
1208 1209 user_group = self.create_user_group(members=[user])
1209 1210 return user, user_group
1210 1211
1211 1212 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1212 1213 auto_cleanup=True, **kwargs):
1213 1214 group_name = "{prefix}_usergroup_{count}".format(
1214 1215 prefix=self._test_name,
1215 1216 count=len(self.user_group_ids))
1216 1217 user_group = self.fixture.create_user_group(
1217 1218 group_name, cur_user=owner, **kwargs)
1218 1219
1219 1220 if auto_cleanup:
1220 1221 self.user_group_ids.append(user_group.users_group_id)
1221 1222 if members:
1222 1223 for user in members:
1223 1224 UserGroupModel().add_user_to_group(user_group, user)
1224 1225 return user_group
1225 1226
1226 1227 def grant_user_permission(self, user_name, permission_name):
1227 1228 self._inherit_default_user_permissions(user_name, False)
1228 1229 self.user_permissions.append((user_name, permission_name))
1229 1230
1230 1231 def grant_user_permission_to_repo_group(
1231 1232 self, repo_group, user, permission_name):
1232 1233 permission = RepoGroupModel().grant_user_permission(
1233 1234 repo_group, user, permission_name)
1234 1235 self.user_repo_group_permission_ids.append(
1235 1236 (repo_group.group_id, user.user_id))
1236 1237 return permission
1237 1238
1238 1239 def grant_user_group_permission_to_repo_group(
1239 1240 self, repo_group, user_group, permission_name):
1240 1241 permission = RepoGroupModel().grant_user_group_permission(
1241 1242 repo_group, user_group, permission_name)
1242 1243 self.user_group_repo_group_permission_ids.append(
1243 1244 (repo_group.group_id, user_group.users_group_id))
1244 1245 return permission
1245 1246
1246 1247 def grant_user_permission_to_repo(
1247 1248 self, repo, user, permission_name):
1248 1249 permission = RepoModel().grant_user_permission(
1249 1250 repo, user, permission_name)
1250 1251 self.user_repo_permission_ids.append(
1251 1252 (repo.repo_id, user.user_id))
1252 1253 return permission
1253 1254
1254 1255 def grant_user_group_permission_to_repo(
1255 1256 self, repo, user_group, permission_name):
1256 1257 permission = RepoModel().grant_user_group_permission(
1257 1258 repo, user_group, permission_name)
1258 1259 self.user_group_repo_permission_ids.append(
1259 1260 (repo.repo_id, user_group.users_group_id))
1260 1261 return permission
1261 1262
1262 1263 def grant_user_permission_to_user_group(
1263 1264 self, target_user_group, user, permission_name):
1264 1265 permission = UserGroupModel().grant_user_permission(
1265 1266 target_user_group, user, permission_name)
1266 1267 self.user_user_group_permission_ids.append(
1267 1268 (target_user_group.users_group_id, user.user_id))
1268 1269 return permission
1269 1270
1270 1271 def grant_user_group_permission_to_user_group(
1271 1272 self, target_user_group, user_group, permission_name):
1272 1273 permission = UserGroupModel().grant_user_group_permission(
1273 1274 target_user_group, user_group, permission_name)
1274 1275 self.user_group_user_group_permission_ids.append(
1275 1276 (target_user_group.users_group_id, user_group.users_group_id))
1276 1277 return permission
1277 1278
1278 1279 def revoke_user_permission(self, user_name, permission_name):
1279 1280 self._inherit_default_user_permissions(user_name, True)
1280 1281 UserModel().revoke_perm(user_name, permission_name)
1281 1282
1282 1283 def _inherit_default_user_permissions(self, user_name, value):
1283 1284 user = UserModel().get_by_username(user_name)
1284 1285 user.inherit_default_permissions = value
1285 1286 Session().add(user)
1286 1287 Session().commit()
1287 1288
1288 1289 def cleanup(self):
1289 1290 self._cleanup_permissions()
1290 1291 self._cleanup_repos()
1291 1292 self._cleanup_repo_groups()
1292 1293 self._cleanup_user_groups()
1293 1294 self._cleanup_users()
1294 1295
1295 1296 def _cleanup_permissions(self):
1296 1297 if self.user_permissions:
1297 1298 for user_name, permission_name in self.user_permissions:
1298 1299 self.revoke_user_permission(user_name, permission_name)
1299 1300
1300 1301 for permission in self.user_repo_permission_ids:
1301 1302 RepoModel().revoke_user_permission(*permission)
1302 1303
1303 1304 for permission in self.user_group_repo_permission_ids:
1304 1305 RepoModel().revoke_user_group_permission(*permission)
1305 1306
1306 1307 for permission in self.user_repo_group_permission_ids:
1307 1308 RepoGroupModel().revoke_user_permission(*permission)
1308 1309
1309 1310 for permission in self.user_group_repo_group_permission_ids:
1310 1311 RepoGroupModel().revoke_user_group_permission(*permission)
1311 1312
1312 1313 for permission in self.user_user_group_permission_ids:
1313 1314 UserGroupModel().revoke_user_permission(*permission)
1314 1315
1315 1316 for permission in self.user_group_user_group_permission_ids:
1316 1317 UserGroupModel().revoke_user_group_permission(*permission)
1317 1318
1318 1319 def _cleanup_repo_groups(self):
1319 1320 def _repo_group_compare(first_group_id, second_group_id):
1320 1321 """
1321 1322 Gives higher priority to the groups with the most complex paths
1322 1323 """
1323 1324 first_group = RepoGroup.get(first_group_id)
1324 1325 second_group = RepoGroup.get(second_group_id)
1325 1326 first_group_parts = (
1326 1327 len(first_group.group_name.split('/')) if first_group else 0)
1327 1328 second_group_parts = (
1328 1329 len(second_group.group_name.split('/')) if second_group else 0)
1329 1330 return cmp(second_group_parts, first_group_parts)
1330 1331
1331 1332 sorted_repo_group_ids = sorted(
1332 1333 self.repo_group_ids, cmp=_repo_group_compare)
1333 1334 for repo_group_id in sorted_repo_group_ids:
1334 1335 self.fixture.destroy_repo_group(repo_group_id)
1335 1336
1336 1337 def _cleanup_repos(self):
1337 1338 sorted_repos_ids = sorted(self.repos_ids)
1338 1339 for repo_id in sorted_repos_ids:
1339 1340 self.fixture.destroy_repo(repo_id)
1340 1341
1341 1342 def _cleanup_user_groups(self):
1342 1343 def _user_group_compare(first_group_id, second_group_id):
1343 1344 """
1344 1345 Gives higher priority to the groups with the most complex paths
1345 1346 """
1346 1347 first_group = UserGroup.get(first_group_id)
1347 1348 second_group = UserGroup.get(second_group_id)
1348 1349 first_group_parts = (
1349 1350 len(first_group.users_group_name.split('/'))
1350 1351 if first_group else 0)
1351 1352 second_group_parts = (
1352 1353 len(second_group.users_group_name.split('/'))
1353 1354 if second_group else 0)
1354 1355 return cmp(second_group_parts, first_group_parts)
1355 1356
1356 1357 sorted_user_group_ids = sorted(
1357 1358 self.user_group_ids, cmp=_user_group_compare)
1358 1359 for user_group_id in sorted_user_group_ids:
1359 1360 self.fixture.destroy_user_group(user_group_id)
1360 1361
1361 1362 def _cleanup_users(self):
1362 1363 for user_id in self.user_ids:
1363 1364 self.fixture.destroy_user(user_id)
1364 1365
1365 1366
1366 1367 # TODO: Think about moving this into a pytest-pyro package and make it a
1367 1368 # pytest plugin
1368 1369 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1369 1370 def pytest_runtest_makereport(item, call):
1370 1371 """
1371 1372 Adding the remote traceback if the exception has this information.
1372 1373
1373 1374 VCSServer attaches this information as the attribute `_vcs_server_traceback`
1374 1375 to the exception instance.
1375 1376 """
1376 1377 outcome = yield
1377 1378 report = outcome.get_result()
1378 1379 if call.excinfo:
1379 1380 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1380 1381
1381 1382
1382 1383 def _add_vcsserver_remote_traceback(report, exc):
1383 1384 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1384 1385
1385 1386 if vcsserver_traceback:
1386 1387 section = 'VCSServer remote traceback ' + report.when
1387 1388 report.sections.append((section, vcsserver_traceback))
1388 1389
1389 1390
1390 1391 @pytest.fixture(scope='session')
1391 1392 def testrun():
1392 1393 return {
1393 1394 'uuid': uuid.uuid4(),
1394 1395 'start': datetime.datetime.utcnow().isoformat(),
1395 1396 'timestamp': int(time.time()),
1396 1397 }
1397 1398
1398 1399
1399 1400 @pytest.fixture(autouse=True)
1400 1401 def collect_appenlight_stats(request, testrun):
1401 1402 """
1402 1403 This fixture reports memory consumtion of single tests.
1403 1404
1404 1405 It gathers data based on `psutil` and sends them to Appenlight. The option
1405 1406 ``--ae`` has te be used to enable this fixture and the API key for your
1406 1407 application has to be provided in ``--ae-key``.
1407 1408 """
1408 1409 try:
1409 1410 # cygwin cannot have yet psutil support.
1410 1411 import psutil
1411 1412 except ImportError:
1412 1413 return
1413 1414
1414 1415 if not request.config.getoption('--appenlight'):
1415 1416 return
1416 1417 else:
1417 1418 # Only request the baseapp fixture if appenlight tracking is
1418 1419 # enabled. This will speed up a test run of unit tests by 2 to 3
1419 1420 # seconds if appenlight is not enabled.
1420 1421 baseapp = request.getfuncargvalue("baseapp")
1421 1422 url = '{}/api/logs'.format(request.config.getoption('--appenlight-url'))
1422 1423 client = AppenlightClient(
1423 1424 url=url,
1424 1425 api_key=request.config.getoption('--appenlight-api-key'),
1425 1426 namespace=request.node.nodeid,
1426 1427 request=str(testrun['uuid']),
1427 1428 testrun=testrun)
1428 1429
1429 1430 client.collect({
1430 1431 'message': "Starting",
1431 1432 })
1432 1433
1433 1434 server_and_port = baseapp.config.get_settings()['vcs.server']
1434 1435 protocol = baseapp.config.get_settings()['vcs.server.protocol']
1435 1436 server = create_vcsserver_proxy(server_and_port, protocol)
1436 1437 with server:
1437 1438 vcs_pid = server.get_pid()
1438 1439 server.run_gc()
1439 1440 vcs_process = psutil.Process(vcs_pid)
1440 1441 mem = vcs_process.memory_info()
1441 1442 client.tag_before('vcsserver.rss', mem.rss)
1442 1443 client.tag_before('vcsserver.vms', mem.vms)
1443 1444
1444 1445 test_process = psutil.Process()
1445 1446 mem = test_process.memory_info()
1446 1447 client.tag_before('test.rss', mem.rss)
1447 1448 client.tag_before('test.vms', mem.vms)
1448 1449
1449 1450 client.tag_before('time', time.time())
1450 1451
1451 1452 @request.addfinalizer
1452 1453 def send_stats():
1453 1454 client.tag_after('time', time.time())
1454 1455 with server:
1455 1456 gc_stats = server.run_gc()
1456 1457 for tag, value in gc_stats.items():
1457 1458 client.tag_after(tag, value)
1458 1459 mem = vcs_process.memory_info()
1459 1460 client.tag_after('vcsserver.rss', mem.rss)
1460 1461 client.tag_after('vcsserver.vms', mem.vms)
1461 1462
1462 1463 mem = test_process.memory_info()
1463 1464 client.tag_after('test.rss', mem.rss)
1464 1465 client.tag_after('test.vms', mem.vms)
1465 1466
1466 1467 client.collect({
1467 1468 'message': "Finished",
1468 1469 })
1469 1470 client.send_stats()
1470 1471
1471 1472 return client
1472 1473
1473 1474
1474 1475 class AppenlightClient():
1475 1476
1476 1477 url_template = '{url}?protocol_version=0.5'
1477 1478
1478 1479 def __init__(
1479 1480 self, url, api_key, add_server=True, add_timestamp=True,
1480 1481 namespace=None, request=None, testrun=None):
1481 1482 self.url = self.url_template.format(url=url)
1482 1483 self.api_key = api_key
1483 1484 self.add_server = add_server
1484 1485 self.add_timestamp = add_timestamp
1485 1486 self.namespace = namespace
1486 1487 self.request = request
1487 1488 self.server = socket.getfqdn(socket.gethostname())
1488 1489 self.tags_before = {}
1489 1490 self.tags_after = {}
1490 1491 self.stats = []
1491 1492 self.testrun = testrun or {}
1492 1493
1493 1494 def tag_before(self, tag, value):
1494 1495 self.tags_before[tag] = value
1495 1496
1496 1497 def tag_after(self, tag, value):
1497 1498 self.tags_after[tag] = value
1498 1499
1499 1500 def collect(self, data):
1500 1501 if self.add_server:
1501 1502 data.setdefault('server', self.server)
1502 1503 if self.add_timestamp:
1503 1504 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1504 1505 if self.namespace:
1505 1506 data.setdefault('namespace', self.namespace)
1506 1507 if self.request:
1507 1508 data.setdefault('request', self.request)
1508 1509 self.stats.append(data)
1509 1510
1510 1511 def send_stats(self):
1511 1512 tags = [
1512 1513 ('testrun', self.request),
1513 1514 ('testrun.start', self.testrun['start']),
1514 1515 ('testrun.timestamp', self.testrun['timestamp']),
1515 1516 ('test', self.namespace),
1516 1517 ]
1517 1518 for key, value in self.tags_before.items():
1518 1519 tags.append((key + '.before', value))
1519 1520 try:
1520 1521 delta = self.tags_after[key] - value
1521 1522 tags.append((key + '.delta', delta))
1522 1523 except Exception:
1523 1524 pass
1524 1525 for key, value in self.tags_after.items():
1525 1526 tags.append((key + '.after', value))
1526 1527 self.collect({
1527 1528 'message': "Collected tags",
1528 1529 'tags': tags,
1529 1530 })
1530 1531
1531 1532 response = requests.post(
1532 1533 self.url,
1533 1534 headers={
1534 1535 'X-appenlight-api-key': self.api_key},
1535 1536 json=self.stats,
1536 1537 )
1537 1538
1538 1539 if not response.status_code == 200:
1539 1540 pprint.pprint(self.stats)
1540 1541 print(response.headers)
1541 1542 print(response.text)
1542 1543 raise Exception('Sending to appenlight failed')
1543 1544
1544 1545
1545 1546 @pytest.fixture
1546 1547 def gist_util(request, db_connection):
1547 1548 """
1548 1549 Provides a wired instance of `GistUtility` with integrated cleanup.
1549 1550 """
1550 1551 utility = GistUtility()
1551 1552 request.addfinalizer(utility.cleanup)
1552 1553 return utility
1553 1554
1554 1555
1555 1556 class GistUtility(object):
1556 1557 def __init__(self):
1557 1558 self.fixture = Fixture()
1558 1559 self.gist_ids = []
1559 1560
1560 1561 def create_gist(self, **kwargs):
1561 1562 gist = self.fixture.create_gist(**kwargs)
1562 1563 self.gist_ids.append(gist.gist_id)
1563 1564 return gist
1564 1565
1565 1566 def cleanup(self):
1566 1567 for id_ in self.gist_ids:
1567 1568 self.fixture.destroy_gists(str(id_))
1568 1569
1569 1570
1570 1571 @pytest.fixture
1571 1572 def enabled_backends(request):
1572 1573 backends = request.config.option.backends
1573 1574 return backends[:]
1574 1575
1575 1576
1576 1577 @pytest.fixture
1577 1578 def settings_util(request, db_connection):
1578 1579 """
1579 1580 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1580 1581 """
1581 1582 utility = SettingsUtility()
1582 1583 request.addfinalizer(utility.cleanup)
1583 1584 return utility
1584 1585
1585 1586
1586 1587 class SettingsUtility(object):
1587 1588 def __init__(self):
1588 1589 self.rhodecode_ui_ids = []
1589 1590 self.rhodecode_setting_ids = []
1590 1591 self.repo_rhodecode_ui_ids = []
1591 1592 self.repo_rhodecode_setting_ids = []
1592 1593
1593 1594 def create_repo_rhodecode_ui(
1594 1595 self, repo, section, value, key=None, active=True, cleanup=True):
1595 1596 key = key or hashlib.sha1(
1596 1597 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1597 1598
1598 1599 setting = RepoRhodeCodeUi()
1599 1600 setting.repository_id = repo.repo_id
1600 1601 setting.ui_section = section
1601 1602 setting.ui_value = value
1602 1603 setting.ui_key = key
1603 1604 setting.ui_active = active
1604 1605 Session().add(setting)
1605 1606 Session().commit()
1606 1607
1607 1608 if cleanup:
1608 1609 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1609 1610 return setting
1610 1611
1611 1612 def create_rhodecode_ui(
1612 1613 self, section, value, key=None, active=True, cleanup=True):
1613 1614 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1614 1615
1615 1616 setting = RhodeCodeUi()
1616 1617 setting.ui_section = section
1617 1618 setting.ui_value = value
1618 1619 setting.ui_key = key
1619 1620 setting.ui_active = active
1620 1621 Session().add(setting)
1621 1622 Session().commit()
1622 1623
1623 1624 if cleanup:
1624 1625 self.rhodecode_ui_ids.append(setting.ui_id)
1625 1626 return setting
1626 1627
1627 1628 def create_repo_rhodecode_setting(
1628 1629 self, repo, name, value, type_, cleanup=True):
1629 1630 setting = RepoRhodeCodeSetting(
1630 1631 repo.repo_id, key=name, val=value, type=type_)
1631 1632 Session().add(setting)
1632 1633 Session().commit()
1633 1634
1634 1635 if cleanup:
1635 1636 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1636 1637 return setting
1637 1638
1638 1639 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1639 1640 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1640 1641 Session().add(setting)
1641 1642 Session().commit()
1642 1643
1643 1644 if cleanup:
1644 1645 self.rhodecode_setting_ids.append(setting.app_settings_id)
1645 1646
1646 1647 return setting
1647 1648
1648 1649 def cleanup(self):
1649 1650 for id_ in self.rhodecode_ui_ids:
1650 1651 setting = RhodeCodeUi.get(id_)
1651 1652 Session().delete(setting)
1652 1653
1653 1654 for id_ in self.rhodecode_setting_ids:
1654 1655 setting = RhodeCodeSetting.get(id_)
1655 1656 Session().delete(setting)
1656 1657
1657 1658 for id_ in self.repo_rhodecode_ui_ids:
1658 1659 setting = RepoRhodeCodeUi.get(id_)
1659 1660 Session().delete(setting)
1660 1661
1661 1662 for id_ in self.repo_rhodecode_setting_ids:
1662 1663 setting = RepoRhodeCodeSetting.get(id_)
1663 1664 Session().delete(setting)
1664 1665
1665 1666 Session().commit()
1666 1667
1667 1668
1668 1669 @pytest.fixture
1669 1670 def no_notifications(request):
1670 1671 notification_patcher = mock.patch(
1671 1672 'rhodecode.model.notification.NotificationModel.create')
1672 1673 notification_patcher.start()
1673 1674 request.addfinalizer(notification_patcher.stop)
1674 1675
1675 1676
1676 1677 @pytest.fixture(scope='session')
1677 1678 def repeat(request):
1678 1679 """
1679 1680 The number of repetitions is based on this fixture.
1680 1681
1681 1682 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1682 1683 tests are not too slow in our default test suite.
1683 1684 """
1684 1685 return request.config.getoption('--repeat')
1685 1686
1686 1687
1687 1688 @pytest.fixture
1688 1689 def rhodecode_fixtures():
1689 1690 return Fixture()
1690 1691
1691 1692
1692 1693 @pytest.fixture
1693 1694 def context_stub():
1694 1695 """
1695 1696 Stub context object.
1696 1697 """
1697 1698 context = pyramid.testing.DummyResource()
1698 1699 return context
1699 1700
1700 1701
1701 1702 @pytest.fixture
1702 1703 def request_stub():
1703 1704 """
1704 1705 Stub request object.
1705 1706 """
1706 1707 from rhodecode.lib.base import bootstrap_request
1707 1708 request = bootstrap_request(scheme='https')
1708 1709 return request
1709 1710
1710 1711
1711 1712 @pytest.fixture
1712 1713 def config_stub(request, request_stub):
1713 1714 """
1714 1715 Set up pyramid.testing and return the Configurator.
1715 1716 """
1716 1717 from rhodecode.lib.base import bootstrap_config
1717 1718 config = bootstrap_config(request=request_stub)
1718 1719
1719 1720 @request.addfinalizer
1720 1721 def cleanup():
1721 1722 pyramid.testing.tearDown()
1722 1723
1723 1724 return config
1724 1725
1725 1726
1726 1727 @pytest.fixture
1727 1728 def StubIntegrationType():
1728 1729 class _StubIntegrationType(IntegrationTypeBase):
1729 1730 """ Test integration type class """
1730 1731
1731 1732 key = 'test'
1732 1733 display_name = 'Test integration type'
1733 1734 description = 'A test integration type for testing'
1734 1735
1735 1736 @classmethod
1736 1737 def icon(cls):
1737 1738 return 'test_icon_html_image'
1738 1739
1739 1740 def __init__(self, settings):
1740 1741 super(_StubIntegrationType, self).__init__(settings)
1741 1742 self.sent_events = [] # for testing
1742 1743
1743 1744 def send_event(self, event):
1744 1745 self.sent_events.append(event)
1745 1746
1746 1747 def settings_schema(self):
1747 1748 class SettingsSchema(colander.Schema):
1748 1749 test_string_field = colander.SchemaNode(
1749 1750 colander.String(),
1750 1751 missing=colander.required,
1751 1752 title='test string field',
1752 1753 )
1753 1754 test_int_field = colander.SchemaNode(
1754 1755 colander.Int(),
1755 1756 title='some integer setting',
1756 1757 )
1757 1758 return SettingsSchema()
1758 1759
1759 1760
1760 1761 integration_type_registry.register_integration_type(_StubIntegrationType)
1761 1762 return _StubIntegrationType
1762 1763
1763 1764 @pytest.fixture
1764 1765 def stub_integration_settings():
1765 1766 return {
1766 1767 'test_string_field': 'some data',
1767 1768 'test_int_field': 100,
1768 1769 }
1769 1770
1770 1771
1771 1772 @pytest.fixture
1772 1773 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1773 1774 stub_integration_settings):
1774 1775 integration = IntegrationModel().create(
1775 1776 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1776 1777 name='test repo integration',
1777 1778 repo=repo_stub, repo_group=None, child_repos_only=None)
1778 1779
1779 1780 @request.addfinalizer
1780 1781 def cleanup():
1781 1782 IntegrationModel().delete(integration)
1782 1783
1783 1784 return integration
1784 1785
1785 1786
1786 1787 @pytest.fixture
1787 1788 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1788 1789 stub_integration_settings):
1789 1790 integration = IntegrationModel().create(
1790 1791 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1791 1792 name='test repogroup integration',
1792 1793 repo=None, repo_group=test_repo_group, child_repos_only=True)
1793 1794
1794 1795 @request.addfinalizer
1795 1796 def cleanup():
1796 1797 IntegrationModel().delete(integration)
1797 1798
1798 1799 return integration
1799 1800
1800 1801
1801 1802 @pytest.fixture
1802 1803 def repogroup_recursive_integration_stub(request, test_repo_group,
1803 1804 StubIntegrationType, stub_integration_settings):
1804 1805 integration = IntegrationModel().create(
1805 1806 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1806 1807 name='test recursive repogroup integration',
1807 1808 repo=None, repo_group=test_repo_group, child_repos_only=False)
1808 1809
1809 1810 @request.addfinalizer
1810 1811 def cleanup():
1811 1812 IntegrationModel().delete(integration)
1812 1813
1813 1814 return integration
1814 1815
1815 1816
1816 1817 @pytest.fixture
1817 1818 def global_integration_stub(request, StubIntegrationType,
1818 1819 stub_integration_settings):
1819 1820 integration = IntegrationModel().create(
1820 1821 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1821 1822 name='test global integration',
1822 1823 repo=None, repo_group=None, child_repos_only=None)
1823 1824
1824 1825 @request.addfinalizer
1825 1826 def cleanup():
1826 1827 IntegrationModel().delete(integration)
1827 1828
1828 1829 return integration
1829 1830
1830 1831
1831 1832 @pytest.fixture
1832 1833 def root_repos_integration_stub(request, StubIntegrationType,
1833 1834 stub_integration_settings):
1834 1835 integration = IntegrationModel().create(
1835 1836 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1836 1837 name='test global integration',
1837 1838 repo=None, repo_group=None, child_repos_only=True)
1838 1839
1839 1840 @request.addfinalizer
1840 1841 def cleanup():
1841 1842 IntegrationModel().delete(integration)
1842 1843
1843 1844 return integration
1844 1845
1845 1846
1846 1847 @pytest.fixture
1847 1848 def local_dt_to_utc():
1848 1849 def _factory(dt):
1849 1850 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1850 1851 dateutil.tz.tzutc()).replace(tzinfo=None)
1851 1852 return _factory
1852 1853
1853 1854
1854 1855 @pytest.fixture
1855 1856 def disable_anonymous_user(request, baseapp):
1856 1857 set_anonymous_access(False)
1857 1858
1858 1859 @request.addfinalizer
1859 1860 def cleanup():
1860 1861 set_anonymous_access(True)
1861 1862
1862 1863
1863 1864 @pytest.fixture(scope='module')
1864 1865 def rc_fixture(request):
1865 1866 return Fixture()
1866 1867
1867 1868
1868 1869 @pytest.fixture
1869 1870 def repo_groups(request):
1870 1871 fixture = Fixture()
1871 1872
1872 1873 session = Session()
1873 1874 zombie_group = fixture.create_repo_group('zombie')
1874 1875 parent_group = fixture.create_repo_group('parent')
1875 1876 child_group = fixture.create_repo_group('parent/child')
1876 1877 groups_in_db = session.query(RepoGroup).all()
1877 1878 assert len(groups_in_db) == 3
1878 1879 assert child_group.group_parent_id == parent_group.group_id
1879 1880
1880 1881 @request.addfinalizer
1881 1882 def cleanup():
1882 1883 fixture.destroy_repo_group(zombie_group)
1883 1884 fixture.destroy_repo_group(child_group)
1884 1885 fixture.destroy_repo_group(parent_group)
1885 1886
1886 1887 return zombie_group, parent_group, child_group
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
This diff has been collapsed as it changes many lines, (823 lines changed) Show them Hide them
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
This diff has been collapsed as it changes many lines, (521 lines changed) Show them Hide them
General Comments 0
You need to be logged in to leave comments. Login now