##// END OF EJS Templates
tests: fix cache problems after empty repo check change.
marcink -
r3738:b8214661 new-ui
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,137 +1,143 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 import pytest
23 23
24 24 from rhodecode.model.meta import Session
25 25 from rhodecode.model.repo import RepoModel
26 26 from rhodecode.model.user import UserModel
27 27 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
28 28 from rhodecode.api.tests.utils import (
29 29 build_data, api_call, assert_ok, assert_error, expected_permissions)
30 30
31 31
32 32 @pytest.mark.usefixtures("testuser_api", "app")
33 33 class TestGetRepo(object):
34 34 @pytest.mark.parametrize("apikey_attr, expect_secrets", [
35 35 ('apikey', True),
36 36 ('apikey_regular', False),
37 37 ])
38 38 @pytest.mark.parametrize("cache_param", [
39 39 True,
40 40 False,
41 41 None,
42 42 ])
43 43 def test_api_get_repo(
44 44 self, apikey_attr, expect_secrets, cache_param, backend,
45 45 user_util):
46 46 repo = backend.create_repo()
47 repo_id = repo.repo_id
47 48 usr = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
48 49 group = user_util.create_user_group(members=[usr])
49 50 user_util.grant_user_group_permission_to_repo(
50 51 repo=repo, user_group=group, permission_name='repository.read')
51 52 Session().commit()
52 53 kwargs = {
53 54 'repoid': repo.repo_name,
54 55 }
55 56 if cache_param is not None:
56 57 kwargs['cache'] = cache_param
57 58
58 59 apikey = getattr(self, apikey_attr)
59 60 id_, params = build_data(apikey, 'get_repo', **kwargs)
60 61 response = api_call(self.app, params)
61 62
62 63 ret = repo.get_api_data()
63 64
64 65 permissions = expected_permissions(repo)
65 66
66 67 followers = []
68
69 repo = RepoModel().get(repo_id)
67 70 for user in repo.followers:
68 71 followers.append(user.user.get_api_data(
69 72 include_secrets=expect_secrets))
70 73
71 74 ret['permissions'] = permissions
72 75 ret['followers'] = followers
73 76
74 77 expected = ret
75 78
76 79 assert_ok(id_, expected, given=response.body)
77 80
78 81 @pytest.mark.parametrize("grant_perm", [
79 82 'repository.admin',
80 83 'repository.write',
81 84 'repository.read',
82 85 ])
83 86 def test_api_get_repo_by_non_admin(self, grant_perm, backend):
84 87 # TODO: Depending on which tests are running before this one, we
85 88 # start with a different number of permissions in the database.
86 89 repo = RepoModel().get_by_repo_name(backend.repo_name)
90 repo_id = repo.repo_id
87 91 permission_count = len(repo.repo_to_perm)
88 92
89 93 RepoModel().grant_user_permission(repo=backend.repo_name,
90 94 user=self.TEST_USER_LOGIN,
91 95 perm=grant_perm)
92 96 Session().commit()
93 97 id_, params = build_data(
94 98 self.apikey_regular, 'get_repo', repoid=backend.repo_name)
95 99 response = api_call(self.app, params)
96 100
97 101 repo = RepoModel().get_by_repo_name(backend.repo_name)
98 102 ret = repo.get_api_data()
99 103
100 104 assert permission_count + 1, len(repo.repo_to_perm)
101 105
102 106 permissions = expected_permissions(repo)
103 107
104 108 followers = []
109
110 repo = RepoModel().get(repo_id)
105 111 for user in repo.followers:
106 112 followers.append(user.user.get_api_data())
107 113
108 114 ret['permissions'] = permissions
109 115 ret['followers'] = followers
110 116
111 117 expected = ret
112 118 try:
113 119 assert_ok(id_, expected, given=response.body)
114 120 finally:
115 121 RepoModel().revoke_user_permission(
116 122 backend.repo_name, self.TEST_USER_LOGIN)
117 123
118 124 def test_api_get_repo_by_non_admin_no_permission_to_repo(self, backend):
119 125 RepoModel().grant_user_permission(repo=backend.repo_name,
120 126 user=self.TEST_USER_LOGIN,
121 127 perm='repository.none')
122 128
123 129 id_, params = build_data(
124 130 self.apikey_regular, 'get_repo', repoid=backend.repo_name)
125 131 response = api_call(self.app, params)
126 132
127 133 expected = 'repository `%s` does not exist' % (backend.repo_name)
128 134 assert_error(id_, expected, given=response.body)
129 135
130 136 def test_api_get_repo_not_existing(self):
131 137 id_, params = build_data(
132 138 self.apikey, 'get_repo', repoid='no-such-repo')
133 139 response = api_call(self.app, params)
134 140
135 141 ret = 'repository `%s` does not exist' % 'no-such-repo'
136 142 expected = ret
137 143 assert_error(id_, expected, given=response.body)
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,1739 +1,1745 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26
27 27 import json
28 28 import logging
29 29 import datetime
30 30 import urllib
31 31 import collections
32 32
33 33 from pyramid import compat
34 34 from pyramid.threadlocal import get_current_request
35 35
36 36 from rhodecode import events
37 37 from rhodecode.translation import lazy_ugettext
38 38 from rhodecode.lib import helpers as h, hooks_utils, diffs
39 39 from rhodecode.lib import audit_logger
40 40 from rhodecode.lib.compat import OrderedDict
41 41 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
42 42 from rhodecode.lib.markup_renderer import (
43 43 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
44 44 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
45 45 from rhodecode.lib.vcs.backends.base import (
46 46 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
47 47 from rhodecode.lib.vcs.conf import settings as vcs_settings
48 48 from rhodecode.lib.vcs.exceptions import (
49 49 CommitDoesNotExistError, EmptyRepositoryError)
50 50 from rhodecode.model import BaseModel
51 51 from rhodecode.model.changeset_status import ChangesetStatusModel
52 52 from rhodecode.model.comment import CommentsModel
53 53 from rhodecode.model.db import (
54 54 or_, PullRequest, PullRequestReviewers, ChangesetStatus,
55 55 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
56 56 from rhodecode.model.meta import Session
57 57 from rhodecode.model.notification import NotificationModel, \
58 58 EmailNotificationModel
59 59 from rhodecode.model.scm import ScmModel
60 60 from rhodecode.model.settings import VcsSettingsModel
61 61
62 62
63 63 log = logging.getLogger(__name__)
64 64
65 65
66 66 # Data structure to hold the response data when updating commits during a pull
67 67 # request update.
68 68 UpdateResponse = collections.namedtuple('UpdateResponse', [
69 69 'executed', 'reason', 'new', 'old', 'changes',
70 70 'source_changed', 'target_changed'])
71 71
72 72
73 73 class PullRequestModel(BaseModel):
74 74
75 75 cls = PullRequest
76 76
77 77 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
78 78
79 79 UPDATE_STATUS_MESSAGES = {
80 80 UpdateFailureReason.NONE: lazy_ugettext(
81 81 'Pull request update successful.'),
82 82 UpdateFailureReason.UNKNOWN: lazy_ugettext(
83 83 'Pull request update failed because of an unknown error.'),
84 84 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
85 85 'No update needed because the source and target have not changed.'),
86 86 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
87 87 'Pull request cannot be updated because the reference type is '
88 88 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
89 89 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
90 90 'This pull request cannot be updated because the target '
91 91 'reference is missing.'),
92 92 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
93 93 'This pull request cannot be updated because the source '
94 94 'reference is missing.'),
95 95 }
96 96 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
97 97 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
98 98
99 99 def __get_pull_request(self, pull_request):
100 100 return self._get_instance((
101 101 PullRequest, PullRequestVersion), pull_request)
102 102
103 103 def _check_perms(self, perms, pull_request, user, api=False):
104 104 if not api:
105 105 return h.HasRepoPermissionAny(*perms)(
106 106 user=user, repo_name=pull_request.target_repo.repo_name)
107 107 else:
108 108 return h.HasRepoPermissionAnyApi(*perms)(
109 109 user=user, repo_name=pull_request.target_repo.repo_name)
110 110
111 111 def check_user_read(self, pull_request, user, api=False):
112 112 _perms = ('repository.admin', 'repository.write', 'repository.read',)
113 113 return self._check_perms(_perms, pull_request, user, api)
114 114
115 115 def check_user_merge(self, pull_request, user, api=False):
116 116 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
117 117 return self._check_perms(_perms, pull_request, user, api)
118 118
119 119 def check_user_update(self, pull_request, user, api=False):
120 120 owner = user.user_id == pull_request.user_id
121 121 return self.check_user_merge(pull_request, user, api) or owner
122 122
123 123 def check_user_delete(self, pull_request, user):
124 124 owner = user.user_id == pull_request.user_id
125 125 _perms = ('repository.admin',)
126 126 return self._check_perms(_perms, pull_request, user) or owner
127 127
128 128 def check_user_change_status(self, pull_request, user, api=False):
129 129 reviewer = user.user_id in [x.user_id for x in
130 130 pull_request.reviewers]
131 131 return self.check_user_update(pull_request, user, api) or reviewer
132 132
133 133 def check_user_comment(self, pull_request, user):
134 134 owner = user.user_id == pull_request.user_id
135 135 return self.check_user_read(pull_request, user) or owner
136 136
137 137 def get(self, pull_request):
138 138 return self.__get_pull_request(pull_request)
139 139
140 140 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
141 141 opened_by=None, order_by=None,
142 142 order_dir='desc', only_created=True):
143 143 repo = None
144 144 if repo_name:
145 145 repo = self._get_repo(repo_name)
146 146
147 147 q = PullRequest.query()
148 148
149 149 # source or target
150 150 if repo and source:
151 151 q = q.filter(PullRequest.source_repo == repo)
152 152 elif repo:
153 153 q = q.filter(PullRequest.target_repo == repo)
154 154
155 155 # closed,opened
156 156 if statuses:
157 157 q = q.filter(PullRequest.status.in_(statuses))
158 158
159 159 # opened by filter
160 160 if opened_by:
161 161 q = q.filter(PullRequest.user_id.in_(opened_by))
162 162
163 163 # only get those that are in "created" state
164 164 if only_created:
165 165 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
166 166
167 167 if order_by:
168 168 order_map = {
169 169 'name_raw': PullRequest.pull_request_id,
170 170 'id': PullRequest.pull_request_id,
171 171 'title': PullRequest.title,
172 172 'updated_on_raw': PullRequest.updated_on,
173 173 'target_repo': PullRequest.target_repo_id
174 174 }
175 175 if order_dir == 'asc':
176 176 q = q.order_by(order_map[order_by].asc())
177 177 else:
178 178 q = q.order_by(order_map[order_by].desc())
179 179
180 180 return q
181 181
182 182 def count_all(self, repo_name, source=False, statuses=None,
183 183 opened_by=None):
184 184 """
185 185 Count the number of pull requests for a specific repository.
186 186
187 187 :param repo_name: target or source repo
188 188 :param source: boolean flag to specify if repo_name refers to source
189 189 :param statuses: list of pull request statuses
190 190 :param opened_by: author user of the pull request
191 191 :returns: int number of pull requests
192 192 """
193 193 q = self._prepare_get_all_query(
194 194 repo_name, source=source, statuses=statuses, opened_by=opened_by)
195 195
196 196 return q.count()
197 197
198 198 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
199 199 offset=0, length=None, order_by=None, order_dir='desc'):
200 200 """
201 201 Get all pull requests for a specific repository.
202 202
203 203 :param repo_name: target or source repo
204 204 :param source: boolean flag to specify if repo_name refers to source
205 205 :param statuses: list of pull request statuses
206 206 :param opened_by: author user of the pull request
207 207 :param offset: pagination offset
208 208 :param length: length of returned list
209 209 :param order_by: order of the returned list
210 210 :param order_dir: 'asc' or 'desc' ordering direction
211 211 :returns: list of pull requests
212 212 """
213 213 q = self._prepare_get_all_query(
214 214 repo_name, source=source, statuses=statuses, opened_by=opened_by,
215 215 order_by=order_by, order_dir=order_dir)
216 216
217 217 if length:
218 218 pull_requests = q.limit(length).offset(offset).all()
219 219 else:
220 220 pull_requests = q.all()
221 221
222 222 return pull_requests
223 223
224 224 def count_awaiting_review(self, repo_name, source=False, statuses=None,
225 225 opened_by=None):
226 226 """
227 227 Count the number of pull requests for a specific repository that are
228 228 awaiting review.
229 229
230 230 :param repo_name: target or source repo
231 231 :param source: boolean flag to specify if repo_name refers to source
232 232 :param statuses: list of pull request statuses
233 233 :param opened_by: author user of the pull request
234 234 :returns: int number of pull requests
235 235 """
236 236 pull_requests = self.get_awaiting_review(
237 237 repo_name, source=source, statuses=statuses, opened_by=opened_by)
238 238
239 239 return len(pull_requests)
240 240
241 241 def get_awaiting_review(self, repo_name, source=False, statuses=None,
242 242 opened_by=None, offset=0, length=None,
243 243 order_by=None, order_dir='desc'):
244 244 """
245 245 Get all pull requests for a specific repository that are awaiting
246 246 review.
247 247
248 248 :param repo_name: target or source repo
249 249 :param source: boolean flag to specify if repo_name refers to source
250 250 :param statuses: list of pull request statuses
251 251 :param opened_by: author user of the pull request
252 252 :param offset: pagination offset
253 253 :param length: length of returned list
254 254 :param order_by: order of the returned list
255 255 :param order_dir: 'asc' or 'desc' ordering direction
256 256 :returns: list of pull requests
257 257 """
258 258 pull_requests = self.get_all(
259 259 repo_name, source=source, statuses=statuses, opened_by=opened_by,
260 260 order_by=order_by, order_dir=order_dir)
261 261
262 262 _filtered_pull_requests = []
263 263 for pr in pull_requests:
264 264 status = pr.calculated_review_status()
265 265 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
266 266 ChangesetStatus.STATUS_UNDER_REVIEW]:
267 267 _filtered_pull_requests.append(pr)
268 268 if length:
269 269 return _filtered_pull_requests[offset:offset+length]
270 270 else:
271 271 return _filtered_pull_requests
272 272
273 273 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
274 274 opened_by=None, user_id=None):
275 275 """
276 276 Count the number of pull requests for a specific repository that are
277 277 awaiting review from a specific user.
278 278
279 279 :param repo_name: target or source repo
280 280 :param source: boolean flag to specify if repo_name refers to source
281 281 :param statuses: list of pull request statuses
282 282 :param opened_by: author user of the pull request
283 283 :param user_id: reviewer user of the pull request
284 284 :returns: int number of pull requests
285 285 """
286 286 pull_requests = self.get_awaiting_my_review(
287 287 repo_name, source=source, statuses=statuses, opened_by=opened_by,
288 288 user_id=user_id)
289 289
290 290 return len(pull_requests)
291 291
292 292 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
293 293 opened_by=None, user_id=None, offset=0,
294 294 length=None, order_by=None, order_dir='desc'):
295 295 """
296 296 Get all pull requests for a specific repository that are awaiting
297 297 review from a specific user.
298 298
299 299 :param repo_name: target or source repo
300 300 :param source: boolean flag to specify if repo_name refers to source
301 301 :param statuses: list of pull request statuses
302 302 :param opened_by: author user of the pull request
303 303 :param user_id: reviewer user of the pull request
304 304 :param offset: pagination offset
305 305 :param length: length of returned list
306 306 :param order_by: order of the returned list
307 307 :param order_dir: 'asc' or 'desc' ordering direction
308 308 :returns: list of pull requests
309 309 """
310 310 pull_requests = self.get_all(
311 311 repo_name, source=source, statuses=statuses, opened_by=opened_by,
312 312 order_by=order_by, order_dir=order_dir)
313 313
314 314 _my = PullRequestModel().get_not_reviewed(user_id)
315 315 my_participation = []
316 316 for pr in pull_requests:
317 317 if pr in _my:
318 318 my_participation.append(pr)
319 319 _filtered_pull_requests = my_participation
320 320 if length:
321 321 return _filtered_pull_requests[offset:offset+length]
322 322 else:
323 323 return _filtered_pull_requests
324 324
325 325 def get_not_reviewed(self, user_id):
326 326 return [
327 327 x.pull_request for x in PullRequestReviewers.query().filter(
328 328 PullRequestReviewers.user_id == user_id).all()
329 329 ]
330 330
331 331 def _prepare_participating_query(self, user_id=None, statuses=None,
332 332 order_by=None, order_dir='desc'):
333 333 q = PullRequest.query()
334 334 if user_id:
335 335 reviewers_subquery = Session().query(
336 336 PullRequestReviewers.pull_request_id).filter(
337 337 PullRequestReviewers.user_id == user_id).subquery()
338 338 user_filter = or_(
339 339 PullRequest.user_id == user_id,
340 340 PullRequest.pull_request_id.in_(reviewers_subquery)
341 341 )
342 342 q = PullRequest.query().filter(user_filter)
343 343
344 344 # closed,opened
345 345 if statuses:
346 346 q = q.filter(PullRequest.status.in_(statuses))
347 347
348 348 if order_by:
349 349 order_map = {
350 350 'name_raw': PullRequest.pull_request_id,
351 351 'title': PullRequest.title,
352 352 'updated_on_raw': PullRequest.updated_on,
353 353 'target_repo': PullRequest.target_repo_id
354 354 }
355 355 if order_dir == 'asc':
356 356 q = q.order_by(order_map[order_by].asc())
357 357 else:
358 358 q = q.order_by(order_map[order_by].desc())
359 359
360 360 return q
361 361
362 362 def count_im_participating_in(self, user_id=None, statuses=None):
363 363 q = self._prepare_participating_query(user_id, statuses=statuses)
364 364 return q.count()
365 365
366 366 def get_im_participating_in(
367 367 self, user_id=None, statuses=None, offset=0,
368 368 length=None, order_by=None, order_dir='desc'):
369 369 """
370 370 Get all Pull requests that i'm participating in, or i have opened
371 371 """
372 372
373 373 q = self._prepare_participating_query(
374 374 user_id, statuses=statuses, order_by=order_by,
375 375 order_dir=order_dir)
376 376
377 377 if length:
378 378 pull_requests = q.limit(length).offset(offset).all()
379 379 else:
380 380 pull_requests = q.all()
381 381
382 382 return pull_requests
383 383
384 384 def get_versions(self, pull_request):
385 385 """
386 386 returns version of pull request sorted by ID descending
387 387 """
388 388 return PullRequestVersion.query()\
389 389 .filter(PullRequestVersion.pull_request == pull_request)\
390 390 .order_by(PullRequestVersion.pull_request_version_id.asc())\
391 391 .all()
392 392
393 393 def get_pr_version(self, pull_request_id, version=None):
394 394 at_version = None
395 395
396 396 if version and version == 'latest':
397 397 pull_request_ver = PullRequest.get(pull_request_id)
398 398 pull_request_obj = pull_request_ver
399 399 _org_pull_request_obj = pull_request_obj
400 400 at_version = 'latest'
401 401 elif version:
402 402 pull_request_ver = PullRequestVersion.get_or_404(version)
403 403 pull_request_obj = pull_request_ver
404 404 _org_pull_request_obj = pull_request_ver.pull_request
405 405 at_version = pull_request_ver.pull_request_version_id
406 406 else:
407 407 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
408 408 pull_request_id)
409 409
410 410 pull_request_display_obj = PullRequest.get_pr_display_object(
411 411 pull_request_obj, _org_pull_request_obj)
412 412
413 413 return _org_pull_request_obj, pull_request_obj, \
414 414 pull_request_display_obj, at_version
415 415
416 416 def create(self, created_by, source_repo, source_ref, target_repo,
417 417 target_ref, revisions, reviewers, title, description=None,
418 418 description_renderer=None,
419 419 reviewer_data=None, translator=None, auth_user=None):
420 420 translator = translator or get_current_request().translate
421 421
422 422 created_by_user = self._get_user(created_by)
423 423 auth_user = auth_user or created_by_user.AuthUser()
424 424 source_repo = self._get_repo(source_repo)
425 425 target_repo = self._get_repo(target_repo)
426 426
427 427 pull_request = PullRequest()
428 428 pull_request.source_repo = source_repo
429 429 pull_request.source_ref = source_ref
430 430 pull_request.target_repo = target_repo
431 431 pull_request.target_ref = target_ref
432 432 pull_request.revisions = revisions
433 433 pull_request.title = title
434 434 pull_request.description = description
435 435 pull_request.description_renderer = description_renderer
436 436 pull_request.author = created_by_user
437 437 pull_request.reviewer_data = reviewer_data
438 438 pull_request.pull_request_state = pull_request.STATE_CREATING
439 439 Session().add(pull_request)
440 440 Session().flush()
441 441
442 442 reviewer_ids = set()
443 443 # members / reviewers
444 444 for reviewer_object in reviewers:
445 445 user_id, reasons, mandatory, rules = reviewer_object
446 446 user = self._get_user(user_id)
447 447
448 448 # skip duplicates
449 449 if user.user_id in reviewer_ids:
450 450 continue
451 451
452 452 reviewer_ids.add(user.user_id)
453 453
454 454 reviewer = PullRequestReviewers()
455 455 reviewer.user = user
456 456 reviewer.pull_request = pull_request
457 457 reviewer.reasons = reasons
458 458 reviewer.mandatory = mandatory
459 459
460 460 # NOTE(marcink): pick only first rule for now
461 461 rule_id = list(rules)[0] if rules else None
462 462 rule = RepoReviewRule.get(rule_id) if rule_id else None
463 463 if rule:
464 464 review_group = rule.user_group_vote_rule(user_id)
465 465 # we check if this particular reviewer is member of a voting group
466 466 if review_group:
467 467 # NOTE(marcink):
468 468 # can be that user is member of more but we pick the first same,
469 469 # same as default reviewers algo
470 470 review_group = review_group[0]
471 471
472 472 rule_data = {
473 473 'rule_name':
474 474 rule.review_rule_name,
475 475 'rule_user_group_entry_id':
476 476 review_group.repo_review_rule_users_group_id,
477 477 'rule_user_group_name':
478 478 review_group.users_group.users_group_name,
479 479 'rule_user_group_members':
480 480 [x.user.username for x in review_group.users_group.members],
481 481 'rule_user_group_members_id':
482 482 [x.user.user_id for x in review_group.users_group.members],
483 483 }
484 484 # e.g {'vote_rule': -1, 'mandatory': True}
485 485 rule_data.update(review_group.rule_data())
486 486
487 487 reviewer.rule_data = rule_data
488 488
489 489 Session().add(reviewer)
490 490 Session().flush()
491 491
492 492 # Set approval status to "Under Review" for all commits which are
493 493 # part of this pull request.
494 494 ChangesetStatusModel().set_status(
495 495 repo=target_repo,
496 496 status=ChangesetStatus.STATUS_UNDER_REVIEW,
497 497 user=created_by_user,
498 498 pull_request=pull_request
499 499 )
500 500 # we commit early at this point. This has to do with a fact
501 501 # that before queries do some row-locking. And because of that
502 502 # we need to commit and finish transaction before below validate call
503 503 # that for large repos could be long resulting in long row locks
504 504 Session().commit()
505 505
506 506 # prepare workspace, and run initial merge simulation. Set state during that
507 507 # operation
508 508 pull_request = PullRequest.get(pull_request.pull_request_id)
509 509
510 510 # set as merging, for simulation, and if finished to created so we mark
511 511 # simulation is working fine
512 512 with pull_request.set_state(PullRequest.STATE_MERGING,
513 513 final_state=PullRequest.STATE_CREATED):
514 514 MergeCheck.validate(
515 515 pull_request, auth_user=auth_user, translator=translator)
516 516
517 517 self.notify_reviewers(pull_request, reviewer_ids)
518 518 self.trigger_pull_request_hook(
519 519 pull_request, created_by_user, 'create')
520 520
521 521 creation_data = pull_request.get_api_data(with_merge_state=False)
522 522 self._log_audit_action(
523 523 'repo.pull_request.create', {'data': creation_data},
524 524 auth_user, pull_request)
525 525
526 526 return pull_request
527 527
528 528 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
529 529 pull_request = self.__get_pull_request(pull_request)
530 530 target_scm = pull_request.target_repo.scm_instance()
531 531 if action == 'create':
532 532 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
533 533 elif action == 'merge':
534 534 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
535 535 elif action == 'close':
536 536 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
537 537 elif action == 'review_status_change':
538 538 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
539 539 elif action == 'update':
540 540 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
541 541 elif action == 'comment':
542 542 # dummy hook ! for comment. We want this function to handle all cases
543 543 def trigger_hook(*args, **kwargs):
544 544 pass
545 545 comment = data['comment']
546 546 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
547 547 else:
548 548 return
549 549
550 550 trigger_hook(
551 551 username=user.username,
552 552 repo_name=pull_request.target_repo.repo_name,
553 553 repo_alias=target_scm.alias,
554 554 pull_request=pull_request,
555 555 data=data)
556 556
557 557 def _get_commit_ids(self, pull_request):
558 558 """
559 559 Return the commit ids of the merged pull request.
560 560
561 561 This method is not dealing correctly yet with the lack of autoupdates
562 562 nor with the implicit target updates.
563 563 For example: if a commit in the source repo is already in the target it
564 564 will be reported anyways.
565 565 """
566 566 merge_rev = pull_request.merge_rev
567 567 if merge_rev is None:
568 568 raise ValueError('This pull request was not merged yet')
569 569
570 570 commit_ids = list(pull_request.revisions)
571 571 if merge_rev not in commit_ids:
572 572 commit_ids.append(merge_rev)
573 573
574 574 return commit_ids
575 575
576 576 def merge_repo(self, pull_request, user, extras):
577 577 log.debug("Merging pull request %s", pull_request.pull_request_id)
578 578 extras['user_agent'] = 'internal-merge'
579 579 merge_state = self._merge_pull_request(pull_request, user, extras)
580 580 if merge_state.executed:
581 581 log.debug("Merge was successful, updating the pull request comments.")
582 582 self._comment_and_close_pr(pull_request, user, merge_state)
583 583
584 584 self._log_audit_action(
585 585 'repo.pull_request.merge',
586 586 {'merge_state': merge_state.__dict__},
587 587 user, pull_request)
588 588
589 589 else:
590 590 log.warn("Merge failed, not updating the pull request.")
591 591 return merge_state
592 592
593 593 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
594 594 target_vcs = pull_request.target_repo.scm_instance()
595 595 source_vcs = pull_request.source_repo.scm_instance()
596 596
597 597 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
598 598 pr_id=pull_request.pull_request_id,
599 599 pr_title=pull_request.title,
600 600 source_repo=source_vcs.name,
601 601 source_ref_name=pull_request.source_ref_parts.name,
602 602 target_repo=target_vcs.name,
603 603 target_ref_name=pull_request.target_ref_parts.name,
604 604 )
605 605
606 606 workspace_id = self._workspace_id(pull_request)
607 607 repo_id = pull_request.target_repo.repo_id
608 608 use_rebase = self._use_rebase_for_merging(pull_request)
609 609 close_branch = self._close_branch_before_merging(pull_request)
610 610
611 611 target_ref = self._refresh_reference(
612 612 pull_request.target_ref_parts, target_vcs)
613 613
614 614 callback_daemon, extras = prepare_callback_daemon(
615 615 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
616 616 host=vcs_settings.HOOKS_HOST,
617 617 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
618 618
619 619 with callback_daemon:
620 620 # TODO: johbo: Implement a clean way to run a config_override
621 621 # for a single call.
622 622 target_vcs.config.set(
623 623 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
624 624
625 625 user_name = user.short_contact
626 626 merge_state = target_vcs.merge(
627 627 repo_id, workspace_id, target_ref, source_vcs,
628 628 pull_request.source_ref_parts,
629 629 user_name=user_name, user_email=user.email,
630 630 message=message, use_rebase=use_rebase,
631 631 close_branch=close_branch)
632 632 return merge_state
633 633
634 634 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
635 635 pull_request.merge_rev = merge_state.merge_ref.commit_id
636 636 pull_request.updated_on = datetime.datetime.now()
637 637 close_msg = close_msg or 'Pull request merged and closed'
638 638
639 639 CommentsModel().create(
640 640 text=safe_unicode(close_msg),
641 641 repo=pull_request.target_repo.repo_id,
642 642 user=user.user_id,
643 643 pull_request=pull_request.pull_request_id,
644 644 f_path=None,
645 645 line_no=None,
646 646 closing_pr=True
647 647 )
648 648
649 649 Session().add(pull_request)
650 650 Session().flush()
651 651 # TODO: paris: replace invalidation with less radical solution
652 652 ScmModel().mark_for_invalidation(
653 653 pull_request.target_repo.repo_name)
654 654 self.trigger_pull_request_hook(pull_request, user, 'merge')
655 655
656 656 def has_valid_update_type(self, pull_request):
657 657 source_ref_type = pull_request.source_ref_parts.type
658 658 return source_ref_type in self.REF_TYPES
659 659
660 660 def update_commits(self, pull_request):
661 661 """
662 662 Get the updated list of commits for the pull request
663 663 and return the new pull request version and the list
664 664 of commits processed by this update action
665 665 """
666 666 pull_request = self.__get_pull_request(pull_request)
667 667 source_ref_type = pull_request.source_ref_parts.type
668 668 source_ref_name = pull_request.source_ref_parts.name
669 669 source_ref_id = pull_request.source_ref_parts.commit_id
670 670
671 671 target_ref_type = pull_request.target_ref_parts.type
672 672 target_ref_name = pull_request.target_ref_parts.name
673 673 target_ref_id = pull_request.target_ref_parts.commit_id
674 674
675 675 if not self.has_valid_update_type(pull_request):
676 676 log.debug("Skipping update of pull request %s due to ref type: %s",
677 677 pull_request, source_ref_type)
678 678 return UpdateResponse(
679 679 executed=False,
680 680 reason=UpdateFailureReason.WRONG_REF_TYPE,
681 681 old=pull_request, new=None, changes=None,
682 682 source_changed=False, target_changed=False)
683 683
684 684 # source repo
685 685 source_repo = pull_request.source_repo.scm_instance()
686 source_repo.count() # cache rebuild
687
686 688 try:
687 689 source_commit = source_repo.get_commit(commit_id=source_ref_name)
688 690 except CommitDoesNotExistError:
689 691 return UpdateResponse(
690 692 executed=False,
691 693 reason=UpdateFailureReason.MISSING_SOURCE_REF,
692 694 old=pull_request, new=None, changes=None,
693 695 source_changed=False, target_changed=False)
694 696
695 697 source_changed = source_ref_id != source_commit.raw_id
696 698
697 699 # target repo
698 700 target_repo = pull_request.target_repo.scm_instance()
701 target_repo.count() # cache rebuild
702
699 703 try:
700 704 target_commit = target_repo.get_commit(commit_id=target_ref_name)
701 705 except CommitDoesNotExistError:
702 706 return UpdateResponse(
703 707 executed=False,
704 708 reason=UpdateFailureReason.MISSING_TARGET_REF,
705 709 old=pull_request, new=None, changes=None,
706 710 source_changed=False, target_changed=False)
707 711 target_changed = target_ref_id != target_commit.raw_id
708 712
709 713 if not (source_changed or target_changed):
710 714 log.debug("Nothing changed in pull request %s", pull_request)
711 715 return UpdateResponse(
712 716 executed=False,
713 717 reason=UpdateFailureReason.NO_CHANGE,
714 718 old=pull_request, new=None, changes=None,
715 719 source_changed=target_changed, target_changed=source_changed)
716 720
717 721 change_in_found = 'target repo' if target_changed else 'source repo'
718 722 log.debug('Updating pull request because of change in %s detected',
719 723 change_in_found)
720 724
721 725 # Finally there is a need for an update, in case of source change
722 726 # we create a new version, else just an update
723 727 if source_changed:
724 728 pull_request_version = self._create_version_from_snapshot(pull_request)
725 729 self._link_comments_to_version(pull_request_version)
726 730 else:
727 731 try:
728 732 ver = pull_request.versions[-1]
729 733 except IndexError:
730 734 ver = None
731 735
732 736 pull_request.pull_request_version_id = \
733 737 ver.pull_request_version_id if ver else None
734 738 pull_request_version = pull_request
735 739
736 740 try:
737 741 if target_ref_type in self.REF_TYPES:
738 742 target_commit = target_repo.get_commit(target_ref_name)
739 743 else:
740 744 target_commit = target_repo.get_commit(target_ref_id)
741 745 except CommitDoesNotExistError:
742 746 return UpdateResponse(
743 747 executed=False,
744 748 reason=UpdateFailureReason.MISSING_TARGET_REF,
745 749 old=pull_request, new=None, changes=None,
746 750 source_changed=source_changed, target_changed=target_changed)
747 751
748 752 # re-compute commit ids
749 753 old_commit_ids = pull_request.revisions
750 754 pre_load = ["author", "branch", "date", "message"]
751 755 commit_ranges = target_repo.compare(
752 756 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
753 757 pre_load=pre_load)
754 758
755 759 ancestor = source_repo.get_common_ancestor(
756 760 source_commit.raw_id, target_commit.raw_id, target_repo)
757 761
758 762 pull_request.source_ref = '%s:%s:%s' % (
759 763 source_ref_type, source_ref_name, source_commit.raw_id)
760 764 pull_request.target_ref = '%s:%s:%s' % (
761 765 target_ref_type, target_ref_name, ancestor)
762 766
763 767 pull_request.revisions = [
764 768 commit.raw_id for commit in reversed(commit_ranges)]
765 769 pull_request.updated_on = datetime.datetime.now()
766 770 Session().add(pull_request)
767 771 new_commit_ids = pull_request.revisions
768 772
769 773 old_diff_data, new_diff_data = self._generate_update_diffs(
770 774 pull_request, pull_request_version)
771 775
772 776 # calculate commit and file changes
773 777 changes = self._calculate_commit_id_changes(
774 778 old_commit_ids, new_commit_ids)
775 779 file_changes = self._calculate_file_changes(
776 780 old_diff_data, new_diff_data)
777 781
778 782 # set comments as outdated if DIFFS changed
779 783 CommentsModel().outdate_comments(
780 784 pull_request, old_diff_data=old_diff_data,
781 785 new_diff_data=new_diff_data)
782 786
783 787 commit_changes = (changes.added or changes.removed)
784 788 file_node_changes = (
785 789 file_changes.added or file_changes.modified or file_changes.removed)
786 790 pr_has_changes = commit_changes or file_node_changes
787 791
788 792 # Add an automatic comment to the pull request, in case
789 793 # anything has changed
790 794 if pr_has_changes:
791 795 update_comment = CommentsModel().create(
792 796 text=self._render_update_message(changes, file_changes),
793 797 repo=pull_request.target_repo,
794 798 user=pull_request.author,
795 799 pull_request=pull_request,
796 800 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
797 801
798 802 # Update status to "Under Review" for added commits
799 803 for commit_id in changes.added:
800 804 ChangesetStatusModel().set_status(
801 805 repo=pull_request.source_repo,
802 806 status=ChangesetStatus.STATUS_UNDER_REVIEW,
803 807 comment=update_comment,
804 808 user=pull_request.author,
805 809 pull_request=pull_request,
806 810 revision=commit_id)
807 811
808 812 log.debug(
809 813 'Updated pull request %s, added_ids: %s, common_ids: %s, '
810 814 'removed_ids: %s', pull_request.pull_request_id,
811 815 changes.added, changes.common, changes.removed)
812 816 log.debug(
813 817 'Updated pull request with the following file changes: %s',
814 818 file_changes)
815 819
816 820 log.info(
817 821 "Updated pull request %s from commit %s to commit %s, "
818 822 "stored new version %s of this pull request.",
819 823 pull_request.pull_request_id, source_ref_id,
820 824 pull_request.source_ref_parts.commit_id,
821 825 pull_request_version.pull_request_version_id)
822 826 Session().commit()
823 827 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
824 828
825 829 return UpdateResponse(
826 830 executed=True, reason=UpdateFailureReason.NONE,
827 831 old=pull_request, new=pull_request_version, changes=changes,
828 832 source_changed=source_changed, target_changed=target_changed)
829 833
830 834 def _create_version_from_snapshot(self, pull_request):
831 835 version = PullRequestVersion()
832 836 version.title = pull_request.title
833 837 version.description = pull_request.description
834 838 version.status = pull_request.status
835 839 version.pull_request_state = pull_request.pull_request_state
836 840 version.created_on = datetime.datetime.now()
837 841 version.updated_on = pull_request.updated_on
838 842 version.user_id = pull_request.user_id
839 843 version.source_repo = pull_request.source_repo
840 844 version.source_ref = pull_request.source_ref
841 845 version.target_repo = pull_request.target_repo
842 846 version.target_ref = pull_request.target_ref
843 847
844 848 version._last_merge_source_rev = pull_request._last_merge_source_rev
845 849 version._last_merge_target_rev = pull_request._last_merge_target_rev
846 850 version.last_merge_status = pull_request.last_merge_status
847 851 version.shadow_merge_ref = pull_request.shadow_merge_ref
848 852 version.merge_rev = pull_request.merge_rev
849 853 version.reviewer_data = pull_request.reviewer_data
850 854
851 855 version.revisions = pull_request.revisions
852 856 version.pull_request = pull_request
853 857 Session().add(version)
854 858 Session().flush()
855 859
856 860 return version
857 861
858 862 def _generate_update_diffs(self, pull_request, pull_request_version):
859 863
860 864 diff_context = (
861 865 self.DIFF_CONTEXT +
862 866 CommentsModel.needed_extra_diff_context())
863 867 hide_whitespace_changes = False
864 868 source_repo = pull_request_version.source_repo
865 869 source_ref_id = pull_request_version.source_ref_parts.commit_id
866 870 target_ref_id = pull_request_version.target_ref_parts.commit_id
867 871 old_diff = self._get_diff_from_pr_or_version(
868 872 source_repo, source_ref_id, target_ref_id,
869 873 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
870 874
871 875 source_repo = pull_request.source_repo
872 876 source_ref_id = pull_request.source_ref_parts.commit_id
873 877 target_ref_id = pull_request.target_ref_parts.commit_id
874 878
875 879 new_diff = self._get_diff_from_pr_or_version(
876 880 source_repo, source_ref_id, target_ref_id,
877 881 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
878 882
879 883 old_diff_data = diffs.DiffProcessor(old_diff)
880 884 old_diff_data.prepare()
881 885 new_diff_data = diffs.DiffProcessor(new_diff)
882 886 new_diff_data.prepare()
883 887
884 888 return old_diff_data, new_diff_data
885 889
886 890 def _link_comments_to_version(self, pull_request_version):
887 891 """
888 892 Link all unlinked comments of this pull request to the given version.
889 893
890 894 :param pull_request_version: The `PullRequestVersion` to which
891 895 the comments shall be linked.
892 896
893 897 """
894 898 pull_request = pull_request_version.pull_request
895 899 comments = ChangesetComment.query()\
896 900 .filter(
897 901 # TODO: johbo: Should we query for the repo at all here?
898 902 # Pending decision on how comments of PRs are to be related
899 903 # to either the source repo, the target repo or no repo at all.
900 904 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
901 905 ChangesetComment.pull_request == pull_request,
902 906 ChangesetComment.pull_request_version == None)\
903 907 .order_by(ChangesetComment.comment_id.asc())
904 908
905 909 # TODO: johbo: Find out why this breaks if it is done in a bulk
906 910 # operation.
907 911 for comment in comments:
908 912 comment.pull_request_version_id = (
909 913 pull_request_version.pull_request_version_id)
910 914 Session().add(comment)
911 915
912 916 def _calculate_commit_id_changes(self, old_ids, new_ids):
913 917 added = [x for x in new_ids if x not in old_ids]
914 918 common = [x for x in new_ids if x in old_ids]
915 919 removed = [x for x in old_ids if x not in new_ids]
916 920 total = new_ids
917 921 return ChangeTuple(added, common, removed, total)
918 922
919 923 def _calculate_file_changes(self, old_diff_data, new_diff_data):
920 924
921 925 old_files = OrderedDict()
922 926 for diff_data in old_diff_data.parsed_diff:
923 927 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
924 928
925 929 added_files = []
926 930 modified_files = []
927 931 removed_files = []
928 932 for diff_data in new_diff_data.parsed_diff:
929 933 new_filename = diff_data['filename']
930 934 new_hash = md5_safe(diff_data['raw_diff'])
931 935
932 936 old_hash = old_files.get(new_filename)
933 937 if not old_hash:
934 938 # file is not present in old diff, means it's added
935 939 added_files.append(new_filename)
936 940 else:
937 941 if new_hash != old_hash:
938 942 modified_files.append(new_filename)
939 943 # now remove a file from old, since we have seen it already
940 944 del old_files[new_filename]
941 945
942 946 # removed files is when there are present in old, but not in NEW,
943 947 # since we remove old files that are present in new diff, left-overs
944 948 # if any should be the removed files
945 949 removed_files.extend(old_files.keys())
946 950
947 951 return FileChangeTuple(added_files, modified_files, removed_files)
948 952
949 953 def _render_update_message(self, changes, file_changes):
950 954 """
951 955 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
952 956 so it's always looking the same disregarding on which default
953 957 renderer system is using.
954 958
955 959 :param changes: changes named tuple
956 960 :param file_changes: file changes named tuple
957 961
958 962 """
959 963 new_status = ChangesetStatus.get_status_lbl(
960 964 ChangesetStatus.STATUS_UNDER_REVIEW)
961 965
962 966 changed_files = (
963 967 file_changes.added + file_changes.modified + file_changes.removed)
964 968
965 969 params = {
966 970 'under_review_label': new_status,
967 971 'added_commits': changes.added,
968 972 'removed_commits': changes.removed,
969 973 'changed_files': changed_files,
970 974 'added_files': file_changes.added,
971 975 'modified_files': file_changes.modified,
972 976 'removed_files': file_changes.removed,
973 977 }
974 978 renderer = RstTemplateRenderer()
975 979 return renderer.render('pull_request_update.mako', **params)
976 980
977 981 def edit(self, pull_request, title, description, description_renderer, user):
978 982 pull_request = self.__get_pull_request(pull_request)
979 983 old_data = pull_request.get_api_data(with_merge_state=False)
980 984 if pull_request.is_closed():
981 985 raise ValueError('This pull request is closed')
982 986 if title:
983 987 pull_request.title = title
984 988 pull_request.description = description
985 989 pull_request.updated_on = datetime.datetime.now()
986 990 pull_request.description_renderer = description_renderer
987 991 Session().add(pull_request)
988 992 self._log_audit_action(
989 993 'repo.pull_request.edit', {'old_data': old_data},
990 994 user, pull_request)
991 995
992 996 def update_reviewers(self, pull_request, reviewer_data, user):
993 997 """
994 998 Update the reviewers in the pull request
995 999
996 1000 :param pull_request: the pr to update
997 1001 :param reviewer_data: list of tuples
998 1002 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
999 1003 """
1000 1004 pull_request = self.__get_pull_request(pull_request)
1001 1005 if pull_request.is_closed():
1002 1006 raise ValueError('This pull request is closed')
1003 1007
1004 1008 reviewers = {}
1005 1009 for user_id, reasons, mandatory, rules in reviewer_data:
1006 1010 if isinstance(user_id, (int, compat.string_types)):
1007 1011 user_id = self._get_user(user_id).user_id
1008 1012 reviewers[user_id] = {
1009 1013 'reasons': reasons, 'mandatory': mandatory}
1010 1014
1011 1015 reviewers_ids = set(reviewers.keys())
1012 1016 current_reviewers = PullRequestReviewers.query()\
1013 1017 .filter(PullRequestReviewers.pull_request ==
1014 1018 pull_request).all()
1015 1019 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1016 1020
1017 1021 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1018 1022 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1019 1023
1020 1024 log.debug("Adding %s reviewers", ids_to_add)
1021 1025 log.debug("Removing %s reviewers", ids_to_remove)
1022 1026 changed = False
1023 1027 added_audit_reviewers = []
1024 1028 removed_audit_reviewers = []
1025 1029
1026 1030 for uid in ids_to_add:
1027 1031 changed = True
1028 1032 _usr = self._get_user(uid)
1029 1033 reviewer = PullRequestReviewers()
1030 1034 reviewer.user = _usr
1031 1035 reviewer.pull_request = pull_request
1032 1036 reviewer.reasons = reviewers[uid]['reasons']
1033 1037 # NOTE(marcink): mandatory shouldn't be changed now
1034 1038 # reviewer.mandatory = reviewers[uid]['reasons']
1035 1039 Session().add(reviewer)
1036 1040 added_audit_reviewers.append(reviewer.get_dict())
1037 1041
1038 1042 for uid in ids_to_remove:
1039 1043 changed = True
1040 1044 # NOTE(marcink): we fetch "ALL" reviewers using .all(). This is an edge case
1041 1045 # that prevents and fixes cases that we added the same reviewer twice.
1042 1046 # this CAN happen due to the lack of DB checks
1043 1047 reviewers = PullRequestReviewers.query()\
1044 1048 .filter(PullRequestReviewers.user_id == uid,
1045 1049 PullRequestReviewers.pull_request == pull_request)\
1046 1050 .all()
1047 1051
1048 1052 for obj in reviewers:
1049 1053 added_audit_reviewers.append(obj.get_dict())
1050 1054 Session().delete(obj)
1051 1055
1052 1056 if changed:
1053 1057 Session().expire_all()
1054 1058 pull_request.updated_on = datetime.datetime.now()
1055 1059 Session().add(pull_request)
1056 1060
1057 1061 # finally store audit logs
1058 1062 for user_data in added_audit_reviewers:
1059 1063 self._log_audit_action(
1060 1064 'repo.pull_request.reviewer.add', {'data': user_data},
1061 1065 user, pull_request)
1062 1066 for user_data in removed_audit_reviewers:
1063 1067 self._log_audit_action(
1064 1068 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1065 1069 user, pull_request)
1066 1070
1067 1071 self.notify_reviewers(pull_request, ids_to_add)
1068 1072 return ids_to_add, ids_to_remove
1069 1073
1070 1074 def get_url(self, pull_request, request=None, permalink=False):
1071 1075 if not request:
1072 1076 request = get_current_request()
1073 1077
1074 1078 if permalink:
1075 1079 return request.route_url(
1076 1080 'pull_requests_global',
1077 1081 pull_request_id=pull_request.pull_request_id,)
1078 1082 else:
1079 1083 return request.route_url('pullrequest_show',
1080 1084 repo_name=safe_str(pull_request.target_repo.repo_name),
1081 1085 pull_request_id=pull_request.pull_request_id,)
1082 1086
1083 1087 def get_shadow_clone_url(self, pull_request, request=None):
1084 1088 """
1085 1089 Returns qualified url pointing to the shadow repository. If this pull
1086 1090 request is closed there is no shadow repository and ``None`` will be
1087 1091 returned.
1088 1092 """
1089 1093 if pull_request.is_closed():
1090 1094 return None
1091 1095 else:
1092 1096 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1093 1097 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1094 1098
1095 1099 def notify_reviewers(self, pull_request, reviewers_ids):
1096 1100 # notification to reviewers
1097 1101 if not reviewers_ids:
1098 1102 return
1099 1103
1100 1104 pull_request_obj = pull_request
1101 1105 # get the current participants of this pull request
1102 1106 recipients = reviewers_ids
1103 1107 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1104 1108
1105 1109 pr_source_repo = pull_request_obj.source_repo
1106 1110 pr_target_repo = pull_request_obj.target_repo
1107 1111
1108 1112 pr_url = h.route_url('pullrequest_show',
1109 1113 repo_name=pr_target_repo.repo_name,
1110 1114 pull_request_id=pull_request_obj.pull_request_id,)
1111 1115
1112 1116 # set some variables for email notification
1113 1117 pr_target_repo_url = h.route_url(
1114 1118 'repo_summary', repo_name=pr_target_repo.repo_name)
1115 1119
1116 1120 pr_source_repo_url = h.route_url(
1117 1121 'repo_summary', repo_name=pr_source_repo.repo_name)
1118 1122
1119 1123 # pull request specifics
1120 1124 pull_request_commits = [
1121 1125 (x.raw_id, x.message)
1122 1126 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1123 1127
1124 1128 kwargs = {
1125 1129 'user': pull_request.author,
1126 1130 'pull_request': pull_request_obj,
1127 1131 'pull_request_commits': pull_request_commits,
1128 1132
1129 1133 'pull_request_target_repo': pr_target_repo,
1130 1134 'pull_request_target_repo_url': pr_target_repo_url,
1131 1135
1132 1136 'pull_request_source_repo': pr_source_repo,
1133 1137 'pull_request_source_repo_url': pr_source_repo_url,
1134 1138
1135 1139 'pull_request_url': pr_url,
1136 1140 }
1137 1141
1138 1142 # pre-generate the subject for notification itself
1139 1143 (subject,
1140 1144 _h, _e, # we don't care about those
1141 1145 body_plaintext) = EmailNotificationModel().render_email(
1142 1146 notification_type, **kwargs)
1143 1147
1144 1148 # create notification objects, and emails
1145 1149 NotificationModel().create(
1146 1150 created_by=pull_request.author,
1147 1151 notification_subject=subject,
1148 1152 notification_body=body_plaintext,
1149 1153 notification_type=notification_type,
1150 1154 recipients=recipients,
1151 1155 email_kwargs=kwargs,
1152 1156 )
1153 1157
1154 1158 def delete(self, pull_request, user):
1155 1159 pull_request = self.__get_pull_request(pull_request)
1156 1160 old_data = pull_request.get_api_data(with_merge_state=False)
1157 1161 self._cleanup_merge_workspace(pull_request)
1158 1162 self._log_audit_action(
1159 1163 'repo.pull_request.delete', {'old_data': old_data},
1160 1164 user, pull_request)
1161 1165 Session().delete(pull_request)
1162 1166
1163 1167 def close_pull_request(self, pull_request, user):
1164 1168 pull_request = self.__get_pull_request(pull_request)
1165 1169 self._cleanup_merge_workspace(pull_request)
1166 1170 pull_request.status = PullRequest.STATUS_CLOSED
1167 1171 pull_request.updated_on = datetime.datetime.now()
1168 1172 Session().add(pull_request)
1169 1173 self.trigger_pull_request_hook(
1170 1174 pull_request, pull_request.author, 'close')
1171 1175
1172 1176 pr_data = pull_request.get_api_data(with_merge_state=False)
1173 1177 self._log_audit_action(
1174 1178 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1175 1179
1176 1180 def close_pull_request_with_comment(
1177 1181 self, pull_request, user, repo, message=None, auth_user=None):
1178 1182
1179 1183 pull_request_review_status = pull_request.calculated_review_status()
1180 1184
1181 1185 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1182 1186 # approved only if we have voting consent
1183 1187 status = ChangesetStatus.STATUS_APPROVED
1184 1188 else:
1185 1189 status = ChangesetStatus.STATUS_REJECTED
1186 1190 status_lbl = ChangesetStatus.get_status_lbl(status)
1187 1191
1188 1192 default_message = (
1189 1193 'Closing with status change {transition_icon} {status}.'
1190 1194 ).format(transition_icon='>', status=status_lbl)
1191 1195 text = message or default_message
1192 1196
1193 1197 # create a comment, and link it to new status
1194 1198 comment = CommentsModel().create(
1195 1199 text=text,
1196 1200 repo=repo.repo_id,
1197 1201 user=user.user_id,
1198 1202 pull_request=pull_request.pull_request_id,
1199 1203 status_change=status_lbl,
1200 1204 status_change_type=status,
1201 1205 closing_pr=True,
1202 1206 auth_user=auth_user,
1203 1207 )
1204 1208
1205 1209 # calculate old status before we change it
1206 1210 old_calculated_status = pull_request.calculated_review_status()
1207 1211 ChangesetStatusModel().set_status(
1208 1212 repo.repo_id,
1209 1213 status,
1210 1214 user.user_id,
1211 1215 comment=comment,
1212 1216 pull_request=pull_request.pull_request_id
1213 1217 )
1214 1218
1215 1219 Session().flush()
1216 1220 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1217 1221 # we now calculate the status of pull request again, and based on that
1218 1222 # calculation trigger status change. This might happen in cases
1219 1223 # that non-reviewer admin closes a pr, which means his vote doesn't
1220 1224 # change the status, while if he's a reviewer this might change it.
1221 1225 calculated_status = pull_request.calculated_review_status()
1222 1226 if old_calculated_status != calculated_status:
1223 1227 self.trigger_pull_request_hook(
1224 1228 pull_request, user, 'review_status_change',
1225 1229 data={'status': calculated_status})
1226 1230
1227 1231 # finally close the PR
1228 1232 PullRequestModel().close_pull_request(
1229 1233 pull_request.pull_request_id, user)
1230 1234
1231 1235 return comment, status
1232 1236
1233 1237 def merge_status(self, pull_request, translator=None,
1234 1238 force_shadow_repo_refresh=False):
1235 1239 _ = translator or get_current_request().translate
1236 1240
1237 1241 if not self._is_merge_enabled(pull_request):
1238 1242 return False, _('Server-side pull request merging is disabled.')
1239 1243 if pull_request.is_closed():
1240 1244 return False, _('This pull request is closed.')
1241 1245 merge_possible, msg = self._check_repo_requirements(
1242 1246 target=pull_request.target_repo, source=pull_request.source_repo,
1243 1247 translator=_)
1244 1248 if not merge_possible:
1245 1249 return merge_possible, msg
1246 1250
1247 1251 try:
1248 1252 resp = self._try_merge(
1249 1253 pull_request,
1250 1254 force_shadow_repo_refresh=force_shadow_repo_refresh)
1251 1255 log.debug("Merge response: %s", resp)
1252 1256 status = resp.possible, resp.merge_status_message
1253 1257 except NotImplementedError:
1254 1258 status = False, _('Pull request merging is not supported.')
1255 1259
1256 1260 return status
1257 1261
1258 1262 def _check_repo_requirements(self, target, source, translator):
1259 1263 """
1260 1264 Check if `target` and `source` have compatible requirements.
1261 1265
1262 1266 Currently this is just checking for largefiles.
1263 1267 """
1264 1268 _ = translator
1265 1269 target_has_largefiles = self._has_largefiles(target)
1266 1270 source_has_largefiles = self._has_largefiles(source)
1267 1271 merge_possible = True
1268 1272 message = u''
1269 1273
1270 1274 if target_has_largefiles != source_has_largefiles:
1271 1275 merge_possible = False
1272 1276 if source_has_largefiles:
1273 1277 message = _(
1274 1278 'Target repository large files support is disabled.')
1275 1279 else:
1276 1280 message = _(
1277 1281 'Source repository large files support is disabled.')
1278 1282
1279 1283 return merge_possible, message
1280 1284
1281 1285 def _has_largefiles(self, repo):
1282 1286 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1283 1287 'extensions', 'largefiles')
1284 1288 return largefiles_ui and largefiles_ui[0].active
1285 1289
1286 1290 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1287 1291 """
1288 1292 Try to merge the pull request and return the merge status.
1289 1293 """
1290 1294 log.debug(
1291 1295 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1292 1296 pull_request.pull_request_id, force_shadow_repo_refresh)
1293 1297 target_vcs = pull_request.target_repo.scm_instance()
1294 1298 # Refresh the target reference.
1295 1299 try:
1296 1300 target_ref = self._refresh_reference(
1297 1301 pull_request.target_ref_parts, target_vcs)
1298 1302 except CommitDoesNotExistError:
1299 1303 merge_state = MergeResponse(
1300 1304 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1301 1305 metadata={'target_ref': pull_request.target_ref_parts})
1302 1306 return merge_state
1303 1307
1304 1308 target_locked = pull_request.target_repo.locked
1305 1309 if target_locked and target_locked[0]:
1306 1310 locked_by = 'user:{}'.format(target_locked[0])
1307 1311 log.debug("The target repository is locked by %s.", locked_by)
1308 1312 merge_state = MergeResponse(
1309 1313 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1310 1314 metadata={'locked_by': locked_by})
1311 1315 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1312 1316 pull_request, target_ref):
1313 1317 log.debug("Refreshing the merge status of the repository.")
1314 1318 merge_state = self._refresh_merge_state(
1315 1319 pull_request, target_vcs, target_ref)
1316 1320 else:
1317 1321 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1318 1322 metadata = {
1319 1323 'target_ref': pull_request.target_ref_parts,
1320 1324 'source_ref': pull_request.source_ref_parts,
1321 1325 }
1322 1326 if not possible and target_ref.type == 'branch':
1323 1327 # NOTE(marcink): case for mercurial multiple heads on branch
1324 1328 heads = target_vcs._heads(target_ref.name)
1325 1329 if len(heads) != 1:
1326 1330 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1327 1331 metadata.update({
1328 1332 'heads': heads
1329 1333 })
1330 1334 merge_state = MergeResponse(
1331 1335 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1332 1336
1333 1337 return merge_state
1334 1338
1335 1339 def _refresh_reference(self, reference, vcs_repository):
1336 1340 if reference.type in self.UPDATABLE_REF_TYPES:
1337 1341 name_or_id = reference.name
1338 1342 else:
1339 1343 name_or_id = reference.commit_id
1344
1345 vcs_repository.count() # cache rebuild
1340 1346 refreshed_commit = vcs_repository.get_commit(name_or_id)
1341 1347 refreshed_reference = Reference(
1342 1348 reference.type, reference.name, refreshed_commit.raw_id)
1343 1349 return refreshed_reference
1344 1350
1345 1351 def _needs_merge_state_refresh(self, pull_request, target_reference):
1346 1352 return not(
1347 1353 pull_request.revisions and
1348 1354 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1349 1355 target_reference.commit_id == pull_request._last_merge_target_rev)
1350 1356
1351 1357 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1352 1358 workspace_id = self._workspace_id(pull_request)
1353 1359 source_vcs = pull_request.source_repo.scm_instance()
1354 1360 repo_id = pull_request.target_repo.repo_id
1355 1361 use_rebase = self._use_rebase_for_merging(pull_request)
1356 1362 close_branch = self._close_branch_before_merging(pull_request)
1357 1363 merge_state = target_vcs.merge(
1358 1364 repo_id, workspace_id,
1359 1365 target_reference, source_vcs, pull_request.source_ref_parts,
1360 1366 dry_run=True, use_rebase=use_rebase,
1361 1367 close_branch=close_branch)
1362 1368
1363 1369 # Do not store the response if there was an unknown error.
1364 1370 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1365 1371 pull_request._last_merge_source_rev = \
1366 1372 pull_request.source_ref_parts.commit_id
1367 1373 pull_request._last_merge_target_rev = target_reference.commit_id
1368 1374 pull_request.last_merge_status = merge_state.failure_reason
1369 1375 pull_request.shadow_merge_ref = merge_state.merge_ref
1370 1376 Session().add(pull_request)
1371 1377 Session().commit()
1372 1378
1373 1379 return merge_state
1374 1380
1375 1381 def _workspace_id(self, pull_request):
1376 1382 workspace_id = 'pr-%s' % pull_request.pull_request_id
1377 1383 return workspace_id
1378 1384
1379 1385 def generate_repo_data(self, repo, commit_id=None, branch=None,
1380 1386 bookmark=None, translator=None):
1381 1387 from rhodecode.model.repo import RepoModel
1382 1388
1383 1389 all_refs, selected_ref = \
1384 1390 self._get_repo_pullrequest_sources(
1385 1391 repo.scm_instance(), commit_id=commit_id,
1386 1392 branch=branch, bookmark=bookmark, translator=translator)
1387 1393
1388 1394 refs_select2 = []
1389 1395 for element in all_refs:
1390 1396 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1391 1397 refs_select2.append({'text': element[1], 'children': children})
1392 1398
1393 1399 return {
1394 1400 'user': {
1395 1401 'user_id': repo.user.user_id,
1396 1402 'username': repo.user.username,
1397 1403 'firstname': repo.user.first_name,
1398 1404 'lastname': repo.user.last_name,
1399 1405 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1400 1406 },
1401 1407 'name': repo.repo_name,
1402 1408 'link': RepoModel().get_url(repo),
1403 1409 'description': h.chop_at_smart(repo.description_safe, '\n'),
1404 1410 'refs': {
1405 1411 'all_refs': all_refs,
1406 1412 'selected_ref': selected_ref,
1407 1413 'select2_refs': refs_select2
1408 1414 }
1409 1415 }
1410 1416
1411 1417 def generate_pullrequest_title(self, source, source_ref, target):
1412 1418 return u'{source}#{at_ref} to {target}'.format(
1413 1419 source=source,
1414 1420 at_ref=source_ref,
1415 1421 target=target,
1416 1422 )
1417 1423
1418 1424 def _cleanup_merge_workspace(self, pull_request):
1419 1425 # Merging related cleanup
1420 1426 repo_id = pull_request.target_repo.repo_id
1421 1427 target_scm = pull_request.target_repo.scm_instance()
1422 1428 workspace_id = self._workspace_id(pull_request)
1423 1429
1424 1430 try:
1425 1431 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1426 1432 except NotImplementedError:
1427 1433 pass
1428 1434
1429 1435 def _get_repo_pullrequest_sources(
1430 1436 self, repo, commit_id=None, branch=None, bookmark=None,
1431 1437 translator=None):
1432 1438 """
1433 1439 Return a structure with repo's interesting commits, suitable for
1434 1440 the selectors in pullrequest controller
1435 1441
1436 1442 :param commit_id: a commit that must be in the list somehow
1437 1443 and selected by default
1438 1444 :param branch: a branch that must be in the list and selected
1439 1445 by default - even if closed
1440 1446 :param bookmark: a bookmark that must be in the list and selected
1441 1447 """
1442 1448 _ = translator or get_current_request().translate
1443 1449
1444 1450 commit_id = safe_str(commit_id) if commit_id else None
1445 1451 branch = safe_unicode(branch) if branch else None
1446 1452 bookmark = safe_unicode(bookmark) if bookmark else None
1447 1453
1448 1454 selected = None
1449 1455
1450 1456 # order matters: first source that has commit_id in it will be selected
1451 1457 sources = []
1452 1458 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1453 1459 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1454 1460
1455 1461 if commit_id:
1456 1462 ref_commit = (h.short_id(commit_id), commit_id)
1457 1463 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1458 1464
1459 1465 sources.append(
1460 1466 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1461 1467 )
1462 1468
1463 1469 groups = []
1464 1470
1465 1471 for group_key, ref_list, group_name, match in sources:
1466 1472 group_refs = []
1467 1473 for ref_name, ref_id in ref_list:
1468 1474 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
1469 1475 group_refs.append((ref_key, ref_name))
1470 1476
1471 1477 if not selected:
1472 1478 if set([commit_id, match]) & set([ref_id, ref_name]):
1473 1479 selected = ref_key
1474 1480
1475 1481 if group_refs:
1476 1482 groups.append((group_refs, group_name))
1477 1483
1478 1484 if not selected:
1479 1485 ref = commit_id or branch or bookmark
1480 1486 if ref:
1481 1487 raise CommitDoesNotExistError(
1482 1488 u'No commit refs could be found matching: {}'.format(ref))
1483 1489 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1484 1490 selected = u'branch:{}:{}'.format(
1485 1491 safe_unicode(repo.DEFAULT_BRANCH_NAME),
1486 1492 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
1487 1493 )
1488 1494 elif repo.commit_ids:
1489 1495 # make the user select in this case
1490 1496 selected = None
1491 1497 else:
1492 1498 raise EmptyRepositoryError()
1493 1499 return groups, selected
1494 1500
1495 1501 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1496 1502 hide_whitespace_changes, diff_context):
1497 1503
1498 1504 return self._get_diff_from_pr_or_version(
1499 1505 source_repo, source_ref_id, target_ref_id,
1500 1506 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1501 1507
1502 1508 def _get_diff_from_pr_or_version(
1503 1509 self, source_repo, source_ref_id, target_ref_id,
1504 1510 hide_whitespace_changes, diff_context):
1505 1511
1506 1512 target_commit = source_repo.get_commit(
1507 1513 commit_id=safe_str(target_ref_id))
1508 1514 source_commit = source_repo.get_commit(
1509 1515 commit_id=safe_str(source_ref_id))
1510 1516 if isinstance(source_repo, Repository):
1511 1517 vcs_repo = source_repo.scm_instance()
1512 1518 else:
1513 1519 vcs_repo = source_repo
1514 1520
1515 1521 # TODO: johbo: In the context of an update, we cannot reach
1516 1522 # the old commit anymore with our normal mechanisms. It needs
1517 1523 # some sort of special support in the vcs layer to avoid this
1518 1524 # workaround.
1519 1525 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1520 1526 vcs_repo.alias == 'git'):
1521 1527 source_commit.raw_id = safe_str(source_ref_id)
1522 1528
1523 1529 log.debug('calculating diff between '
1524 1530 'source_ref:%s and target_ref:%s for repo `%s`',
1525 1531 target_ref_id, source_ref_id,
1526 1532 safe_unicode(vcs_repo.path))
1527 1533
1528 1534 vcs_diff = vcs_repo.get_diff(
1529 1535 commit1=target_commit, commit2=source_commit,
1530 1536 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1531 1537 return vcs_diff
1532 1538
1533 1539 def _is_merge_enabled(self, pull_request):
1534 1540 return self._get_general_setting(
1535 1541 pull_request, 'rhodecode_pr_merge_enabled')
1536 1542
1537 1543 def _use_rebase_for_merging(self, pull_request):
1538 1544 repo_type = pull_request.target_repo.repo_type
1539 1545 if repo_type == 'hg':
1540 1546 return self._get_general_setting(
1541 1547 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1542 1548 elif repo_type == 'git':
1543 1549 return self._get_general_setting(
1544 1550 pull_request, 'rhodecode_git_use_rebase_for_merging')
1545 1551
1546 1552 return False
1547 1553
1548 1554 def _close_branch_before_merging(self, pull_request):
1549 1555 repo_type = pull_request.target_repo.repo_type
1550 1556 if repo_type == 'hg':
1551 1557 return self._get_general_setting(
1552 1558 pull_request, 'rhodecode_hg_close_branch_before_merging')
1553 1559 elif repo_type == 'git':
1554 1560 return self._get_general_setting(
1555 1561 pull_request, 'rhodecode_git_close_branch_before_merging')
1556 1562
1557 1563 return False
1558 1564
1559 1565 def _get_general_setting(self, pull_request, settings_key, default=False):
1560 1566 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1561 1567 settings = settings_model.get_general_settings()
1562 1568 return settings.get(settings_key, default)
1563 1569
1564 1570 def _log_audit_action(self, action, action_data, user, pull_request):
1565 1571 audit_logger.store(
1566 1572 action=action,
1567 1573 action_data=action_data,
1568 1574 user=user,
1569 1575 repo=pull_request.target_repo)
1570 1576
1571 1577 def get_reviewer_functions(self):
1572 1578 """
1573 1579 Fetches functions for validation and fetching default reviewers.
1574 1580 If available we use the EE package, else we fallback to CE
1575 1581 package functions
1576 1582 """
1577 1583 try:
1578 1584 from rc_reviewers.utils import get_default_reviewers_data
1579 1585 from rc_reviewers.utils import validate_default_reviewers
1580 1586 except ImportError:
1581 1587 from rhodecode.apps.repository.utils import get_default_reviewers_data
1582 1588 from rhodecode.apps.repository.utils import validate_default_reviewers
1583 1589
1584 1590 return get_default_reviewers_data, validate_default_reviewers
1585 1591
1586 1592
1587 1593 class MergeCheck(object):
1588 1594 """
1589 1595 Perform Merge Checks and returns a check object which stores information
1590 1596 about merge errors, and merge conditions
1591 1597 """
1592 1598 TODO_CHECK = 'todo'
1593 1599 PERM_CHECK = 'perm'
1594 1600 REVIEW_CHECK = 'review'
1595 1601 MERGE_CHECK = 'merge'
1596 1602
1597 1603 def __init__(self):
1598 1604 self.review_status = None
1599 1605 self.merge_possible = None
1600 1606 self.merge_msg = ''
1601 1607 self.failed = None
1602 1608 self.errors = []
1603 1609 self.error_details = OrderedDict()
1604 1610
1605 1611 def push_error(self, error_type, message, error_key, details):
1606 1612 self.failed = True
1607 1613 self.errors.append([error_type, message])
1608 1614 self.error_details[error_key] = dict(
1609 1615 details=details,
1610 1616 error_type=error_type,
1611 1617 message=message
1612 1618 )
1613 1619
1614 1620 @classmethod
1615 1621 def validate(cls, pull_request, auth_user, translator, fail_early=False,
1616 1622 force_shadow_repo_refresh=False):
1617 1623 _ = translator
1618 1624 merge_check = cls()
1619 1625
1620 1626 # permissions to merge
1621 1627 user_allowed_to_merge = PullRequestModel().check_user_merge(
1622 1628 pull_request, auth_user)
1623 1629 if not user_allowed_to_merge:
1624 1630 log.debug("MergeCheck: cannot merge, approval is pending.")
1625 1631
1626 1632 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
1627 1633 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1628 1634 if fail_early:
1629 1635 return merge_check
1630 1636
1631 1637 # permission to merge into the target branch
1632 1638 target_commit_id = pull_request.target_ref_parts.commit_id
1633 1639 if pull_request.target_ref_parts.type == 'branch':
1634 1640 branch_name = pull_request.target_ref_parts.name
1635 1641 else:
1636 1642 # for mercurial we can always figure out the branch from the commit
1637 1643 # in case of bookmark
1638 1644 target_commit = pull_request.target_repo.get_commit(target_commit_id)
1639 1645 branch_name = target_commit.branch
1640 1646
1641 1647 rule, branch_perm = auth_user.get_rule_and_branch_permission(
1642 1648 pull_request.target_repo.repo_name, branch_name)
1643 1649 if branch_perm and branch_perm == 'branch.none':
1644 1650 msg = _('Target branch `{}` changes rejected by rule {}.').format(
1645 1651 branch_name, rule)
1646 1652 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1647 1653 if fail_early:
1648 1654 return merge_check
1649 1655
1650 1656 # review status, must be always present
1651 1657 review_status = pull_request.calculated_review_status()
1652 1658 merge_check.review_status = review_status
1653 1659
1654 1660 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1655 1661 if not status_approved:
1656 1662 log.debug("MergeCheck: cannot merge, approval is pending.")
1657 1663
1658 1664 msg = _('Pull request reviewer approval is pending.')
1659 1665
1660 1666 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
1661 1667
1662 1668 if fail_early:
1663 1669 return merge_check
1664 1670
1665 1671 # left over TODOs
1666 1672 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
1667 1673 if todos:
1668 1674 log.debug("MergeCheck: cannot merge, {} "
1669 1675 "unresolved TODOs left.".format(len(todos)))
1670 1676
1671 1677 if len(todos) == 1:
1672 1678 msg = _('Cannot merge, {} TODO still not resolved.').format(
1673 1679 len(todos))
1674 1680 else:
1675 1681 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1676 1682 len(todos))
1677 1683
1678 1684 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1679 1685
1680 1686 if fail_early:
1681 1687 return merge_check
1682 1688
1683 1689 # merge possible, here is the filesystem simulation + shadow repo
1684 1690 merge_status, msg = PullRequestModel().merge_status(
1685 1691 pull_request, translator=translator,
1686 1692 force_shadow_repo_refresh=force_shadow_repo_refresh)
1687 1693 merge_check.merge_possible = merge_status
1688 1694 merge_check.merge_msg = msg
1689 1695 if not merge_status:
1690 1696 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
1691 1697 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1692 1698
1693 1699 if fail_early:
1694 1700 return merge_check
1695 1701
1696 1702 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1697 1703 return merge_check
1698 1704
1699 1705 @classmethod
1700 1706 def get_merge_conditions(cls, pull_request, translator):
1701 1707 _ = translator
1702 1708 merge_details = {}
1703 1709
1704 1710 model = PullRequestModel()
1705 1711 use_rebase = model._use_rebase_for_merging(pull_request)
1706 1712
1707 1713 if use_rebase:
1708 1714 merge_details['merge_strategy'] = dict(
1709 1715 details={},
1710 1716 message=_('Merge strategy: rebase')
1711 1717 )
1712 1718 else:
1713 1719 merge_details['merge_strategy'] = dict(
1714 1720 details={},
1715 1721 message=_('Merge strategy: explicit merge commit')
1716 1722 )
1717 1723
1718 1724 close_branch = model._close_branch_before_merging(pull_request)
1719 1725 if close_branch:
1720 1726 repo_type = pull_request.target_repo.repo_type
1721 1727 close_msg = ''
1722 1728 if repo_type == 'hg':
1723 1729 close_msg = _('Source branch will be closed after merge.')
1724 1730 elif repo_type == 'git':
1725 1731 close_msg = _('Source branch will be deleted after merge.')
1726 1732
1727 1733 merge_details['close_branch'] = dict(
1728 1734 details={},
1729 1735 message=close_msg
1730 1736 )
1731 1737
1732 1738 return merge_details
1733 1739
1734 1740
1735 1741 ChangeTuple = collections.namedtuple(
1736 1742 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1737 1743
1738 1744 FileChangeTuple = collections.namedtuple(
1739 1745 'FileChangeTuple', ['added', 'modified', 'removed'])
@@ -1,1886 +1,1888 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import collections
22 22 import datetime
23 23 import hashlib
24 24 import os
25 25 import re
26 26 import pprint
27 27 import shutil
28 28 import socket
29 29 import subprocess32
30 30 import time
31 31 import uuid
32 32 import dateutil.tz
33 33 import functools
34 34
35 35 import mock
36 36 import pyramid.testing
37 37 import pytest
38 38 import colander
39 39 import requests
40 40 import pyramid.paster
41 41
42 42 import rhodecode
43 43 from rhodecode.lib.utils2 import AttributeDict
44 44 from rhodecode.model.changeset_status import ChangesetStatusModel
45 45 from rhodecode.model.comment import CommentsModel
46 46 from rhodecode.model.db import (
47 47 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
48 48 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
49 49 from rhodecode.model.meta import Session
50 50 from rhodecode.model.pull_request import PullRequestModel
51 51 from rhodecode.model.repo import RepoModel
52 52 from rhodecode.model.repo_group import RepoGroupModel
53 53 from rhodecode.model.user import UserModel
54 54 from rhodecode.model.settings import VcsSettingsModel
55 55 from rhodecode.model.user_group import UserGroupModel
56 56 from rhodecode.model.integration import IntegrationModel
57 57 from rhodecode.integrations import integration_type_registry
58 58 from rhodecode.integrations.types.base import IntegrationTypeBase
59 59 from rhodecode.lib.utils import repo2db_mapper
60 60 from rhodecode.lib.vcs import create_vcsserver_proxy
61 61 from rhodecode.lib.vcs.backends import get_backend
62 62 from rhodecode.lib.vcs.nodes import FileNode
63 63 from rhodecode.tests import (
64 64 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
65 65 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
66 66 TEST_USER_REGULAR_PASS)
67 67 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
68 68 from rhodecode.tests.fixture import Fixture
69 69 from rhodecode.config import utils as config_utils
70 70
71 71 def _split_comma(value):
72 72 return value.split(',')
73 73
74 74
75 75 def pytest_addoption(parser):
76 76 parser.addoption(
77 77 '--keep-tmp-path', action='store_true',
78 78 help="Keep the test temporary directories")
79 79 parser.addoption(
80 80 '--backends', action='store', type=_split_comma,
81 81 default=['git', 'hg', 'svn'],
82 82 help="Select which backends to test for backend specific tests.")
83 83 parser.addoption(
84 84 '--dbs', action='store', type=_split_comma,
85 85 default=['sqlite'],
86 86 help="Select which database to test for database specific tests. "
87 87 "Possible options are sqlite,postgres,mysql")
88 88 parser.addoption(
89 89 '--appenlight', '--ae', action='store_true',
90 90 help="Track statistics in appenlight.")
91 91 parser.addoption(
92 92 '--appenlight-api-key', '--ae-key',
93 93 help="API key for Appenlight.")
94 94 parser.addoption(
95 95 '--appenlight-url', '--ae-url',
96 96 default="https://ae.rhodecode.com",
97 97 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
98 98 parser.addoption(
99 99 '--sqlite-connection-string', action='store',
100 100 default='', help="Connection string for the dbs tests with SQLite")
101 101 parser.addoption(
102 102 '--postgres-connection-string', action='store',
103 103 default='', help="Connection string for the dbs tests with Postgres")
104 104 parser.addoption(
105 105 '--mysql-connection-string', action='store',
106 106 default='', help="Connection string for the dbs tests with MySQL")
107 107 parser.addoption(
108 108 '--repeat', type=int, default=100,
109 109 help="Number of repetitions in performance tests.")
110 110
111 111
112 112 def pytest_configure(config):
113 113 from rhodecode.config import patches
114 114
115 115
116 116 def pytest_collection_modifyitems(session, config, items):
117 117 # nottest marked, compare nose, used for transition from nose to pytest
118 118 remaining = [
119 119 i for i in items if getattr(i.obj, '__test__', True)]
120 120 items[:] = remaining
121 121
122 122
123 123 def pytest_generate_tests(metafunc):
124 124 # Support test generation based on --backend parameter
125 125 if 'backend_alias' in metafunc.fixturenames:
126 126 backends = get_backends_from_metafunc(metafunc)
127 127 scope = None
128 128 if not backends:
129 129 pytest.skip("Not enabled for any of selected backends")
130 130 metafunc.parametrize('backend_alias', backends, scope=scope)
131 131 elif hasattr(metafunc.function, 'backends'):
132 132 backends = get_backends_from_metafunc(metafunc)
133 133 if not backends:
134 134 pytest.skip("Not enabled for any of selected backends")
135 135
136 136
137 137 def get_backends_from_metafunc(metafunc):
138 138 requested_backends = set(metafunc.config.getoption('--backends'))
139 139 if hasattr(metafunc.function, 'backends'):
140 140 # Supported backends by this test function, created from
141 141 # pytest.mark.backends
142 142 backends = metafunc.definition.get_closest_marker('backends').args
143 143 elif hasattr(metafunc.cls, 'backend_alias'):
144 144 # Support class attribute "backend_alias", this is mainly
145 145 # for legacy reasons for tests not yet using pytest.mark.backends
146 146 backends = [metafunc.cls.backend_alias]
147 147 else:
148 148 backends = metafunc.config.getoption('--backends')
149 149 return requested_backends.intersection(backends)
150 150
151 151
152 152 @pytest.fixture(scope='session', autouse=True)
153 153 def activate_example_rcextensions(request):
154 154 """
155 155 Patch in an example rcextensions module which verifies passed in kwargs.
156 156 """
157 157 from rhodecode.config import rcextensions
158 158
159 159 old_extensions = rhodecode.EXTENSIONS
160 160 rhodecode.EXTENSIONS = rcextensions
161 161 rhodecode.EXTENSIONS.calls = collections.defaultdict(list)
162 162
163 163 @request.addfinalizer
164 164 def cleanup():
165 165 rhodecode.EXTENSIONS = old_extensions
166 166
167 167
168 168 @pytest.fixture
169 169 def capture_rcextensions():
170 170 """
171 171 Returns the recorded calls to entry points in rcextensions.
172 172 """
173 173 calls = rhodecode.EXTENSIONS.calls
174 174 calls.clear()
175 175 # Note: At this moment, it is still the empty dict, but that will
176 176 # be filled during the test run and since it is a reference this
177 177 # is enough to make it work.
178 178 return calls
179 179
180 180
181 181 @pytest.fixture(scope='session')
182 182 def http_environ_session():
183 183 """
184 184 Allow to use "http_environ" in session scope.
185 185 """
186 186 return plain_http_environ()
187 187
188 188
189 189 def plain_http_host_stub():
190 190 """
191 191 Value of HTTP_HOST in the test run.
192 192 """
193 193 return 'example.com:80'
194 194
195 195
196 196 @pytest.fixture
197 197 def http_host_stub():
198 198 """
199 199 Value of HTTP_HOST in the test run.
200 200 """
201 201 return plain_http_host_stub()
202 202
203 203
204 204 def plain_http_host_only_stub():
205 205 """
206 206 Value of HTTP_HOST in the test run.
207 207 """
208 208 return plain_http_host_stub().split(':')[0]
209 209
210 210
211 211 @pytest.fixture
212 212 def http_host_only_stub():
213 213 """
214 214 Value of HTTP_HOST in the test run.
215 215 """
216 216 return plain_http_host_only_stub()
217 217
218 218
219 219 def plain_http_environ():
220 220 """
221 221 HTTP extra environ keys.
222 222
223 223 User by the test application and as well for setting up the pylons
224 224 environment. In the case of the fixture "app" it should be possible
225 225 to override this for a specific test case.
226 226 """
227 227 return {
228 228 'SERVER_NAME': plain_http_host_only_stub(),
229 229 'SERVER_PORT': plain_http_host_stub().split(':')[1],
230 230 'HTTP_HOST': plain_http_host_stub(),
231 231 'HTTP_USER_AGENT': 'rc-test-agent',
232 232 'REQUEST_METHOD': 'GET'
233 233 }
234 234
235 235
236 236 @pytest.fixture
237 237 def http_environ():
238 238 """
239 239 HTTP extra environ keys.
240 240
241 241 User by the test application and as well for setting up the pylons
242 242 environment. In the case of the fixture "app" it should be possible
243 243 to override this for a specific test case.
244 244 """
245 245 return plain_http_environ()
246 246
247 247
248 248 @pytest.fixture(scope='session')
249 249 def baseapp(ini_config, vcsserver, http_environ_session):
250 250 from rhodecode.lib.pyramid_utils import get_app_config
251 251 from rhodecode.config.middleware import make_pyramid_app
252 252
253 253 print("Using the RhodeCode configuration:{}".format(ini_config))
254 254 pyramid.paster.setup_logging(ini_config)
255 255
256 256 settings = get_app_config(ini_config)
257 257 app = make_pyramid_app({'__file__': ini_config}, **settings)
258 258
259 259 return app
260 260
261 261
262 262 @pytest.fixture(scope='function')
263 263 def app(request, config_stub, baseapp, http_environ):
264 264 app = CustomTestApp(
265 265 baseapp,
266 266 extra_environ=http_environ)
267 267 if request.cls:
268 268 request.cls.app = app
269 269 return app
270 270
271 271
272 272 @pytest.fixture(scope='session')
273 273 def app_settings(baseapp, ini_config):
274 274 """
275 275 Settings dictionary used to create the app.
276 276
277 277 Parses the ini file and passes the result through the sanitize and apply
278 278 defaults mechanism in `rhodecode.config.middleware`.
279 279 """
280 280 return baseapp.config.get_settings()
281 281
282 282
283 283 @pytest.fixture(scope='session')
284 284 def db_connection(ini_settings):
285 285 # Initialize the database connection.
286 286 config_utils.initialize_database(ini_settings)
287 287
288 288
289 289 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
290 290
291 291
292 292 def _autologin_user(app, *args):
293 293 session = login_user_session(app, *args)
294 294 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
295 295 return LoginData(csrf_token, session['rhodecode_user'])
296 296
297 297
298 298 @pytest.fixture
299 299 def autologin_user(app):
300 300 """
301 301 Utility fixture which makes sure that the admin user is logged in
302 302 """
303 303 return _autologin_user(app)
304 304
305 305
306 306 @pytest.fixture
307 307 def autologin_regular_user(app):
308 308 """
309 309 Utility fixture which makes sure that the regular user is logged in
310 310 """
311 311 return _autologin_user(
312 312 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
313 313
314 314
315 315 @pytest.fixture(scope='function')
316 316 def csrf_token(request, autologin_user):
317 317 return autologin_user.csrf_token
318 318
319 319
320 320 @pytest.fixture(scope='function')
321 321 def xhr_header(request):
322 322 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
323 323
324 324
325 325 @pytest.fixture
326 326 def real_crypto_backend(monkeypatch):
327 327 """
328 328 Switch the production crypto backend on for this test.
329 329
330 330 During the test run the crypto backend is replaced with a faster
331 331 implementation based on the MD5 algorithm.
332 332 """
333 333 monkeypatch.setattr(rhodecode, 'is_test', False)
334 334
335 335
336 336 @pytest.fixture(scope='class')
337 337 def index_location(request, baseapp):
338 338 index_location = baseapp.config.get_settings()['search.location']
339 339 if request.cls:
340 340 request.cls.index_location = index_location
341 341 return index_location
342 342
343 343
344 344 @pytest.fixture(scope='session', autouse=True)
345 345 def tests_tmp_path(request):
346 346 """
347 347 Create temporary directory to be used during the test session.
348 348 """
349 349 if not os.path.exists(TESTS_TMP_PATH):
350 350 os.makedirs(TESTS_TMP_PATH)
351 351
352 352 if not request.config.getoption('--keep-tmp-path'):
353 353 @request.addfinalizer
354 354 def remove_tmp_path():
355 355 shutil.rmtree(TESTS_TMP_PATH)
356 356
357 357 return TESTS_TMP_PATH
358 358
359 359
360 360 @pytest.fixture
361 361 def test_repo_group(request):
362 362 """
363 363 Create a temporary repository group, and destroy it after
364 364 usage automatically
365 365 """
366 366 fixture = Fixture()
367 367 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
368 368 repo_group = fixture.create_repo_group(repogroupid)
369 369
370 370 def _cleanup():
371 371 fixture.destroy_repo_group(repogroupid)
372 372
373 373 request.addfinalizer(_cleanup)
374 374 return repo_group
375 375
376 376
377 377 @pytest.fixture
378 378 def test_user_group(request):
379 379 """
380 380 Create a temporary user group, and destroy it after
381 381 usage automatically
382 382 """
383 383 fixture = Fixture()
384 384 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
385 385 user_group = fixture.create_user_group(usergroupid)
386 386
387 387 def _cleanup():
388 388 fixture.destroy_user_group(user_group)
389 389
390 390 request.addfinalizer(_cleanup)
391 391 return user_group
392 392
393 393
394 394 @pytest.fixture(scope='session')
395 395 def test_repo(request):
396 396 container = TestRepoContainer()
397 397 request.addfinalizer(container._cleanup)
398 398 return container
399 399
400 400
401 401 class TestRepoContainer(object):
402 402 """
403 403 Container for test repositories which are used read only.
404 404
405 405 Repositories will be created on demand and re-used during the lifetime
406 406 of this object.
407 407
408 408 Usage to get the svn test repository "minimal"::
409 409
410 410 test_repo = TestContainer()
411 411 repo = test_repo('minimal', 'svn')
412 412
413 413 """
414 414
415 415 dump_extractors = {
416 416 'git': utils.extract_git_repo_from_dump,
417 417 'hg': utils.extract_hg_repo_from_dump,
418 418 'svn': utils.extract_svn_repo_from_dump,
419 419 }
420 420
421 421 def __init__(self):
422 422 self._cleanup_repos = []
423 423 self._fixture = Fixture()
424 424 self._repos = {}
425 425
426 426 def __call__(self, dump_name, backend_alias, config=None):
427 427 key = (dump_name, backend_alias)
428 428 if key not in self._repos:
429 429 repo = self._create_repo(dump_name, backend_alias, config)
430 430 self._repos[key] = repo.repo_id
431 431 return Repository.get(self._repos[key])
432 432
433 433 def _create_repo(self, dump_name, backend_alias, config):
434 434 repo_name = '%s-%s' % (backend_alias, dump_name)
435 435 backend_class = get_backend(backend_alias)
436 436 dump_extractor = self.dump_extractors[backend_alias]
437 437 repo_path = dump_extractor(dump_name, repo_name)
438 438
439 439 vcs_repo = backend_class(repo_path, config=config)
440 440 repo2db_mapper({repo_name: vcs_repo})
441 441
442 442 repo = RepoModel().get_by_repo_name(repo_name)
443 443 self._cleanup_repos.append(repo_name)
444 444 return repo
445 445
446 446 def _cleanup(self):
447 447 for repo_name in reversed(self._cleanup_repos):
448 448 self._fixture.destroy_repo(repo_name)
449 449
450 450
451 451 def backend_base(request, backend_alias, baseapp, test_repo):
452 452 if backend_alias not in request.config.getoption('--backends'):
453 453 pytest.skip("Backend %s not selected." % (backend_alias, ))
454 454
455 455 utils.check_xfail_backends(request.node, backend_alias)
456 456 utils.check_skip_backends(request.node, backend_alias)
457 457
458 458 repo_name = 'vcs_test_%s' % (backend_alias, )
459 459 backend = Backend(
460 460 alias=backend_alias,
461 461 repo_name=repo_name,
462 462 test_name=request.node.name,
463 463 test_repo_container=test_repo)
464 464 request.addfinalizer(backend.cleanup)
465 465 return backend
466 466
467 467
468 468 @pytest.fixture
469 469 def backend(request, backend_alias, baseapp, test_repo):
470 470 """
471 471 Parametrized fixture which represents a single backend implementation.
472 472
473 473 It respects the option `--backends` to focus the test run on specific
474 474 backend implementations.
475 475
476 476 It also supports `pytest.mark.xfail_backends` to mark tests as failing
477 477 for specific backends. This is intended as a utility for incremental
478 478 development of a new backend implementation.
479 479 """
480 480 return backend_base(request, backend_alias, baseapp, test_repo)
481 481
482 482
483 483 @pytest.fixture
484 484 def backend_git(request, baseapp, test_repo):
485 485 return backend_base(request, 'git', baseapp, test_repo)
486 486
487 487
488 488 @pytest.fixture
489 489 def backend_hg(request, baseapp, test_repo):
490 490 return backend_base(request, 'hg', baseapp, test_repo)
491 491
492 492
493 493 @pytest.fixture
494 494 def backend_svn(request, baseapp, test_repo):
495 495 return backend_base(request, 'svn', baseapp, test_repo)
496 496
497 497
498 498 @pytest.fixture
499 499 def backend_random(backend_git):
500 500 """
501 501 Use this to express that your tests need "a backend.
502 502
503 503 A few of our tests need a backend, so that we can run the code. This
504 504 fixture is intended to be used for such cases. It will pick one of the
505 505 backends and run the tests.
506 506
507 507 The fixture `backend` would run the test multiple times for each
508 508 available backend which is a pure waste of time if the test is
509 509 independent of the backend type.
510 510 """
511 511 # TODO: johbo: Change this to pick a random backend
512 512 return backend_git
513 513
514 514
515 515 @pytest.fixture
516 516 def backend_stub(backend_git):
517 517 """
518 518 Use this to express that your tests need a backend stub
519 519
520 520 TODO: mikhail: Implement a real stub logic instead of returning
521 521 a git backend
522 522 """
523 523 return backend_git
524 524
525 525
526 526 @pytest.fixture
527 527 def repo_stub(backend_stub):
528 528 """
529 529 Use this to express that your tests need a repository stub
530 530 """
531 531 return backend_stub.create_repo()
532 532
533 533
534 534 class Backend(object):
535 535 """
536 536 Represents the test configuration for one supported backend
537 537
538 538 Provides easy access to different test repositories based on
539 539 `__getitem__`. Such repositories will only be created once per test
540 540 session.
541 541 """
542 542
543 543 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
544 544 _master_repo = None
545 545 _commit_ids = {}
546 546
547 547 def __init__(self, alias, repo_name, test_name, test_repo_container):
548 548 self.alias = alias
549 549 self.repo_name = repo_name
550 550 self._cleanup_repos = []
551 551 self._test_name = test_name
552 552 self._test_repo_container = test_repo_container
553 553 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
554 554 # Fixture will survive in the end.
555 555 self._fixture = Fixture()
556 556
557 557 def __getitem__(self, key):
558 558 return self._test_repo_container(key, self.alias)
559 559
560 560 def create_test_repo(self, key, config=None):
561 561 return self._test_repo_container(key, self.alias, config)
562 562
563 563 @property
564 564 def repo(self):
565 565 """
566 566 Returns the "current" repository. This is the vcs_test repo or the
567 567 last repo which has been created with `create_repo`.
568 568 """
569 569 from rhodecode.model.db import Repository
570 570 return Repository.get_by_repo_name(self.repo_name)
571 571
572 572 @property
573 573 def default_branch_name(self):
574 574 VcsRepository = get_backend(self.alias)
575 575 return VcsRepository.DEFAULT_BRANCH_NAME
576 576
577 577 @property
578 578 def default_head_id(self):
579 579 """
580 580 Returns the default head id of the underlying backend.
581 581
582 582 This will be the default branch name in case the backend does have a
583 583 default branch. In the other cases it will point to a valid head
584 584 which can serve as the base to create a new commit on top of it.
585 585 """
586 586 vcsrepo = self.repo.scm_instance()
587 587 head_id = (
588 588 vcsrepo.DEFAULT_BRANCH_NAME or
589 589 vcsrepo.commit_ids[-1])
590 590 return head_id
591 591
592 592 @property
593 593 def commit_ids(self):
594 594 """
595 595 Returns the list of commits for the last created repository
596 596 """
597 597 return self._commit_ids
598 598
599 599 def create_master_repo(self, commits):
600 600 """
601 601 Create a repository and remember it as a template.
602 602
603 603 This allows to easily create derived repositories to construct
604 604 more complex scenarios for diff, compare and pull requests.
605 605
606 606 Returns a commit map which maps from commit message to raw_id.
607 607 """
608 608 self._master_repo = self.create_repo(commits=commits)
609 609 return self._commit_ids
610 610
611 611 def create_repo(
612 612 self, commits=None, number_of_commits=0, heads=None,
613 613 name_suffix=u'', bare=False, **kwargs):
614 614 """
615 615 Create a repository and record it for later cleanup.
616 616
617 617 :param commits: Optional. A sequence of dict instances.
618 618 Will add a commit per entry to the new repository.
619 619 :param number_of_commits: Optional. If set to a number, this number of
620 620 commits will be added to the new repository.
621 621 :param heads: Optional. Can be set to a sequence of of commit
622 622 names which shall be pulled in from the master repository.
623 623 :param name_suffix: adds special suffix to generated repo name
624 624 :param bare: set a repo as bare (no checkout)
625 625 """
626 626 self.repo_name = self._next_repo_name() + name_suffix
627 627 repo = self._fixture.create_repo(
628 628 self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
629 629 self._cleanup_repos.append(repo.repo_name)
630 630
631 631 commits = commits or [
632 632 {'message': 'Commit %s of %s' % (x, self.repo_name)}
633 633 for x in range(number_of_commits)]
634 self._add_commits_to_repo(repo.scm_instance(), commits)
634 vcs_repo = repo.scm_instance()
635 vcs_repo.count()
636 self._add_commits_to_repo(vcs_repo, commits)
635 637 if heads:
636 638 self.pull_heads(repo, heads)
637 639
638 640 return repo
639 641
640 642 def pull_heads(self, repo, heads):
641 643 """
642 644 Make sure that repo contains all commits mentioned in `heads`
643 645 """
644 646 vcsmaster = self._master_repo.scm_instance()
645 647 vcsrepo = repo.scm_instance()
646 648 vcsrepo.config.clear_section('hooks')
647 649 commit_ids = [self._commit_ids[h] for h in heads]
648 650 vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids)
649 651
650 652 def create_fork(self):
651 653 repo_to_fork = self.repo_name
652 654 self.repo_name = self._next_repo_name()
653 655 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
654 656 self._cleanup_repos.append(self.repo_name)
655 657 return repo
656 658
657 659 def new_repo_name(self, suffix=u''):
658 660 self.repo_name = self._next_repo_name() + suffix
659 661 self._cleanup_repos.append(self.repo_name)
660 662 return self.repo_name
661 663
662 664 def _next_repo_name(self):
663 665 return u"%s_%s" % (
664 666 self.invalid_repo_name.sub(u'_', self._test_name), len(self._cleanup_repos))
665 667
666 668 def ensure_file(self, filename, content='Test content\n'):
667 669 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
668 670 commits = [
669 671 {'added': [
670 672 FileNode(filename, content=content),
671 673 ]},
672 674 ]
673 675 self._add_commits_to_repo(self.repo.scm_instance(), commits)
674 676
675 677 def enable_downloads(self):
676 678 repo = self.repo
677 679 repo.enable_downloads = True
678 680 Session().add(repo)
679 681 Session().commit()
680 682
681 683 def cleanup(self):
682 684 for repo_name in reversed(self._cleanup_repos):
683 685 self._fixture.destroy_repo(repo_name)
684 686
685 687 def _add_commits_to_repo(self, repo, commits):
686 688 commit_ids = _add_commits_to_repo(repo, commits)
687 689 if not commit_ids:
688 690 return
689 691 self._commit_ids = commit_ids
690 692
691 693 # Creating refs for Git to allow fetching them from remote repository
692 694 if self.alias == 'git':
693 695 refs = {}
694 696 for message in self._commit_ids:
695 697 # TODO: mikhail: do more special chars replacements
696 698 ref_name = 'refs/test-refs/{}'.format(
697 699 message.replace(' ', ''))
698 700 refs[ref_name] = self._commit_ids[message]
699 701 self._create_refs(repo, refs)
700 702
701 703 def _create_refs(self, repo, refs):
702 704 for ref_name in refs:
703 705 repo.set_refs(ref_name, refs[ref_name])
704 706
705 707
706 708 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo):
707 709 if backend_alias not in request.config.getoption('--backends'):
708 710 pytest.skip("Backend %s not selected." % (backend_alias, ))
709 711
710 712 utils.check_xfail_backends(request.node, backend_alias)
711 713 utils.check_skip_backends(request.node, backend_alias)
712 714
713 715 repo_name = 'vcs_test_%s' % (backend_alias, )
714 716 repo_path = os.path.join(tests_tmp_path, repo_name)
715 717 backend = VcsBackend(
716 718 alias=backend_alias,
717 719 repo_path=repo_path,
718 720 test_name=request.node.name,
719 721 test_repo_container=test_repo)
720 722 request.addfinalizer(backend.cleanup)
721 723 return backend
722 724
723 725
724 726 @pytest.fixture
725 727 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
726 728 """
727 729 Parametrized fixture which represents a single vcs backend implementation.
728 730
729 731 See the fixture `backend` for more details. This one implements the same
730 732 concept, but on vcs level. So it does not provide model instances etc.
731 733
732 734 Parameters are generated dynamically, see :func:`pytest_generate_tests`
733 735 for how this works.
734 736 """
735 737 return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo)
736 738
737 739
738 740 @pytest.fixture
739 741 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
740 742 return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo)
741 743
742 744
743 745 @pytest.fixture
744 746 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
745 747 return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo)
746 748
747 749
748 750 @pytest.fixture
749 751 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
750 752 return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo)
751 753
752 754
753 755 @pytest.fixture
754 756 def vcsbackend_stub(vcsbackend_git):
755 757 """
756 758 Use this to express that your test just needs a stub of a vcsbackend.
757 759
758 760 Plan is to eventually implement an in-memory stub to speed tests up.
759 761 """
760 762 return vcsbackend_git
761 763
762 764
763 765 class VcsBackend(object):
764 766 """
765 767 Represents the test configuration for one supported vcs backend.
766 768 """
767 769
768 770 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
769 771
770 772 def __init__(self, alias, repo_path, test_name, test_repo_container):
771 773 self.alias = alias
772 774 self._repo_path = repo_path
773 775 self._cleanup_repos = []
774 776 self._test_name = test_name
775 777 self._test_repo_container = test_repo_container
776 778
777 779 def __getitem__(self, key):
778 780 return self._test_repo_container(key, self.alias).scm_instance()
779 781
780 782 @property
781 783 def repo(self):
782 784 """
783 785 Returns the "current" repository. This is the vcs_test repo of the last
784 786 repo which has been created.
785 787 """
786 788 Repository = get_backend(self.alias)
787 789 return Repository(self._repo_path)
788 790
789 791 @property
790 792 def backend(self):
791 793 """
792 794 Returns the backend implementation class.
793 795 """
794 796 return get_backend(self.alias)
795 797
796 798 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None,
797 799 bare=False):
798 800 repo_name = self._next_repo_name()
799 801 self._repo_path = get_new_dir(repo_name)
800 802 repo_class = get_backend(self.alias)
801 803 src_url = None
802 804 if _clone_repo:
803 805 src_url = _clone_repo.path
804 806 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
805 807 self._cleanup_repos.append(repo)
806 808
807 809 commits = commits or [
808 810 {'message': 'Commit %s of %s' % (x, repo_name)}
809 811 for x in xrange(number_of_commits)]
810 812 _add_commits_to_repo(repo, commits)
811 813 return repo
812 814
813 815 def clone_repo(self, repo):
814 816 return self.create_repo(_clone_repo=repo)
815 817
816 818 def cleanup(self):
817 819 for repo in self._cleanup_repos:
818 820 shutil.rmtree(repo.path)
819 821
820 822 def new_repo_path(self):
821 823 repo_name = self._next_repo_name()
822 824 self._repo_path = get_new_dir(repo_name)
823 825 return self._repo_path
824 826
825 827 def _next_repo_name(self):
826 828 return "%s_%s" % (
827 829 self.invalid_repo_name.sub('_', self._test_name),
828 830 len(self._cleanup_repos))
829 831
830 832 def add_file(self, repo, filename, content='Test content\n'):
831 833 imc = repo.in_memory_commit
832 834 imc.add(FileNode(filename, content=content))
833 835 imc.commit(
834 836 message=u'Automatic commit from vcsbackend fixture',
835 837 author=u'Automatic')
836 838
837 839 def ensure_file(self, filename, content='Test content\n'):
838 840 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
839 841 self.add_file(self.repo, filename, content)
840 842
841 843
842 844 def _add_commits_to_repo(vcs_repo, commits):
843 845 commit_ids = {}
844 846 if not commits:
845 847 return commit_ids
846 848
847 849 imc = vcs_repo.in_memory_commit
848 850 commit = None
849 851
850 852 for idx, commit in enumerate(commits):
851 853 message = unicode(commit.get('message', 'Commit %s' % idx))
852 854
853 855 for node in commit.get('added', []):
854 856 imc.add(FileNode(node.path, content=node.content))
855 857 for node in commit.get('changed', []):
856 858 imc.change(FileNode(node.path, content=node.content))
857 859 for node in commit.get('removed', []):
858 860 imc.remove(FileNode(node.path))
859 861
860 862 parents = [
861 863 vcs_repo.get_commit(commit_id=commit_ids[p])
862 864 for p in commit.get('parents', [])]
863 865
864 866 operations = ('added', 'changed', 'removed')
865 867 if not any((commit.get(o) for o in operations)):
866 868 imc.add(FileNode('file_%s' % idx, content=message))
867 869
868 870 commit = imc.commit(
869 871 message=message,
870 872 author=unicode(commit.get('author', 'Automatic')),
871 873 date=commit.get('date'),
872 874 branch=commit.get('branch'),
873 875 parents=parents)
874 876
875 877 commit_ids[commit.message] = commit.raw_id
876 878
877 879 return commit_ids
878 880
879 881
880 882 @pytest.fixture
881 883 def reposerver(request):
882 884 """
883 885 Allows to serve a backend repository
884 886 """
885 887
886 888 repo_server = RepoServer()
887 889 request.addfinalizer(repo_server.cleanup)
888 890 return repo_server
889 891
890 892
891 893 class RepoServer(object):
892 894 """
893 895 Utility to serve a local repository for the duration of a test case.
894 896
895 897 Supports only Subversion so far.
896 898 """
897 899
898 900 url = None
899 901
900 902 def __init__(self):
901 903 self._cleanup_servers = []
902 904
903 905 def serve(self, vcsrepo):
904 906 if vcsrepo.alias != 'svn':
905 907 raise TypeError("Backend %s not supported" % vcsrepo.alias)
906 908
907 909 proc = subprocess32.Popen(
908 910 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
909 911 '--root', vcsrepo.path])
910 912 self._cleanup_servers.append(proc)
911 913 self.url = 'svn://localhost'
912 914
913 915 def cleanup(self):
914 916 for proc in self._cleanup_servers:
915 917 proc.terminate()
916 918
917 919
918 920 @pytest.fixture
919 921 def pr_util(backend, request, config_stub):
920 922 """
921 923 Utility for tests of models and for functional tests around pull requests.
922 924
923 925 It gives an instance of :class:`PRTestUtility` which provides various
924 926 utility methods around one pull request.
925 927
926 928 This fixture uses `backend` and inherits its parameterization.
927 929 """
928 930
929 931 util = PRTestUtility(backend)
930 932 request.addfinalizer(util.cleanup)
931 933
932 934 return util
933 935
934 936
935 937 class PRTestUtility(object):
936 938
937 939 pull_request = None
938 940 pull_request_id = None
939 941 mergeable_patcher = None
940 942 mergeable_mock = None
941 943 notification_patcher = None
942 944
943 945 def __init__(self, backend):
944 946 self.backend = backend
945 947
946 948 def create_pull_request(
947 949 self, commits=None, target_head=None, source_head=None,
948 950 revisions=None, approved=False, author=None, mergeable=False,
949 951 enable_notifications=True, name_suffix=u'', reviewers=None,
950 952 title=u"Test", description=u"Description"):
951 953 self.set_mergeable(mergeable)
952 954 if not enable_notifications:
953 955 # mock notification side effect
954 956 self.notification_patcher = mock.patch(
955 957 'rhodecode.model.notification.NotificationModel.create')
956 958 self.notification_patcher.start()
957 959
958 960 if not self.pull_request:
959 961 if not commits:
960 962 commits = [
961 963 {'message': 'c1'},
962 964 {'message': 'c2'},
963 965 {'message': 'c3'},
964 966 ]
965 967 target_head = 'c1'
966 968 source_head = 'c2'
967 969 revisions = ['c2']
968 970
969 971 self.commit_ids = self.backend.create_master_repo(commits)
970 972 self.target_repository = self.backend.create_repo(
971 973 heads=[target_head], name_suffix=name_suffix)
972 974 self.source_repository = self.backend.create_repo(
973 975 heads=[source_head], name_suffix=name_suffix)
974 976 self.author = author or UserModel().get_by_username(
975 977 TEST_USER_ADMIN_LOGIN)
976 978
977 979 model = PullRequestModel()
978 980 self.create_parameters = {
979 981 'created_by': self.author,
980 982 'source_repo': self.source_repository.repo_name,
981 983 'source_ref': self._default_branch_reference(source_head),
982 984 'target_repo': self.target_repository.repo_name,
983 985 'target_ref': self._default_branch_reference(target_head),
984 986 'revisions': [self.commit_ids[r] for r in revisions],
985 987 'reviewers': reviewers or self._get_reviewers(),
986 988 'title': title,
987 989 'description': description,
988 990 }
989 991 self.pull_request = model.create(**self.create_parameters)
990 992 assert model.get_versions(self.pull_request) == []
991 993
992 994 self.pull_request_id = self.pull_request.pull_request_id
993 995
994 996 if approved:
995 997 self.approve()
996 998
997 999 Session().add(self.pull_request)
998 1000 Session().commit()
999 1001
1000 1002 return self.pull_request
1001 1003
1002 1004 def approve(self):
1003 1005 self.create_status_votes(
1004 1006 ChangesetStatus.STATUS_APPROVED,
1005 1007 *self.pull_request.reviewers)
1006 1008
1007 1009 def close(self):
1008 1010 PullRequestModel().close_pull_request(self.pull_request, self.author)
1009 1011
1010 1012 def _default_branch_reference(self, commit_message):
1011 1013 reference = '%s:%s:%s' % (
1012 1014 'branch',
1013 1015 self.backend.default_branch_name,
1014 1016 self.commit_ids[commit_message])
1015 1017 return reference
1016 1018
1017 1019 def _get_reviewers(self):
1018 1020 return [
1019 1021 (TEST_USER_REGULAR_LOGIN, ['default1'], False, []),
1020 1022 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, []),
1021 1023 ]
1022 1024
1023 1025 def update_source_repository(self, head=None):
1024 1026 heads = [head or 'c3']
1025 1027 self.backend.pull_heads(self.source_repository, heads=heads)
1026 1028
1027 1029 def add_one_commit(self, head=None):
1028 1030 self.update_source_repository(head=head)
1029 1031 old_commit_ids = set(self.pull_request.revisions)
1030 1032 PullRequestModel().update_commits(self.pull_request)
1031 1033 commit_ids = set(self.pull_request.revisions)
1032 1034 new_commit_ids = commit_ids - old_commit_ids
1033 1035 assert len(new_commit_ids) == 1
1034 1036 return new_commit_ids.pop()
1035 1037
1036 1038 def remove_one_commit(self):
1037 1039 assert len(self.pull_request.revisions) == 2
1038 1040 source_vcs = self.source_repository.scm_instance()
1039 1041 removed_commit_id = source_vcs.commit_ids[-1]
1040 1042
1041 1043 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1042 1044 # remove the if once that's sorted out.
1043 1045 if self.backend.alias == "git":
1044 1046 kwargs = {'branch_name': self.backend.default_branch_name}
1045 1047 else:
1046 1048 kwargs = {}
1047 1049 source_vcs.strip(removed_commit_id, **kwargs)
1048 1050
1049 1051 PullRequestModel().update_commits(self.pull_request)
1050 1052 assert len(self.pull_request.revisions) == 1
1051 1053 return removed_commit_id
1052 1054
1053 1055 def create_comment(self, linked_to=None):
1054 1056 comment = CommentsModel().create(
1055 1057 text=u"Test comment",
1056 1058 repo=self.target_repository.repo_name,
1057 1059 user=self.author,
1058 1060 pull_request=self.pull_request)
1059 1061 assert comment.pull_request_version_id is None
1060 1062
1061 1063 if linked_to:
1062 1064 PullRequestModel()._link_comments_to_version(linked_to)
1063 1065
1064 1066 return comment
1065 1067
1066 1068 def create_inline_comment(
1067 1069 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1068 1070 comment = CommentsModel().create(
1069 1071 text=u"Test comment",
1070 1072 repo=self.target_repository.repo_name,
1071 1073 user=self.author,
1072 1074 line_no=line_no,
1073 1075 f_path=file_path,
1074 1076 pull_request=self.pull_request)
1075 1077 assert comment.pull_request_version_id is None
1076 1078
1077 1079 if linked_to:
1078 1080 PullRequestModel()._link_comments_to_version(linked_to)
1079 1081
1080 1082 return comment
1081 1083
1082 1084 def create_version_of_pull_request(self):
1083 1085 pull_request = self.create_pull_request()
1084 1086 version = PullRequestModel()._create_version_from_snapshot(
1085 1087 pull_request)
1086 1088 return version
1087 1089
1088 1090 def create_status_votes(self, status, *reviewers):
1089 1091 for reviewer in reviewers:
1090 1092 ChangesetStatusModel().set_status(
1091 1093 repo=self.pull_request.target_repo,
1092 1094 status=status,
1093 1095 user=reviewer.user_id,
1094 1096 pull_request=self.pull_request)
1095 1097
1096 1098 def set_mergeable(self, value):
1097 1099 if not self.mergeable_patcher:
1098 1100 self.mergeable_patcher = mock.patch.object(
1099 1101 VcsSettingsModel, 'get_general_settings')
1100 1102 self.mergeable_mock = self.mergeable_patcher.start()
1101 1103 self.mergeable_mock.return_value = {
1102 1104 'rhodecode_pr_merge_enabled': value}
1103 1105
1104 1106 def cleanup(self):
1105 1107 # In case the source repository is already cleaned up, the pull
1106 1108 # request will already be deleted.
1107 1109 pull_request = PullRequest().get(self.pull_request_id)
1108 1110 if pull_request:
1109 1111 PullRequestModel().delete(pull_request, pull_request.author)
1110 1112 Session().commit()
1111 1113
1112 1114 if self.notification_patcher:
1113 1115 self.notification_patcher.stop()
1114 1116
1115 1117 if self.mergeable_patcher:
1116 1118 self.mergeable_patcher.stop()
1117 1119
1118 1120
1119 1121 @pytest.fixture
1120 1122 def user_admin(baseapp):
1121 1123 """
1122 1124 Provides the default admin test user as an instance of `db.User`.
1123 1125 """
1124 1126 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1125 1127 return user
1126 1128
1127 1129
1128 1130 @pytest.fixture
1129 1131 def user_regular(baseapp):
1130 1132 """
1131 1133 Provides the default regular test user as an instance of `db.User`.
1132 1134 """
1133 1135 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1134 1136 return user
1135 1137
1136 1138
1137 1139 @pytest.fixture
1138 1140 def user_util(request, db_connection):
1139 1141 """
1140 1142 Provides a wired instance of `UserUtility` with integrated cleanup.
1141 1143 """
1142 1144 utility = UserUtility(test_name=request.node.name)
1143 1145 request.addfinalizer(utility.cleanup)
1144 1146 return utility
1145 1147
1146 1148
1147 1149 # TODO: johbo: Split this up into utilities per domain or something similar
1148 1150 class UserUtility(object):
1149 1151
1150 1152 def __init__(self, test_name="test"):
1151 1153 self._test_name = self._sanitize_name(test_name)
1152 1154 self.fixture = Fixture()
1153 1155 self.repo_group_ids = []
1154 1156 self.repos_ids = []
1155 1157 self.user_ids = []
1156 1158 self.user_group_ids = []
1157 1159 self.user_repo_permission_ids = []
1158 1160 self.user_group_repo_permission_ids = []
1159 1161 self.user_repo_group_permission_ids = []
1160 1162 self.user_group_repo_group_permission_ids = []
1161 1163 self.user_user_group_permission_ids = []
1162 1164 self.user_group_user_group_permission_ids = []
1163 1165 self.user_permissions = []
1164 1166
1165 1167 def _sanitize_name(self, name):
1166 1168 for char in ['[', ']']:
1167 1169 name = name.replace(char, '_')
1168 1170 return name
1169 1171
1170 1172 def create_repo_group(
1171 1173 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1172 1174 group_name = "{prefix}_repogroup_{count}".format(
1173 1175 prefix=self._test_name,
1174 1176 count=len(self.repo_group_ids))
1175 1177 repo_group = self.fixture.create_repo_group(
1176 1178 group_name, cur_user=owner)
1177 1179 if auto_cleanup:
1178 1180 self.repo_group_ids.append(repo_group.group_id)
1179 1181 return repo_group
1180 1182
1181 1183 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1182 1184 auto_cleanup=True, repo_type='hg', bare=False):
1183 1185 repo_name = "{prefix}_repository_{count}".format(
1184 1186 prefix=self._test_name,
1185 1187 count=len(self.repos_ids))
1186 1188
1187 1189 repository = self.fixture.create_repo(
1188 1190 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare)
1189 1191 if auto_cleanup:
1190 1192 self.repos_ids.append(repository.repo_id)
1191 1193 return repository
1192 1194
1193 1195 def create_user(self, auto_cleanup=True, **kwargs):
1194 1196 user_name = "{prefix}_user_{count}".format(
1195 1197 prefix=self._test_name,
1196 1198 count=len(self.user_ids))
1197 1199 user = self.fixture.create_user(user_name, **kwargs)
1198 1200 if auto_cleanup:
1199 1201 self.user_ids.append(user.user_id)
1200 1202 return user
1201 1203
1202 1204 def create_additional_user_email(self, user, email):
1203 1205 uem = self.fixture.create_additional_user_email(user=user, email=email)
1204 1206 return uem
1205 1207
1206 1208 def create_user_with_group(self):
1207 1209 user = self.create_user()
1208 1210 user_group = self.create_user_group(members=[user])
1209 1211 return user, user_group
1210 1212
1211 1213 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1212 1214 auto_cleanup=True, **kwargs):
1213 1215 group_name = "{prefix}_usergroup_{count}".format(
1214 1216 prefix=self._test_name,
1215 1217 count=len(self.user_group_ids))
1216 1218 user_group = self.fixture.create_user_group(
1217 1219 group_name, cur_user=owner, **kwargs)
1218 1220
1219 1221 if auto_cleanup:
1220 1222 self.user_group_ids.append(user_group.users_group_id)
1221 1223 if members:
1222 1224 for user in members:
1223 1225 UserGroupModel().add_user_to_group(user_group, user)
1224 1226 return user_group
1225 1227
1226 1228 def grant_user_permission(self, user_name, permission_name):
1227 1229 self.inherit_default_user_permissions(user_name, False)
1228 1230 self.user_permissions.append((user_name, permission_name))
1229 1231
1230 1232 def grant_user_permission_to_repo_group(
1231 1233 self, repo_group, user, permission_name):
1232 1234 permission = RepoGroupModel().grant_user_permission(
1233 1235 repo_group, user, permission_name)
1234 1236 self.user_repo_group_permission_ids.append(
1235 1237 (repo_group.group_id, user.user_id))
1236 1238 return permission
1237 1239
1238 1240 def grant_user_group_permission_to_repo_group(
1239 1241 self, repo_group, user_group, permission_name):
1240 1242 permission = RepoGroupModel().grant_user_group_permission(
1241 1243 repo_group, user_group, permission_name)
1242 1244 self.user_group_repo_group_permission_ids.append(
1243 1245 (repo_group.group_id, user_group.users_group_id))
1244 1246 return permission
1245 1247
1246 1248 def grant_user_permission_to_repo(
1247 1249 self, repo, user, permission_name):
1248 1250 permission = RepoModel().grant_user_permission(
1249 1251 repo, user, permission_name)
1250 1252 self.user_repo_permission_ids.append(
1251 1253 (repo.repo_id, user.user_id))
1252 1254 return permission
1253 1255
1254 1256 def grant_user_group_permission_to_repo(
1255 1257 self, repo, user_group, permission_name):
1256 1258 permission = RepoModel().grant_user_group_permission(
1257 1259 repo, user_group, permission_name)
1258 1260 self.user_group_repo_permission_ids.append(
1259 1261 (repo.repo_id, user_group.users_group_id))
1260 1262 return permission
1261 1263
1262 1264 def grant_user_permission_to_user_group(
1263 1265 self, target_user_group, user, permission_name):
1264 1266 permission = UserGroupModel().grant_user_permission(
1265 1267 target_user_group, user, permission_name)
1266 1268 self.user_user_group_permission_ids.append(
1267 1269 (target_user_group.users_group_id, user.user_id))
1268 1270 return permission
1269 1271
1270 1272 def grant_user_group_permission_to_user_group(
1271 1273 self, target_user_group, user_group, permission_name):
1272 1274 permission = UserGroupModel().grant_user_group_permission(
1273 1275 target_user_group, user_group, permission_name)
1274 1276 self.user_group_user_group_permission_ids.append(
1275 1277 (target_user_group.users_group_id, user_group.users_group_id))
1276 1278 return permission
1277 1279
1278 1280 def revoke_user_permission(self, user_name, permission_name):
1279 1281 self.inherit_default_user_permissions(user_name, True)
1280 1282 UserModel().revoke_perm(user_name, permission_name)
1281 1283
1282 1284 def inherit_default_user_permissions(self, user_name, value):
1283 1285 user = UserModel().get_by_username(user_name)
1284 1286 user.inherit_default_permissions = value
1285 1287 Session().add(user)
1286 1288 Session().commit()
1287 1289
1288 1290 def cleanup(self):
1289 1291 self._cleanup_permissions()
1290 1292 self._cleanup_repos()
1291 1293 self._cleanup_repo_groups()
1292 1294 self._cleanup_user_groups()
1293 1295 self._cleanup_users()
1294 1296
1295 1297 def _cleanup_permissions(self):
1296 1298 if self.user_permissions:
1297 1299 for user_name, permission_name in self.user_permissions:
1298 1300 self.revoke_user_permission(user_name, permission_name)
1299 1301
1300 1302 for permission in self.user_repo_permission_ids:
1301 1303 RepoModel().revoke_user_permission(*permission)
1302 1304
1303 1305 for permission in self.user_group_repo_permission_ids:
1304 1306 RepoModel().revoke_user_group_permission(*permission)
1305 1307
1306 1308 for permission in self.user_repo_group_permission_ids:
1307 1309 RepoGroupModel().revoke_user_permission(*permission)
1308 1310
1309 1311 for permission in self.user_group_repo_group_permission_ids:
1310 1312 RepoGroupModel().revoke_user_group_permission(*permission)
1311 1313
1312 1314 for permission in self.user_user_group_permission_ids:
1313 1315 UserGroupModel().revoke_user_permission(*permission)
1314 1316
1315 1317 for permission in self.user_group_user_group_permission_ids:
1316 1318 UserGroupModel().revoke_user_group_permission(*permission)
1317 1319
1318 1320 def _cleanup_repo_groups(self):
1319 1321 def _repo_group_compare(first_group_id, second_group_id):
1320 1322 """
1321 1323 Gives higher priority to the groups with the most complex paths
1322 1324 """
1323 1325 first_group = RepoGroup.get(first_group_id)
1324 1326 second_group = RepoGroup.get(second_group_id)
1325 1327 first_group_parts = (
1326 1328 len(first_group.group_name.split('/')) if first_group else 0)
1327 1329 second_group_parts = (
1328 1330 len(second_group.group_name.split('/')) if second_group else 0)
1329 1331 return cmp(second_group_parts, first_group_parts)
1330 1332
1331 1333 sorted_repo_group_ids = sorted(
1332 1334 self.repo_group_ids, cmp=_repo_group_compare)
1333 1335 for repo_group_id in sorted_repo_group_ids:
1334 1336 self.fixture.destroy_repo_group(repo_group_id)
1335 1337
1336 1338 def _cleanup_repos(self):
1337 1339 sorted_repos_ids = sorted(self.repos_ids)
1338 1340 for repo_id in sorted_repos_ids:
1339 1341 self.fixture.destroy_repo(repo_id)
1340 1342
1341 1343 def _cleanup_user_groups(self):
1342 1344 def _user_group_compare(first_group_id, second_group_id):
1343 1345 """
1344 1346 Gives higher priority to the groups with the most complex paths
1345 1347 """
1346 1348 first_group = UserGroup.get(first_group_id)
1347 1349 second_group = UserGroup.get(second_group_id)
1348 1350 first_group_parts = (
1349 1351 len(first_group.users_group_name.split('/'))
1350 1352 if first_group else 0)
1351 1353 second_group_parts = (
1352 1354 len(second_group.users_group_name.split('/'))
1353 1355 if second_group else 0)
1354 1356 return cmp(second_group_parts, first_group_parts)
1355 1357
1356 1358 sorted_user_group_ids = sorted(
1357 1359 self.user_group_ids, cmp=_user_group_compare)
1358 1360 for user_group_id in sorted_user_group_ids:
1359 1361 self.fixture.destroy_user_group(user_group_id)
1360 1362
1361 1363 def _cleanup_users(self):
1362 1364 for user_id in self.user_ids:
1363 1365 self.fixture.destroy_user(user_id)
1364 1366
1365 1367
1366 1368 # TODO: Think about moving this into a pytest-pyro package and make it a
1367 1369 # pytest plugin
1368 1370 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1369 1371 def pytest_runtest_makereport(item, call):
1370 1372 """
1371 1373 Adding the remote traceback if the exception has this information.
1372 1374
1373 1375 VCSServer attaches this information as the attribute `_vcs_server_traceback`
1374 1376 to the exception instance.
1375 1377 """
1376 1378 outcome = yield
1377 1379 report = outcome.get_result()
1378 1380 if call.excinfo:
1379 1381 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1380 1382
1381 1383
1382 1384 def _add_vcsserver_remote_traceback(report, exc):
1383 1385 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1384 1386
1385 1387 if vcsserver_traceback:
1386 1388 section = 'VCSServer remote traceback ' + report.when
1387 1389 report.sections.append((section, vcsserver_traceback))
1388 1390
1389 1391
1390 1392 @pytest.fixture(scope='session')
1391 1393 def testrun():
1392 1394 return {
1393 1395 'uuid': uuid.uuid4(),
1394 1396 'start': datetime.datetime.utcnow().isoformat(),
1395 1397 'timestamp': int(time.time()),
1396 1398 }
1397 1399
1398 1400
1399 1401 @pytest.fixture(autouse=True)
1400 1402 def collect_appenlight_stats(request, testrun):
1401 1403 """
1402 1404 This fixture reports memory consumtion of single tests.
1403 1405
1404 1406 It gathers data based on `psutil` and sends them to Appenlight. The option
1405 1407 ``--ae`` has te be used to enable this fixture and the API key for your
1406 1408 application has to be provided in ``--ae-key``.
1407 1409 """
1408 1410 try:
1409 1411 # cygwin cannot have yet psutil support.
1410 1412 import psutil
1411 1413 except ImportError:
1412 1414 return
1413 1415
1414 1416 if not request.config.getoption('--appenlight'):
1415 1417 return
1416 1418 else:
1417 1419 # Only request the baseapp fixture if appenlight tracking is
1418 1420 # enabled. This will speed up a test run of unit tests by 2 to 3
1419 1421 # seconds if appenlight is not enabled.
1420 1422 baseapp = request.getfuncargvalue("baseapp")
1421 1423 url = '{}/api/logs'.format(request.config.getoption('--appenlight-url'))
1422 1424 client = AppenlightClient(
1423 1425 url=url,
1424 1426 api_key=request.config.getoption('--appenlight-api-key'),
1425 1427 namespace=request.node.nodeid,
1426 1428 request=str(testrun['uuid']),
1427 1429 testrun=testrun)
1428 1430
1429 1431 client.collect({
1430 1432 'message': "Starting",
1431 1433 })
1432 1434
1433 1435 server_and_port = baseapp.config.get_settings()['vcs.server']
1434 1436 protocol = baseapp.config.get_settings()['vcs.server.protocol']
1435 1437 server = create_vcsserver_proxy(server_and_port, protocol)
1436 1438 with server:
1437 1439 vcs_pid = server.get_pid()
1438 1440 server.run_gc()
1439 1441 vcs_process = psutil.Process(vcs_pid)
1440 1442 mem = vcs_process.memory_info()
1441 1443 client.tag_before('vcsserver.rss', mem.rss)
1442 1444 client.tag_before('vcsserver.vms', mem.vms)
1443 1445
1444 1446 test_process = psutil.Process()
1445 1447 mem = test_process.memory_info()
1446 1448 client.tag_before('test.rss', mem.rss)
1447 1449 client.tag_before('test.vms', mem.vms)
1448 1450
1449 1451 client.tag_before('time', time.time())
1450 1452
1451 1453 @request.addfinalizer
1452 1454 def send_stats():
1453 1455 client.tag_after('time', time.time())
1454 1456 with server:
1455 1457 gc_stats = server.run_gc()
1456 1458 for tag, value in gc_stats.items():
1457 1459 client.tag_after(tag, value)
1458 1460 mem = vcs_process.memory_info()
1459 1461 client.tag_after('vcsserver.rss', mem.rss)
1460 1462 client.tag_after('vcsserver.vms', mem.vms)
1461 1463
1462 1464 mem = test_process.memory_info()
1463 1465 client.tag_after('test.rss', mem.rss)
1464 1466 client.tag_after('test.vms', mem.vms)
1465 1467
1466 1468 client.collect({
1467 1469 'message': "Finished",
1468 1470 })
1469 1471 client.send_stats()
1470 1472
1471 1473 return client
1472 1474
1473 1475
1474 1476 class AppenlightClient():
1475 1477
1476 1478 url_template = '{url}?protocol_version=0.5'
1477 1479
1478 1480 def __init__(
1479 1481 self, url, api_key, add_server=True, add_timestamp=True,
1480 1482 namespace=None, request=None, testrun=None):
1481 1483 self.url = self.url_template.format(url=url)
1482 1484 self.api_key = api_key
1483 1485 self.add_server = add_server
1484 1486 self.add_timestamp = add_timestamp
1485 1487 self.namespace = namespace
1486 1488 self.request = request
1487 1489 self.server = socket.getfqdn(socket.gethostname())
1488 1490 self.tags_before = {}
1489 1491 self.tags_after = {}
1490 1492 self.stats = []
1491 1493 self.testrun = testrun or {}
1492 1494
1493 1495 def tag_before(self, tag, value):
1494 1496 self.tags_before[tag] = value
1495 1497
1496 1498 def tag_after(self, tag, value):
1497 1499 self.tags_after[tag] = value
1498 1500
1499 1501 def collect(self, data):
1500 1502 if self.add_server:
1501 1503 data.setdefault('server', self.server)
1502 1504 if self.add_timestamp:
1503 1505 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1504 1506 if self.namespace:
1505 1507 data.setdefault('namespace', self.namespace)
1506 1508 if self.request:
1507 1509 data.setdefault('request', self.request)
1508 1510 self.stats.append(data)
1509 1511
1510 1512 def send_stats(self):
1511 1513 tags = [
1512 1514 ('testrun', self.request),
1513 1515 ('testrun.start', self.testrun['start']),
1514 1516 ('testrun.timestamp', self.testrun['timestamp']),
1515 1517 ('test', self.namespace),
1516 1518 ]
1517 1519 for key, value in self.tags_before.items():
1518 1520 tags.append((key + '.before', value))
1519 1521 try:
1520 1522 delta = self.tags_after[key] - value
1521 1523 tags.append((key + '.delta', delta))
1522 1524 except Exception:
1523 1525 pass
1524 1526 for key, value in self.tags_after.items():
1525 1527 tags.append((key + '.after', value))
1526 1528 self.collect({
1527 1529 'message': "Collected tags",
1528 1530 'tags': tags,
1529 1531 })
1530 1532
1531 1533 response = requests.post(
1532 1534 self.url,
1533 1535 headers={
1534 1536 'X-appenlight-api-key': self.api_key},
1535 1537 json=self.stats,
1536 1538 )
1537 1539
1538 1540 if not response.status_code == 200:
1539 1541 pprint.pprint(self.stats)
1540 1542 print(response.headers)
1541 1543 print(response.text)
1542 1544 raise Exception('Sending to appenlight failed')
1543 1545
1544 1546
1545 1547 @pytest.fixture
1546 1548 def gist_util(request, db_connection):
1547 1549 """
1548 1550 Provides a wired instance of `GistUtility` with integrated cleanup.
1549 1551 """
1550 1552 utility = GistUtility()
1551 1553 request.addfinalizer(utility.cleanup)
1552 1554 return utility
1553 1555
1554 1556
1555 1557 class GistUtility(object):
1556 1558 def __init__(self):
1557 1559 self.fixture = Fixture()
1558 1560 self.gist_ids = []
1559 1561
1560 1562 def create_gist(self, **kwargs):
1561 1563 gist = self.fixture.create_gist(**kwargs)
1562 1564 self.gist_ids.append(gist.gist_id)
1563 1565 return gist
1564 1566
1565 1567 def cleanup(self):
1566 1568 for id_ in self.gist_ids:
1567 1569 self.fixture.destroy_gists(str(id_))
1568 1570
1569 1571
1570 1572 @pytest.fixture
1571 1573 def enabled_backends(request):
1572 1574 backends = request.config.option.backends
1573 1575 return backends[:]
1574 1576
1575 1577
1576 1578 @pytest.fixture
1577 1579 def settings_util(request, db_connection):
1578 1580 """
1579 1581 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1580 1582 """
1581 1583 utility = SettingsUtility()
1582 1584 request.addfinalizer(utility.cleanup)
1583 1585 return utility
1584 1586
1585 1587
1586 1588 class SettingsUtility(object):
1587 1589 def __init__(self):
1588 1590 self.rhodecode_ui_ids = []
1589 1591 self.rhodecode_setting_ids = []
1590 1592 self.repo_rhodecode_ui_ids = []
1591 1593 self.repo_rhodecode_setting_ids = []
1592 1594
1593 1595 def create_repo_rhodecode_ui(
1594 1596 self, repo, section, value, key=None, active=True, cleanup=True):
1595 1597 key = key or hashlib.sha1(
1596 1598 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1597 1599
1598 1600 setting = RepoRhodeCodeUi()
1599 1601 setting.repository_id = repo.repo_id
1600 1602 setting.ui_section = section
1601 1603 setting.ui_value = value
1602 1604 setting.ui_key = key
1603 1605 setting.ui_active = active
1604 1606 Session().add(setting)
1605 1607 Session().commit()
1606 1608
1607 1609 if cleanup:
1608 1610 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1609 1611 return setting
1610 1612
1611 1613 def create_rhodecode_ui(
1612 1614 self, section, value, key=None, active=True, cleanup=True):
1613 1615 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1614 1616
1615 1617 setting = RhodeCodeUi()
1616 1618 setting.ui_section = section
1617 1619 setting.ui_value = value
1618 1620 setting.ui_key = key
1619 1621 setting.ui_active = active
1620 1622 Session().add(setting)
1621 1623 Session().commit()
1622 1624
1623 1625 if cleanup:
1624 1626 self.rhodecode_ui_ids.append(setting.ui_id)
1625 1627 return setting
1626 1628
1627 1629 def create_repo_rhodecode_setting(
1628 1630 self, repo, name, value, type_, cleanup=True):
1629 1631 setting = RepoRhodeCodeSetting(
1630 1632 repo.repo_id, key=name, val=value, type=type_)
1631 1633 Session().add(setting)
1632 1634 Session().commit()
1633 1635
1634 1636 if cleanup:
1635 1637 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1636 1638 return setting
1637 1639
1638 1640 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1639 1641 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1640 1642 Session().add(setting)
1641 1643 Session().commit()
1642 1644
1643 1645 if cleanup:
1644 1646 self.rhodecode_setting_ids.append(setting.app_settings_id)
1645 1647
1646 1648 return setting
1647 1649
1648 1650 def cleanup(self):
1649 1651 for id_ in self.rhodecode_ui_ids:
1650 1652 setting = RhodeCodeUi.get(id_)
1651 1653 Session().delete(setting)
1652 1654
1653 1655 for id_ in self.rhodecode_setting_ids:
1654 1656 setting = RhodeCodeSetting.get(id_)
1655 1657 Session().delete(setting)
1656 1658
1657 1659 for id_ in self.repo_rhodecode_ui_ids:
1658 1660 setting = RepoRhodeCodeUi.get(id_)
1659 1661 Session().delete(setting)
1660 1662
1661 1663 for id_ in self.repo_rhodecode_setting_ids:
1662 1664 setting = RepoRhodeCodeSetting.get(id_)
1663 1665 Session().delete(setting)
1664 1666
1665 1667 Session().commit()
1666 1668
1667 1669
1668 1670 @pytest.fixture
1669 1671 def no_notifications(request):
1670 1672 notification_patcher = mock.patch(
1671 1673 'rhodecode.model.notification.NotificationModel.create')
1672 1674 notification_patcher.start()
1673 1675 request.addfinalizer(notification_patcher.stop)
1674 1676
1675 1677
1676 1678 @pytest.fixture(scope='session')
1677 1679 def repeat(request):
1678 1680 """
1679 1681 The number of repetitions is based on this fixture.
1680 1682
1681 1683 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1682 1684 tests are not too slow in our default test suite.
1683 1685 """
1684 1686 return request.config.getoption('--repeat')
1685 1687
1686 1688
1687 1689 @pytest.fixture
1688 1690 def rhodecode_fixtures():
1689 1691 return Fixture()
1690 1692
1691 1693
1692 1694 @pytest.fixture
1693 1695 def context_stub():
1694 1696 """
1695 1697 Stub context object.
1696 1698 """
1697 1699 context = pyramid.testing.DummyResource()
1698 1700 return context
1699 1701
1700 1702
1701 1703 @pytest.fixture
1702 1704 def request_stub():
1703 1705 """
1704 1706 Stub request object.
1705 1707 """
1706 1708 from rhodecode.lib.base import bootstrap_request
1707 1709 request = bootstrap_request(scheme='https')
1708 1710 return request
1709 1711
1710 1712
1711 1713 @pytest.fixture
1712 1714 def config_stub(request, request_stub):
1713 1715 """
1714 1716 Set up pyramid.testing and return the Configurator.
1715 1717 """
1716 1718 from rhodecode.lib.base import bootstrap_config
1717 1719 config = bootstrap_config(request=request_stub)
1718 1720
1719 1721 @request.addfinalizer
1720 1722 def cleanup():
1721 1723 pyramid.testing.tearDown()
1722 1724
1723 1725 return config
1724 1726
1725 1727
1726 1728 @pytest.fixture
1727 1729 def StubIntegrationType():
1728 1730 class _StubIntegrationType(IntegrationTypeBase):
1729 1731 """ Test integration type class """
1730 1732
1731 1733 key = 'test'
1732 1734 display_name = 'Test integration type'
1733 1735 description = 'A test integration type for testing'
1734 1736
1735 1737 @classmethod
1736 1738 def icon(cls):
1737 1739 return 'test_icon_html_image'
1738 1740
1739 1741 def __init__(self, settings):
1740 1742 super(_StubIntegrationType, self).__init__(settings)
1741 1743 self.sent_events = [] # for testing
1742 1744
1743 1745 def send_event(self, event):
1744 1746 self.sent_events.append(event)
1745 1747
1746 1748 def settings_schema(self):
1747 1749 class SettingsSchema(colander.Schema):
1748 1750 test_string_field = colander.SchemaNode(
1749 1751 colander.String(),
1750 1752 missing=colander.required,
1751 1753 title='test string field',
1752 1754 )
1753 1755 test_int_field = colander.SchemaNode(
1754 1756 colander.Int(),
1755 1757 title='some integer setting',
1756 1758 )
1757 1759 return SettingsSchema()
1758 1760
1759 1761
1760 1762 integration_type_registry.register_integration_type(_StubIntegrationType)
1761 1763 return _StubIntegrationType
1762 1764
1763 1765 @pytest.fixture
1764 1766 def stub_integration_settings():
1765 1767 return {
1766 1768 'test_string_field': 'some data',
1767 1769 'test_int_field': 100,
1768 1770 }
1769 1771
1770 1772
1771 1773 @pytest.fixture
1772 1774 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1773 1775 stub_integration_settings):
1774 1776 integration = IntegrationModel().create(
1775 1777 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1776 1778 name='test repo integration',
1777 1779 repo=repo_stub, repo_group=None, child_repos_only=None)
1778 1780
1779 1781 @request.addfinalizer
1780 1782 def cleanup():
1781 1783 IntegrationModel().delete(integration)
1782 1784
1783 1785 return integration
1784 1786
1785 1787
1786 1788 @pytest.fixture
1787 1789 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1788 1790 stub_integration_settings):
1789 1791 integration = IntegrationModel().create(
1790 1792 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1791 1793 name='test repogroup integration',
1792 1794 repo=None, repo_group=test_repo_group, child_repos_only=True)
1793 1795
1794 1796 @request.addfinalizer
1795 1797 def cleanup():
1796 1798 IntegrationModel().delete(integration)
1797 1799
1798 1800 return integration
1799 1801
1800 1802
1801 1803 @pytest.fixture
1802 1804 def repogroup_recursive_integration_stub(request, test_repo_group,
1803 1805 StubIntegrationType, stub_integration_settings):
1804 1806 integration = IntegrationModel().create(
1805 1807 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1806 1808 name='test recursive repogroup integration',
1807 1809 repo=None, repo_group=test_repo_group, child_repos_only=False)
1808 1810
1809 1811 @request.addfinalizer
1810 1812 def cleanup():
1811 1813 IntegrationModel().delete(integration)
1812 1814
1813 1815 return integration
1814 1816
1815 1817
1816 1818 @pytest.fixture
1817 1819 def global_integration_stub(request, StubIntegrationType,
1818 1820 stub_integration_settings):
1819 1821 integration = IntegrationModel().create(
1820 1822 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1821 1823 name='test global integration',
1822 1824 repo=None, repo_group=None, child_repos_only=None)
1823 1825
1824 1826 @request.addfinalizer
1825 1827 def cleanup():
1826 1828 IntegrationModel().delete(integration)
1827 1829
1828 1830 return integration
1829 1831
1830 1832
1831 1833 @pytest.fixture
1832 1834 def root_repos_integration_stub(request, StubIntegrationType,
1833 1835 stub_integration_settings):
1834 1836 integration = IntegrationModel().create(
1835 1837 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1836 1838 name='test global integration',
1837 1839 repo=None, repo_group=None, child_repos_only=True)
1838 1840
1839 1841 @request.addfinalizer
1840 1842 def cleanup():
1841 1843 IntegrationModel().delete(integration)
1842 1844
1843 1845 return integration
1844 1846
1845 1847
1846 1848 @pytest.fixture
1847 1849 def local_dt_to_utc():
1848 1850 def _factory(dt):
1849 1851 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1850 1852 dateutil.tz.tzutc()).replace(tzinfo=None)
1851 1853 return _factory
1852 1854
1853 1855
1854 1856 @pytest.fixture
1855 1857 def disable_anonymous_user(request, baseapp):
1856 1858 set_anonymous_access(False)
1857 1859
1858 1860 @request.addfinalizer
1859 1861 def cleanup():
1860 1862 set_anonymous_access(True)
1861 1863
1862 1864
1863 1865 @pytest.fixture(scope='module')
1864 1866 def rc_fixture(request):
1865 1867 return Fixture()
1866 1868
1867 1869
1868 1870 @pytest.fixture
1869 1871 def repo_groups(request):
1870 1872 fixture = Fixture()
1871 1873
1872 1874 session = Session()
1873 1875 zombie_group = fixture.create_repo_group('zombie')
1874 1876 parent_group = fixture.create_repo_group('parent')
1875 1877 child_group = fixture.create_repo_group('parent/child')
1876 1878 groups_in_db = session.query(RepoGroup).all()
1877 1879 assert len(groups_in_db) == 3
1878 1880 assert child_group.group_parent_id == parent_group.group_id
1879 1881
1880 1882 @request.addfinalizer
1881 1883 def cleanup():
1882 1884 fixture.destroy_repo_group(zombie_group)
1883 1885 fixture.destroy_repo_group(child_group)
1884 1886 fixture.destroy_repo_group(parent_group)
1885 1887
1886 1888 return zombie_group, parent_group, child_group
General Comments 0
You need to be logged in to leave comments. Login now