##// END OF EJS Templates
caches: enable no-cache repos for certain scenarios where we explicitly don't need to cache things like gists etc.
marcink -
r3868:707232ae default
parent child Browse files
Show More
@@ -1,1072 +1,1073 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import re
23 23 import shutil
24 24 import time
25 25 import logging
26 26 import traceback
27 27 import datetime
28 28
29 29 from pyramid.threadlocal import get_current_request
30 30 from zope.cachedescriptors.property import Lazy as LazyProperty
31 31
32 32 from rhodecode import events
33 33 from rhodecode.lib.auth import HasUserGroupPermissionAny
34 34 from rhodecode.lib.caching_query import FromCache
35 35 from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError
36 36 from rhodecode.lib.hooks_base import log_delete_repository
37 37 from rhodecode.lib.user_log_filter import user_log_filter
38 38 from rhodecode.lib.utils import make_db_config
39 39 from rhodecode.lib.utils2 import (
40 40 safe_str, safe_unicode, remove_prefix, obfuscate_url_pw,
41 41 get_current_rhodecode_user, safe_int, datetime_to_time,
42 42 action_logger_generic)
43 43 from rhodecode.lib.vcs.backends import get_backend
44 44 from rhodecode.model import BaseModel
45 45 from rhodecode.model.db import (
46 46 _hash_key, joinedload, or_, Repository, UserRepoToPerm, UserGroupRepoToPerm,
47 47 UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission,
48 48 Statistics, UserGroup, RepoGroup, RepositoryField, UserLog)
49 49
50 50 from rhodecode.model.settings import VcsSettingsModel
51 51
52 52
53 53 log = logging.getLogger(__name__)
54 54
55 55
56 56 class RepoModel(BaseModel):
57 57
58 58 cls = Repository
59 59
60 60 def _get_user_group(self, users_group):
61 61 return self._get_instance(UserGroup, users_group,
62 62 callback=UserGroup.get_by_group_name)
63 63
64 64 def _get_repo_group(self, repo_group):
65 65 return self._get_instance(RepoGroup, repo_group,
66 66 callback=RepoGroup.get_by_group_name)
67 67
68 68 def _create_default_perms(self, repository, private):
69 69 # create default permission
70 70 default = 'repository.read'
71 71 def_user = User.get_default_user()
72 72 for p in def_user.user_perms:
73 73 if p.permission.permission_name.startswith('repository.'):
74 74 default = p.permission.permission_name
75 75 break
76 76
77 77 default_perm = 'repository.none' if private else default
78 78
79 79 repo_to_perm = UserRepoToPerm()
80 80 repo_to_perm.permission = Permission.get_by_key(default_perm)
81 81
82 82 repo_to_perm.repository = repository
83 83 repo_to_perm.user_id = def_user.user_id
84 84
85 85 return repo_to_perm
86 86
87 87 @LazyProperty
88 88 def repos_path(self):
89 89 """
90 90 Gets the repositories root path from database
91 91 """
92 92 settings_model = VcsSettingsModel(sa=self.sa)
93 93 return settings_model.get_repos_location()
94 94
95 95 def get(self, repo_id):
96 96 repo = self.sa.query(Repository) \
97 97 .filter(Repository.repo_id == repo_id)
98 98
99 99 return repo.scalar()
100 100
101 101 def get_repo(self, repository):
102 102 return self._get_repo(repository)
103 103
104 104 def get_by_repo_name(self, repo_name, cache=False):
105 105 repo = self.sa.query(Repository) \
106 106 .filter(Repository.repo_name == repo_name)
107 107
108 108 if cache:
109 109 name_key = _hash_key(repo_name)
110 110 repo = repo.options(
111 111 FromCache("sql_cache_short", "get_repo_%s" % name_key))
112 112 return repo.scalar()
113 113
114 114 def _extract_id_from_repo_name(self, repo_name):
115 115 if repo_name.startswith('/'):
116 116 repo_name = repo_name.lstrip('/')
117 117 by_id_match = re.match(r'^_(\d{1,})', repo_name)
118 118 if by_id_match:
119 119 return by_id_match.groups()[0]
120 120
121 121 def get_repo_by_id(self, repo_name):
122 122 """
123 123 Extracts repo_name by id from special urls.
124 124 Example url is _11/repo_name
125 125
126 126 :param repo_name:
127 127 :return: repo object if matched else None
128 128 """
129 129
130 130 try:
131 131 _repo_id = self._extract_id_from_repo_name(repo_name)
132 132 if _repo_id:
133 133 return self.get(_repo_id)
134 134 except Exception:
135 135 log.exception('Failed to extract repo_name from URL')
136 136
137 137 return None
138 138
139 139 def get_repos_for_root(self, root, traverse=False):
140 140 if traverse:
141 141 like_expression = u'{}%'.format(safe_unicode(root))
142 142 repos = Repository.query().filter(
143 143 Repository.repo_name.like(like_expression)).all()
144 144 else:
145 145 if root and not isinstance(root, RepoGroup):
146 146 raise ValueError(
147 147 'Root must be an instance '
148 148 'of RepoGroup, got:{} instead'.format(type(root)))
149 149 repos = Repository.query().filter(Repository.group == root).all()
150 150 return repos
151 151
152 152 def get_url(self, repo, request=None, permalink=False):
153 153 if not request:
154 154 request = get_current_request()
155 155
156 156 if not request:
157 157 return
158 158
159 159 if permalink:
160 160 return request.route_url(
161 161 'repo_summary', repo_name='_{}'.format(safe_str(repo.repo_id)))
162 162 else:
163 163 return request.route_url(
164 164 'repo_summary', repo_name=safe_str(repo.repo_name))
165 165
166 166 def get_commit_url(self, repo, commit_id, request=None, permalink=False):
167 167 if not request:
168 168 request = get_current_request()
169 169
170 170 if not request:
171 171 return
172 172
173 173 if permalink:
174 174 return request.route_url(
175 175 'repo_commit', repo_name=safe_str(repo.repo_id),
176 176 commit_id=commit_id)
177 177
178 178 else:
179 179 return request.route_url(
180 180 'repo_commit', repo_name=safe_str(repo.repo_name),
181 181 commit_id=commit_id)
182 182
183 183 def get_repo_log(self, repo, filter_term):
184 184 repo_log = UserLog.query()\
185 185 .filter(or_(UserLog.repository_id == repo.repo_id,
186 186 UserLog.repository_name == repo.repo_name))\
187 187 .options(joinedload(UserLog.user))\
188 188 .options(joinedload(UserLog.repository))\
189 189 .order_by(UserLog.action_date.desc())
190 190
191 191 repo_log = user_log_filter(repo_log, filter_term)
192 192 return repo_log
193 193
194 194 @classmethod
195 195 def update_commit_cache(cls, repositories=None):
196 196 if not repositories:
197 197 repositories = Repository.getAll()
198 198 for repo in repositories:
199 199 repo.update_commit_cache()
200 200
201 201 def get_repos_as_dict(self, repo_list=None, admin=False,
202 202 super_user_actions=False, short_name=None):
203 203 _render = get_current_request().get_partial_renderer(
204 204 'rhodecode:templates/data_table/_dt_elements.mako')
205 205 c = _render.get_call_context()
206 206
207 207 def quick_menu(repo_name):
208 208 return _render('quick_menu', repo_name)
209 209
210 210 def repo_lnk(name, rtype, rstate, private, archived, fork_of):
211 211 if short_name is not None:
212 212 short_name_var = short_name
213 213 else:
214 214 short_name_var = not admin
215 215 return _render('repo_name', name, rtype, rstate, private, archived, fork_of,
216 216 short_name=short_name_var, admin=False)
217 217
218 218 def last_change(last_change):
219 219 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
220 220 last_change = last_change + datetime.timedelta(seconds=
221 221 (datetime.datetime.now() - datetime.datetime.utcnow()).seconds)
222 222 return _render("last_change", last_change)
223 223
224 224 def rss_lnk(repo_name):
225 225 return _render("rss", repo_name)
226 226
227 227 def atom_lnk(repo_name):
228 228 return _render("atom", repo_name)
229 229
230 230 def last_rev(repo_name, cs_cache):
231 231 return _render('revision', repo_name, cs_cache.get('revision'),
232 232 cs_cache.get('raw_id'), cs_cache.get('author'),
233 233 cs_cache.get('message'), cs_cache.get('date'))
234 234
235 235 def desc(desc):
236 236 return _render('repo_desc', desc, c.visual.stylify_metatags)
237 237
238 238 def state(repo_state):
239 239 return _render("repo_state", repo_state)
240 240
241 241 def repo_actions(repo_name):
242 242 return _render('repo_actions', repo_name, super_user_actions)
243 243
244 244 def user_profile(username):
245 245 return _render('user_profile', username)
246 246
247 247 repos_data = []
248 248 for repo in repo_list:
249 249 cs_cache = repo.changeset_cache
250 250 row = {
251 251 "menu": quick_menu(repo.repo_name),
252 252
253 253 "name": repo_lnk(repo.repo_name, repo.repo_type, repo.repo_state,
254 254 repo.private, repo.archived, repo.fork),
255 255 "name_raw": repo.repo_name.lower(),
256 256
257 257 "last_change": last_change(repo.last_commit_change),
258 258 "last_change_raw": datetime_to_time(repo.last_commit_change),
259 259
260 260 "last_changeset": last_rev(repo.repo_name, cs_cache),
261 261 "last_changeset_raw": cs_cache.get('revision'),
262 262
263 263 "desc": desc(repo.description_safe),
264 264 "owner": user_profile(repo.user.username),
265 265
266 266 "state": state(repo.repo_state),
267 267 "rss": rss_lnk(repo.repo_name),
268 268
269 269 "atom": atom_lnk(repo.repo_name),
270 270 }
271 271 if admin:
272 272 row.update({
273 273 "action": repo_actions(repo.repo_name),
274 274 })
275 275 repos_data.append(row)
276 276
277 277 return repos_data
278 278
279 279 def _get_defaults(self, repo_name):
280 280 """
281 281 Gets information about repository, and returns a dict for
282 282 usage in forms
283 283
284 284 :param repo_name:
285 285 """
286 286
287 287 repo_info = Repository.get_by_repo_name(repo_name)
288 288
289 289 if repo_info is None:
290 290 return None
291 291
292 292 defaults = repo_info.get_dict()
293 293 defaults['repo_name'] = repo_info.just_name
294 294
295 295 groups = repo_info.groups_with_parents
296 296 parent_group = groups[-1] if groups else None
297 297
298 298 # we use -1 as this is how in HTML, we mark an empty group
299 299 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
300 300
301 301 keys_to_process = (
302 302 {'k': 'repo_type', 'strip': False},
303 303 {'k': 'repo_enable_downloads', 'strip': True},
304 304 {'k': 'repo_description', 'strip': True},
305 305 {'k': 'repo_enable_locking', 'strip': True},
306 306 {'k': 'repo_landing_rev', 'strip': True},
307 307 {'k': 'clone_uri', 'strip': False},
308 308 {'k': 'push_uri', 'strip': False},
309 309 {'k': 'repo_private', 'strip': True},
310 310 {'k': 'repo_enable_statistics', 'strip': True}
311 311 )
312 312
313 313 for item in keys_to_process:
314 314 attr = item['k']
315 315 if item['strip']:
316 316 attr = remove_prefix(item['k'], 'repo_')
317 317
318 318 val = defaults[attr]
319 319 if item['k'] == 'repo_landing_rev':
320 320 val = ':'.join(defaults[attr])
321 321 defaults[item['k']] = val
322 322 if item['k'] == 'clone_uri':
323 323 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
324 324 if item['k'] == 'push_uri':
325 325 defaults['push_uri_hidden'] = repo_info.push_uri_hidden
326 326
327 327 # fill owner
328 328 if repo_info.user:
329 329 defaults.update({'user': repo_info.user.username})
330 330 else:
331 331 replacement_user = User.get_first_super_admin().username
332 332 defaults.update({'user': replacement_user})
333 333
334 334 return defaults
335 335
336 336 def update(self, repo, **kwargs):
337 337 try:
338 338 cur_repo = self._get_repo(repo)
339 339 source_repo_name = cur_repo.repo_name
340 340 if 'user' in kwargs:
341 341 cur_repo.user = User.get_by_username(kwargs['user'])
342 342
343 343 if 'repo_group' in kwargs:
344 344 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
345 345 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
346 346
347 347 update_keys = [
348 348 (1, 'repo_description'),
349 349 (1, 'repo_landing_rev'),
350 350 (1, 'repo_private'),
351 351 (1, 'repo_enable_downloads'),
352 352 (1, 'repo_enable_locking'),
353 353 (1, 'repo_enable_statistics'),
354 354 (0, 'clone_uri'),
355 355 (0, 'push_uri'),
356 356 (0, 'fork_id')
357 357 ]
358 358 for strip, k in update_keys:
359 359 if k in kwargs:
360 360 val = kwargs[k]
361 361 if strip:
362 362 k = remove_prefix(k, 'repo_')
363 363
364 364 setattr(cur_repo, k, val)
365 365
366 366 new_name = cur_repo.get_new_name(kwargs['repo_name'])
367 367 cur_repo.repo_name = new_name
368 368
369 369 # if private flag is set, reset default permission to NONE
370 370 if kwargs.get('repo_private'):
371 371 EMPTY_PERM = 'repository.none'
372 372 RepoModel().grant_user_permission(
373 373 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
374 374 )
375 375
376 376 # handle extra fields
377 377 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), kwargs):
378 378 k = RepositoryField.un_prefix_key(field)
379 379 ex_field = RepositoryField.get_by_key_name(
380 380 key=k, repo=cur_repo)
381 381 if ex_field:
382 382 ex_field.field_value = kwargs[field]
383 383 self.sa.add(ex_field)
384 384 cur_repo.updated_on = datetime.datetime.now()
385 385 self.sa.add(cur_repo)
386 386
387 387 if source_repo_name != new_name:
388 388 # rename repository
389 389 self._rename_filesystem_repo(
390 390 old=source_repo_name, new=new_name)
391 391
392 392 return cur_repo
393 393 except Exception:
394 394 log.error(traceback.format_exc())
395 395 raise
396 396
397 397 def _create_repo(self, repo_name, repo_type, description, owner,
398 398 private=False, clone_uri=None, repo_group=None,
399 399 landing_rev='rev:tip', fork_of=None,
400 400 copy_fork_permissions=False, enable_statistics=False,
401 401 enable_locking=False, enable_downloads=False,
402 402 copy_group_permissions=False,
403 403 state=Repository.STATE_PENDING):
404 404 """
405 405 Create repository inside database with PENDING state, this should be
406 406 only executed by create() repo. With exception of importing existing
407 407 repos
408 408 """
409 409 from rhodecode.model.scm import ScmModel
410 410
411 411 owner = self._get_user(owner)
412 412 fork_of = self._get_repo(fork_of)
413 413 repo_group = self._get_repo_group(safe_int(repo_group))
414 414
415 415 try:
416 416 repo_name = safe_unicode(repo_name)
417 417 description = safe_unicode(description)
418 418 # repo name is just a name of repository
419 419 # while repo_name_full is a full qualified name that is combined
420 420 # with name and path of group
421 421 repo_name_full = repo_name
422 422 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
423 423
424 424 new_repo = Repository()
425 425 new_repo.repo_state = state
426 426 new_repo.enable_statistics = False
427 427 new_repo.repo_name = repo_name_full
428 428 new_repo.repo_type = repo_type
429 429 new_repo.user = owner
430 430 new_repo.group = repo_group
431 431 new_repo.description = description or repo_name
432 432 new_repo.private = private
433 433 new_repo.archived = False
434 434 new_repo.clone_uri = clone_uri
435 435 new_repo.landing_rev = landing_rev
436 436
437 437 new_repo.enable_statistics = enable_statistics
438 438 new_repo.enable_locking = enable_locking
439 439 new_repo.enable_downloads = enable_downloads
440 440
441 441 if repo_group:
442 442 new_repo.enable_locking = repo_group.enable_locking
443 443
444 444 if fork_of:
445 445 parent_repo = fork_of
446 446 new_repo.fork = parent_repo
447 447
448 448 events.trigger(events.RepoPreCreateEvent(new_repo))
449 449
450 450 self.sa.add(new_repo)
451 451
452 452 EMPTY_PERM = 'repository.none'
453 453 if fork_of and copy_fork_permissions:
454 454 repo = fork_of
455 455 user_perms = UserRepoToPerm.query() \
456 456 .filter(UserRepoToPerm.repository == repo).all()
457 457 group_perms = UserGroupRepoToPerm.query() \
458 458 .filter(UserGroupRepoToPerm.repository == repo).all()
459 459
460 460 for perm in user_perms:
461 461 UserRepoToPerm.create(
462 462 perm.user, new_repo, perm.permission)
463 463
464 464 for perm in group_perms:
465 465 UserGroupRepoToPerm.create(
466 466 perm.users_group, new_repo, perm.permission)
467 467 # in case we copy permissions and also set this repo to private
468 468 # override the default user permission to make it a private repo
469 469 if private:
470 470 RepoModel(self.sa).grant_user_permission(
471 471 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
472 472
473 473 elif repo_group and copy_group_permissions:
474 474 user_perms = UserRepoGroupToPerm.query() \
475 475 .filter(UserRepoGroupToPerm.group == repo_group).all()
476 476
477 477 group_perms = UserGroupRepoGroupToPerm.query() \
478 478 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
479 479
480 480 for perm in user_perms:
481 481 perm_name = perm.permission.permission_name.replace(
482 482 'group.', 'repository.')
483 483 perm_obj = Permission.get_by_key(perm_name)
484 484 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
485 485
486 486 for perm in group_perms:
487 487 perm_name = perm.permission.permission_name.replace(
488 488 'group.', 'repository.')
489 489 perm_obj = Permission.get_by_key(perm_name)
490 490 UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj)
491 491
492 492 if private:
493 493 RepoModel(self.sa).grant_user_permission(
494 494 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
495 495
496 496 else:
497 497 perm_obj = self._create_default_perms(new_repo, private)
498 498 self.sa.add(perm_obj)
499 499
500 500 # now automatically start following this repository as owner
501 501 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id, owner.user_id)
502 502
503 503 # we need to flush here, in order to check if database won't
504 504 # throw any exceptions, create filesystem dirs at the very end
505 505 self.sa.flush()
506 506 events.trigger(events.RepoCreateEvent(new_repo))
507 507 return new_repo
508 508
509 509 except Exception:
510 510 log.error(traceback.format_exc())
511 511 raise
512 512
513 513 def create(self, form_data, cur_user):
514 514 """
515 515 Create repository using celery tasks
516 516
517 517 :param form_data:
518 518 :param cur_user:
519 519 """
520 520 from rhodecode.lib.celerylib import tasks, run_task
521 521 return run_task(tasks.create_repo, form_data, cur_user)
522 522
523 523 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
524 524 perm_deletions=None, check_perms=True,
525 525 cur_user=None):
526 526 if not perm_additions:
527 527 perm_additions = []
528 528 if not perm_updates:
529 529 perm_updates = []
530 530 if not perm_deletions:
531 531 perm_deletions = []
532 532
533 533 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
534 534
535 535 changes = {
536 536 'added': [],
537 537 'updated': [],
538 538 'deleted': []
539 539 }
540 540 # update permissions
541 541 for member_id, perm, member_type in perm_updates:
542 542 member_id = int(member_id)
543 543 if member_type == 'user':
544 544 member_name = User.get(member_id).username
545 545 # this updates also current one if found
546 546 self.grant_user_permission(
547 547 repo=repo, user=member_id, perm=perm)
548 548 elif member_type == 'user_group':
549 549 # check if we have permissions to alter this usergroup
550 550 member_name = UserGroup.get(member_id).users_group_name
551 551 if not check_perms or HasUserGroupPermissionAny(
552 552 *req_perms)(member_name, user=cur_user):
553 553 self.grant_user_group_permission(
554 554 repo=repo, group_name=member_id, perm=perm)
555 555 else:
556 556 raise ValueError("member_type must be 'user' or 'user_group' "
557 557 "got {} instead".format(member_type))
558 558 changes['updated'].append({'type': member_type, 'id': member_id,
559 559 'name': member_name, 'new_perm': perm})
560 560
561 561 # set new permissions
562 562 for member_id, perm, member_type in perm_additions:
563 563 member_id = int(member_id)
564 564 if member_type == 'user':
565 565 member_name = User.get(member_id).username
566 566 self.grant_user_permission(
567 567 repo=repo, user=member_id, perm=perm)
568 568 elif member_type == 'user_group':
569 569 # check if we have permissions to alter this usergroup
570 570 member_name = UserGroup.get(member_id).users_group_name
571 571 if not check_perms or HasUserGroupPermissionAny(
572 572 *req_perms)(member_name, user=cur_user):
573 573 self.grant_user_group_permission(
574 574 repo=repo, group_name=member_id, perm=perm)
575 575 else:
576 576 raise ValueError("member_type must be 'user' or 'user_group' "
577 577 "got {} instead".format(member_type))
578 578
579 579 changes['added'].append({'type': member_type, 'id': member_id,
580 580 'name': member_name, 'new_perm': perm})
581 581 # delete permissions
582 582 for member_id, perm, member_type in perm_deletions:
583 583 member_id = int(member_id)
584 584 if member_type == 'user':
585 585 member_name = User.get(member_id).username
586 586 self.revoke_user_permission(repo=repo, user=member_id)
587 587 elif member_type == 'user_group':
588 588 # check if we have permissions to alter this usergroup
589 589 member_name = UserGroup.get(member_id).users_group_name
590 590 if not check_perms or HasUserGroupPermissionAny(
591 591 *req_perms)(member_name, user=cur_user):
592 592 self.revoke_user_group_permission(
593 593 repo=repo, group_name=member_id)
594 594 else:
595 595 raise ValueError("member_type must be 'user' or 'user_group' "
596 596 "got {} instead".format(member_type))
597 597
598 598 changes['deleted'].append({'type': member_type, 'id': member_id,
599 599 'name': member_name, 'new_perm': perm})
600 600 return changes
601 601
602 602 def create_fork(self, form_data, cur_user):
603 603 """
604 604 Simple wrapper into executing celery task for fork creation
605 605
606 606 :param form_data:
607 607 :param cur_user:
608 608 """
609 609 from rhodecode.lib.celerylib import tasks, run_task
610 610 return run_task(tasks.create_repo_fork, form_data, cur_user)
611 611
612 612 def archive(self, repo):
613 613 """
614 614 Archive given repository. Set archive flag.
615 615
616 616 :param repo:
617 617 """
618 618 repo = self._get_repo(repo)
619 619 if repo:
620 620
621 621 try:
622 622 repo.archived = True
623 623 self.sa.add(repo)
624 624 self.sa.commit()
625 625 except Exception:
626 626 log.error(traceback.format_exc())
627 627 raise
628 628
629 629 def delete(self, repo, forks=None, pull_requests=None, fs_remove=True, cur_user=None):
630 630 """
631 631 Delete given repository, forks parameter defines what do do with
632 632 attached forks. Throws AttachedForksError if deleted repo has attached
633 633 forks
634 634
635 635 :param repo:
636 636 :param forks: str 'delete' or 'detach'
637 637 :param pull_requests: str 'delete' or None
638 638 :param fs_remove: remove(archive) repo from filesystem
639 639 """
640 640 if not cur_user:
641 641 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
642 642 repo = self._get_repo(repo)
643 643 if repo:
644 644 if forks == 'detach':
645 645 for r in repo.forks:
646 646 r.fork = None
647 647 self.sa.add(r)
648 648 elif forks == 'delete':
649 649 for r in repo.forks:
650 650 self.delete(r, forks='delete')
651 651 elif [f for f in repo.forks]:
652 652 raise AttachedForksError()
653 653
654 654 # check for pull requests
655 655 pr_sources = repo.pull_requests_source
656 656 pr_targets = repo.pull_requests_target
657 657 if pull_requests != 'delete' and (pr_sources or pr_targets):
658 658 raise AttachedPullRequestsError()
659 659
660 660 old_repo_dict = repo.get_dict()
661 661 events.trigger(events.RepoPreDeleteEvent(repo))
662 662 try:
663 663 self.sa.delete(repo)
664 664 if fs_remove:
665 665 self._delete_filesystem_repo(repo)
666 666 else:
667 667 log.debug('skipping removal from filesystem')
668 668 old_repo_dict.update({
669 669 'deleted_by': cur_user,
670 670 'deleted_on': time.time(),
671 671 })
672 672 log_delete_repository(**old_repo_dict)
673 673 events.trigger(events.RepoDeleteEvent(repo))
674 674 except Exception:
675 675 log.error(traceback.format_exc())
676 676 raise
677 677
678 678 def grant_user_permission(self, repo, user, perm):
679 679 """
680 680 Grant permission for user on given repository, or update existing one
681 681 if found
682 682
683 683 :param repo: Instance of Repository, repository_id, or repository name
684 684 :param user: Instance of User, user_id or username
685 685 :param perm: Instance of Permission, or permission_name
686 686 """
687 687 user = self._get_user(user)
688 688 repo = self._get_repo(repo)
689 689 permission = self._get_perm(perm)
690 690
691 691 # check if we have that permission already
692 692 obj = self.sa.query(UserRepoToPerm) \
693 693 .filter(UserRepoToPerm.user == user) \
694 694 .filter(UserRepoToPerm.repository == repo) \
695 695 .scalar()
696 696 if obj is None:
697 697 # create new !
698 698 obj = UserRepoToPerm()
699 699 obj.repository = repo
700 700 obj.user = user
701 701 obj.permission = permission
702 702 self.sa.add(obj)
703 703 log.debug('Granted perm %s to %s on %s', perm, user, repo)
704 704 action_logger_generic(
705 705 'granted permission: {} to user: {} on repo: {}'.format(
706 706 perm, user, repo), namespace='security.repo')
707 707 return obj
708 708
709 709 def revoke_user_permission(self, repo, user):
710 710 """
711 711 Revoke permission for user on given repository
712 712
713 713 :param repo: Instance of Repository, repository_id, or repository name
714 714 :param user: Instance of User, user_id or username
715 715 """
716 716
717 717 user = self._get_user(user)
718 718 repo = self._get_repo(repo)
719 719
720 720 obj = self.sa.query(UserRepoToPerm) \
721 721 .filter(UserRepoToPerm.repository == repo) \
722 722 .filter(UserRepoToPerm.user == user) \
723 723 .scalar()
724 724 if obj:
725 725 self.sa.delete(obj)
726 726 log.debug('Revoked perm on %s on %s', repo, user)
727 727 action_logger_generic(
728 728 'revoked permission from user: {} on repo: {}'.format(
729 729 user, repo), namespace='security.repo')
730 730
731 731 def grant_user_group_permission(self, repo, group_name, perm):
732 732 """
733 733 Grant permission for user group on given repository, or update
734 734 existing one if found
735 735
736 736 :param repo: Instance of Repository, repository_id, or repository name
737 737 :param group_name: Instance of UserGroup, users_group_id,
738 738 or user group name
739 739 :param perm: Instance of Permission, or permission_name
740 740 """
741 741 repo = self._get_repo(repo)
742 742 group_name = self._get_user_group(group_name)
743 743 permission = self._get_perm(perm)
744 744
745 745 # check if we have that permission already
746 746 obj = self.sa.query(UserGroupRepoToPerm) \
747 747 .filter(UserGroupRepoToPerm.users_group == group_name) \
748 748 .filter(UserGroupRepoToPerm.repository == repo) \
749 749 .scalar()
750 750
751 751 if obj is None:
752 752 # create new
753 753 obj = UserGroupRepoToPerm()
754 754
755 755 obj.repository = repo
756 756 obj.users_group = group_name
757 757 obj.permission = permission
758 758 self.sa.add(obj)
759 759 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
760 760 action_logger_generic(
761 761 'granted permission: {} to usergroup: {} on repo: {}'.format(
762 762 perm, group_name, repo), namespace='security.repo')
763 763
764 764 return obj
765 765
766 766 def revoke_user_group_permission(self, repo, group_name):
767 767 """
768 768 Revoke permission for user group on given repository
769 769
770 770 :param repo: Instance of Repository, repository_id, or repository name
771 771 :param group_name: Instance of UserGroup, users_group_id,
772 772 or user group name
773 773 """
774 774 repo = self._get_repo(repo)
775 775 group_name = self._get_user_group(group_name)
776 776
777 777 obj = self.sa.query(UserGroupRepoToPerm) \
778 778 .filter(UserGroupRepoToPerm.repository == repo) \
779 779 .filter(UserGroupRepoToPerm.users_group == group_name) \
780 780 .scalar()
781 781 if obj:
782 782 self.sa.delete(obj)
783 783 log.debug('Revoked perm to %s on %s', repo, group_name)
784 784 action_logger_generic(
785 785 'revoked permission from usergroup: {} on repo: {}'.format(
786 786 group_name, repo), namespace='security.repo')
787 787
788 788 def delete_stats(self, repo_name):
789 789 """
790 790 removes stats for given repo
791 791
792 792 :param repo_name:
793 793 """
794 794 repo = self._get_repo(repo_name)
795 795 try:
796 796 obj = self.sa.query(Statistics) \
797 797 .filter(Statistics.repository == repo).scalar()
798 798 if obj:
799 799 self.sa.delete(obj)
800 800 except Exception:
801 801 log.error(traceback.format_exc())
802 802 raise
803 803
804 804 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
805 805 field_type='str', field_desc=''):
806 806
807 807 repo = self._get_repo(repo_name)
808 808
809 809 new_field = RepositoryField()
810 810 new_field.repository = repo
811 811 new_field.field_key = field_key
812 812 new_field.field_type = field_type # python type
813 813 new_field.field_value = field_value
814 814 new_field.field_desc = field_desc
815 815 new_field.field_label = field_label
816 816 self.sa.add(new_field)
817 817 return new_field
818 818
819 819 def delete_repo_field(self, repo_name, field_key):
820 820 repo = self._get_repo(repo_name)
821 821 field = RepositoryField.get_by_key_name(field_key, repo)
822 822 if field:
823 823 self.sa.delete(field)
824 824
825 825 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
826 826 clone_uri=None, repo_store_location=None,
827 827 use_global_config=False):
828 828 """
829 829 makes repository on filesystem. It's group aware means it'll create
830 830 a repository within a group, and alter the paths accordingly of
831 831 group location
832 832
833 833 :param repo_name:
834 834 :param alias:
835 835 :param parent:
836 836 :param clone_uri:
837 837 :param repo_store_location:
838 838 """
839 839 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
840 840 from rhodecode.model.scm import ScmModel
841 841
842 842 if Repository.NAME_SEP in repo_name:
843 843 raise ValueError(
844 844 'repo_name must not contain groups got `%s`' % repo_name)
845 845
846 846 if isinstance(repo_group, RepoGroup):
847 847 new_parent_path = os.sep.join(repo_group.full_path_splitted)
848 848 else:
849 849 new_parent_path = repo_group or ''
850 850
851 851 if repo_store_location:
852 852 _paths = [repo_store_location]
853 853 else:
854 854 _paths = [self.repos_path, new_parent_path, repo_name]
855 855 # we need to make it str for mercurial
856 856 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
857 857
858 858 # check if this path is not a repository
859 859 if is_valid_repo(repo_path, self.repos_path):
860 860 raise Exception('This path %s is a valid repository' % repo_path)
861 861
862 862 # check if this path is a group
863 863 if is_valid_repo_group(repo_path, self.repos_path):
864 864 raise Exception('This path %s is a valid group' % repo_path)
865 865
866 866 log.info('creating repo %s in %s from url: `%s`',
867 867 repo_name, safe_unicode(repo_path),
868 868 obfuscate_url_pw(clone_uri))
869 869
870 870 backend = get_backend(repo_type)
871 871
872 872 config_repo = None if use_global_config else repo_name
873 873 if config_repo and new_parent_path:
874 874 config_repo = Repository.NAME_SEP.join(
875 875 (new_parent_path, config_repo))
876 876 config = make_db_config(clear_session=False, repo=config_repo)
877 877 config.set('extensions', 'largefiles', '')
878 878
879 879 # patch and reset hooks section of UI config to not run any
880 880 # hooks on creating remote repo
881 881 config.clear_section('hooks')
882 882
883 883 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
884 884 if repo_type == 'git':
885 885 repo = backend(
886 repo_path, config=config, create=True, src_url=clone_uri,
887 bare=True)
886 repo_path, config=config, create=True, src_url=clone_uri, bare=True,
887 with_wire={"cache": False})
888 888 else:
889 889 repo = backend(
890 repo_path, config=config, create=True, src_url=clone_uri)
890 repo_path, config=config, create=True, src_url=clone_uri,
891 with_wire={"cache": False})
891 892
892 893 repo.install_hooks()
893 894
894 895 log.debug('Created repo %s with %s backend',
895 896 safe_unicode(repo_name), safe_unicode(repo_type))
896 897 return repo
897 898
898 899 def _rename_filesystem_repo(self, old, new):
899 900 """
900 901 renames repository on filesystem
901 902
902 903 :param old: old name
903 904 :param new: new name
904 905 """
905 906 log.info('renaming repo from %s to %s', old, new)
906 907
907 908 old_path = os.path.join(self.repos_path, old)
908 909 new_path = os.path.join(self.repos_path, new)
909 910 if os.path.isdir(new_path):
910 911 raise Exception(
911 912 'Was trying to rename to already existing dir %s' % new_path
912 913 )
913 914 shutil.move(old_path, new_path)
914 915
915 916 def _delete_filesystem_repo(self, repo):
916 917 """
917 918 removes repo from filesystem, the removal is acctually made by
918 919 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
919 920 repository is no longer valid for rhodecode, can be undeleted later on
920 921 by reverting the renames on this repository
921 922
922 923 :param repo: repo object
923 924 """
924 925 rm_path = os.path.join(self.repos_path, repo.repo_name)
925 926 repo_group = repo.group
926 927 log.info("Removing repository %s", rm_path)
927 928 # disable hg/git internal that it doesn't get detected as repo
928 929 alias = repo.repo_type
929 930
930 931 config = make_db_config(clear_session=False)
931 932 config.set('extensions', 'largefiles', '')
932 933 bare = getattr(repo.scm_instance(config=config), 'bare', False)
933 934
934 935 # skip this for bare git repos
935 936 if not bare:
936 937 # disable VCS repo
937 938 vcs_path = os.path.join(rm_path, '.%s' % alias)
938 939 if os.path.exists(vcs_path):
939 940 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
940 941
941 942 _now = datetime.datetime.now()
942 943 _ms = str(_now.microsecond).rjust(6, '0')
943 944 _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
944 945 repo.just_name)
945 946 if repo_group:
946 947 # if repository is in group, prefix the removal path with the group
947 948 args = repo_group.full_path_splitted + [_d]
948 949 _d = os.path.join(*args)
949 950
950 951 if os.path.isdir(rm_path):
951 952 shutil.move(rm_path, os.path.join(self.repos_path, _d))
952 953
953 954 # finally cleanup diff-cache if it exists
954 955 cached_diffs_dir = repo.cached_diffs_dir
955 956 if os.path.isdir(cached_diffs_dir):
956 957 shutil.rmtree(cached_diffs_dir)
957 958
958 959
959 960 class ReadmeFinder:
960 961 """
961 962 Utility which knows how to find a readme for a specific commit.
962 963
963 964 The main idea is that this is a configurable algorithm. When creating an
964 965 instance you can define parameters, currently only the `default_renderer`.
965 966 Based on this configuration the method :meth:`search` behaves slightly
966 967 different.
967 968 """
968 969
969 970 readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE)
970 971 path_re = re.compile(r'^docs?', re.IGNORECASE)
971 972
972 973 default_priorities = {
973 974 None: 0,
974 975 '.text': 2,
975 976 '.txt': 3,
976 977 '.rst': 1,
977 978 '.rest': 2,
978 979 '.md': 1,
979 980 '.mkdn': 2,
980 981 '.mdown': 3,
981 982 '.markdown': 4,
982 983 }
983 984
984 985 path_priority = {
985 986 'doc': 0,
986 987 'docs': 1,
987 988 }
988 989
989 990 FALLBACK_PRIORITY = 99
990 991
991 992 RENDERER_TO_EXTENSION = {
992 993 'rst': ['.rst', '.rest'],
993 994 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'],
994 995 }
995 996
996 997 def __init__(self, default_renderer=None):
997 998 self._default_renderer = default_renderer
998 999 self._renderer_extensions = self.RENDERER_TO_EXTENSION.get(
999 1000 default_renderer, [])
1000 1001
1001 1002 def search(self, commit, path='/'):
1002 1003 """
1003 1004 Find a readme in the given `commit`.
1004 1005 """
1005 1006 nodes = commit.get_nodes(path)
1006 1007 matches = self._match_readmes(nodes)
1007 1008 matches = self._sort_according_to_priority(matches)
1008 1009 if matches:
1009 1010 return matches[0].node
1010 1011
1011 1012 paths = self._match_paths(nodes)
1012 1013 paths = self._sort_paths_according_to_priority(paths)
1013 1014 for path in paths:
1014 1015 match = self.search(commit, path=path)
1015 1016 if match:
1016 1017 return match
1017 1018
1018 1019 return None
1019 1020
1020 1021 def _match_readmes(self, nodes):
1021 1022 for node in nodes:
1022 1023 if not node.is_file():
1023 1024 continue
1024 1025 path = node.path.rsplit('/', 1)[-1]
1025 1026 match = self.readme_re.match(path)
1026 1027 if match:
1027 1028 extension = match.group(1)
1028 1029 yield ReadmeMatch(node, match, self._priority(extension))
1029 1030
1030 1031 def _match_paths(self, nodes):
1031 1032 for node in nodes:
1032 1033 if not node.is_dir():
1033 1034 continue
1034 1035 match = self.path_re.match(node.path)
1035 1036 if match:
1036 1037 yield node.path
1037 1038
1038 1039 def _priority(self, extension):
1039 1040 renderer_priority = (
1040 1041 0 if extension in self._renderer_extensions else 1)
1041 1042 extension_priority = self.default_priorities.get(
1042 1043 extension, self.FALLBACK_PRIORITY)
1043 1044 return (renderer_priority, extension_priority)
1044 1045
1045 1046 def _sort_according_to_priority(self, matches):
1046 1047
1047 1048 def priority_and_path(match):
1048 1049 return (match.priority, match.path)
1049 1050
1050 1051 return sorted(matches, key=priority_and_path)
1051 1052
1052 1053 def _sort_paths_according_to_priority(self, paths):
1053 1054
1054 1055 def priority_and_path(path):
1055 1056 return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path)
1056 1057
1057 1058 return sorted(paths, key=priority_and_path)
1058 1059
1059 1060
1060 1061 class ReadmeMatch:
1061 1062
1062 1063 def __init__(self, node, match, priority):
1063 1064 self.node = node
1064 1065 self._match = match
1065 1066 self.priority = priority
1066 1067
1067 1068 @property
1068 1069 def path(self):
1069 1070 return self.node.path
1070 1071
1071 1072 def __repr__(self):
1072 1073 return '<ReadmeMatch {} priority={}'.format(self.path, self.priority)
@@ -1,941 +1,942 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Scm model for RhodeCode
23 23 """
24 24
25 25 import os.path
26 26 import traceback
27 27 import logging
28 28 import cStringIO
29 29
30 30 from sqlalchemy import func
31 31 from zope.cachedescriptors.property import Lazy as LazyProperty
32 32
33 33 import rhodecode
34 34 from rhodecode.lib.vcs import get_backend
35 35 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
36 36 from rhodecode.lib.vcs.nodes import FileNode
37 37 from rhodecode.lib.vcs.backends.base import EmptyCommit
38 38 from rhodecode.lib import helpers as h, rc_cache
39 39 from rhodecode.lib.auth import (
40 40 HasRepoPermissionAny, HasRepoGroupPermissionAny,
41 41 HasUserGroupPermissionAny)
42 42 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
43 43 from rhodecode.lib import hooks_utils
44 44 from rhodecode.lib.utils import (
45 45 get_filesystem_repos, make_db_config)
46 46 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
47 47 from rhodecode.lib.system_info import get_system_info
48 48 from rhodecode.model import BaseModel
49 49 from rhodecode.model.db import (
50 50 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
51 51 PullRequest)
52 52 from rhodecode.model.settings import VcsSettingsModel
53 53 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
54 54
55 55 log = logging.getLogger(__name__)
56 56
57 57
58 58 class UserTemp(object):
59 59 def __init__(self, user_id):
60 60 self.user_id = user_id
61 61
62 62 def __repr__(self):
63 63 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
64 64
65 65
66 66 class RepoTemp(object):
67 67 def __init__(self, repo_id):
68 68 self.repo_id = repo_id
69 69
70 70 def __repr__(self):
71 71 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
72 72
73 73
74 74 class SimpleCachedRepoList(object):
75 75 """
76 76 Lighter version of of iteration of repos without the scm initialisation,
77 77 and with cache usage
78 78 """
79 79 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
80 80 self.db_repo_list = db_repo_list
81 81 self.repos_path = repos_path
82 82 self.order_by = order_by
83 83 self.reversed = (order_by or '').startswith('-')
84 84 if not perm_set:
85 85 perm_set = ['repository.read', 'repository.write',
86 86 'repository.admin']
87 87 self.perm_set = perm_set
88 88
89 89 def __len__(self):
90 90 return len(self.db_repo_list)
91 91
92 92 def __repr__(self):
93 93 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
94 94
95 95 def __iter__(self):
96 96 for dbr in self.db_repo_list:
97 97 # check permission at this level
98 98 has_perm = HasRepoPermissionAny(*self.perm_set)(
99 99 dbr.repo_name, 'SimpleCachedRepoList check')
100 100 if not has_perm:
101 101 continue
102 102
103 103 tmp_d = {
104 104 'name': dbr.repo_name,
105 105 'dbrepo': dbr.get_dict(),
106 106 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
107 107 }
108 108 yield tmp_d
109 109
110 110
111 111 class _PermCheckIterator(object):
112 112
113 113 def __init__(
114 114 self, obj_list, obj_attr, perm_set, perm_checker,
115 115 extra_kwargs=None):
116 116 """
117 117 Creates iterator from given list of objects, additionally
118 118 checking permission for them from perm_set var
119 119
120 120 :param obj_list: list of db objects
121 121 :param obj_attr: attribute of object to pass into perm_checker
122 122 :param perm_set: list of permissions to check
123 123 :param perm_checker: callable to check permissions against
124 124 """
125 125 self.obj_list = obj_list
126 126 self.obj_attr = obj_attr
127 127 self.perm_set = perm_set
128 128 self.perm_checker = perm_checker
129 129 self.extra_kwargs = extra_kwargs or {}
130 130
131 131 def __len__(self):
132 132 return len(self.obj_list)
133 133
134 134 def __repr__(self):
135 135 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
136 136
137 137 def __iter__(self):
138 138 checker = self.perm_checker(*self.perm_set)
139 139 for db_obj in self.obj_list:
140 140 # check permission at this level
141 141 name = getattr(db_obj, self.obj_attr, None)
142 142 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
143 143 continue
144 144
145 145 yield db_obj
146 146
147 147
148 148 class RepoList(_PermCheckIterator):
149 149
150 150 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
151 151 if not perm_set:
152 152 perm_set = [
153 153 'repository.read', 'repository.write', 'repository.admin']
154 154
155 155 super(RepoList, self).__init__(
156 156 obj_list=db_repo_list,
157 157 obj_attr='repo_name', perm_set=perm_set,
158 158 perm_checker=HasRepoPermissionAny,
159 159 extra_kwargs=extra_kwargs)
160 160
161 161
162 162 class RepoGroupList(_PermCheckIterator):
163 163
164 164 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
165 165 if not perm_set:
166 166 perm_set = ['group.read', 'group.write', 'group.admin']
167 167
168 168 super(RepoGroupList, self).__init__(
169 169 obj_list=db_repo_group_list,
170 170 obj_attr='group_name', perm_set=perm_set,
171 171 perm_checker=HasRepoGroupPermissionAny,
172 172 extra_kwargs=extra_kwargs)
173 173
174 174
175 175 class UserGroupList(_PermCheckIterator):
176 176
177 177 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
178 178 if not perm_set:
179 179 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
180 180
181 181 super(UserGroupList, self).__init__(
182 182 obj_list=db_user_group_list,
183 183 obj_attr='users_group_name', perm_set=perm_set,
184 184 perm_checker=HasUserGroupPermissionAny,
185 185 extra_kwargs=extra_kwargs)
186 186
187 187
188 188 class ScmModel(BaseModel):
189 189 """
190 190 Generic Scm Model
191 191 """
192 192
193 193 @LazyProperty
194 194 def repos_path(self):
195 195 """
196 196 Gets the repositories root path from database
197 197 """
198 198
199 199 settings_model = VcsSettingsModel(sa=self.sa)
200 200 return settings_model.get_repos_location()
201 201
202 202 def repo_scan(self, repos_path=None):
203 203 """
204 204 Listing of repositories in given path. This path should not be a
205 205 repository itself. Return a dictionary of repository objects
206 206
207 207 :param repos_path: path to directory containing repositories
208 208 """
209 209
210 210 if repos_path is None:
211 211 repos_path = self.repos_path
212 212
213 213 log.info('scanning for repositories in %s', repos_path)
214 214
215 215 config = make_db_config()
216 216 config.set('extensions', 'largefiles', '')
217 217 repos = {}
218 218
219 219 for name, path in get_filesystem_repos(repos_path, recursive=True):
220 220 # name need to be decomposed and put back together using the /
221 221 # since this is internal storage separator for rhodecode
222 222 name = Repository.normalize_repo_name(name)
223 223
224 224 try:
225 225 if name in repos:
226 226 raise RepositoryError('Duplicate repository name %s '
227 227 'found in %s' % (name, path))
228 228 elif path[0] in rhodecode.BACKENDS:
229 klass = get_backend(path[0])
230 repos[name] = klass(path[1], config=config)
229 backend = get_backend(path[0])
230 repos[name] = backend(path[1], config=config,
231 with_wire={"cache": False})
231 232 except OSError:
232 233 continue
233 234 log.debug('found %s paths with repositories', len(repos))
234 235 return repos
235 236
236 237 def get_repos(self, all_repos=None, sort_key=None):
237 238 """
238 239 Get all repositories from db and for each repo create it's
239 240 backend instance and fill that backed with information from database
240 241
241 242 :param all_repos: list of repository names as strings
242 243 give specific repositories list, good for filtering
243 244
244 245 :param sort_key: initial sorting of repositories
245 246 """
246 247 if all_repos is None:
247 248 all_repos = self.sa.query(Repository)\
248 249 .filter(Repository.group_id == None)\
249 250 .order_by(func.lower(Repository.repo_name)).all()
250 251 repo_iter = SimpleCachedRepoList(
251 252 all_repos, repos_path=self.repos_path, order_by=sort_key)
252 253 return repo_iter
253 254
254 255 def get_repo_groups(self, all_groups=None):
255 256 if all_groups is None:
256 257 all_groups = RepoGroup.query()\
257 258 .filter(RepoGroup.group_parent_id == None).all()
258 259 return [x for x in RepoGroupList(all_groups)]
259 260
260 261 def mark_for_invalidation(self, repo_name, delete=False):
261 262 """
262 263 Mark caches of this repo invalid in the database. `delete` flag
263 264 removes the cache entries
264 265
265 266 :param repo_name: the repo_name for which caches should be marked
266 267 invalid, or deleted
267 268 :param delete: delete the entry keys instead of setting bool
268 269 flag on them, and also purge caches used by the dogpile
269 270 """
270 271 repo = Repository.get_by_repo_name(repo_name)
271 272
272 273 if repo:
273 274 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
274 275 repo_id=repo.repo_id)
275 276 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
276 277
277 278 repo_id = repo.repo_id
278 279 config = repo._config
279 280 config.set('extensions', 'largefiles', '')
280 281 repo.update_commit_cache(config=config, cs_cache=None)
281 282 if delete:
282 283 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
283 284 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid)
284 285
285 286 def toggle_following_repo(self, follow_repo_id, user_id):
286 287
287 288 f = self.sa.query(UserFollowing)\
288 289 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
289 290 .filter(UserFollowing.user_id == user_id).scalar()
290 291
291 292 if f is not None:
292 293 try:
293 294 self.sa.delete(f)
294 295 return
295 296 except Exception:
296 297 log.error(traceback.format_exc())
297 298 raise
298 299
299 300 try:
300 301 f = UserFollowing()
301 302 f.user_id = user_id
302 303 f.follows_repo_id = follow_repo_id
303 304 self.sa.add(f)
304 305 except Exception:
305 306 log.error(traceback.format_exc())
306 307 raise
307 308
308 309 def toggle_following_user(self, follow_user_id, user_id):
309 310 f = self.sa.query(UserFollowing)\
310 311 .filter(UserFollowing.follows_user_id == follow_user_id)\
311 312 .filter(UserFollowing.user_id == user_id).scalar()
312 313
313 314 if f is not None:
314 315 try:
315 316 self.sa.delete(f)
316 317 return
317 318 except Exception:
318 319 log.error(traceback.format_exc())
319 320 raise
320 321
321 322 try:
322 323 f = UserFollowing()
323 324 f.user_id = user_id
324 325 f.follows_user_id = follow_user_id
325 326 self.sa.add(f)
326 327 except Exception:
327 328 log.error(traceback.format_exc())
328 329 raise
329 330
330 331 def is_following_repo(self, repo_name, user_id, cache=False):
331 332 r = self.sa.query(Repository)\
332 333 .filter(Repository.repo_name == repo_name).scalar()
333 334
334 335 f = self.sa.query(UserFollowing)\
335 336 .filter(UserFollowing.follows_repository == r)\
336 337 .filter(UserFollowing.user_id == user_id).scalar()
337 338
338 339 return f is not None
339 340
340 341 def is_following_user(self, username, user_id, cache=False):
341 342 u = User.get_by_username(username)
342 343
343 344 f = self.sa.query(UserFollowing)\
344 345 .filter(UserFollowing.follows_user == u)\
345 346 .filter(UserFollowing.user_id == user_id).scalar()
346 347
347 348 return f is not None
348 349
349 350 def get_followers(self, repo):
350 351 repo = self._get_repo(repo)
351 352
352 353 return self.sa.query(UserFollowing)\
353 354 .filter(UserFollowing.follows_repository == repo).count()
354 355
355 356 def get_forks(self, repo):
356 357 repo = self._get_repo(repo)
357 358 return self.sa.query(Repository)\
358 359 .filter(Repository.fork == repo).count()
359 360
360 361 def get_pull_requests(self, repo):
361 362 repo = self._get_repo(repo)
362 363 return self.sa.query(PullRequest)\
363 364 .filter(PullRequest.target_repo == repo)\
364 365 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
365 366
366 367 def mark_as_fork(self, repo, fork, user):
367 368 repo = self._get_repo(repo)
368 369 fork = self._get_repo(fork)
369 370 if fork and repo.repo_id == fork.repo_id:
370 371 raise Exception("Cannot set repository as fork of itself")
371 372
372 373 if fork and repo.repo_type != fork.repo_type:
373 374 raise RepositoryError(
374 375 "Cannot set repository as fork of repository with other type")
375 376
376 377 repo.fork = fork
377 378 self.sa.add(repo)
378 379 return repo
379 380
380 381 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True):
381 382 dbrepo = self._get_repo(repo)
382 383 remote_uri = remote_uri or dbrepo.clone_uri
383 384 if not remote_uri:
384 385 raise Exception("This repository doesn't have a clone uri")
385 386
386 387 repo = dbrepo.scm_instance(cache=False)
387 388 repo.config.clear_section('hooks')
388 389
389 390 try:
390 391 # NOTE(marcink): add extra validation so we skip invalid urls
391 392 # this is due this tasks can be executed via scheduler without
392 393 # proper validation of remote_uri
393 394 if validate_uri:
394 395 config = make_db_config(clear_session=False)
395 396 url_validator(remote_uri, dbrepo.repo_type, config)
396 397 except InvalidCloneUrl:
397 398 raise
398 399
399 400 repo_name = dbrepo.repo_name
400 401 try:
401 402 # TODO: we need to make sure those operations call proper hooks !
402 403 repo.fetch(remote_uri)
403 404
404 405 self.mark_for_invalidation(repo_name)
405 406 except Exception:
406 407 log.error(traceback.format_exc())
407 408 raise
408 409
409 410 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
410 411 dbrepo = self._get_repo(repo)
411 412 remote_uri = remote_uri or dbrepo.push_uri
412 413 if not remote_uri:
413 414 raise Exception("This repository doesn't have a clone uri")
414 415
415 416 repo = dbrepo.scm_instance(cache=False)
416 417 repo.config.clear_section('hooks')
417 418
418 419 try:
419 420 # NOTE(marcink): add extra validation so we skip invalid urls
420 421 # this is due this tasks can be executed via scheduler without
421 422 # proper validation of remote_uri
422 423 if validate_uri:
423 424 config = make_db_config(clear_session=False)
424 425 url_validator(remote_uri, dbrepo.repo_type, config)
425 426 except InvalidCloneUrl:
426 427 raise
427 428
428 429 try:
429 430 repo.push(remote_uri)
430 431 except Exception:
431 432 log.error(traceback.format_exc())
432 433 raise
433 434
434 435 def commit_change(self, repo, repo_name, commit, user, author, message,
435 436 content, f_path):
436 437 """
437 438 Commits changes
438 439
439 440 :param repo: SCM instance
440 441
441 442 """
442 443 user = self._get_user(user)
443 444
444 445 # decoding here will force that we have proper encoded values
445 446 # in any other case this will throw exceptions and deny commit
446 447 content = safe_str(content)
447 448 path = safe_str(f_path)
448 449 # message and author needs to be unicode
449 450 # proper backend should then translate that into required type
450 451 message = safe_unicode(message)
451 452 author = safe_unicode(author)
452 453 imc = repo.in_memory_commit
453 454 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
454 455 try:
455 456 # TODO: handle pre-push action !
456 457 tip = imc.commit(
457 458 message=message, author=author, parents=[commit],
458 459 branch=commit.branch)
459 460 except Exception as e:
460 461 log.error(traceback.format_exc())
461 462 raise IMCCommitError(str(e))
462 463 finally:
463 464 # always clear caches, if commit fails we want fresh object also
464 465 self.mark_for_invalidation(repo_name)
465 466
466 467 # We trigger the post-push action
467 468 hooks_utils.trigger_post_push_hook(
468 469 username=user.username, action='push_local', hook_type='post_push',
469 470 repo_name=repo_name, repo_alias=repo.alias, commit_ids=[tip.raw_id])
470 471 return tip
471 472
472 473 def _sanitize_path(self, f_path):
473 474 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
474 475 raise NonRelativePathError('%s is not an relative path' % f_path)
475 476 if f_path:
476 477 f_path = os.path.normpath(f_path)
477 478 return f_path
478 479
479 480 def get_dirnode_metadata(self, request, commit, dir_node):
480 481 if not dir_node.is_dir():
481 482 return []
482 483
483 484 data = []
484 485 for node in dir_node:
485 486 if not node.is_file():
486 487 # we skip file-nodes
487 488 continue
488 489
489 490 last_commit = node.last_commit
490 491 last_commit_date = last_commit.date
491 492 data.append({
492 493 'name': node.name,
493 494 'size': h.format_byte_size_binary(node.size),
494 495 'modified_at': h.format_date(last_commit_date),
495 496 'modified_ts': last_commit_date.isoformat(),
496 497 'revision': last_commit.revision,
497 498 'short_id': last_commit.short_id,
498 499 'message': h.escape(last_commit.message),
499 500 'author': h.escape(last_commit.author),
500 501 'user_profile': h.gravatar_with_user(
501 502 request, last_commit.author),
502 503 })
503 504
504 505 return data
505 506
506 507 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
507 508 extended_info=False, content=False, max_file_bytes=None):
508 509 """
509 510 recursive walk in root dir and return a set of all path in that dir
510 511 based on repository walk function
511 512
512 513 :param repo_name: name of repository
513 514 :param commit_id: commit id for which to list nodes
514 515 :param root_path: root path to list
515 516 :param flat: return as a list, if False returns a dict with description
516 517 :param extended_info: show additional info such as md5, binary, size etc
517 518 :param content: add nodes content to the return data
518 519 :param max_file_bytes: will not return file contents over this limit
519 520
520 521 """
521 522 _files = list()
522 523 _dirs = list()
523 524 try:
524 525 _repo = self._get_repo(repo_name)
525 526 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
526 527 root_path = root_path.lstrip('/')
527 528 for __, dirs, files in commit.walk(root_path):
528 529
529 530 for f in files:
530 531 _content = None
531 532 _data = f_name = f.unicode_path
532 533
533 534 if not flat:
534 535 _data = {
535 536 "name": h.escape(f_name),
536 537 "type": "file",
537 538 }
538 539 if extended_info:
539 540 _data.update({
540 541 "md5": f.md5,
541 542 "binary": f.is_binary,
542 543 "size": f.size,
543 544 "extension": f.extension,
544 545 "mimetype": f.mimetype,
545 546 "lines": f.lines()[0]
546 547 })
547 548
548 549 if content:
549 550 over_size_limit = (max_file_bytes is not None
550 551 and f.size > max_file_bytes)
551 552 full_content = None
552 553 if not f.is_binary and not over_size_limit:
553 554 full_content = safe_str(f.content)
554 555
555 556 _data.update({
556 557 "content": full_content,
557 558 })
558 559 _files.append(_data)
559 560
560 561 for d in dirs:
561 562 _data = d_name = d.unicode_path
562 563 if not flat:
563 564 _data = {
564 565 "name": h.escape(d_name),
565 566 "type": "dir",
566 567 }
567 568 if extended_info:
568 569 _data.update({
569 570 "md5": None,
570 571 "binary": None,
571 572 "size": None,
572 573 "extension": None,
573 574 })
574 575 if content:
575 576 _data.update({
576 577 "content": None
577 578 })
578 579 _dirs.append(_data)
579 580 except RepositoryError:
580 581 log.exception("Exception in get_nodes")
581 582 raise
582 583
583 584 return _dirs, _files
584 585
585 586 def get_node(self, repo_name, commit_id, file_path,
586 587 extended_info=False, content=False, max_file_bytes=None, cache=True):
587 588 """
588 589 retrieve single node from commit
589 590 """
590 591 try:
591 592
592 593 _repo = self._get_repo(repo_name)
593 594 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
594 595
595 596 file_node = commit.get_node(file_path)
596 597 if file_node.is_dir():
597 598 raise RepositoryError('The given path is a directory')
598 599
599 600 _content = None
600 601 f_name = file_node.unicode_path
601 602
602 603 file_data = {
603 604 "name": h.escape(f_name),
604 605 "type": "file",
605 606 }
606 607
607 608 if extended_info:
608 609 file_data.update({
609 610 "extension": file_node.extension,
610 611 "mimetype": file_node.mimetype,
611 612 })
612 613
613 614 if cache:
614 615 md5 = file_node.md5
615 616 is_binary = file_node.is_binary
616 617 size = file_node.size
617 618 else:
618 619 is_binary, md5, size, _content = file_node.metadata_uncached()
619 620
620 621 file_data.update({
621 622 "md5": md5,
622 623 "binary": is_binary,
623 624 "size": size,
624 625 })
625 626
626 627 if content and cache:
627 628 # get content + cache
628 629 size = file_node.size
629 630 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
630 631 full_content = None
631 632 if not file_node.is_binary and not over_size_limit:
632 633 full_content = safe_unicode(file_node.content)
633 634
634 635 file_data.update({
635 636 "content": full_content,
636 637 })
637 638 elif content:
638 639 # get content *without* cache
639 640 if _content is None:
640 641 is_binary, md5, size, _content = file_node.metadata_uncached()
641 642
642 643 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
643 644 full_content = None
644 645 if not is_binary and not over_size_limit:
645 646 full_content = safe_unicode(_content)
646 647
647 648 file_data.update({
648 649 "content": full_content,
649 650 })
650 651
651 652 except RepositoryError:
652 653 log.exception("Exception in get_node")
653 654 raise
654 655
655 656 return file_data
656 657
657 658 def get_fts_data(self, repo_name, commit_id, root_path='/'):
658 659 """
659 660 Fetch node tree for usage in full text search
660 661 """
661 662
662 663 tree_info = list()
663 664
664 665 try:
665 666 _repo = self._get_repo(repo_name)
666 667 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
667 668 root_path = root_path.lstrip('/')
668 669 for __, dirs, files in commit.walk(root_path):
669 670
670 671 for f in files:
671 672 is_binary, md5, size, _content = f.metadata_uncached()
672 673 _data = {
673 674 "name": f.unicode_path,
674 675 "md5": md5,
675 676 "extension": f.extension,
676 677 "binary": is_binary,
677 678 "size": size
678 679 }
679 680
680 681 tree_info.append(_data)
681 682
682 683 except RepositoryError:
683 684 log.exception("Exception in get_nodes")
684 685 raise
685 686
686 687 return tree_info
687 688
688 689 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
689 690 author=None, trigger_push_hook=True):
690 691 """
691 692 Commits given multiple nodes into repo
692 693
693 694 :param user: RhodeCode User object or user_id, the commiter
694 695 :param repo: RhodeCode Repository object
695 696 :param message: commit message
696 697 :param nodes: mapping {filename:{'content':content},...}
697 698 :param parent_commit: parent commit, can be empty than it's
698 699 initial commit
699 700 :param author: author of commit, cna be different that commiter
700 701 only for git
701 702 :param trigger_push_hook: trigger push hooks
702 703
703 704 :returns: new commited commit
704 705 """
705 706
706 707 user = self._get_user(user)
707 708 scm_instance = repo.scm_instance(cache=False)
708 709
709 710 processed_nodes = []
710 711 for f_path in nodes:
711 712 f_path = self._sanitize_path(f_path)
712 713 content = nodes[f_path]['content']
713 714 f_path = safe_str(f_path)
714 715 # decoding here will force that we have proper encoded values
715 716 # in any other case this will throw exceptions and deny commit
716 717 if isinstance(content, (basestring,)):
717 718 content = safe_str(content)
718 719 elif isinstance(content, (file, cStringIO.OutputType,)):
719 720 content = content.read()
720 721 else:
721 722 raise Exception('Content is of unrecognized type %s' % (
722 723 type(content)
723 724 ))
724 725 processed_nodes.append((f_path, content))
725 726
726 727 message = safe_unicode(message)
727 728 commiter = user.full_contact
728 729 author = safe_unicode(author) if author else commiter
729 730
730 731 imc = scm_instance.in_memory_commit
731 732
732 733 if not parent_commit:
733 734 parent_commit = EmptyCommit(alias=scm_instance.alias)
734 735
735 736 if isinstance(parent_commit, EmptyCommit):
736 737 # EmptyCommit means we we're editing empty repository
737 738 parents = None
738 739 else:
739 740 parents = [parent_commit]
740 741 # add multiple nodes
741 742 for path, content in processed_nodes:
742 743 imc.add(FileNode(path, content=content))
743 744 # TODO: handle pre push scenario
744 745 tip = imc.commit(message=message,
745 746 author=author,
746 747 parents=parents,
747 748 branch=parent_commit.branch)
748 749
749 750 self.mark_for_invalidation(repo.repo_name)
750 751 if trigger_push_hook:
751 752 hooks_utils.trigger_post_push_hook(
752 753 username=user.username, action='push_local',
753 754 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
754 755 hook_type='post_push',
755 756 commit_ids=[tip.raw_id])
756 757 return tip
757 758
758 759 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
759 760 author=None, trigger_push_hook=True):
760 761 user = self._get_user(user)
761 762 scm_instance = repo.scm_instance(cache=False)
762 763
763 764 message = safe_unicode(message)
764 765 commiter = user.full_contact
765 766 author = safe_unicode(author) if author else commiter
766 767
767 768 imc = scm_instance.in_memory_commit
768 769
769 770 if not parent_commit:
770 771 parent_commit = EmptyCommit(alias=scm_instance.alias)
771 772
772 773 if isinstance(parent_commit, EmptyCommit):
773 774 # EmptyCommit means we we're editing empty repository
774 775 parents = None
775 776 else:
776 777 parents = [parent_commit]
777 778
778 779 # add multiple nodes
779 780 for _filename, data in nodes.items():
780 781 # new filename, can be renamed from the old one, also sanitaze
781 782 # the path for any hack around relative paths like ../../ etc.
782 783 filename = self._sanitize_path(data['filename'])
783 784 old_filename = self._sanitize_path(_filename)
784 785 content = data['content']
785 786 file_mode = data.get('mode')
786 787 filenode = FileNode(old_filename, content=content, mode=file_mode)
787 788 op = data['op']
788 789 if op == 'add':
789 790 imc.add(filenode)
790 791 elif op == 'del':
791 792 imc.remove(filenode)
792 793 elif op == 'mod':
793 794 if filename != old_filename:
794 795 # TODO: handle renames more efficient, needs vcs lib changes
795 796 imc.remove(filenode)
796 797 imc.add(FileNode(filename, content=content, mode=file_mode))
797 798 else:
798 799 imc.change(filenode)
799 800
800 801 try:
801 802 # TODO: handle pre push scenario commit changes
802 803 tip = imc.commit(message=message,
803 804 author=author,
804 805 parents=parents,
805 806 branch=parent_commit.branch)
806 807 except NodeNotChangedError:
807 808 raise
808 809 except Exception as e:
809 810 log.exception("Unexpected exception during call to imc.commit")
810 811 raise IMCCommitError(str(e))
811 812 finally:
812 813 # always clear caches, if commit fails we want fresh object also
813 814 self.mark_for_invalidation(repo.repo_name)
814 815
815 816 if trigger_push_hook:
816 817 hooks_utils.trigger_post_push_hook(
817 818 username=user.username, action='push_local', hook_type='post_push',
818 819 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
819 820 commit_ids=[tip.raw_id])
820 821
821 822 return tip
822 823
823 824 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
824 825 author=None, trigger_push_hook=True):
825 826 """
826 827 Deletes given multiple nodes into `repo`
827 828
828 829 :param user: RhodeCode User object or user_id, the committer
829 830 :param repo: RhodeCode Repository object
830 831 :param message: commit message
831 832 :param nodes: mapping {filename:{'content':content},...}
832 833 :param parent_commit: parent commit, can be empty than it's initial
833 834 commit
834 835 :param author: author of commit, cna be different that commiter only
835 836 for git
836 837 :param trigger_push_hook: trigger push hooks
837 838
838 839 :returns: new commit after deletion
839 840 """
840 841
841 842 user = self._get_user(user)
842 843 scm_instance = repo.scm_instance(cache=False)
843 844
844 845 processed_nodes = []
845 846 for f_path in nodes:
846 847 f_path = self._sanitize_path(f_path)
847 848 # content can be empty but for compatabilty it allows same dicts
848 849 # structure as add_nodes
849 850 content = nodes[f_path].get('content')
850 851 processed_nodes.append((f_path, content))
851 852
852 853 message = safe_unicode(message)
853 854 commiter = user.full_contact
854 855 author = safe_unicode(author) if author else commiter
855 856
856 857 imc = scm_instance.in_memory_commit
857 858
858 859 if not parent_commit:
859 860 parent_commit = EmptyCommit(alias=scm_instance.alias)
860 861
861 862 if isinstance(parent_commit, EmptyCommit):
862 863 # EmptyCommit means we we're editing empty repository
863 864 parents = None
864 865 else:
865 866 parents = [parent_commit]
866 867 # add multiple nodes
867 868 for path, content in processed_nodes:
868 869 imc.remove(FileNode(path, content=content))
869 870
870 871 # TODO: handle pre push scenario
871 872 tip = imc.commit(message=message,
872 873 author=author,
873 874 parents=parents,
874 875 branch=parent_commit.branch)
875 876
876 877 self.mark_for_invalidation(repo.repo_name)
877 878 if trigger_push_hook:
878 879 hooks_utils.trigger_post_push_hook(
879 880 username=user.username, action='push_local', hook_type='post_push',
880 881 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
881 882 commit_ids=[tip.raw_id])
882 883 return tip
883 884
884 885 def strip(self, repo, commit_id, branch):
885 886 scm_instance = repo.scm_instance(cache=False)
886 887 scm_instance.config.clear_section('hooks')
887 888 scm_instance.strip(commit_id, branch)
888 889 self.mark_for_invalidation(repo.repo_name)
889 890
890 891 def get_unread_journal(self):
891 892 return self.sa.query(UserLog).count()
892 893
893 894 def get_repo_landing_revs(self, translator, repo=None):
894 895 """
895 896 Generates select option with tags branches and bookmarks (for hg only)
896 897 grouped by type
897 898
898 899 :param repo:
899 900 """
900 901 _ = translator
901 902 repo = self._get_repo(repo)
902 903
903 904 hist_l = [
904 905 ['rev:tip', _('latest tip')]
905 906 ]
906 907 choices = [
907 908 'rev:tip'
908 909 ]
909 910
910 911 if not repo:
911 912 return choices, hist_l
912 913
913 914 repo = repo.scm_instance()
914 915
915 916 branches_group = (
916 917 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
917 918 for b in repo.branches],
918 919 _("Branches"))
919 920 hist_l.append(branches_group)
920 921 choices.extend([x[0] for x in branches_group[0]])
921 922
922 923 if repo.alias == 'hg':
923 924 bookmarks_group = (
924 925 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
925 926 for b in repo.bookmarks],
926 927 _("Bookmarks"))
927 928 hist_l.append(bookmarks_group)
928 929 choices.extend([x[0] for x in bookmarks_group[0]])
929 930
930 931 tags_group = (
931 932 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
932 933 for t in repo.tags],
933 934 _("Tags"))
934 935 hist_l.append(tags_group)
935 936 choices.extend([x[0] for x in tags_group[0]])
936 937
937 938 return choices, hist_l
938 939
939 940 def get_server_info(self, environ=None):
940 941 server_info = get_system_info(environ)
941 942 return server_info
@@ -1,1902 +1,1902 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import collections
22 22 import datetime
23 23 import hashlib
24 24 import os
25 25 import re
26 26 import pprint
27 27 import shutil
28 28 import socket
29 29 import subprocess32
30 30 import time
31 31 import uuid
32 32 import dateutil.tz
33 33 import functools
34 34
35 35 import mock
36 36 import pyramid.testing
37 37 import pytest
38 38 import colander
39 39 import requests
40 40 import pyramid.paster
41 41
42 42 import rhodecode
43 43 from rhodecode.lib.utils2 import AttributeDict
44 44 from rhodecode.model.changeset_status import ChangesetStatusModel
45 45 from rhodecode.model.comment import CommentsModel
46 46 from rhodecode.model.db import (
47 47 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
48 48 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
49 49 from rhodecode.model.meta import Session
50 50 from rhodecode.model.pull_request import PullRequestModel
51 51 from rhodecode.model.repo import RepoModel
52 52 from rhodecode.model.repo_group import RepoGroupModel
53 53 from rhodecode.model.user import UserModel
54 54 from rhodecode.model.settings import VcsSettingsModel
55 55 from rhodecode.model.user_group import UserGroupModel
56 56 from rhodecode.model.integration import IntegrationModel
57 57 from rhodecode.integrations import integration_type_registry
58 58 from rhodecode.integrations.types.base import IntegrationTypeBase
59 59 from rhodecode.lib.utils import repo2db_mapper
60 60 from rhodecode.lib.vcs import create_vcsserver_proxy
61 61 from rhodecode.lib.vcs.backends import get_backend
62 62 from rhodecode.lib.vcs.nodes import FileNode
63 63 from rhodecode.tests import (
64 64 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
65 65 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
66 66 TEST_USER_REGULAR_PASS)
67 67 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
68 68 from rhodecode.tests.fixture import Fixture
69 69 from rhodecode.config import utils as config_utils
70 70
71 71 def _split_comma(value):
72 72 return value.split(',')
73 73
74 74
75 75 def pytest_addoption(parser):
76 76 parser.addoption(
77 77 '--keep-tmp-path', action='store_true',
78 78 help="Keep the test temporary directories")
79 79 parser.addoption(
80 80 '--backends', action='store', type=_split_comma,
81 81 default=['git', 'hg', 'svn'],
82 82 help="Select which backends to test for backend specific tests.")
83 83 parser.addoption(
84 84 '--dbs', action='store', type=_split_comma,
85 85 default=['sqlite'],
86 86 help="Select which database to test for database specific tests. "
87 87 "Possible options are sqlite,postgres,mysql")
88 88 parser.addoption(
89 89 '--appenlight', '--ae', action='store_true',
90 90 help="Track statistics in appenlight.")
91 91 parser.addoption(
92 92 '--appenlight-api-key', '--ae-key',
93 93 help="API key for Appenlight.")
94 94 parser.addoption(
95 95 '--appenlight-url', '--ae-url',
96 96 default="https://ae.rhodecode.com",
97 97 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
98 98 parser.addoption(
99 99 '--sqlite-connection-string', action='store',
100 100 default='', help="Connection string for the dbs tests with SQLite")
101 101 parser.addoption(
102 102 '--postgres-connection-string', action='store',
103 103 default='', help="Connection string for the dbs tests with Postgres")
104 104 parser.addoption(
105 105 '--mysql-connection-string', action='store',
106 106 default='', help="Connection string for the dbs tests with MySQL")
107 107 parser.addoption(
108 108 '--repeat', type=int, default=100,
109 109 help="Number of repetitions in performance tests.")
110 110
111 111
112 112 def pytest_configure(config):
113 113 from rhodecode.config import patches
114 114
115 115
116 116 def pytest_collection_modifyitems(session, config, items):
117 117 # nottest marked, compare nose, used for transition from nose to pytest
118 118 remaining = [
119 119 i for i in items if getattr(i.obj, '__test__', True)]
120 120 items[:] = remaining
121 121
122 122
123 123 def pytest_generate_tests(metafunc):
124 124 # Support test generation based on --backend parameter
125 125 if 'backend_alias' in metafunc.fixturenames:
126 126 backends = get_backends_from_metafunc(metafunc)
127 127 scope = None
128 128 if not backends:
129 129 pytest.skip("Not enabled for any of selected backends")
130 130 metafunc.parametrize('backend_alias', backends, scope=scope)
131 131 elif hasattr(metafunc.function, 'backends'):
132 132 backends = get_backends_from_metafunc(metafunc)
133 133 if not backends:
134 134 pytest.skip("Not enabled for any of selected backends")
135 135
136 136
137 137 def get_backends_from_metafunc(metafunc):
138 138 requested_backends = set(metafunc.config.getoption('--backends'))
139 139 if hasattr(metafunc.function, 'backends'):
140 140 # Supported backends by this test function, created from
141 141 # pytest.mark.backends
142 142 backends = metafunc.definition.get_closest_marker('backends').args
143 143 elif hasattr(metafunc.cls, 'backend_alias'):
144 144 # Support class attribute "backend_alias", this is mainly
145 145 # for legacy reasons for tests not yet using pytest.mark.backends
146 146 backends = [metafunc.cls.backend_alias]
147 147 else:
148 148 backends = metafunc.config.getoption('--backends')
149 149 return requested_backends.intersection(backends)
150 150
151 151
152 152 @pytest.fixture(scope='session', autouse=True)
153 153 def activate_example_rcextensions(request):
154 154 """
155 155 Patch in an example rcextensions module which verifies passed in kwargs.
156 156 """
157 157 from rhodecode.config import rcextensions
158 158
159 159 old_extensions = rhodecode.EXTENSIONS
160 160 rhodecode.EXTENSIONS = rcextensions
161 161 rhodecode.EXTENSIONS.calls = collections.defaultdict(list)
162 162
163 163 @request.addfinalizer
164 164 def cleanup():
165 165 rhodecode.EXTENSIONS = old_extensions
166 166
167 167
168 168 @pytest.fixture
169 169 def capture_rcextensions():
170 170 """
171 171 Returns the recorded calls to entry points in rcextensions.
172 172 """
173 173 calls = rhodecode.EXTENSIONS.calls
174 174 calls.clear()
175 175 # Note: At this moment, it is still the empty dict, but that will
176 176 # be filled during the test run and since it is a reference this
177 177 # is enough to make it work.
178 178 return calls
179 179
180 180
181 181 @pytest.fixture(scope='session')
182 182 def http_environ_session():
183 183 """
184 184 Allow to use "http_environ" in session scope.
185 185 """
186 186 return plain_http_environ()
187 187
188 188
189 189 def plain_http_host_stub():
190 190 """
191 191 Value of HTTP_HOST in the test run.
192 192 """
193 193 return 'example.com:80'
194 194
195 195
196 196 @pytest.fixture
197 197 def http_host_stub():
198 198 """
199 199 Value of HTTP_HOST in the test run.
200 200 """
201 201 return plain_http_host_stub()
202 202
203 203
204 204 def plain_http_host_only_stub():
205 205 """
206 206 Value of HTTP_HOST in the test run.
207 207 """
208 208 return plain_http_host_stub().split(':')[0]
209 209
210 210
211 211 @pytest.fixture
212 212 def http_host_only_stub():
213 213 """
214 214 Value of HTTP_HOST in the test run.
215 215 """
216 216 return plain_http_host_only_stub()
217 217
218 218
219 219 def plain_http_environ():
220 220 """
221 221 HTTP extra environ keys.
222 222
223 223 User by the test application and as well for setting up the pylons
224 224 environment. In the case of the fixture "app" it should be possible
225 225 to override this for a specific test case.
226 226 """
227 227 return {
228 228 'SERVER_NAME': plain_http_host_only_stub(),
229 229 'SERVER_PORT': plain_http_host_stub().split(':')[1],
230 230 'HTTP_HOST': plain_http_host_stub(),
231 231 'HTTP_USER_AGENT': 'rc-test-agent',
232 232 'REQUEST_METHOD': 'GET'
233 233 }
234 234
235 235
236 236 @pytest.fixture
237 237 def http_environ():
238 238 """
239 239 HTTP extra environ keys.
240 240
241 241 User by the test application and as well for setting up the pylons
242 242 environment. In the case of the fixture "app" it should be possible
243 243 to override this for a specific test case.
244 244 """
245 245 return plain_http_environ()
246 246
247 247
248 248 @pytest.fixture(scope='session')
249 249 def baseapp(ini_config, vcsserver, http_environ_session):
250 250 from rhodecode.lib.pyramid_utils import get_app_config
251 251 from rhodecode.config.middleware import make_pyramid_app
252 252
253 253 print("Using the RhodeCode configuration:{}".format(ini_config))
254 254 pyramid.paster.setup_logging(ini_config)
255 255
256 256 settings = get_app_config(ini_config)
257 257 app = make_pyramid_app({'__file__': ini_config}, **settings)
258 258
259 259 return app
260 260
261 261
262 262 @pytest.fixture(scope='function')
263 263 def app(request, config_stub, baseapp, http_environ):
264 264 app = CustomTestApp(
265 265 baseapp,
266 266 extra_environ=http_environ)
267 267 if request.cls:
268 268 request.cls.app = app
269 269 return app
270 270
271 271
272 272 @pytest.fixture(scope='session')
273 273 def app_settings(baseapp, ini_config):
274 274 """
275 275 Settings dictionary used to create the app.
276 276
277 277 Parses the ini file and passes the result through the sanitize and apply
278 278 defaults mechanism in `rhodecode.config.middleware`.
279 279 """
280 280 return baseapp.config.get_settings()
281 281
282 282
283 283 @pytest.fixture(scope='session')
284 284 def db_connection(ini_settings):
285 285 # Initialize the database connection.
286 286 config_utils.initialize_database(ini_settings)
287 287
288 288
289 289 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
290 290
291 291
292 292 def _autologin_user(app, *args):
293 293 session = login_user_session(app, *args)
294 294 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
295 295 return LoginData(csrf_token, session['rhodecode_user'])
296 296
297 297
298 298 @pytest.fixture
299 299 def autologin_user(app):
300 300 """
301 301 Utility fixture which makes sure that the admin user is logged in
302 302 """
303 303 return _autologin_user(app)
304 304
305 305
306 306 @pytest.fixture
307 307 def autologin_regular_user(app):
308 308 """
309 309 Utility fixture which makes sure that the regular user is logged in
310 310 """
311 311 return _autologin_user(
312 312 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
313 313
314 314
315 315 @pytest.fixture(scope='function')
316 316 def csrf_token(request, autologin_user):
317 317 return autologin_user.csrf_token
318 318
319 319
320 320 @pytest.fixture(scope='function')
321 321 def xhr_header(request):
322 322 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
323 323
324 324
325 325 @pytest.fixture
326 326 def real_crypto_backend(monkeypatch):
327 327 """
328 328 Switch the production crypto backend on for this test.
329 329
330 330 During the test run the crypto backend is replaced with a faster
331 331 implementation based on the MD5 algorithm.
332 332 """
333 333 monkeypatch.setattr(rhodecode, 'is_test', False)
334 334
335 335
336 336 @pytest.fixture(scope='class')
337 337 def index_location(request, baseapp):
338 338 index_location = baseapp.config.get_settings()['search.location']
339 339 if request.cls:
340 340 request.cls.index_location = index_location
341 341 return index_location
342 342
343 343
344 344 @pytest.fixture(scope='session', autouse=True)
345 345 def tests_tmp_path(request):
346 346 """
347 347 Create temporary directory to be used during the test session.
348 348 """
349 349 if not os.path.exists(TESTS_TMP_PATH):
350 350 os.makedirs(TESTS_TMP_PATH)
351 351
352 352 if not request.config.getoption('--keep-tmp-path'):
353 353 @request.addfinalizer
354 354 def remove_tmp_path():
355 355 shutil.rmtree(TESTS_TMP_PATH)
356 356
357 357 return TESTS_TMP_PATH
358 358
359 359
360 360 @pytest.fixture
361 361 def test_repo_group(request):
362 362 """
363 363 Create a temporary repository group, and destroy it after
364 364 usage automatically
365 365 """
366 366 fixture = Fixture()
367 367 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
368 368 repo_group = fixture.create_repo_group(repogroupid)
369 369
370 370 def _cleanup():
371 371 fixture.destroy_repo_group(repogroupid)
372 372
373 373 request.addfinalizer(_cleanup)
374 374 return repo_group
375 375
376 376
377 377 @pytest.fixture
378 378 def test_user_group(request):
379 379 """
380 380 Create a temporary user group, and destroy it after
381 381 usage automatically
382 382 """
383 383 fixture = Fixture()
384 384 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
385 385 user_group = fixture.create_user_group(usergroupid)
386 386
387 387 def _cleanup():
388 388 fixture.destroy_user_group(user_group)
389 389
390 390 request.addfinalizer(_cleanup)
391 391 return user_group
392 392
393 393
394 394 @pytest.fixture(scope='session')
395 395 def test_repo(request):
396 396 container = TestRepoContainer()
397 397 request.addfinalizer(container._cleanup)
398 398 return container
399 399
400 400
401 401 class TestRepoContainer(object):
402 402 """
403 403 Container for test repositories which are used read only.
404 404
405 405 Repositories will be created on demand and re-used during the lifetime
406 406 of this object.
407 407
408 408 Usage to get the svn test repository "minimal"::
409 409
410 410 test_repo = TestContainer()
411 411 repo = test_repo('minimal', 'svn')
412 412
413 413 """
414 414
415 415 dump_extractors = {
416 416 'git': utils.extract_git_repo_from_dump,
417 417 'hg': utils.extract_hg_repo_from_dump,
418 418 'svn': utils.extract_svn_repo_from_dump,
419 419 }
420 420
421 421 def __init__(self):
422 422 self._cleanup_repos = []
423 423 self._fixture = Fixture()
424 424 self._repos = {}
425 425
426 426 def __call__(self, dump_name, backend_alias, config=None):
427 427 key = (dump_name, backend_alias)
428 428 if key not in self._repos:
429 429 repo = self._create_repo(dump_name, backend_alias, config)
430 430 self._repos[key] = repo.repo_id
431 431 return Repository.get(self._repos[key])
432 432
433 433 def _create_repo(self, dump_name, backend_alias, config):
434 434 repo_name = '%s-%s' % (backend_alias, dump_name)
435 backend_class = get_backend(backend_alias)
435 backend = get_backend(backend_alias)
436 436 dump_extractor = self.dump_extractors[backend_alias]
437 437 repo_path = dump_extractor(dump_name, repo_name)
438 438
439 vcs_repo = backend_class(repo_path, config=config)
439 vcs_repo = backend(repo_path, config=config)
440 440 repo2db_mapper({repo_name: vcs_repo})
441 441
442 442 repo = RepoModel().get_by_repo_name(repo_name)
443 443 self._cleanup_repos.append(repo_name)
444 444 return repo
445 445
446 446 def _cleanup(self):
447 447 for repo_name in reversed(self._cleanup_repos):
448 448 self._fixture.destroy_repo(repo_name)
449 449
450 450
451 451 def backend_base(request, backend_alias, baseapp, test_repo):
452 452 if backend_alias not in request.config.getoption('--backends'):
453 453 pytest.skip("Backend %s not selected." % (backend_alias, ))
454 454
455 455 utils.check_xfail_backends(request.node, backend_alias)
456 456 utils.check_skip_backends(request.node, backend_alias)
457 457
458 458 repo_name = 'vcs_test_%s' % (backend_alias, )
459 459 backend = Backend(
460 460 alias=backend_alias,
461 461 repo_name=repo_name,
462 462 test_name=request.node.name,
463 463 test_repo_container=test_repo)
464 464 request.addfinalizer(backend.cleanup)
465 465 return backend
466 466
467 467
468 468 @pytest.fixture
469 469 def backend(request, backend_alias, baseapp, test_repo):
470 470 """
471 471 Parametrized fixture which represents a single backend implementation.
472 472
473 473 It respects the option `--backends` to focus the test run on specific
474 474 backend implementations.
475 475
476 476 It also supports `pytest.mark.xfail_backends` to mark tests as failing
477 477 for specific backends. This is intended as a utility for incremental
478 478 development of a new backend implementation.
479 479 """
480 480 return backend_base(request, backend_alias, baseapp, test_repo)
481 481
482 482
483 483 @pytest.fixture
484 484 def backend_git(request, baseapp, test_repo):
485 485 return backend_base(request, 'git', baseapp, test_repo)
486 486
487 487
488 488 @pytest.fixture
489 489 def backend_hg(request, baseapp, test_repo):
490 490 return backend_base(request, 'hg', baseapp, test_repo)
491 491
492 492
493 493 @pytest.fixture
494 494 def backend_svn(request, baseapp, test_repo):
495 495 return backend_base(request, 'svn', baseapp, test_repo)
496 496
497 497
498 498 @pytest.fixture
499 499 def backend_random(backend_git):
500 500 """
501 501 Use this to express that your tests need "a backend.
502 502
503 503 A few of our tests need a backend, so that we can run the code. This
504 504 fixture is intended to be used for such cases. It will pick one of the
505 505 backends and run the tests.
506 506
507 507 The fixture `backend` would run the test multiple times for each
508 508 available backend which is a pure waste of time if the test is
509 509 independent of the backend type.
510 510 """
511 511 # TODO: johbo: Change this to pick a random backend
512 512 return backend_git
513 513
514 514
515 515 @pytest.fixture
516 516 def backend_stub(backend_git):
517 517 """
518 518 Use this to express that your tests need a backend stub
519 519
520 520 TODO: mikhail: Implement a real stub logic instead of returning
521 521 a git backend
522 522 """
523 523 return backend_git
524 524
525 525
526 526 @pytest.fixture
527 527 def repo_stub(backend_stub):
528 528 """
529 529 Use this to express that your tests need a repository stub
530 530 """
531 531 return backend_stub.create_repo()
532 532
533 533
534 534 class Backend(object):
535 535 """
536 536 Represents the test configuration for one supported backend
537 537
538 538 Provides easy access to different test repositories based on
539 539 `__getitem__`. Such repositories will only be created once per test
540 540 session.
541 541 """
542 542
543 543 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
544 544 _master_repo = None
545 545 _commit_ids = {}
546 546
547 547 def __init__(self, alias, repo_name, test_name, test_repo_container):
548 548 self.alias = alias
549 549 self.repo_name = repo_name
550 550 self._cleanup_repos = []
551 551 self._test_name = test_name
552 552 self._test_repo_container = test_repo_container
553 553 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
554 554 # Fixture will survive in the end.
555 555 self._fixture = Fixture()
556 556
557 557 def __getitem__(self, key):
558 558 return self._test_repo_container(key, self.alias)
559 559
560 560 def create_test_repo(self, key, config=None):
561 561 return self._test_repo_container(key, self.alias, config)
562 562
563 563 @property
564 564 def repo(self):
565 565 """
566 566 Returns the "current" repository. This is the vcs_test repo or the
567 567 last repo which has been created with `create_repo`.
568 568 """
569 569 from rhodecode.model.db import Repository
570 570 return Repository.get_by_repo_name(self.repo_name)
571 571
572 572 @property
573 573 def default_branch_name(self):
574 574 VcsRepository = get_backend(self.alias)
575 575 return VcsRepository.DEFAULT_BRANCH_NAME
576 576
577 577 @property
578 578 def default_head_id(self):
579 579 """
580 580 Returns the default head id of the underlying backend.
581 581
582 582 This will be the default branch name in case the backend does have a
583 583 default branch. In the other cases it will point to a valid head
584 584 which can serve as the base to create a new commit on top of it.
585 585 """
586 586 vcsrepo = self.repo.scm_instance()
587 587 head_id = (
588 588 vcsrepo.DEFAULT_BRANCH_NAME or
589 589 vcsrepo.commit_ids[-1])
590 590 return head_id
591 591
592 592 @property
593 593 def commit_ids(self):
594 594 """
595 595 Returns the list of commits for the last created repository
596 596 """
597 597 return self._commit_ids
598 598
599 599 def create_master_repo(self, commits):
600 600 """
601 601 Create a repository and remember it as a template.
602 602
603 603 This allows to easily create derived repositories to construct
604 604 more complex scenarios for diff, compare and pull requests.
605 605
606 606 Returns a commit map which maps from commit message to raw_id.
607 607 """
608 608 self._master_repo = self.create_repo(commits=commits)
609 609 return self._commit_ids
610 610
611 611 def create_repo(
612 612 self, commits=None, number_of_commits=0, heads=None,
613 613 name_suffix=u'', bare=False, **kwargs):
614 614 """
615 615 Create a repository and record it for later cleanup.
616 616
617 617 :param commits: Optional. A sequence of dict instances.
618 618 Will add a commit per entry to the new repository.
619 619 :param number_of_commits: Optional. If set to a number, this number of
620 620 commits will be added to the new repository.
621 621 :param heads: Optional. Can be set to a sequence of of commit
622 622 names which shall be pulled in from the master repository.
623 623 :param name_suffix: adds special suffix to generated repo name
624 624 :param bare: set a repo as bare (no checkout)
625 625 """
626 626 self.repo_name = self._next_repo_name() + name_suffix
627 627 repo = self._fixture.create_repo(
628 628 self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
629 629 self._cleanup_repos.append(repo.repo_name)
630 630
631 631 commits = commits or [
632 632 {'message': 'Commit %s of %s' % (x, self.repo_name)}
633 633 for x in range(number_of_commits)]
634 634 vcs_repo = repo.scm_instance()
635 635 vcs_repo.count()
636 636 self._add_commits_to_repo(vcs_repo, commits)
637 637 if heads:
638 638 self.pull_heads(repo, heads)
639 639
640 640 return repo
641 641
642 642 def pull_heads(self, repo, heads):
643 643 """
644 644 Make sure that repo contains all commits mentioned in `heads`
645 645 """
646 646 vcsmaster = self._master_repo.scm_instance()
647 647 vcsrepo = repo.scm_instance()
648 648 vcsrepo.config.clear_section('hooks')
649 649 commit_ids = [self._commit_ids[h] for h in heads]
650 650 vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids)
651 651
652 652 def create_fork(self):
653 653 repo_to_fork = self.repo_name
654 654 self.repo_name = self._next_repo_name()
655 655 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
656 656 self._cleanup_repos.append(self.repo_name)
657 657 return repo
658 658
659 659 def new_repo_name(self, suffix=u''):
660 660 self.repo_name = self._next_repo_name() + suffix
661 661 self._cleanup_repos.append(self.repo_name)
662 662 return self.repo_name
663 663
664 664 def _next_repo_name(self):
665 665 return u"%s_%s" % (
666 666 self.invalid_repo_name.sub(u'_', self._test_name), len(self._cleanup_repos))
667 667
668 668 def ensure_file(self, filename, content='Test content\n'):
669 669 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
670 670 commits = [
671 671 {'added': [
672 672 FileNode(filename, content=content),
673 673 ]},
674 674 ]
675 675 self._add_commits_to_repo(self.repo.scm_instance(), commits)
676 676
677 677 def enable_downloads(self):
678 678 repo = self.repo
679 679 repo.enable_downloads = True
680 680 Session().add(repo)
681 681 Session().commit()
682 682
683 683 def cleanup(self):
684 684 for repo_name in reversed(self._cleanup_repos):
685 685 self._fixture.destroy_repo(repo_name)
686 686
687 687 def _add_commits_to_repo(self, repo, commits):
688 688 commit_ids = _add_commits_to_repo(repo, commits)
689 689 if not commit_ids:
690 690 return
691 691 self._commit_ids = commit_ids
692 692
693 693 # Creating refs for Git to allow fetching them from remote repository
694 694 if self.alias == 'git':
695 695 refs = {}
696 696 for message in self._commit_ids:
697 697 # TODO: mikhail: do more special chars replacements
698 698 ref_name = 'refs/test-refs/{}'.format(
699 699 message.replace(' ', ''))
700 700 refs[ref_name] = self._commit_ids[message]
701 701 self._create_refs(repo, refs)
702 702
703 703 def _create_refs(self, repo, refs):
704 704 for ref_name in refs:
705 705 repo.set_refs(ref_name, refs[ref_name])
706 706
707 707
708 708 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo):
709 709 if backend_alias not in request.config.getoption('--backends'):
710 710 pytest.skip("Backend %s not selected." % (backend_alias, ))
711 711
712 712 utils.check_xfail_backends(request.node, backend_alias)
713 713 utils.check_skip_backends(request.node, backend_alias)
714 714
715 715 repo_name = 'vcs_test_%s' % (backend_alias, )
716 716 repo_path = os.path.join(tests_tmp_path, repo_name)
717 717 backend = VcsBackend(
718 718 alias=backend_alias,
719 719 repo_path=repo_path,
720 720 test_name=request.node.name,
721 721 test_repo_container=test_repo)
722 722 request.addfinalizer(backend.cleanup)
723 723 return backend
724 724
725 725
726 726 @pytest.fixture
727 727 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
728 728 """
729 729 Parametrized fixture which represents a single vcs backend implementation.
730 730
731 731 See the fixture `backend` for more details. This one implements the same
732 732 concept, but on vcs level. So it does not provide model instances etc.
733 733
734 734 Parameters are generated dynamically, see :func:`pytest_generate_tests`
735 735 for how this works.
736 736 """
737 737 return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo)
738 738
739 739
740 740 @pytest.fixture
741 741 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
742 742 return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo)
743 743
744 744
745 745 @pytest.fixture
746 746 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
747 747 return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo)
748 748
749 749
750 750 @pytest.fixture
751 751 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
752 752 return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo)
753 753
754 754
755 755 @pytest.fixture
756 756 def vcsbackend_stub(vcsbackend_git):
757 757 """
758 758 Use this to express that your test just needs a stub of a vcsbackend.
759 759
760 760 Plan is to eventually implement an in-memory stub to speed tests up.
761 761 """
762 762 return vcsbackend_git
763 763
764 764
765 765 class VcsBackend(object):
766 766 """
767 767 Represents the test configuration for one supported vcs backend.
768 768 """
769 769
770 770 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
771 771
772 772 def __init__(self, alias, repo_path, test_name, test_repo_container):
773 773 self.alias = alias
774 774 self._repo_path = repo_path
775 775 self._cleanup_repos = []
776 776 self._test_name = test_name
777 777 self._test_repo_container = test_repo_container
778 778
779 779 def __getitem__(self, key):
780 780 return self._test_repo_container(key, self.alias).scm_instance()
781 781
782 782 @property
783 783 def repo(self):
784 784 """
785 785 Returns the "current" repository. This is the vcs_test repo of the last
786 786 repo which has been created.
787 787 """
788 788 Repository = get_backend(self.alias)
789 789 return Repository(self._repo_path)
790 790
791 791 @property
792 792 def backend(self):
793 793 """
794 794 Returns the backend implementation class.
795 795 """
796 796 return get_backend(self.alias)
797 797
798 798 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None,
799 799 bare=False):
800 800 repo_name = self._next_repo_name()
801 801 self._repo_path = get_new_dir(repo_name)
802 802 repo_class = get_backend(self.alias)
803 803 src_url = None
804 804 if _clone_repo:
805 805 src_url = _clone_repo.path
806 806 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
807 807 self._cleanup_repos.append(repo)
808 808
809 809 commits = commits or [
810 810 {'message': 'Commit %s of %s' % (x, repo_name)}
811 811 for x in xrange(number_of_commits)]
812 812 _add_commits_to_repo(repo, commits)
813 813 return repo
814 814
815 815 def clone_repo(self, repo):
816 816 return self.create_repo(_clone_repo=repo)
817 817
818 818 def cleanup(self):
819 819 for repo in self._cleanup_repos:
820 820 shutil.rmtree(repo.path)
821 821
822 822 def new_repo_path(self):
823 823 repo_name = self._next_repo_name()
824 824 self._repo_path = get_new_dir(repo_name)
825 825 return self._repo_path
826 826
827 827 def _next_repo_name(self):
828 828 return "%s_%s" % (
829 829 self.invalid_repo_name.sub('_', self._test_name),
830 830 len(self._cleanup_repos))
831 831
832 832 def add_file(self, repo, filename, content='Test content\n'):
833 833 imc = repo.in_memory_commit
834 834 imc.add(FileNode(filename, content=content))
835 835 imc.commit(
836 836 message=u'Automatic commit from vcsbackend fixture',
837 837 author=u'Automatic <automatic@rhodecode.com>')
838 838
839 839 def ensure_file(self, filename, content='Test content\n'):
840 840 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
841 841 self.add_file(self.repo, filename, content)
842 842
843 843
844 844 def _add_commits_to_repo(vcs_repo, commits):
845 845 commit_ids = {}
846 846 if not commits:
847 847 return commit_ids
848 848
849 849 imc = vcs_repo.in_memory_commit
850 850 commit = None
851 851
852 852 for idx, commit in enumerate(commits):
853 853 message = unicode(commit.get('message', 'Commit %s' % idx))
854 854
855 855 for node in commit.get('added', []):
856 856 imc.add(FileNode(node.path, content=node.content))
857 857 for node in commit.get('changed', []):
858 858 imc.change(FileNode(node.path, content=node.content))
859 859 for node in commit.get('removed', []):
860 860 imc.remove(FileNode(node.path))
861 861
862 862 parents = [
863 863 vcs_repo.get_commit(commit_id=commit_ids[p])
864 864 for p in commit.get('parents', [])]
865 865
866 866 operations = ('added', 'changed', 'removed')
867 867 if not any((commit.get(o) for o in operations)):
868 868 imc.add(FileNode('file_%s' % idx, content=message))
869 869
870 870 commit = imc.commit(
871 871 message=message,
872 872 author=unicode(commit.get('author', 'Automatic <automatic@rhodecode.com>')),
873 873 date=commit.get('date'),
874 874 branch=commit.get('branch'),
875 875 parents=parents)
876 876
877 877 commit_ids[commit.message] = commit.raw_id
878 878
879 879 return commit_ids
880 880
881 881
882 882 @pytest.fixture
883 883 def reposerver(request):
884 884 """
885 885 Allows to serve a backend repository
886 886 """
887 887
888 888 repo_server = RepoServer()
889 889 request.addfinalizer(repo_server.cleanup)
890 890 return repo_server
891 891
892 892
893 893 class RepoServer(object):
894 894 """
895 895 Utility to serve a local repository for the duration of a test case.
896 896
897 897 Supports only Subversion so far.
898 898 """
899 899
900 900 url = None
901 901
902 902 def __init__(self):
903 903 self._cleanup_servers = []
904 904
905 905 def serve(self, vcsrepo):
906 906 if vcsrepo.alias != 'svn':
907 907 raise TypeError("Backend %s not supported" % vcsrepo.alias)
908 908
909 909 proc = subprocess32.Popen(
910 910 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
911 911 '--root', vcsrepo.path])
912 912 self._cleanup_servers.append(proc)
913 913 self.url = 'svn://localhost'
914 914
915 915 def cleanup(self):
916 916 for proc in self._cleanup_servers:
917 917 proc.terminate()
918 918
919 919
920 920 @pytest.fixture
921 921 def pr_util(backend, request, config_stub):
922 922 """
923 923 Utility for tests of models and for functional tests around pull requests.
924 924
925 925 It gives an instance of :class:`PRTestUtility` which provides various
926 926 utility methods around one pull request.
927 927
928 928 This fixture uses `backend` and inherits its parameterization.
929 929 """
930 930
931 931 util = PRTestUtility(backend)
932 932 request.addfinalizer(util.cleanup)
933 933
934 934 return util
935 935
936 936
937 937 class PRTestUtility(object):
938 938
939 939 pull_request = None
940 940 pull_request_id = None
941 941 mergeable_patcher = None
942 942 mergeable_mock = None
943 943 notification_patcher = None
944 944
945 945 def __init__(self, backend):
946 946 self.backend = backend
947 947
948 948 def create_pull_request(
949 949 self, commits=None, target_head=None, source_head=None,
950 950 revisions=None, approved=False, author=None, mergeable=False,
951 951 enable_notifications=True, name_suffix=u'', reviewers=None,
952 952 title=u"Test", description=u"Description"):
953 953 self.set_mergeable(mergeable)
954 954 if not enable_notifications:
955 955 # mock notification side effect
956 956 self.notification_patcher = mock.patch(
957 957 'rhodecode.model.notification.NotificationModel.create')
958 958 self.notification_patcher.start()
959 959
960 960 if not self.pull_request:
961 961 if not commits:
962 962 commits = [
963 963 {'message': 'c1'},
964 964 {'message': 'c2'},
965 965 {'message': 'c3'},
966 966 ]
967 967 target_head = 'c1'
968 968 source_head = 'c2'
969 969 revisions = ['c2']
970 970
971 971 self.commit_ids = self.backend.create_master_repo(commits)
972 972 self.target_repository = self.backend.create_repo(
973 973 heads=[target_head], name_suffix=name_suffix)
974 974 self.source_repository = self.backend.create_repo(
975 975 heads=[source_head], name_suffix=name_suffix)
976 976 self.author = author or UserModel().get_by_username(
977 977 TEST_USER_ADMIN_LOGIN)
978 978
979 979 model = PullRequestModel()
980 980 self.create_parameters = {
981 981 'created_by': self.author,
982 982 'source_repo': self.source_repository.repo_name,
983 983 'source_ref': self._default_branch_reference(source_head),
984 984 'target_repo': self.target_repository.repo_name,
985 985 'target_ref': self._default_branch_reference(target_head),
986 986 'revisions': [self.commit_ids[r] for r in revisions],
987 987 'reviewers': reviewers or self._get_reviewers(),
988 988 'title': title,
989 989 'description': description,
990 990 }
991 991 self.pull_request = model.create(**self.create_parameters)
992 992 assert model.get_versions(self.pull_request) == []
993 993
994 994 self.pull_request_id = self.pull_request.pull_request_id
995 995
996 996 if approved:
997 997 self.approve()
998 998
999 999 Session().add(self.pull_request)
1000 1000 Session().commit()
1001 1001
1002 1002 return self.pull_request
1003 1003
1004 1004 def approve(self):
1005 1005 self.create_status_votes(
1006 1006 ChangesetStatus.STATUS_APPROVED,
1007 1007 *self.pull_request.reviewers)
1008 1008
1009 1009 def close(self):
1010 1010 PullRequestModel().close_pull_request(self.pull_request, self.author)
1011 1011
1012 1012 def _default_branch_reference(self, commit_message):
1013 1013 reference = '%s:%s:%s' % (
1014 1014 'branch',
1015 1015 self.backend.default_branch_name,
1016 1016 self.commit_ids[commit_message])
1017 1017 return reference
1018 1018
1019 1019 def _get_reviewers(self):
1020 1020 return [
1021 1021 (TEST_USER_REGULAR_LOGIN, ['default1'], False, []),
1022 1022 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, []),
1023 1023 ]
1024 1024
1025 1025 def update_source_repository(self, head=None):
1026 1026 heads = [head or 'c3']
1027 1027 self.backend.pull_heads(self.source_repository, heads=heads)
1028 1028
1029 1029 def add_one_commit(self, head=None):
1030 1030 self.update_source_repository(head=head)
1031 1031 old_commit_ids = set(self.pull_request.revisions)
1032 1032 PullRequestModel().update_commits(self.pull_request)
1033 1033 commit_ids = set(self.pull_request.revisions)
1034 1034 new_commit_ids = commit_ids - old_commit_ids
1035 1035 assert len(new_commit_ids) == 1
1036 1036 return new_commit_ids.pop()
1037 1037
1038 1038 def remove_one_commit(self):
1039 1039 assert len(self.pull_request.revisions) == 2
1040 1040 source_vcs = self.source_repository.scm_instance()
1041 1041 removed_commit_id = source_vcs.commit_ids[-1]
1042 1042
1043 1043 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1044 1044 # remove the if once that's sorted out.
1045 1045 if self.backend.alias == "git":
1046 1046 kwargs = {'branch_name': self.backend.default_branch_name}
1047 1047 else:
1048 1048 kwargs = {}
1049 1049 source_vcs.strip(removed_commit_id, **kwargs)
1050 1050
1051 1051 PullRequestModel().update_commits(self.pull_request)
1052 1052 assert len(self.pull_request.revisions) == 1
1053 1053 return removed_commit_id
1054 1054
1055 1055 def create_comment(self, linked_to=None):
1056 1056 comment = CommentsModel().create(
1057 1057 text=u"Test comment",
1058 1058 repo=self.target_repository.repo_name,
1059 1059 user=self.author,
1060 1060 pull_request=self.pull_request)
1061 1061 assert comment.pull_request_version_id is None
1062 1062
1063 1063 if linked_to:
1064 1064 PullRequestModel()._link_comments_to_version(linked_to)
1065 1065
1066 1066 return comment
1067 1067
1068 1068 def create_inline_comment(
1069 1069 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1070 1070 comment = CommentsModel().create(
1071 1071 text=u"Test comment",
1072 1072 repo=self.target_repository.repo_name,
1073 1073 user=self.author,
1074 1074 line_no=line_no,
1075 1075 f_path=file_path,
1076 1076 pull_request=self.pull_request)
1077 1077 assert comment.pull_request_version_id is None
1078 1078
1079 1079 if linked_to:
1080 1080 PullRequestModel()._link_comments_to_version(linked_to)
1081 1081
1082 1082 return comment
1083 1083
1084 1084 def create_version_of_pull_request(self):
1085 1085 pull_request = self.create_pull_request()
1086 1086 version = PullRequestModel()._create_version_from_snapshot(
1087 1087 pull_request)
1088 1088 return version
1089 1089
1090 1090 def create_status_votes(self, status, *reviewers):
1091 1091 for reviewer in reviewers:
1092 1092 ChangesetStatusModel().set_status(
1093 1093 repo=self.pull_request.target_repo,
1094 1094 status=status,
1095 1095 user=reviewer.user_id,
1096 1096 pull_request=self.pull_request)
1097 1097
1098 1098 def set_mergeable(self, value):
1099 1099 if not self.mergeable_patcher:
1100 1100 self.mergeable_patcher = mock.patch.object(
1101 1101 VcsSettingsModel, 'get_general_settings')
1102 1102 self.mergeable_mock = self.mergeable_patcher.start()
1103 1103 self.mergeable_mock.return_value = {
1104 1104 'rhodecode_pr_merge_enabled': value}
1105 1105
1106 1106 def cleanup(self):
1107 1107 # In case the source repository is already cleaned up, the pull
1108 1108 # request will already be deleted.
1109 1109 pull_request = PullRequest().get(self.pull_request_id)
1110 1110 if pull_request:
1111 1111 PullRequestModel().delete(pull_request, pull_request.author)
1112 1112 Session().commit()
1113 1113
1114 1114 if self.notification_patcher:
1115 1115 self.notification_patcher.stop()
1116 1116
1117 1117 if self.mergeable_patcher:
1118 1118 self.mergeable_patcher.stop()
1119 1119
1120 1120
1121 1121 @pytest.fixture
1122 1122 def user_admin(baseapp):
1123 1123 """
1124 1124 Provides the default admin test user as an instance of `db.User`.
1125 1125 """
1126 1126 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1127 1127 return user
1128 1128
1129 1129
1130 1130 @pytest.fixture
1131 1131 def user_regular(baseapp):
1132 1132 """
1133 1133 Provides the default regular test user as an instance of `db.User`.
1134 1134 """
1135 1135 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1136 1136 return user
1137 1137
1138 1138
1139 1139 @pytest.fixture
1140 1140 def user_util(request, db_connection):
1141 1141 """
1142 1142 Provides a wired instance of `UserUtility` with integrated cleanup.
1143 1143 """
1144 1144 utility = UserUtility(test_name=request.node.name)
1145 1145 request.addfinalizer(utility.cleanup)
1146 1146 return utility
1147 1147
1148 1148
1149 1149 # TODO: johbo: Split this up into utilities per domain or something similar
1150 1150 class UserUtility(object):
1151 1151
1152 1152 def __init__(self, test_name="test"):
1153 1153 self._test_name = self._sanitize_name(test_name)
1154 1154 self.fixture = Fixture()
1155 1155 self.repo_group_ids = []
1156 1156 self.repos_ids = []
1157 1157 self.user_ids = []
1158 1158 self.user_group_ids = []
1159 1159 self.user_repo_permission_ids = []
1160 1160 self.user_group_repo_permission_ids = []
1161 1161 self.user_repo_group_permission_ids = []
1162 1162 self.user_group_repo_group_permission_ids = []
1163 1163 self.user_user_group_permission_ids = []
1164 1164 self.user_group_user_group_permission_ids = []
1165 1165 self.user_permissions = []
1166 1166
1167 1167 def _sanitize_name(self, name):
1168 1168 for char in ['[', ']']:
1169 1169 name = name.replace(char, '_')
1170 1170 return name
1171 1171
1172 1172 def create_repo_group(
1173 1173 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1174 1174 group_name = "{prefix}_repogroup_{count}".format(
1175 1175 prefix=self._test_name,
1176 1176 count=len(self.repo_group_ids))
1177 1177 repo_group = self.fixture.create_repo_group(
1178 1178 group_name, cur_user=owner)
1179 1179 if auto_cleanup:
1180 1180 self.repo_group_ids.append(repo_group.group_id)
1181 1181 return repo_group
1182 1182
1183 1183 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1184 1184 auto_cleanup=True, repo_type='hg', bare=False):
1185 1185 repo_name = "{prefix}_repository_{count}".format(
1186 1186 prefix=self._test_name,
1187 1187 count=len(self.repos_ids))
1188 1188
1189 1189 repository = self.fixture.create_repo(
1190 1190 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare)
1191 1191 if auto_cleanup:
1192 1192 self.repos_ids.append(repository.repo_id)
1193 1193 return repository
1194 1194
1195 1195 def create_user(self, auto_cleanup=True, **kwargs):
1196 1196 user_name = "{prefix}_user_{count}".format(
1197 1197 prefix=self._test_name,
1198 1198 count=len(self.user_ids))
1199 1199 user = self.fixture.create_user(user_name, **kwargs)
1200 1200 if auto_cleanup:
1201 1201 self.user_ids.append(user.user_id)
1202 1202 return user
1203 1203
1204 1204 def create_additional_user_email(self, user, email):
1205 1205 uem = self.fixture.create_additional_user_email(user=user, email=email)
1206 1206 return uem
1207 1207
1208 1208 def create_user_with_group(self):
1209 1209 user = self.create_user()
1210 1210 user_group = self.create_user_group(members=[user])
1211 1211 return user, user_group
1212 1212
1213 1213 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1214 1214 auto_cleanup=True, **kwargs):
1215 1215 group_name = "{prefix}_usergroup_{count}".format(
1216 1216 prefix=self._test_name,
1217 1217 count=len(self.user_group_ids))
1218 1218 user_group = self.fixture.create_user_group(
1219 1219 group_name, cur_user=owner, **kwargs)
1220 1220
1221 1221 if auto_cleanup:
1222 1222 self.user_group_ids.append(user_group.users_group_id)
1223 1223 if members:
1224 1224 for user in members:
1225 1225 UserGroupModel().add_user_to_group(user_group, user)
1226 1226 return user_group
1227 1227
1228 1228 def grant_user_permission(self, user_name, permission_name):
1229 1229 self.inherit_default_user_permissions(user_name, False)
1230 1230 self.user_permissions.append((user_name, permission_name))
1231 1231
1232 1232 def grant_user_permission_to_repo_group(
1233 1233 self, repo_group, user, permission_name):
1234 1234 permission = RepoGroupModel().grant_user_permission(
1235 1235 repo_group, user, permission_name)
1236 1236 self.user_repo_group_permission_ids.append(
1237 1237 (repo_group.group_id, user.user_id))
1238 1238 return permission
1239 1239
1240 1240 def grant_user_group_permission_to_repo_group(
1241 1241 self, repo_group, user_group, permission_name):
1242 1242 permission = RepoGroupModel().grant_user_group_permission(
1243 1243 repo_group, user_group, permission_name)
1244 1244 self.user_group_repo_group_permission_ids.append(
1245 1245 (repo_group.group_id, user_group.users_group_id))
1246 1246 return permission
1247 1247
1248 1248 def grant_user_permission_to_repo(
1249 1249 self, repo, user, permission_name):
1250 1250 permission = RepoModel().grant_user_permission(
1251 1251 repo, user, permission_name)
1252 1252 self.user_repo_permission_ids.append(
1253 1253 (repo.repo_id, user.user_id))
1254 1254 return permission
1255 1255
1256 1256 def grant_user_group_permission_to_repo(
1257 1257 self, repo, user_group, permission_name):
1258 1258 permission = RepoModel().grant_user_group_permission(
1259 1259 repo, user_group, permission_name)
1260 1260 self.user_group_repo_permission_ids.append(
1261 1261 (repo.repo_id, user_group.users_group_id))
1262 1262 return permission
1263 1263
1264 1264 def grant_user_permission_to_user_group(
1265 1265 self, target_user_group, user, permission_name):
1266 1266 permission = UserGroupModel().grant_user_permission(
1267 1267 target_user_group, user, permission_name)
1268 1268 self.user_user_group_permission_ids.append(
1269 1269 (target_user_group.users_group_id, user.user_id))
1270 1270 return permission
1271 1271
1272 1272 def grant_user_group_permission_to_user_group(
1273 1273 self, target_user_group, user_group, permission_name):
1274 1274 permission = UserGroupModel().grant_user_group_permission(
1275 1275 target_user_group, user_group, permission_name)
1276 1276 self.user_group_user_group_permission_ids.append(
1277 1277 (target_user_group.users_group_id, user_group.users_group_id))
1278 1278 return permission
1279 1279
1280 1280 def revoke_user_permission(self, user_name, permission_name):
1281 1281 self.inherit_default_user_permissions(user_name, True)
1282 1282 UserModel().revoke_perm(user_name, permission_name)
1283 1283
1284 1284 def inherit_default_user_permissions(self, user_name, value):
1285 1285 user = UserModel().get_by_username(user_name)
1286 1286 user.inherit_default_permissions = value
1287 1287 Session().add(user)
1288 1288 Session().commit()
1289 1289
1290 1290 def cleanup(self):
1291 1291 self._cleanup_permissions()
1292 1292 self._cleanup_repos()
1293 1293 self._cleanup_repo_groups()
1294 1294 self._cleanup_user_groups()
1295 1295 self._cleanup_users()
1296 1296
1297 1297 def _cleanup_permissions(self):
1298 1298 if self.user_permissions:
1299 1299 for user_name, permission_name in self.user_permissions:
1300 1300 self.revoke_user_permission(user_name, permission_name)
1301 1301
1302 1302 for permission in self.user_repo_permission_ids:
1303 1303 RepoModel().revoke_user_permission(*permission)
1304 1304
1305 1305 for permission in self.user_group_repo_permission_ids:
1306 1306 RepoModel().revoke_user_group_permission(*permission)
1307 1307
1308 1308 for permission in self.user_repo_group_permission_ids:
1309 1309 RepoGroupModel().revoke_user_permission(*permission)
1310 1310
1311 1311 for permission in self.user_group_repo_group_permission_ids:
1312 1312 RepoGroupModel().revoke_user_group_permission(*permission)
1313 1313
1314 1314 for permission in self.user_user_group_permission_ids:
1315 1315 UserGroupModel().revoke_user_permission(*permission)
1316 1316
1317 1317 for permission in self.user_group_user_group_permission_ids:
1318 1318 UserGroupModel().revoke_user_group_permission(*permission)
1319 1319
1320 1320 def _cleanup_repo_groups(self):
1321 1321 def _repo_group_compare(first_group_id, second_group_id):
1322 1322 """
1323 1323 Gives higher priority to the groups with the most complex paths
1324 1324 """
1325 1325 first_group = RepoGroup.get(first_group_id)
1326 1326 second_group = RepoGroup.get(second_group_id)
1327 1327 first_group_parts = (
1328 1328 len(first_group.group_name.split('/')) if first_group else 0)
1329 1329 second_group_parts = (
1330 1330 len(second_group.group_name.split('/')) if second_group else 0)
1331 1331 return cmp(second_group_parts, first_group_parts)
1332 1332
1333 1333 sorted_repo_group_ids = sorted(
1334 1334 self.repo_group_ids, cmp=_repo_group_compare)
1335 1335 for repo_group_id in sorted_repo_group_ids:
1336 1336 self.fixture.destroy_repo_group(repo_group_id)
1337 1337
1338 1338 def _cleanup_repos(self):
1339 1339 sorted_repos_ids = sorted(self.repos_ids)
1340 1340 for repo_id in sorted_repos_ids:
1341 1341 self.fixture.destroy_repo(repo_id)
1342 1342
1343 1343 def _cleanup_user_groups(self):
1344 1344 def _user_group_compare(first_group_id, second_group_id):
1345 1345 """
1346 1346 Gives higher priority to the groups with the most complex paths
1347 1347 """
1348 1348 first_group = UserGroup.get(first_group_id)
1349 1349 second_group = UserGroup.get(second_group_id)
1350 1350 first_group_parts = (
1351 1351 len(first_group.users_group_name.split('/'))
1352 1352 if first_group else 0)
1353 1353 second_group_parts = (
1354 1354 len(second_group.users_group_name.split('/'))
1355 1355 if second_group else 0)
1356 1356 return cmp(second_group_parts, first_group_parts)
1357 1357
1358 1358 sorted_user_group_ids = sorted(
1359 1359 self.user_group_ids, cmp=_user_group_compare)
1360 1360 for user_group_id in sorted_user_group_ids:
1361 1361 self.fixture.destroy_user_group(user_group_id)
1362 1362
1363 1363 def _cleanup_users(self):
1364 1364 for user_id in self.user_ids:
1365 1365 self.fixture.destroy_user(user_id)
1366 1366
1367 1367
1368 1368 # TODO: Think about moving this into a pytest-pyro package and make it a
1369 1369 # pytest plugin
1370 1370 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1371 1371 def pytest_runtest_makereport(item, call):
1372 1372 """
1373 1373 Adding the remote traceback if the exception has this information.
1374 1374
1375 1375 VCSServer attaches this information as the attribute `_vcs_server_traceback`
1376 1376 to the exception instance.
1377 1377 """
1378 1378 outcome = yield
1379 1379 report = outcome.get_result()
1380 1380 if call.excinfo:
1381 1381 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1382 1382
1383 1383
1384 1384 def _add_vcsserver_remote_traceback(report, exc):
1385 1385 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1386 1386
1387 1387 if vcsserver_traceback:
1388 1388 section = 'VCSServer remote traceback ' + report.when
1389 1389 report.sections.append((section, vcsserver_traceback))
1390 1390
1391 1391
1392 1392 @pytest.fixture(scope='session')
1393 1393 def testrun():
1394 1394 return {
1395 1395 'uuid': uuid.uuid4(),
1396 1396 'start': datetime.datetime.utcnow().isoformat(),
1397 1397 'timestamp': int(time.time()),
1398 1398 }
1399 1399
1400 1400
1401 1401 @pytest.fixture(autouse=True)
1402 1402 def collect_appenlight_stats(request, testrun):
1403 1403 """
1404 1404 This fixture reports memory consumtion of single tests.
1405 1405
1406 1406 It gathers data based on `psutil` and sends them to Appenlight. The option
1407 1407 ``--ae`` has te be used to enable this fixture and the API key for your
1408 1408 application has to be provided in ``--ae-key``.
1409 1409 """
1410 1410 try:
1411 1411 # cygwin cannot have yet psutil support.
1412 1412 import psutil
1413 1413 except ImportError:
1414 1414 return
1415 1415
1416 1416 if not request.config.getoption('--appenlight'):
1417 1417 return
1418 1418 else:
1419 1419 # Only request the baseapp fixture if appenlight tracking is
1420 1420 # enabled. This will speed up a test run of unit tests by 2 to 3
1421 1421 # seconds if appenlight is not enabled.
1422 1422 baseapp = request.getfuncargvalue("baseapp")
1423 1423 url = '{}/api/logs'.format(request.config.getoption('--appenlight-url'))
1424 1424 client = AppenlightClient(
1425 1425 url=url,
1426 1426 api_key=request.config.getoption('--appenlight-api-key'),
1427 1427 namespace=request.node.nodeid,
1428 1428 request=str(testrun['uuid']),
1429 1429 testrun=testrun)
1430 1430
1431 1431 client.collect({
1432 1432 'message': "Starting",
1433 1433 })
1434 1434
1435 1435 server_and_port = baseapp.config.get_settings()['vcs.server']
1436 1436 protocol = baseapp.config.get_settings()['vcs.server.protocol']
1437 1437 server = create_vcsserver_proxy(server_and_port, protocol)
1438 1438 with server:
1439 1439 vcs_pid = server.get_pid()
1440 1440 server.run_gc()
1441 1441 vcs_process = psutil.Process(vcs_pid)
1442 1442 mem = vcs_process.memory_info()
1443 1443 client.tag_before('vcsserver.rss', mem.rss)
1444 1444 client.tag_before('vcsserver.vms', mem.vms)
1445 1445
1446 1446 test_process = psutil.Process()
1447 1447 mem = test_process.memory_info()
1448 1448 client.tag_before('test.rss', mem.rss)
1449 1449 client.tag_before('test.vms', mem.vms)
1450 1450
1451 1451 client.tag_before('time', time.time())
1452 1452
1453 1453 @request.addfinalizer
1454 1454 def send_stats():
1455 1455 client.tag_after('time', time.time())
1456 1456 with server:
1457 1457 gc_stats = server.run_gc()
1458 1458 for tag, value in gc_stats.items():
1459 1459 client.tag_after(tag, value)
1460 1460 mem = vcs_process.memory_info()
1461 1461 client.tag_after('vcsserver.rss', mem.rss)
1462 1462 client.tag_after('vcsserver.vms', mem.vms)
1463 1463
1464 1464 mem = test_process.memory_info()
1465 1465 client.tag_after('test.rss', mem.rss)
1466 1466 client.tag_after('test.vms', mem.vms)
1467 1467
1468 1468 client.collect({
1469 1469 'message': "Finished",
1470 1470 })
1471 1471 client.send_stats()
1472 1472
1473 1473 return client
1474 1474
1475 1475
1476 1476 class AppenlightClient():
1477 1477
1478 1478 url_template = '{url}?protocol_version=0.5'
1479 1479
1480 1480 def __init__(
1481 1481 self, url, api_key, add_server=True, add_timestamp=True,
1482 1482 namespace=None, request=None, testrun=None):
1483 1483 self.url = self.url_template.format(url=url)
1484 1484 self.api_key = api_key
1485 1485 self.add_server = add_server
1486 1486 self.add_timestamp = add_timestamp
1487 1487 self.namespace = namespace
1488 1488 self.request = request
1489 1489 self.server = socket.getfqdn(socket.gethostname())
1490 1490 self.tags_before = {}
1491 1491 self.tags_after = {}
1492 1492 self.stats = []
1493 1493 self.testrun = testrun or {}
1494 1494
1495 1495 def tag_before(self, tag, value):
1496 1496 self.tags_before[tag] = value
1497 1497
1498 1498 def tag_after(self, tag, value):
1499 1499 self.tags_after[tag] = value
1500 1500
1501 1501 def collect(self, data):
1502 1502 if self.add_server:
1503 1503 data.setdefault('server', self.server)
1504 1504 if self.add_timestamp:
1505 1505 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1506 1506 if self.namespace:
1507 1507 data.setdefault('namespace', self.namespace)
1508 1508 if self.request:
1509 1509 data.setdefault('request', self.request)
1510 1510 self.stats.append(data)
1511 1511
1512 1512 def send_stats(self):
1513 1513 tags = [
1514 1514 ('testrun', self.request),
1515 1515 ('testrun.start', self.testrun['start']),
1516 1516 ('testrun.timestamp', self.testrun['timestamp']),
1517 1517 ('test', self.namespace),
1518 1518 ]
1519 1519 for key, value in self.tags_before.items():
1520 1520 tags.append((key + '.before', value))
1521 1521 try:
1522 1522 delta = self.tags_after[key] - value
1523 1523 tags.append((key + '.delta', delta))
1524 1524 except Exception:
1525 1525 pass
1526 1526 for key, value in self.tags_after.items():
1527 1527 tags.append((key + '.after', value))
1528 1528 self.collect({
1529 1529 'message': "Collected tags",
1530 1530 'tags': tags,
1531 1531 })
1532 1532
1533 1533 response = requests.post(
1534 1534 self.url,
1535 1535 headers={
1536 1536 'X-appenlight-api-key': self.api_key},
1537 1537 json=self.stats,
1538 1538 )
1539 1539
1540 1540 if not response.status_code == 200:
1541 1541 pprint.pprint(self.stats)
1542 1542 print(response.headers)
1543 1543 print(response.text)
1544 1544 raise Exception('Sending to appenlight failed')
1545 1545
1546 1546
1547 1547 @pytest.fixture
1548 1548 def gist_util(request, db_connection):
1549 1549 """
1550 1550 Provides a wired instance of `GistUtility` with integrated cleanup.
1551 1551 """
1552 1552 utility = GistUtility()
1553 1553 request.addfinalizer(utility.cleanup)
1554 1554 return utility
1555 1555
1556 1556
1557 1557 class GistUtility(object):
1558 1558 def __init__(self):
1559 1559 self.fixture = Fixture()
1560 1560 self.gist_ids = []
1561 1561
1562 1562 def create_gist(self, **kwargs):
1563 1563 gist = self.fixture.create_gist(**kwargs)
1564 1564 self.gist_ids.append(gist.gist_id)
1565 1565 return gist
1566 1566
1567 1567 def cleanup(self):
1568 1568 for id_ in self.gist_ids:
1569 1569 self.fixture.destroy_gists(str(id_))
1570 1570
1571 1571
1572 1572 @pytest.fixture
1573 1573 def enabled_backends(request):
1574 1574 backends = request.config.option.backends
1575 1575 return backends[:]
1576 1576
1577 1577
1578 1578 @pytest.fixture
1579 1579 def settings_util(request, db_connection):
1580 1580 """
1581 1581 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1582 1582 """
1583 1583 utility = SettingsUtility()
1584 1584 request.addfinalizer(utility.cleanup)
1585 1585 return utility
1586 1586
1587 1587
1588 1588 class SettingsUtility(object):
1589 1589 def __init__(self):
1590 1590 self.rhodecode_ui_ids = []
1591 1591 self.rhodecode_setting_ids = []
1592 1592 self.repo_rhodecode_ui_ids = []
1593 1593 self.repo_rhodecode_setting_ids = []
1594 1594
1595 1595 def create_repo_rhodecode_ui(
1596 1596 self, repo, section, value, key=None, active=True, cleanup=True):
1597 1597 key = key or hashlib.sha1(
1598 1598 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1599 1599
1600 1600 setting = RepoRhodeCodeUi()
1601 1601 setting.repository_id = repo.repo_id
1602 1602 setting.ui_section = section
1603 1603 setting.ui_value = value
1604 1604 setting.ui_key = key
1605 1605 setting.ui_active = active
1606 1606 Session().add(setting)
1607 1607 Session().commit()
1608 1608
1609 1609 if cleanup:
1610 1610 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1611 1611 return setting
1612 1612
1613 1613 def create_rhodecode_ui(
1614 1614 self, section, value, key=None, active=True, cleanup=True):
1615 1615 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1616 1616
1617 1617 setting = RhodeCodeUi()
1618 1618 setting.ui_section = section
1619 1619 setting.ui_value = value
1620 1620 setting.ui_key = key
1621 1621 setting.ui_active = active
1622 1622 Session().add(setting)
1623 1623 Session().commit()
1624 1624
1625 1625 if cleanup:
1626 1626 self.rhodecode_ui_ids.append(setting.ui_id)
1627 1627 return setting
1628 1628
1629 1629 def create_repo_rhodecode_setting(
1630 1630 self, repo, name, value, type_, cleanup=True):
1631 1631 setting = RepoRhodeCodeSetting(
1632 1632 repo.repo_id, key=name, val=value, type=type_)
1633 1633 Session().add(setting)
1634 1634 Session().commit()
1635 1635
1636 1636 if cleanup:
1637 1637 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1638 1638 return setting
1639 1639
1640 1640 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1641 1641 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1642 1642 Session().add(setting)
1643 1643 Session().commit()
1644 1644
1645 1645 if cleanup:
1646 1646 self.rhodecode_setting_ids.append(setting.app_settings_id)
1647 1647
1648 1648 return setting
1649 1649
1650 1650 def cleanup(self):
1651 1651 for id_ in self.rhodecode_ui_ids:
1652 1652 setting = RhodeCodeUi.get(id_)
1653 1653 Session().delete(setting)
1654 1654
1655 1655 for id_ in self.rhodecode_setting_ids:
1656 1656 setting = RhodeCodeSetting.get(id_)
1657 1657 Session().delete(setting)
1658 1658
1659 1659 for id_ in self.repo_rhodecode_ui_ids:
1660 1660 setting = RepoRhodeCodeUi.get(id_)
1661 1661 Session().delete(setting)
1662 1662
1663 1663 for id_ in self.repo_rhodecode_setting_ids:
1664 1664 setting = RepoRhodeCodeSetting.get(id_)
1665 1665 Session().delete(setting)
1666 1666
1667 1667 Session().commit()
1668 1668
1669 1669
1670 1670 @pytest.fixture
1671 1671 def no_notifications(request):
1672 1672 notification_patcher = mock.patch(
1673 1673 'rhodecode.model.notification.NotificationModel.create')
1674 1674 notification_patcher.start()
1675 1675 request.addfinalizer(notification_patcher.stop)
1676 1676
1677 1677
1678 1678 @pytest.fixture(scope='session')
1679 1679 def repeat(request):
1680 1680 """
1681 1681 The number of repetitions is based on this fixture.
1682 1682
1683 1683 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1684 1684 tests are not too slow in our default test suite.
1685 1685 """
1686 1686 return request.config.getoption('--repeat')
1687 1687
1688 1688
1689 1689 @pytest.fixture
1690 1690 def rhodecode_fixtures():
1691 1691 return Fixture()
1692 1692
1693 1693
1694 1694 @pytest.fixture
1695 1695 def context_stub():
1696 1696 """
1697 1697 Stub context object.
1698 1698 """
1699 1699 context = pyramid.testing.DummyResource()
1700 1700 return context
1701 1701
1702 1702
1703 1703 @pytest.fixture
1704 1704 def request_stub():
1705 1705 """
1706 1706 Stub request object.
1707 1707 """
1708 1708 from rhodecode.lib.base import bootstrap_request
1709 1709 request = bootstrap_request(scheme='https')
1710 1710 return request
1711 1711
1712 1712
1713 1713 @pytest.fixture
1714 1714 def config_stub(request, request_stub):
1715 1715 """
1716 1716 Set up pyramid.testing and return the Configurator.
1717 1717 """
1718 1718 from rhodecode.lib.base import bootstrap_config
1719 1719 config = bootstrap_config(request=request_stub)
1720 1720
1721 1721 @request.addfinalizer
1722 1722 def cleanup():
1723 1723 pyramid.testing.tearDown()
1724 1724
1725 1725 return config
1726 1726
1727 1727
1728 1728 @pytest.fixture
1729 1729 def StubIntegrationType():
1730 1730 class _StubIntegrationType(IntegrationTypeBase):
1731 1731 """ Test integration type class """
1732 1732
1733 1733 key = 'test'
1734 1734 display_name = 'Test integration type'
1735 1735 description = 'A test integration type for testing'
1736 1736
1737 1737 @classmethod
1738 1738 def icon(cls):
1739 1739 return 'test_icon_html_image'
1740 1740
1741 1741 def __init__(self, settings):
1742 1742 super(_StubIntegrationType, self).__init__(settings)
1743 1743 self.sent_events = [] # for testing
1744 1744
1745 1745 def send_event(self, event):
1746 1746 self.sent_events.append(event)
1747 1747
1748 1748 def settings_schema(self):
1749 1749 class SettingsSchema(colander.Schema):
1750 1750 test_string_field = colander.SchemaNode(
1751 1751 colander.String(),
1752 1752 missing=colander.required,
1753 1753 title='test string field',
1754 1754 )
1755 1755 test_int_field = colander.SchemaNode(
1756 1756 colander.Int(),
1757 1757 title='some integer setting',
1758 1758 )
1759 1759 return SettingsSchema()
1760 1760
1761 1761
1762 1762 integration_type_registry.register_integration_type(_StubIntegrationType)
1763 1763 return _StubIntegrationType
1764 1764
1765 1765 @pytest.fixture
1766 1766 def stub_integration_settings():
1767 1767 return {
1768 1768 'test_string_field': 'some data',
1769 1769 'test_int_field': 100,
1770 1770 }
1771 1771
1772 1772
1773 1773 @pytest.fixture
1774 1774 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1775 1775 stub_integration_settings):
1776 1776 integration = IntegrationModel().create(
1777 1777 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1778 1778 name='test repo integration',
1779 1779 repo=repo_stub, repo_group=None, child_repos_only=None)
1780 1780
1781 1781 @request.addfinalizer
1782 1782 def cleanup():
1783 1783 IntegrationModel().delete(integration)
1784 1784
1785 1785 return integration
1786 1786
1787 1787
1788 1788 @pytest.fixture
1789 1789 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1790 1790 stub_integration_settings):
1791 1791 integration = IntegrationModel().create(
1792 1792 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1793 1793 name='test repogroup integration',
1794 1794 repo=None, repo_group=test_repo_group, child_repos_only=True)
1795 1795
1796 1796 @request.addfinalizer
1797 1797 def cleanup():
1798 1798 IntegrationModel().delete(integration)
1799 1799
1800 1800 return integration
1801 1801
1802 1802
1803 1803 @pytest.fixture
1804 1804 def repogroup_recursive_integration_stub(request, test_repo_group,
1805 1805 StubIntegrationType, stub_integration_settings):
1806 1806 integration = IntegrationModel().create(
1807 1807 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1808 1808 name='test recursive repogroup integration',
1809 1809 repo=None, repo_group=test_repo_group, child_repos_only=False)
1810 1810
1811 1811 @request.addfinalizer
1812 1812 def cleanup():
1813 1813 IntegrationModel().delete(integration)
1814 1814
1815 1815 return integration
1816 1816
1817 1817
1818 1818 @pytest.fixture
1819 1819 def global_integration_stub(request, StubIntegrationType,
1820 1820 stub_integration_settings):
1821 1821 integration = IntegrationModel().create(
1822 1822 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1823 1823 name='test global integration',
1824 1824 repo=None, repo_group=None, child_repos_only=None)
1825 1825
1826 1826 @request.addfinalizer
1827 1827 def cleanup():
1828 1828 IntegrationModel().delete(integration)
1829 1829
1830 1830 return integration
1831 1831
1832 1832
1833 1833 @pytest.fixture
1834 1834 def root_repos_integration_stub(request, StubIntegrationType,
1835 1835 stub_integration_settings):
1836 1836 integration = IntegrationModel().create(
1837 1837 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1838 1838 name='test global integration',
1839 1839 repo=None, repo_group=None, child_repos_only=True)
1840 1840
1841 1841 @request.addfinalizer
1842 1842 def cleanup():
1843 1843 IntegrationModel().delete(integration)
1844 1844
1845 1845 return integration
1846 1846
1847 1847
1848 1848 @pytest.fixture
1849 1849 def local_dt_to_utc():
1850 1850 def _factory(dt):
1851 1851 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1852 1852 dateutil.tz.tzutc()).replace(tzinfo=None)
1853 1853 return _factory
1854 1854
1855 1855
1856 1856 @pytest.fixture
1857 1857 def disable_anonymous_user(request, baseapp):
1858 1858 set_anonymous_access(False)
1859 1859
1860 1860 @request.addfinalizer
1861 1861 def cleanup():
1862 1862 set_anonymous_access(True)
1863 1863
1864 1864
1865 1865 @pytest.fixture(scope='module')
1866 1866 def rc_fixture(request):
1867 1867 return Fixture()
1868 1868
1869 1869
1870 1870 @pytest.fixture
1871 1871 def repo_groups(request):
1872 1872 fixture = Fixture()
1873 1873
1874 1874 session = Session()
1875 1875 zombie_group = fixture.create_repo_group('zombie')
1876 1876 parent_group = fixture.create_repo_group('parent')
1877 1877 child_group = fixture.create_repo_group('parent/child')
1878 1878 groups_in_db = session.query(RepoGroup).all()
1879 1879 assert len(groups_in_db) == 3
1880 1880 assert child_group.group_parent_id == parent_group.group_id
1881 1881
1882 1882 @request.addfinalizer
1883 1883 def cleanup():
1884 1884 fixture.destroy_repo_group(zombie_group)
1885 1885 fixture.destroy_repo_group(child_group)
1886 1886 fixture.destroy_repo_group(parent_group)
1887 1887
1888 1888 return zombie_group, parent_group, child_group
1889 1889
1890 1890
1891 1891 @pytest.fixture(scope="session")
1892 1892 def tmp_path_factory(request):
1893 1893 """Return a :class:`_pytest.tmpdir.TempPathFactory` instance for the test session.
1894 1894 """
1895 1895
1896 1896 class TempPathFactory:
1897 1897
1898 1898 def mktemp(self, basename):
1899 1899 import tempfile
1900 1900 return tempfile.mktemp(basename)
1901 1901
1902 1902 return TempPathFactory()
General Comments 0
You need to be logged in to leave comments. Login now