##// END OF EJS Templates
simplevcs: handle a case of damaged filesystem repo without 500 exception.
marcink -
r2363:4c6f9023 default
parent child Browse files
Show More
@@ -1,609 +1,610 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 SimpleVCS middleware for handling protocol request (push/clone etc.)
23 23 It's implemented with basic auth function
24 24 """
25 25
26 26 import os
27 27 import re
28 28 import logging
29 29 import importlib
30 30 from functools import wraps
31 31
32 32 import time
33 33 from paste.httpheaders import REMOTE_USER, AUTH_TYPE
34 34 # TODO(marcink): check if we should use webob.exc here ?
35 35 from pyramid.httpexceptions import (
36 36 HTTPNotFound, HTTPForbidden, HTTPNotAcceptable, HTTPInternalServerError)
37 37
38 38 import rhodecode
39 39 from rhodecode.authentication.base import (
40 40 authenticate, get_perms_cache_manager, VCS_TYPE)
41 41 from rhodecode.lib import caches
42 42 from rhodecode.lib.auth import AuthUser, HasPermissionAnyMiddleware
43 43 from rhodecode.lib.base import (
44 44 BasicAuth, get_ip_addr, get_user_agent, vcs_operation_context)
45 45 from rhodecode.lib.exceptions import (
46 46 HTTPLockedRC, HTTPRequirementError, UserCreationError,
47 47 NotAllowedToCreateUserError)
48 48 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
49 49 from rhodecode.lib.middleware import appenlight
50 50 from rhodecode.lib.middleware.utils import scm_app_http
51 51 from rhodecode.lib.utils import is_valid_repo, SLUG_RE
52 52 from rhodecode.lib.utils2 import safe_str, fix_PATH, str2bool, safe_unicode
53 53 from rhodecode.lib.vcs.conf import settings as vcs_settings
54 54 from rhodecode.lib.vcs.backends import base
55 55 from rhodecode.model import meta
56 56 from rhodecode.model.db import User, Repository, PullRequest
57 57 from rhodecode.model.scm import ScmModel
58 58 from rhodecode.model.pull_request import PullRequestModel
59 59 from rhodecode.model.settings import SettingsModel, VcsSettingsModel
60 60
61 61 log = logging.getLogger(__name__)
62 62
63 63
64 64 def initialize_generator(factory):
65 65 """
66 66 Initializes the returned generator by draining its first element.
67 67
68 68 This can be used to give a generator an initializer, which is the code
69 69 up to the first yield statement. This decorator enforces that the first
70 70 produced element has the value ``"__init__"`` to make its special
71 71 purpose very explicit in the using code.
72 72 """
73 73
74 74 @wraps(factory)
75 75 def wrapper(*args, **kwargs):
76 76 gen = factory(*args, **kwargs)
77 77 try:
78 78 init = gen.next()
79 79 except StopIteration:
80 80 raise ValueError('Generator must yield at least one element.')
81 81 if init != "__init__":
82 82 raise ValueError('First yielded element must be "__init__".')
83 83 return gen
84 84 return wrapper
85 85
86 86
87 87 class SimpleVCS(object):
88 88 """Common functionality for SCM HTTP handlers."""
89 89
90 90 SCM = 'unknown'
91 91
92 92 acl_repo_name = None
93 93 url_repo_name = None
94 94 vcs_repo_name = None
95 95
96 96 # We have to handle requests to shadow repositories different than requests
97 97 # to normal repositories. Therefore we have to distinguish them. To do this
98 98 # we use this regex which will match only on URLs pointing to shadow
99 99 # repositories.
100 100 shadow_repo_re = re.compile(
101 101 '(?P<groups>(?:{slug_pat}/)*)' # repo groups
102 102 '(?P<target>{slug_pat})/' # target repo
103 103 'pull-request/(?P<pr_id>\d+)/' # pull request
104 104 'repository$' # shadow repo
105 105 .format(slug_pat=SLUG_RE.pattern))
106 106
107 107 def __init__(self, config, registry):
108 108 self.registry = registry
109 109 self.config = config
110 110 # re-populated by specialized middleware
111 111 self.repo_vcs_config = base.Config()
112 112 self.rhodecode_settings = SettingsModel().get_all_settings(cache=True)
113 113
114 114 registry.rhodecode_settings = self.rhodecode_settings
115 115 # authenticate this VCS request using authfunc
116 116 auth_ret_code_detection = \
117 117 str2bool(self.config.get('auth_ret_code_detection', False))
118 118 self.authenticate = BasicAuth(
119 119 '', authenticate, registry, config.get('auth_ret_code'),
120 120 auth_ret_code_detection)
121 121 self.ip_addr = '0.0.0.0'
122 122
123 123 @property
124 124 def base_path(self):
125 125 settings_path = self.repo_vcs_config.get(*VcsSettingsModel.PATH_SETTING)
126 126 if not settings_path:
127 127 # try, maybe we passed in explicitly as config option
128 128 settings_path = self.config.get('base_path')
129 129 return settings_path
130 130
131 131 def set_repo_names(self, environ):
132 132 """
133 133 This will populate the attributes acl_repo_name, url_repo_name,
134 134 vcs_repo_name and is_shadow_repo. In case of requests to normal (non
135 135 shadow) repositories all names are equal. In case of requests to a
136 136 shadow repository the acl-name points to the target repo of the pull
137 137 request and the vcs-name points to the shadow repo file system path.
138 138 The url-name is always the URL used by the vcs client program.
139 139
140 140 Example in case of a shadow repo:
141 141 acl_repo_name = RepoGroup/MyRepo
142 142 url_repo_name = RepoGroup/MyRepo/pull-request/3/repository
143 143 vcs_repo_name = /repo/base/path/RepoGroup/.__shadow_MyRepo_pr-3'
144 144 """
145 145 # First we set the repo name from URL for all attributes. This is the
146 146 # default if handling normal (non shadow) repo requests.
147 147 self.url_repo_name = self._get_repository_name(environ)
148 148 self.acl_repo_name = self.vcs_repo_name = self.url_repo_name
149 149 self.is_shadow_repo = False
150 150
151 151 # Check if this is a request to a shadow repository.
152 152 match = self.shadow_repo_re.match(self.url_repo_name)
153 153 if match:
154 154 match_dict = match.groupdict()
155 155
156 156 # Build acl repo name from regex match.
157 157 acl_repo_name = safe_unicode('{groups}{target}'.format(
158 158 groups=match_dict['groups'] or '',
159 159 target=match_dict['target']))
160 160
161 161 # Retrieve pull request instance by ID from regex match.
162 162 pull_request = PullRequest.get(match_dict['pr_id'])
163 163
164 164 # Only proceed if we got a pull request and if acl repo name from
165 165 # URL equals the target repo name of the pull request.
166 166 if pull_request and (acl_repo_name ==
167 167 pull_request.target_repo.repo_name):
168 168 # Get file system path to shadow repository.
169 169 workspace_id = PullRequestModel()._workspace_id(pull_request)
170 170 target_vcs = pull_request.target_repo.scm_instance()
171 171 vcs_repo_name = target_vcs._get_shadow_repository_path(
172 172 workspace_id)
173 173
174 174 # Store names for later usage.
175 175 self.vcs_repo_name = vcs_repo_name
176 176 self.acl_repo_name = acl_repo_name
177 177 self.is_shadow_repo = True
178 178
179 179 log.debug('Setting all VCS repository names: %s', {
180 180 'acl_repo_name': self.acl_repo_name,
181 181 'url_repo_name': self.url_repo_name,
182 182 'vcs_repo_name': self.vcs_repo_name,
183 183 })
184 184
185 185 @property
186 186 def scm_app(self):
187 187 custom_implementation = self.config['vcs.scm_app_implementation']
188 188 if custom_implementation == 'http':
189 189 log.info('Using HTTP implementation of scm app.')
190 190 scm_app_impl = scm_app_http
191 191 else:
192 192 log.info('Using custom implementation of scm_app: "{}"'.format(
193 193 custom_implementation))
194 194 scm_app_impl = importlib.import_module(custom_implementation)
195 195 return scm_app_impl
196 196
197 197 def _get_by_id(self, repo_name):
198 198 """
199 199 Gets a special pattern _<ID> from clone url and tries to replace it
200 200 with a repository_name for support of _<ID> non changeable urls
201 201 """
202 202
203 203 data = repo_name.split('/')
204 204 if len(data) >= 2:
205 205 from rhodecode.model.repo import RepoModel
206 206 by_id_match = RepoModel().get_repo_by_id(repo_name)
207 207 if by_id_match:
208 208 data[1] = by_id_match.repo_name
209 209
210 210 return safe_str('/'.join(data))
211 211
212 212 def _invalidate_cache(self, repo_name):
213 213 """
214 214 Set's cache for this repository for invalidation on next access
215 215
216 216 :param repo_name: full repo name, also a cache key
217 217 """
218 218 ScmModel().mark_for_invalidation(repo_name)
219 219
220 220 def is_valid_and_existing_repo(self, repo_name, base_path, scm_type):
221 221 db_repo = Repository.get_by_repo_name(repo_name)
222 222 if not db_repo:
223 223 log.debug('Repository `%s` not found inside the database.',
224 224 repo_name)
225 225 return False
226 226
227 227 if db_repo.repo_type != scm_type:
228 228 log.warning(
229 229 'Repository `%s` have incorrect scm_type, expected %s got %s',
230 230 repo_name, db_repo.repo_type, scm_type)
231 231 return False
232 232
233 return is_valid_repo(repo_name, base_path, explicit_scm=scm_type)
233 return is_valid_repo(repo_name, base_path,
234 explicit_scm=scm_type, expect_scm=scm_type)
234 235
235 236 def valid_and_active_user(self, user):
236 237 """
237 238 Checks if that user is not empty, and if it's actually object it checks
238 239 if he's active.
239 240
240 241 :param user: user object or None
241 242 :return: boolean
242 243 """
243 244 if user is None:
244 245 return False
245 246
246 247 elif user.active:
247 248 return True
248 249
249 250 return False
250 251
251 252 @property
252 253 def is_shadow_repo_dir(self):
253 254 return os.path.isdir(self.vcs_repo_name)
254 255
255 256 def _check_permission(self, action, user, repo_name, ip_addr=None,
256 257 plugin_id='', plugin_cache_active=False, cache_ttl=0):
257 258 """
258 259 Checks permissions using action (push/pull) user and repository
259 260 name. If plugin_cache and ttl is set it will use the plugin which
260 261 authenticated the user to store the cached permissions result for N
261 262 amount of seconds as in cache_ttl
262 263
263 264 :param action: push or pull action
264 265 :param user: user instance
265 266 :param repo_name: repository name
266 267 """
267 268
268 269 # get instance of cache manager configured for a namespace
269 270 cache_manager = get_perms_cache_manager(custom_ttl=cache_ttl)
270 271 log.debug('AUTH_CACHE_TTL for permissions `%s` active: %s (TTL: %s)',
271 272 plugin_id, plugin_cache_active, cache_ttl)
272 273
273 274 # for environ based password can be empty, but then the validation is
274 275 # on the server that fills in the env data needed for authentication
275 276 _perm_calc_hash = caches.compute_key_from_params(
276 277 plugin_id, action, user.user_id, repo_name, ip_addr)
277 278
278 279 # _authenticate is a wrapper for .auth() method of plugin.
279 280 # it checks if .auth() sends proper data.
280 281 # For RhodeCodeExternalAuthPlugin it also maps users to
281 282 # Database and maps the attributes returned from .auth()
282 283 # to RhodeCode database. If this function returns data
283 284 # then auth is correct.
284 285 start = time.time()
285 286 log.debug('Running plugin `%s` permissions check', plugin_id)
286 287
287 288 def perm_func():
288 289 """
289 290 This function is used internally in Cache of Beaker to calculate
290 291 Results
291 292 """
292 293 log.debug('auth: calculating permission access now...')
293 294 # check IP
294 295 inherit = user.inherit_default_permissions
295 296 ip_allowed = AuthUser.check_ip_allowed(
296 297 user.user_id, ip_addr, inherit_from_default=inherit)
297 298 if ip_allowed:
298 299 log.info('Access for IP:%s allowed', ip_addr)
299 300 else:
300 301 return False
301 302
302 303 if action == 'push':
303 304 perms = ('repository.write', 'repository.admin')
304 305 if not HasPermissionAnyMiddleware(*perms)(user, repo_name):
305 306 return False
306 307
307 308 else:
308 309 # any other action need at least read permission
309 310 perms = (
310 311 'repository.read', 'repository.write', 'repository.admin')
311 312 if not HasPermissionAnyMiddleware(*perms)(user, repo_name):
312 313 return False
313 314
314 315 return True
315 316
316 317 if plugin_cache_active:
317 318 log.debug('Trying to fetch cached perms by %s', _perm_calc_hash[:6])
318 319 perm_result = cache_manager.get(
319 320 _perm_calc_hash, createfunc=perm_func)
320 321 else:
321 322 perm_result = perm_func()
322 323
323 324 auth_time = time.time() - start
324 325 log.debug('Permissions for plugin `%s` completed in %.3fs, '
325 326 'expiration time of fetched cache %.1fs.',
326 327 plugin_id, auth_time, cache_ttl)
327 328
328 329 return perm_result
329 330
330 331 def _check_ssl(self, environ, start_response):
331 332 """
332 333 Checks the SSL check flag and returns False if SSL is not present
333 334 and required True otherwise
334 335 """
335 336 org_proto = environ['wsgi._org_proto']
336 337 # check if we have SSL required ! if not it's a bad request !
337 338 require_ssl = str2bool(self.repo_vcs_config.get('web', 'push_ssl'))
338 339 if require_ssl and org_proto == 'http':
339 340 log.debug('proto is %s and SSL is required BAD REQUEST !',
340 341 org_proto)
341 342 return False
342 343 return True
343 344
344 345 def __call__(self, environ, start_response):
345 346 try:
346 347 return self._handle_request(environ, start_response)
347 348 except Exception:
348 349 log.exception("Exception while handling request")
349 350 appenlight.track_exception(environ)
350 351 return HTTPInternalServerError()(environ, start_response)
351 352 finally:
352 353 meta.Session.remove()
353 354
354 355 def _handle_request(self, environ, start_response):
355 356
356 357 if not self._check_ssl(environ, start_response):
357 358 reason = ('SSL required, while RhodeCode was unable '
358 359 'to detect this as SSL request')
359 360 log.debug('User not allowed to proceed, %s', reason)
360 361 return HTTPNotAcceptable(reason)(environ, start_response)
361 362
362 363 if not self.url_repo_name:
363 364 log.warning('Repository name is empty: %s', self.url_repo_name)
364 365 # failed to get repo name, we fail now
365 366 return HTTPNotFound()(environ, start_response)
366 367 log.debug('Extracted repo name is %s', self.url_repo_name)
367 368
368 369 ip_addr = get_ip_addr(environ)
369 370 user_agent = get_user_agent(environ)
370 371 username = None
371 372
372 373 # skip passing error to error controller
373 374 environ['pylons.status_code_redirect'] = True
374 375
375 376 # ======================================================================
376 377 # GET ACTION PULL or PUSH
377 378 # ======================================================================
378 379 action = self._get_action(environ)
379 380
380 381 # ======================================================================
381 382 # Check if this is a request to a shadow repository of a pull request.
382 383 # In this case only pull action is allowed.
383 384 # ======================================================================
384 385 if self.is_shadow_repo and action != 'pull':
385 386 reason = 'Only pull action is allowed for shadow repositories.'
386 387 log.debug('User not allowed to proceed, %s', reason)
387 388 return HTTPNotAcceptable(reason)(environ, start_response)
388 389
389 390 # Check if the shadow repo actually exists, in case someone refers
390 391 # to it, and it has been deleted because of successful merge.
391 392 if self.is_shadow_repo and not self.is_shadow_repo_dir:
392 393 log.debug('Shadow repo detected, and shadow repo dir `%s` is missing',
393 394 self.is_shadow_repo_dir)
394 395 return HTTPNotFound()(environ, start_response)
395 396
396 397 # ======================================================================
397 398 # CHECK ANONYMOUS PERMISSION
398 399 # ======================================================================
399 400 if action in ['pull', 'push']:
400 401 anonymous_user = User.get_default_user()
401 402 username = anonymous_user.username
402 403 if anonymous_user.active:
403 404 # ONLY check permissions if the user is activated
404 405 anonymous_perm = self._check_permission(
405 406 action, anonymous_user, self.acl_repo_name, ip_addr)
406 407 else:
407 408 anonymous_perm = False
408 409
409 410 if not anonymous_user.active or not anonymous_perm:
410 411 if not anonymous_user.active:
411 412 log.debug('Anonymous access is disabled, running '
412 413 'authentication')
413 414
414 415 if not anonymous_perm:
415 416 log.debug('Not enough credentials to access this '
416 417 'repository as anonymous user')
417 418
418 419 username = None
419 420 # ==============================================================
420 421 # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE
421 422 # NEED TO AUTHENTICATE AND ASK FOR AUTH USER PERMISSIONS
422 423 # ==============================================================
423 424
424 425 # try to auth based on environ, container auth methods
425 426 log.debug('Running PRE-AUTH for container based authentication')
426 427 pre_auth = authenticate(
427 428 '', '', environ, VCS_TYPE, registry=self.registry,
428 429 acl_repo_name=self.acl_repo_name)
429 430 if pre_auth and pre_auth.get('username'):
430 431 username = pre_auth['username']
431 432 log.debug('PRE-AUTH got %s as username', username)
432 433 if pre_auth:
433 434 log.debug('PRE-AUTH successful from %s',
434 435 pre_auth.get('auth_data', {}).get('_plugin'))
435 436
436 437 # If not authenticated by the container, running basic auth
437 438 # before inject the calling repo_name for special scope checks
438 439 self.authenticate.acl_repo_name = self.acl_repo_name
439 440
440 441 plugin_cache_active, cache_ttl = False, 0
441 442 plugin = None
442 443 if not username:
443 444 self.authenticate.realm = self.authenticate.get_rc_realm()
444 445
445 446 try:
446 447 auth_result = self.authenticate(environ)
447 448 except (UserCreationError, NotAllowedToCreateUserError) as e:
448 449 log.error(e)
449 450 reason = safe_str(e)
450 451 return HTTPNotAcceptable(reason)(environ, start_response)
451 452
452 453 if isinstance(auth_result, dict):
453 454 AUTH_TYPE.update(environ, 'basic')
454 455 REMOTE_USER.update(environ, auth_result['username'])
455 456 username = auth_result['username']
456 457 plugin = auth_result.get('auth_data', {}).get('_plugin')
457 458 log.info(
458 459 'MAIN-AUTH successful for user `%s` from %s plugin',
459 460 username, plugin)
460 461
461 462 plugin_cache_active, cache_ttl = auth_result.get(
462 463 'auth_data', {}).get('_ttl_cache') or (False, 0)
463 464 else:
464 465 return auth_result.wsgi_application(
465 466 environ, start_response)
466 467
467 468
468 469 # ==============================================================
469 470 # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME
470 471 # ==============================================================
471 472 user = User.get_by_username(username)
472 473 if not self.valid_and_active_user(user):
473 474 return HTTPForbidden()(environ, start_response)
474 475 username = user.username
475 476 user.update_lastactivity()
476 477 meta.Session().commit()
477 478
478 479 # check user attributes for password change flag
479 480 user_obj = user
480 481 if user_obj and user_obj.username != User.DEFAULT_USER and \
481 482 user_obj.user_data.get('force_password_change'):
482 483 reason = 'password change required'
483 484 log.debug('User not allowed to authenticate, %s', reason)
484 485 return HTTPNotAcceptable(reason)(environ, start_response)
485 486
486 487 # check permissions for this repository
487 488 perm = self._check_permission(
488 489 action, user, self.acl_repo_name, ip_addr,
489 490 plugin, plugin_cache_active, cache_ttl)
490 491 if not perm:
491 492 return HTTPForbidden()(environ, start_response)
492 493
493 494 # extras are injected into UI object and later available
494 495 # in hooks executed by RhodeCode
495 496 check_locking = _should_check_locking(environ.get('QUERY_STRING'))
496 497 extras = vcs_operation_context(
497 498 environ, repo_name=self.acl_repo_name, username=username,
498 499 action=action, scm=self.SCM, check_locking=check_locking,
499 500 is_shadow_repo=self.is_shadow_repo
500 501 )
501 502
502 503 # ======================================================================
503 504 # REQUEST HANDLING
504 505 # ======================================================================
505 506 repo_path = os.path.join(
506 507 safe_str(self.base_path), safe_str(self.vcs_repo_name))
507 508 log.debug('Repository path is %s', repo_path)
508 509
509 510 fix_PATH()
510 511
511 512 log.info(
512 513 '%s action on %s repo "%s" by "%s" from %s %s',
513 514 action, self.SCM, safe_str(self.url_repo_name),
514 515 safe_str(username), ip_addr, user_agent)
515 516
516 517 return self._generate_vcs_response(
517 518 environ, start_response, repo_path, extras, action)
518 519
519 520 @initialize_generator
520 521 def _generate_vcs_response(
521 522 self, environ, start_response, repo_path, extras, action):
522 523 """
523 524 Returns a generator for the response content.
524 525
525 526 This method is implemented as a generator, so that it can trigger
526 527 the cache validation after all content sent back to the client. It
527 528 also handles the locking exceptions which will be triggered when
528 529 the first chunk is produced by the underlying WSGI application.
529 530 """
530 531 callback_daemon, extras = self._prepare_callback_daemon(extras)
531 532 config = self._create_config(extras, self.acl_repo_name)
532 533 log.debug('HOOKS extras is %s', extras)
533 534 app = self._create_wsgi_app(repo_path, self.url_repo_name, config)
534 535
535 536 try:
536 537 with callback_daemon:
537 538 try:
538 539 response = app(environ, start_response)
539 540 finally:
540 541 # This statement works together with the decorator
541 542 # "initialize_generator" above. The decorator ensures that
542 543 # we hit the first yield statement before the generator is
543 544 # returned back to the WSGI server. This is needed to
544 545 # ensure that the call to "app" above triggers the
545 546 # needed callback to "start_response" before the
546 547 # generator is actually used.
547 548 yield "__init__"
548 549
549 550 for chunk in response:
550 551 yield chunk
551 552 except Exception as exc:
552 553 # TODO: martinb: Exceptions are only raised in case of the Pyro4
553 554 # backend. Refactor this except block after dropping Pyro4 support.
554 555 # TODO: johbo: Improve "translating" back the exception.
555 556 if getattr(exc, '_vcs_kind', None) == 'repo_locked':
556 557 exc = HTTPLockedRC(*exc.args)
557 558 _code = rhodecode.CONFIG.get('lock_ret_code')
558 559 log.debug('Repository LOCKED ret code %s!', (_code,))
559 560 elif getattr(exc, '_vcs_kind', None) == 'requirement':
560 561 log.debug(
561 562 'Repository requires features unknown to this Mercurial')
562 563 exc = HTTPRequirementError(*exc.args)
563 564 else:
564 565 raise
565 566
566 567 for chunk in exc(environ, start_response):
567 568 yield chunk
568 569 finally:
569 570 # invalidate cache on push
570 571 try:
571 572 if action == 'push':
572 573 self._invalidate_cache(self.url_repo_name)
573 574 finally:
574 575 meta.Session.remove()
575 576
576 577 def _get_repository_name(self, environ):
577 578 """Get repository name out of the environmnent
578 579
579 580 :param environ: WSGI environment
580 581 """
581 582 raise NotImplementedError()
582 583
583 584 def _get_action(self, environ):
584 585 """Map request commands into a pull or push command.
585 586
586 587 :param environ: WSGI environment
587 588 """
588 589 raise NotImplementedError()
589 590
590 591 def _create_wsgi_app(self, repo_path, repo_name, config):
591 592 """Return the WSGI app that will finally handle the request."""
592 593 raise NotImplementedError()
593 594
594 595 def _create_config(self, extras, repo_name):
595 596 """Create a safe config representation."""
596 597 raise NotImplementedError()
597 598
598 599 def _prepare_callback_daemon(self, extras):
599 600 return prepare_callback_daemon(
600 601 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
601 602 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
602 603
603 604
604 605 def _should_check_locking(query_string):
605 606 # this is kind of hacky, but due to how mercurial handles client-server
606 607 # server see all operation on commit; bookmarks, phases and
607 608 # obsolescence marker in different transaction, we don't want to check
608 609 # locking on those
609 610 return query_string not in ['cmd=listkeys']
@@ -1,802 +1,802 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Utilities library for RhodeCode
23 23 """
24 24
25 25 import datetime
26 26 import decorator
27 27 import json
28 28 import logging
29 29 import os
30 30 import re
31 31 import shutil
32 32 import tempfile
33 33 import traceback
34 34 import tarfile
35 35 import warnings
36 36 import hashlib
37 37 from os.path import join as jn
38 38
39 39 import paste
40 40 import pkg_resources
41 41 from webhelpers.text import collapse, remove_formatting, strip_tags
42 42 from mako import exceptions
43 43 from pyramid.threadlocal import get_current_registry
44 44 from pyramid.request import Request
45 45
46 46 from rhodecode.lib.fakemod import create_module
47 47 from rhodecode.lib.vcs.backends.base import Config
48 48 from rhodecode.lib.vcs.exceptions import VCSError
49 49 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
50 50 from rhodecode.lib.utils2 import (
51 51 safe_str, safe_unicode, get_current_rhodecode_user, md5)
52 52 from rhodecode.model import meta
53 53 from rhodecode.model.db import (
54 54 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
55 55 from rhodecode.model.meta import Session
56 56
57 57
58 58 log = logging.getLogger(__name__)
59 59
60 60 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
61 61
62 62 # String which contains characters that are not allowed in slug names for
63 63 # repositories or repository groups. It is properly escaped to use it in
64 64 # regular expressions.
65 65 SLUG_BAD_CHARS = re.escape('`?=[]\;\'"<>,/~!@#$%^&*()+{}|:')
66 66
67 67 # Regex that matches forbidden characters in repo/group slugs.
68 68 SLUG_BAD_CHAR_RE = re.compile('[{}]'.format(SLUG_BAD_CHARS))
69 69
70 70 # Regex that matches allowed characters in repo/group slugs.
71 71 SLUG_GOOD_CHAR_RE = re.compile('[^{}]'.format(SLUG_BAD_CHARS))
72 72
73 73 # Regex that matches whole repo/group slugs.
74 74 SLUG_RE = re.compile('[^{}]+'.format(SLUG_BAD_CHARS))
75 75
76 76 _license_cache = None
77 77
78 78
79 79 def repo_name_slug(value):
80 80 """
81 81 Return slug of name of repository
82 82 This function is called on each creation/modification
83 83 of repository to prevent bad names in repo
84 84 """
85 85 replacement_char = '-'
86 86
87 87 slug = remove_formatting(value)
88 88 slug = SLUG_BAD_CHAR_RE.sub('', slug)
89 89 slug = re.sub('[\s]+', '-', slug)
90 90 slug = collapse(slug, replacement_char)
91 91 return slug
92 92
93 93
94 94 #==============================================================================
95 95 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
96 96 #==============================================================================
97 97 def get_repo_slug(request):
98 98 _repo = ''
99 99
100 100 if hasattr(request, 'db_repo'):
101 101 # if our requests has set db reference use it for name, this
102 102 # translates the example.com/_<id> into proper repo names
103 103 _repo = request.db_repo.repo_name
104 104 elif getattr(request, 'matchdict', None):
105 105 # pyramid
106 106 _repo = request.matchdict.get('repo_name')
107 107
108 108 if _repo:
109 109 _repo = _repo.rstrip('/')
110 110 return _repo
111 111
112 112
113 113 def get_repo_group_slug(request):
114 114 _group = ''
115 115 if hasattr(request, 'db_repo_group'):
116 116 # if our requests has set db reference use it for name, this
117 117 # translates the example.com/_<id> into proper repo group names
118 118 _group = request.db_repo_group.group_name
119 119 elif getattr(request, 'matchdict', None):
120 120 # pyramid
121 121 _group = request.matchdict.get('repo_group_name')
122 122
123 123
124 124 if _group:
125 125 _group = _group.rstrip('/')
126 126 return _group
127 127
128 128
129 129 def get_user_group_slug(request):
130 130 _user_group = ''
131 131
132 132 if hasattr(request, 'db_user_group'):
133 133 _user_group = request.db_user_group.users_group_name
134 134 elif getattr(request, 'matchdict', None):
135 135 # pyramid
136 136 _user_group = request.matchdict.get('user_group_id')
137 137
138 138 try:
139 139 _user_group = UserGroup.get(_user_group)
140 140 if _user_group:
141 141 _user_group = _user_group.users_group_name
142 142 except Exception:
143 143 log.exception('Failed to get user group by id')
144 144 # catch all failures here
145 145 return None
146 146
147 147 return _user_group
148 148
149 149
150 150 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
151 151 """
152 152 Scans given path for repos and return (name,(type,path)) tuple
153 153
154 154 :param path: path to scan for repositories
155 155 :param recursive: recursive search and return names with subdirs in front
156 156 """
157 157
158 158 # remove ending slash for better results
159 159 path = path.rstrip(os.sep)
160 160 log.debug('now scanning in %s location recursive:%s...', path, recursive)
161 161
162 162 def _get_repos(p):
163 163 dirpaths = _get_dirpaths(p)
164 164 if not _is_dir_writable(p):
165 165 log.warning('repo path without write access: %s', p)
166 166
167 167 for dirpath in dirpaths:
168 168 if os.path.isfile(os.path.join(p, dirpath)):
169 169 continue
170 170 cur_path = os.path.join(p, dirpath)
171 171
172 172 # skip removed repos
173 173 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
174 174 continue
175 175
176 176 #skip .<somethin> dirs
177 177 if dirpath.startswith('.'):
178 178 continue
179 179
180 180 try:
181 181 scm_info = get_scm(cur_path)
182 182 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
183 183 except VCSError:
184 184 if not recursive:
185 185 continue
186 186 #check if this dir containts other repos for recursive scan
187 187 rec_path = os.path.join(p, dirpath)
188 188 if os.path.isdir(rec_path):
189 189 for inner_scm in _get_repos(rec_path):
190 190 yield inner_scm
191 191
192 192 return _get_repos(path)
193 193
194 194
195 195 def _get_dirpaths(p):
196 196 try:
197 197 # OS-independable way of checking if we have at least read-only
198 198 # access or not.
199 199 dirpaths = os.listdir(p)
200 200 except OSError:
201 201 log.warning('ignoring repo path without read access: %s', p)
202 202 return []
203 203
204 204 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
205 205 # decode paths and suddenly returns unicode objects itself. The items it
206 206 # cannot decode are returned as strings and cause issues.
207 207 #
208 208 # Those paths are ignored here until a solid solution for path handling has
209 209 # been built.
210 210 expected_type = type(p)
211 211
212 212 def _has_correct_type(item):
213 213 if type(item) is not expected_type:
214 214 log.error(
215 215 u"Ignoring path %s since it cannot be decoded into unicode.",
216 216 # Using "repr" to make sure that we see the byte value in case
217 217 # of support.
218 218 repr(item))
219 219 return False
220 220 return True
221 221
222 222 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
223 223
224 224 return dirpaths
225 225
226 226
227 227 def _is_dir_writable(path):
228 228 """
229 229 Probe if `path` is writable.
230 230
231 231 Due to trouble on Cygwin / Windows, this is actually probing if it is
232 232 possible to create a file inside of `path`, stat does not produce reliable
233 233 results in this case.
234 234 """
235 235 try:
236 236 with tempfile.TemporaryFile(dir=path):
237 237 pass
238 238 except OSError:
239 239 return False
240 240 return True
241 241
242 242
243 243 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None):
244 244 """
245 245 Returns True if given path is a valid repository False otherwise.
246 246 If expect_scm param is given also, compare if given scm is the same
247 247 as expected from scm parameter. If explicit_scm is given don't try to
248 248 detect the scm, just use the given one to check if repo is valid
249 249
250 250 :param repo_name:
251 251 :param base_path:
252 252 :param expect_scm:
253 253 :param explicit_scm:
254 254
255 255 :return True: if given path is a valid repository
256 256 """
257 257 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
258 258 log.debug('Checking if `%s` is a valid path for repository. '
259 259 'Explicit type: %s', repo_name, explicit_scm)
260 260
261 261 try:
262 262 if explicit_scm:
263 detected_scms = [get_scm_backend(explicit_scm)]
263 detected_scms = [get_scm_backend(explicit_scm)(full_path).alias]
264 264 else:
265 265 detected_scms = get_scm(full_path)
266 266
267 267 if expect_scm:
268 268 return detected_scms[0] == expect_scm
269 269 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
270 270 return True
271 271 except VCSError:
272 272 log.debug('path: %s is not a valid repo !', full_path)
273 273 return False
274 274
275 275
276 276 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
277 277 """
278 278 Returns True if given path is a repository group, False otherwise
279 279
280 280 :param repo_name:
281 281 :param base_path:
282 282 """
283 283 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
284 284 log.debug('Checking if `%s` is a valid path for repository group',
285 285 repo_group_name)
286 286
287 287 # check if it's not a repo
288 288 if is_valid_repo(repo_group_name, base_path):
289 289 log.debug('Repo called %s exist, it is not a valid '
290 290 'repo group' % repo_group_name)
291 291 return False
292 292
293 293 try:
294 294 # we need to check bare git repos at higher level
295 295 # since we might match branches/hooks/info/objects or possible
296 296 # other things inside bare git repo
297 297 scm_ = get_scm(os.path.dirname(full_path))
298 298 log.debug('path: %s is a vcs object:%s, not valid '
299 299 'repo group' % (full_path, scm_))
300 300 return False
301 301 except VCSError:
302 302 pass
303 303
304 304 # check if it's a valid path
305 305 if skip_path_check or os.path.isdir(full_path):
306 306 log.debug('path: %s is a valid repo group !', full_path)
307 307 return True
308 308
309 309 log.debug('path: %s is not a valid repo group !', full_path)
310 310 return False
311 311
312 312
313 313 def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'):
314 314 while True:
315 315 ok = raw_input(prompt)
316 316 if ok.lower() in ('y', 'ye', 'yes'):
317 317 return True
318 318 if ok.lower() in ('n', 'no', 'nop', 'nope'):
319 319 return False
320 320 retries = retries - 1
321 321 if retries < 0:
322 322 raise IOError
323 323 print(complaint)
324 324
325 325 # propagated from mercurial documentation
326 326 ui_sections = [
327 327 'alias', 'auth',
328 328 'decode/encode', 'defaults',
329 329 'diff', 'email',
330 330 'extensions', 'format',
331 331 'merge-patterns', 'merge-tools',
332 332 'hooks', 'http_proxy',
333 333 'smtp', 'patch',
334 334 'paths', 'profiling',
335 335 'server', 'trusted',
336 336 'ui', 'web', ]
337 337
338 338
339 339 def config_data_from_db(clear_session=True, repo=None):
340 340 """
341 341 Read the configuration data from the database and return configuration
342 342 tuples.
343 343 """
344 344 from rhodecode.model.settings import VcsSettingsModel
345 345
346 346 config = []
347 347
348 348 sa = meta.Session()
349 349 settings_model = VcsSettingsModel(repo=repo, sa=sa)
350 350
351 351 ui_settings = settings_model.get_ui_settings()
352 352
353 353 for setting in ui_settings:
354 354 if setting.active:
355 355 log.debug(
356 356 'settings ui from db: [%s] %s=%s',
357 357 setting.section, setting.key, setting.value)
358 358 config.append((
359 359 safe_str(setting.section), safe_str(setting.key),
360 360 safe_str(setting.value)))
361 361 if setting.key == 'push_ssl':
362 362 # force set push_ssl requirement to False, rhodecode
363 363 # handles that
364 364 config.append((
365 365 safe_str(setting.section), safe_str(setting.key), False))
366 366 if clear_session:
367 367 meta.Session.remove()
368 368
369 369 # TODO: mikhail: probably it makes no sense to re-read hooks information.
370 370 # It's already there and activated/deactivated
371 371 skip_entries = []
372 372 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
373 373 if 'pull' not in enabled_hook_classes:
374 374 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
375 375 if 'push' not in enabled_hook_classes:
376 376 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
377 377 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRETX_PUSH))
378 378 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PUSH_KEY))
379 379
380 380 config = [entry for entry in config if entry[:2] not in skip_entries]
381 381
382 382 return config
383 383
384 384
385 385 def make_db_config(clear_session=True, repo=None):
386 386 """
387 387 Create a :class:`Config` instance based on the values in the database.
388 388 """
389 389 config = Config()
390 390 config_data = config_data_from_db(clear_session=clear_session, repo=repo)
391 391 for section, option, value in config_data:
392 392 config.set(section, option, value)
393 393 return config
394 394
395 395
396 396 def get_enabled_hook_classes(ui_settings):
397 397 """
398 398 Return the enabled hook classes.
399 399
400 400 :param ui_settings: List of ui_settings as returned
401 401 by :meth:`VcsSettingsModel.get_ui_settings`
402 402
403 403 :return: a list with the enabled hook classes. The order is not guaranteed.
404 404 :rtype: list
405 405 """
406 406 enabled_hooks = []
407 407 active_hook_keys = [
408 408 key for section, key, value, active in ui_settings
409 409 if section == 'hooks' and active]
410 410
411 411 hook_names = {
412 412 RhodeCodeUi.HOOK_PUSH: 'push',
413 413 RhodeCodeUi.HOOK_PULL: 'pull',
414 414 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
415 415 }
416 416
417 417 for key in active_hook_keys:
418 418 hook = hook_names.get(key)
419 419 if hook:
420 420 enabled_hooks.append(hook)
421 421
422 422 return enabled_hooks
423 423
424 424
425 425 def set_rhodecode_config(config):
426 426 """
427 427 Updates pyramid config with new settings from database
428 428
429 429 :param config:
430 430 """
431 431 from rhodecode.model.settings import SettingsModel
432 432 app_settings = SettingsModel().get_all_settings()
433 433
434 434 for k, v in app_settings.items():
435 435 config[k] = v
436 436
437 437
438 438 def get_rhodecode_realm():
439 439 """
440 440 Return the rhodecode realm from database.
441 441 """
442 442 from rhodecode.model.settings import SettingsModel
443 443 realm = SettingsModel().get_setting_by_name('realm')
444 444 return safe_str(realm.app_settings_value)
445 445
446 446
447 447 def get_rhodecode_base_path():
448 448 """
449 449 Returns the base path. The base path is the filesystem path which points
450 450 to the repository store.
451 451 """
452 452 from rhodecode.model.settings import SettingsModel
453 453 paths_ui = SettingsModel().get_ui_by_section_and_key('paths', '/')
454 454 return safe_str(paths_ui.ui_value)
455 455
456 456
457 457 def map_groups(path):
458 458 """
459 459 Given a full path to a repository, create all nested groups that this
460 460 repo is inside. This function creates parent-child relationships between
461 461 groups and creates default perms for all new groups.
462 462
463 463 :param paths: full path to repository
464 464 """
465 465 from rhodecode.model.repo_group import RepoGroupModel
466 466 sa = meta.Session()
467 467 groups = path.split(Repository.NAME_SEP)
468 468 parent = None
469 469 group = None
470 470
471 471 # last element is repo in nested groups structure
472 472 groups = groups[:-1]
473 473 rgm = RepoGroupModel(sa)
474 474 owner = User.get_first_super_admin()
475 475 for lvl, group_name in enumerate(groups):
476 476 group_name = '/'.join(groups[:lvl] + [group_name])
477 477 group = RepoGroup.get_by_group_name(group_name)
478 478 desc = '%s group' % group_name
479 479
480 480 # skip folders that are now removed repos
481 481 if REMOVED_REPO_PAT.match(group_name):
482 482 break
483 483
484 484 if group is None:
485 485 log.debug('creating group level: %s group_name: %s',
486 486 lvl, group_name)
487 487 group = RepoGroup(group_name, parent)
488 488 group.group_description = desc
489 489 group.user = owner
490 490 sa.add(group)
491 491 perm_obj = rgm._create_default_perms(group)
492 492 sa.add(perm_obj)
493 493 sa.flush()
494 494
495 495 parent = group
496 496 return group
497 497
498 498
499 499 def repo2db_mapper(initial_repo_list, remove_obsolete=False):
500 500 """
501 501 maps all repos given in initial_repo_list, non existing repositories
502 502 are created, if remove_obsolete is True it also checks for db entries
503 503 that are not in initial_repo_list and removes them.
504 504
505 505 :param initial_repo_list: list of repositories found by scanning methods
506 506 :param remove_obsolete: check for obsolete entries in database
507 507 """
508 508 from rhodecode.model.repo import RepoModel
509 509 from rhodecode.model.scm import ScmModel
510 510 from rhodecode.model.repo_group import RepoGroupModel
511 511 from rhodecode.model.settings import SettingsModel
512 512
513 513 sa = meta.Session()
514 514 repo_model = RepoModel()
515 515 user = User.get_first_super_admin()
516 516 added = []
517 517
518 518 # creation defaults
519 519 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
520 520 enable_statistics = defs.get('repo_enable_statistics')
521 521 enable_locking = defs.get('repo_enable_locking')
522 522 enable_downloads = defs.get('repo_enable_downloads')
523 523 private = defs.get('repo_private')
524 524
525 525 for name, repo in initial_repo_list.items():
526 526 group = map_groups(name)
527 527 unicode_name = safe_unicode(name)
528 528 db_repo = repo_model.get_by_repo_name(unicode_name)
529 529 # found repo that is on filesystem not in RhodeCode database
530 530 if not db_repo:
531 531 log.info('repository %s not found, creating now', name)
532 532 added.append(name)
533 533 desc = (repo.description
534 534 if repo.description != 'unknown'
535 535 else '%s repository' % name)
536 536
537 537 db_repo = repo_model._create_repo(
538 538 repo_name=name,
539 539 repo_type=repo.alias,
540 540 description=desc,
541 541 repo_group=getattr(group, 'group_id', None),
542 542 owner=user,
543 543 enable_locking=enable_locking,
544 544 enable_downloads=enable_downloads,
545 545 enable_statistics=enable_statistics,
546 546 private=private,
547 547 state=Repository.STATE_CREATED
548 548 )
549 549 sa.commit()
550 550 # we added that repo just now, and make sure we updated server info
551 551 if db_repo.repo_type == 'git':
552 552 git_repo = db_repo.scm_instance()
553 553 # update repository server-info
554 554 log.debug('Running update server info')
555 555 git_repo._update_server_info()
556 556
557 557 db_repo.update_commit_cache()
558 558
559 559 config = db_repo._config
560 560 config.set('extensions', 'largefiles', '')
561 561 ScmModel().install_hooks(
562 562 db_repo.scm_instance(config=config),
563 563 repo_type=db_repo.repo_type)
564 564
565 565 removed = []
566 566 if remove_obsolete:
567 567 # remove from database those repositories that are not in the filesystem
568 568 for repo in sa.query(Repository).all():
569 569 if repo.repo_name not in initial_repo_list.keys():
570 570 log.debug("Removing non-existing repository found in db `%s`",
571 571 repo.repo_name)
572 572 try:
573 573 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
574 574 sa.commit()
575 575 removed.append(repo.repo_name)
576 576 except Exception:
577 577 # don't hold further removals on error
578 578 log.error(traceback.format_exc())
579 579 sa.rollback()
580 580
581 581 def splitter(full_repo_name):
582 582 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
583 583 gr_name = None
584 584 if len(_parts) == 2:
585 585 gr_name = _parts[0]
586 586 return gr_name
587 587
588 588 initial_repo_group_list = [splitter(x) for x in
589 589 initial_repo_list.keys() if splitter(x)]
590 590
591 591 # remove from database those repository groups that are not in the
592 592 # filesystem due to parent child relationships we need to delete them
593 593 # in a specific order of most nested first
594 594 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
595 595 nested_sort = lambda gr: len(gr.split('/'))
596 596 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
597 597 if group_name not in initial_repo_group_list:
598 598 repo_group = RepoGroup.get_by_group_name(group_name)
599 599 if (repo_group.children.all() or
600 600 not RepoGroupModel().check_exist_filesystem(
601 601 group_name=group_name, exc_on_failure=False)):
602 602 continue
603 603
604 604 log.info(
605 605 'Removing non-existing repository group found in db `%s`',
606 606 group_name)
607 607 try:
608 608 RepoGroupModel(sa).delete(group_name, fs_remove=False)
609 609 sa.commit()
610 610 removed.append(group_name)
611 611 except Exception:
612 612 # don't hold further removals on error
613 613 log.exception(
614 614 'Unable to remove repository group `%s`',
615 615 group_name)
616 616 sa.rollback()
617 617 raise
618 618
619 619 return added, removed
620 620
621 621
622 622 def get_default_cache_settings(settings):
623 623 cache_settings = {}
624 624 for key in settings.keys():
625 625 for prefix in ['beaker.cache.', 'cache.']:
626 626 if key.startswith(prefix):
627 627 name = key.split(prefix)[1].strip()
628 628 cache_settings[name] = settings[key].strip()
629 629 return cache_settings
630 630
631 631
632 632 # set cache regions for beaker so celery can utilise it
633 633 def add_cache(settings):
634 634 from rhodecode.lib import caches
635 635 cache_settings = {'regions': None}
636 636 # main cache settings used as default ...
637 637 cache_settings.update(get_default_cache_settings(settings))
638 638
639 639 if cache_settings['regions']:
640 640 for region in cache_settings['regions'].split(','):
641 641 region = region.strip()
642 642 region_settings = {}
643 643 for key, value in cache_settings.items():
644 644 if key.startswith(region):
645 645 region_settings[key.split('.')[1]] = value
646 646
647 647 caches.configure_cache_region(
648 648 region, region_settings, cache_settings)
649 649
650 650
651 651 def load_rcextensions(root_path):
652 652 import rhodecode
653 653 from rhodecode.config import conf
654 654
655 655 path = os.path.join(root_path, 'rcextensions', '__init__.py')
656 656 if os.path.isfile(path):
657 657 rcext = create_module('rc', path)
658 658 EXT = rhodecode.EXTENSIONS = rcext
659 659 log.debug('Found rcextensions now loading %s...', rcext)
660 660
661 661 # Additional mappings that are not present in the pygments lexers
662 662 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
663 663
664 664 # auto check if the module is not missing any data, set to default if is
665 665 # this will help autoupdate new feature of rcext module
666 666 #from rhodecode.config import rcextensions
667 667 #for k in dir(rcextensions):
668 668 # if not k.startswith('_') and not hasattr(EXT, k):
669 669 # setattr(EXT, k, getattr(rcextensions, k))
670 670
671 671
672 672 def get_custom_lexer(extension):
673 673 """
674 674 returns a custom lexer if it is defined in rcextensions module, or None
675 675 if there's no custom lexer defined
676 676 """
677 677 import rhodecode
678 678 from pygments import lexers
679 679
680 680 # custom override made by RhodeCode
681 681 if extension in ['mako']:
682 682 return lexers.get_lexer_by_name('html+mako')
683 683
684 684 # check if we didn't define this extension as other lexer
685 685 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
686 686 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
687 687 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
688 688 return lexers.get_lexer_by_name(_lexer_name)
689 689
690 690
691 691 #==============================================================================
692 692 # TEST FUNCTIONS AND CREATORS
693 693 #==============================================================================
694 694 def create_test_index(repo_location, config):
695 695 """
696 696 Makes default test index.
697 697 """
698 698 import rc_testdata
699 699
700 700 rc_testdata.extract_search_index(
701 701 'vcs_search_index', os.path.dirname(config['search.location']))
702 702
703 703
704 704 def create_test_directory(test_path):
705 705 """
706 706 Create test directory if it doesn't exist.
707 707 """
708 708 if not os.path.isdir(test_path):
709 709 log.debug('Creating testdir %s', test_path)
710 710 os.makedirs(test_path)
711 711
712 712
713 713 def create_test_database(test_path, config):
714 714 """
715 715 Makes a fresh database.
716 716 """
717 717 from rhodecode.lib.db_manage import DbManage
718 718
719 719 # PART ONE create db
720 720 dbconf = config['sqlalchemy.db1.url']
721 721 log.debug('making test db %s', dbconf)
722 722
723 723 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
724 724 tests=True, cli_args={'force_ask': True})
725 725 dbmanage.create_tables(override=True)
726 726 dbmanage.set_db_version()
727 727 # for tests dynamically set new root paths based on generated content
728 728 dbmanage.create_settings(dbmanage.config_prompt(test_path))
729 729 dbmanage.create_default_user()
730 730 dbmanage.create_test_admin_and_users()
731 731 dbmanage.create_permissions()
732 732 dbmanage.populate_default_permissions()
733 733 Session().commit()
734 734
735 735
736 736 def create_test_repositories(test_path, config):
737 737 """
738 738 Creates test repositories in the temporary directory. Repositories are
739 739 extracted from archives within the rc_testdata package.
740 740 """
741 741 import rc_testdata
742 742 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
743 743
744 744 log.debug('making test vcs repositories')
745 745
746 746 idx_path = config['search.location']
747 747 data_path = config['cache_dir']
748 748
749 749 # clean index and data
750 750 if idx_path and os.path.exists(idx_path):
751 751 log.debug('remove %s', idx_path)
752 752 shutil.rmtree(idx_path)
753 753
754 754 if data_path and os.path.exists(data_path):
755 755 log.debug('remove %s', data_path)
756 756 shutil.rmtree(data_path)
757 757
758 758 rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO))
759 759 rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO))
760 760
761 761 # Note: Subversion is in the process of being integrated with the system,
762 762 # until we have a properly packed version of the test svn repository, this
763 763 # tries to copy over the repo from a package "rc_testdata"
764 764 svn_repo_path = rc_testdata.get_svn_repo_archive()
765 765 with tarfile.open(svn_repo_path) as tar:
766 766 tar.extractall(jn(test_path, SVN_REPO))
767 767
768 768
769 769 def password_changed(auth_user, session):
770 770 # Never report password change in case of default user or anonymous user.
771 771 if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None:
772 772 return False
773 773
774 774 password_hash = md5(auth_user.password) if auth_user.password else None
775 775 rhodecode_user = session.get('rhodecode_user', {})
776 776 session_password_hash = rhodecode_user.get('password', '')
777 777 return password_hash != session_password_hash
778 778
779 779
780 780 def read_opensource_licenses():
781 781 global _license_cache
782 782
783 783 if not _license_cache:
784 784 licenses = pkg_resources.resource_string(
785 785 'rhodecode', 'config/licenses.json')
786 786 _license_cache = json.loads(licenses)
787 787
788 788 return _license_cache
789 789
790 790
791 791 def generate_platform_uuid():
792 792 """
793 793 Generates platform UUID based on it's name
794 794 """
795 795 import platform
796 796
797 797 try:
798 798 uuid_list = [platform.platform()]
799 799 return hashlib.sha256(':'.join(uuid_list)).hexdigest()
800 800 except Exception as e:
801 801 log.error('Failed to generate host uuid: %s' % e)
802 802 return 'UNDEFINED'
@@ -1,655 +1,678 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Test suite for making push/pull operations, on specially modified INI files
23 23
24 24 .. important::
25 25
26 26 You must have git >= 1.8.5 for tests to work fine. With 68b939b git started
27 27 to redirect things to stderr instead of stdout.
28 28 """
29 29
30 30
31 31 import os
32 32 import time
33 33
34 34 import pytest
35 35
36 36 from rhodecode.lib.vcs.backends.git.repository import GitRepository
37 37 from rhodecode.lib.vcs.backends.hg.repository import MercurialRepository
38 38 from rhodecode.lib.vcs.nodes import FileNode
39 39 from rhodecode.model.auth_token import AuthTokenModel
40 40 from rhodecode.model.db import Repository, UserIpMap, CacheKey
41 41 from rhodecode.model.meta import Session
42 from rhodecode.model.repo import RepoModel
42 43 from rhodecode.model.user import UserModel
43 44 from rhodecode.tests import (GIT_REPO, HG_REPO, TEST_USER_ADMIN_LOGIN)
44 45
45 46 from rhodecode.tests.other.vcs_operations import (
46 47 Command, _check_proper_clone, _check_proper_git_push,
47 48 _check_proper_hg_push, _add_files_and_push,
48 49 HG_REPO_WITH_GROUP, GIT_REPO_WITH_GROUP)
49 50
50 51
51 52 @pytest.mark.usefixtures("disable_locking", "disable_anonymous_user")
52 53 class TestVCSOperations(object):
53 54
54 55 def test_clone_hg_repo_by_admin(self, rc_web_server, tmpdir):
55 56 clone_url = rc_web_server.repo_clone_url(HG_REPO)
56 57 stdout, stderr = Command('/tmp').execute(
57 58 'hg clone', clone_url, tmpdir.strpath)
58 59 _check_proper_clone(stdout, stderr, 'hg')
59 60
60 61 def test_clone_git_repo_by_admin(self, rc_web_server, tmpdir):
61 62 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
62 63 cmd = Command('/tmp')
63 64 stdout, stderr = cmd.execute('git clone', clone_url, tmpdir.strpath)
64 65 _check_proper_clone(stdout, stderr, 'git')
65 66 cmd.assert_returncode_success()
66 67
67 68 def test_clone_git_repo_by_admin_with_git_suffix(self, rc_web_server, tmpdir):
68 69 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
69 70 cmd = Command('/tmp')
70 71 stdout, stderr = cmd.execute('git clone', clone_url+".git", tmpdir.strpath)
71 72 _check_proper_clone(stdout, stderr, 'git')
72 73 cmd.assert_returncode_success()
73 74
74 75 def test_clone_hg_repo_by_id_by_admin(self, rc_web_server, tmpdir):
75 76 repo_id = Repository.get_by_repo_name(HG_REPO).repo_id
76 77 clone_url = rc_web_server.repo_clone_url('_%s' % repo_id)
77 78 stdout, stderr = Command('/tmp').execute(
78 79 'hg clone', clone_url, tmpdir.strpath)
79 80 _check_proper_clone(stdout, stderr, 'hg')
80 81
81 82 def test_clone_git_repo_by_id_by_admin(self, rc_web_server, tmpdir):
82 83 repo_id = Repository.get_by_repo_name(GIT_REPO).repo_id
83 84 clone_url = rc_web_server.repo_clone_url('_%s' % repo_id)
84 85 cmd = Command('/tmp')
85 86 stdout, stderr = cmd.execute('git clone', clone_url, tmpdir.strpath)
86 87 _check_proper_clone(stdout, stderr, 'git')
87 88 cmd.assert_returncode_success()
88 89
89 90 def test_clone_hg_repo_with_group_by_admin(self, rc_web_server, tmpdir):
90 91 clone_url = rc_web_server.repo_clone_url(HG_REPO_WITH_GROUP)
91 92 stdout, stderr = Command('/tmp').execute(
92 93 'hg clone', clone_url, tmpdir.strpath)
93 94 _check_proper_clone(stdout, stderr, 'hg')
94 95
95 96 def test_clone_git_repo_with_group_by_admin(self, rc_web_server, tmpdir):
96 97 clone_url = rc_web_server.repo_clone_url(GIT_REPO_WITH_GROUP)
97 98 cmd = Command('/tmp')
98 99 stdout, stderr = cmd.execute('git clone', clone_url, tmpdir.strpath)
99 100 _check_proper_clone(stdout, stderr, 'git')
100 101 cmd.assert_returncode_success()
101 102
102 103 def test_clone_git_repo_shallow_by_admin(self, rc_web_server, tmpdir):
103 104 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
104 105 cmd = Command('/tmp')
105 106 stdout, stderr = cmd.execute(
106 107 'git clone --depth=1', clone_url, tmpdir.strpath)
107 108
108 109 assert '' == stdout
109 110 assert 'Cloning into' in stderr
110 111 cmd.assert_returncode_success()
111 112
112 113 def test_clone_wrong_credentials_hg(self, rc_web_server, tmpdir):
113 114 clone_url = rc_web_server.repo_clone_url(HG_REPO, passwd='bad!')
114 115 stdout, stderr = Command('/tmp').execute(
115 116 'hg clone', clone_url, tmpdir.strpath)
116 117 assert 'abort: authorization failed' in stderr
117 118
118 119 def test_clone_wrong_credentials_git(self, rc_web_server, tmpdir):
119 120 clone_url = rc_web_server.repo_clone_url(GIT_REPO, passwd='bad!')
120 121 stdout, stderr = Command('/tmp').execute(
121 122 'git clone', clone_url, tmpdir.strpath)
122 123 assert 'fatal: Authentication failed' in stderr
123 124
124 125 def test_clone_git_dir_as_hg(self, rc_web_server, tmpdir):
125 126 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
126 127 stdout, stderr = Command('/tmp').execute(
127 128 'hg clone', clone_url, tmpdir.strpath)
128 129 assert 'HTTP Error 404: Not Found' in stderr
129 130
130 131 def test_clone_hg_repo_as_git(self, rc_web_server, tmpdir):
131 132 clone_url = rc_web_server.repo_clone_url(HG_REPO)
132 133 stdout, stderr = Command('/tmp').execute(
133 134 'git clone', clone_url, tmpdir.strpath)
134 135 assert 'not found' in stderr
135 136
136 137 def test_clone_non_existing_path_hg(self, rc_web_server, tmpdir):
137 138 clone_url = rc_web_server.repo_clone_url('trololo')
138 139 stdout, stderr = Command('/tmp').execute(
139 140 'hg clone', clone_url, tmpdir.strpath)
140 141 assert 'HTTP Error 404: Not Found' in stderr
141 142
142 143 def test_clone_non_existing_path_git(self, rc_web_server, tmpdir):
143 144 clone_url = rc_web_server.repo_clone_url('trololo')
144 145 stdout, stderr = Command('/tmp').execute('git clone', clone_url)
145 146 assert 'not found' in stderr
146 147
147 148 def test_clone_existing_path_hg_not_in_database(
148 149 self, rc_web_server, tmpdir, fs_repo_only):
149 150
150 151 db_name = fs_repo_only('not-in-db-hg', repo_type='hg')
151 152 clone_url = rc_web_server.repo_clone_url(db_name)
152 153 stdout, stderr = Command('/tmp').execute(
153 154 'hg clone', clone_url, tmpdir.strpath)
154 155 assert 'HTTP Error 404: Not Found' in stderr
155 156
156 157 def test_clone_existing_path_git_not_in_database(
157 158 self, rc_web_server, tmpdir, fs_repo_only):
158 159 db_name = fs_repo_only('not-in-db-git', repo_type='git')
159 160 clone_url = rc_web_server.repo_clone_url(db_name)
160 161 stdout, stderr = Command('/tmp').execute(
161 162 'git clone', clone_url, tmpdir.strpath)
162 163 assert 'not found' in stderr
163 164
164 165 def test_clone_existing_path_hg_not_in_database_different_scm(
165 166 self, rc_web_server, tmpdir, fs_repo_only):
166 167 db_name = fs_repo_only('not-in-db-git', repo_type='git')
167 168 clone_url = rc_web_server.repo_clone_url(db_name)
168 169 stdout, stderr = Command('/tmp').execute(
169 170 'hg clone', clone_url, tmpdir.strpath)
170 171 assert 'HTTP Error 404: Not Found' in stderr
171 172
172 173 def test_clone_existing_path_git_not_in_database_different_scm(
173 174 self, rc_web_server, tmpdir, fs_repo_only):
174 175 db_name = fs_repo_only('not-in-db-hg', repo_type='hg')
175 176 clone_url = rc_web_server.repo_clone_url(db_name)
176 177 stdout, stderr = Command('/tmp').execute(
177 178 'git clone', clone_url, tmpdir.strpath)
178 179 assert 'not found' in stderr
179 180
181 def test_clone_non_existing_store_path_hg(self, rc_web_server, tmpdir, user_util):
182 repo = user_util.create_repo()
183 clone_url = rc_web_server.repo_clone_url(repo.repo_name)
184
185 # Damage repo by removing it's folder
186 RepoModel()._delete_filesystem_repo(repo)
187
188 stdout, stderr = Command('/tmp').execute(
189 'hg clone', clone_url, tmpdir.strpath)
190 assert 'HTTP Error 404: Not Found' in stderr
191
192 def test_clone_non_existing_store_path_git(self, rc_web_server, tmpdir, user_util):
193 repo = user_util.create_repo(repo_type='git')
194 clone_url = rc_web_server.repo_clone_url(repo.repo_name)
195
196 # Damage repo by removing it's folder
197 RepoModel()._delete_filesystem_repo(repo)
198
199 stdout, stderr = Command('/tmp').execute(
200 'git clone', clone_url, tmpdir.strpath)
201 assert 'not found' in stderr
202
180 203 def test_push_new_file_hg(self, rc_web_server, tmpdir):
181 204 clone_url = rc_web_server.repo_clone_url(HG_REPO)
182 205 stdout, stderr = Command('/tmp').execute(
183 206 'hg clone', clone_url, tmpdir.strpath)
184 207
185 208 stdout, stderr = _add_files_and_push(
186 209 'hg', tmpdir.strpath, clone_url=clone_url)
187 210
188 211 assert 'pushing to' in stdout
189 212 assert 'size summary' in stdout
190 213
191 214 def test_push_new_file_git(self, rc_web_server, tmpdir):
192 215 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
193 216 stdout, stderr = Command('/tmp').execute(
194 217 'git clone', clone_url, tmpdir.strpath)
195 218
196 219 # commit some stuff into this repo
197 220 stdout, stderr = _add_files_and_push(
198 221 'git', tmpdir.strpath, clone_url=clone_url)
199 222
200 223 _check_proper_git_push(stdout, stderr)
201 224
202 225 def test_push_invalidates_cache_hg(self, rc_web_server, tmpdir):
203 226 key = CacheKey.query().filter(CacheKey.cache_key == HG_REPO).scalar()
204 227 if not key:
205 228 key = CacheKey(HG_REPO, HG_REPO)
206 229
207 230 key.cache_active = True
208 231 Session().add(key)
209 232 Session().commit()
210 233
211 234 clone_url = rc_web_server.repo_clone_url(HG_REPO)
212 235 stdout, stderr = Command('/tmp').execute(
213 236 'hg clone', clone_url, tmpdir.strpath)
214 237
215 238 stdout, stderr = _add_files_and_push(
216 239 'hg', tmpdir.strpath, clone_url=clone_url, files_no=1)
217 240
218 241 key = CacheKey.query().filter(CacheKey.cache_key == HG_REPO).one()
219 242 assert key.cache_active is False
220 243
221 244 def test_push_invalidates_cache_git(self, rc_web_server, tmpdir):
222 245 key = CacheKey.query().filter(CacheKey.cache_key == GIT_REPO).scalar()
223 246 if not key:
224 247 key = CacheKey(GIT_REPO, GIT_REPO)
225 248
226 249 key.cache_active = True
227 250 Session().add(key)
228 251 Session().commit()
229 252
230 253 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
231 254 stdout, stderr = Command('/tmp').execute(
232 255 'git clone', clone_url, tmpdir.strpath)
233 256
234 257 # commit some stuff into this repo
235 258 stdout, stderr = _add_files_and_push(
236 259 'git', tmpdir.strpath, clone_url=clone_url, files_no=1)
237 260 _check_proper_git_push(stdout, stderr)
238 261
239 262 key = CacheKey.query().filter(CacheKey.cache_key == GIT_REPO).one()
240 263
241 264 assert key.cache_active is False
242 265
243 266 def test_push_wrong_credentials_hg(self, rc_web_server, tmpdir):
244 267 clone_url = rc_web_server.repo_clone_url(HG_REPO)
245 268 stdout, stderr = Command('/tmp').execute(
246 269 'hg clone', clone_url, tmpdir.strpath)
247 270
248 271 push_url = rc_web_server.repo_clone_url(
249 272 HG_REPO, user='bad', passwd='name')
250 273 stdout, stderr = _add_files_and_push(
251 274 'hg', tmpdir.strpath, clone_url=push_url)
252 275
253 276 assert 'abort: authorization failed' in stderr
254 277
255 278 def test_push_wrong_credentials_git(self, rc_web_server, tmpdir):
256 279 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
257 280 stdout, stderr = Command('/tmp').execute(
258 281 'git clone', clone_url, tmpdir.strpath)
259 282
260 283 push_url = rc_web_server.repo_clone_url(
261 284 GIT_REPO, user='bad', passwd='name')
262 285 stdout, stderr = _add_files_and_push(
263 286 'git', tmpdir.strpath, clone_url=push_url)
264 287
265 288 assert 'fatal: Authentication failed' in stderr
266 289
267 290 def test_push_back_to_wrong_url_hg(self, rc_web_server, tmpdir):
268 291 clone_url = rc_web_server.repo_clone_url(HG_REPO)
269 292 stdout, stderr = Command('/tmp').execute(
270 293 'hg clone', clone_url, tmpdir.strpath)
271 294
272 295 stdout, stderr = _add_files_and_push(
273 296 'hg', tmpdir.strpath,
274 297 clone_url=rc_web_server.repo_clone_url('not-existing'))
275 298
276 299 assert 'HTTP Error 404: Not Found' in stderr
277 300
278 301 def test_push_back_to_wrong_url_git(self, rc_web_server, tmpdir):
279 302 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
280 303 stdout, stderr = Command('/tmp').execute(
281 304 'git clone', clone_url, tmpdir.strpath)
282 305
283 306 stdout, stderr = _add_files_and_push(
284 307 'git', tmpdir.strpath,
285 308 clone_url=rc_web_server.repo_clone_url('not-existing'))
286 309
287 310 assert 'not found' in stderr
288 311
289 312 def test_ip_restriction_hg(self, rc_web_server, tmpdir):
290 313 user_model = UserModel()
291 314 try:
292 315 user_model.add_extra_ip(TEST_USER_ADMIN_LOGIN, '10.10.10.10/32')
293 316 Session().commit()
294 317 time.sleep(2)
295 318 clone_url = rc_web_server.repo_clone_url(HG_REPO)
296 319 stdout, stderr = Command('/tmp').execute(
297 320 'hg clone', clone_url, tmpdir.strpath)
298 321 assert 'abort: HTTP Error 403: Forbidden' in stderr
299 322 finally:
300 323 # release IP restrictions
301 324 for ip in UserIpMap.getAll():
302 325 UserIpMap.delete(ip.ip_id)
303 326 Session().commit()
304 327
305 328 time.sleep(2)
306 329
307 330 stdout, stderr = Command('/tmp').execute(
308 331 'hg clone', clone_url, tmpdir.strpath)
309 332 _check_proper_clone(stdout, stderr, 'hg')
310 333
311 334 def test_ip_restriction_git(self, rc_web_server, tmpdir):
312 335 user_model = UserModel()
313 336 try:
314 337 user_model.add_extra_ip(TEST_USER_ADMIN_LOGIN, '10.10.10.10/32')
315 338 Session().commit()
316 339 time.sleep(2)
317 340 clone_url = rc_web_server.repo_clone_url(GIT_REPO)
318 341 stdout, stderr = Command('/tmp').execute(
319 342 'git clone', clone_url, tmpdir.strpath)
320 343 msg = "The requested URL returned error: 403"
321 344 assert msg in stderr
322 345 finally:
323 346 # release IP restrictions
324 347 for ip in UserIpMap.getAll():
325 348 UserIpMap.delete(ip.ip_id)
326 349 Session().commit()
327 350
328 351 time.sleep(2)
329 352
330 353 cmd = Command('/tmp')
331 354 stdout, stderr = cmd.execute('git clone', clone_url, tmpdir.strpath)
332 355 cmd.assert_returncode_success()
333 356 _check_proper_clone(stdout, stderr, 'git')
334 357
335 358 def test_clone_by_auth_token(
336 359 self, rc_web_server, tmpdir, user_util, enable_auth_plugins):
337 360 enable_auth_plugins(['egg:rhodecode-enterprise-ce#token',
338 361 'egg:rhodecode-enterprise-ce#rhodecode'])
339 362
340 363 user = user_util.create_user()
341 364 token = user.auth_tokens[1]
342 365
343 366 clone_url = rc_web_server.repo_clone_url(
344 367 HG_REPO, user=user.username, passwd=token)
345 368
346 369 stdout, stderr = Command('/tmp').execute(
347 370 'hg clone', clone_url, tmpdir.strpath)
348 371 _check_proper_clone(stdout, stderr, 'hg')
349 372
350 373 def test_clone_by_auth_token_expired(
351 374 self, rc_web_server, tmpdir, user_util, enable_auth_plugins):
352 375 enable_auth_plugins(['egg:rhodecode-enterprise-ce#token',
353 376 'egg:rhodecode-enterprise-ce#rhodecode'])
354 377
355 378 user = user_util.create_user()
356 379 auth_token = AuthTokenModel().create(
357 380 user.user_id, 'test-token', -10, AuthTokenModel.cls.ROLE_VCS)
358 381 token = auth_token.api_key
359 382
360 383 clone_url = rc_web_server.repo_clone_url(
361 384 HG_REPO, user=user.username, passwd=token)
362 385
363 386 stdout, stderr = Command('/tmp').execute(
364 387 'hg clone', clone_url, tmpdir.strpath)
365 388 assert 'abort: authorization failed' in stderr
366 389
367 390 def test_clone_by_auth_token_bad_role(
368 391 self, rc_web_server, tmpdir, user_util, enable_auth_plugins):
369 392 enable_auth_plugins(['egg:rhodecode-enterprise-ce#token',
370 393 'egg:rhodecode-enterprise-ce#rhodecode'])
371 394
372 395 user = user_util.create_user()
373 396 auth_token = AuthTokenModel().create(
374 397 user.user_id, 'test-token', -1, AuthTokenModel.cls.ROLE_API)
375 398 token = auth_token.api_key
376 399
377 400 clone_url = rc_web_server.repo_clone_url(
378 401 HG_REPO, user=user.username, passwd=token)
379 402
380 403 stdout, stderr = Command('/tmp').execute(
381 404 'hg clone', clone_url, tmpdir.strpath)
382 405 assert 'abort: authorization failed' in stderr
383 406
384 407 def test_clone_by_auth_token_user_disabled(
385 408 self, rc_web_server, tmpdir, user_util, enable_auth_plugins):
386 409 enable_auth_plugins(['egg:rhodecode-enterprise-ce#token',
387 410 'egg:rhodecode-enterprise-ce#rhodecode'])
388 411 user = user_util.create_user()
389 412 user.active = False
390 413 Session().add(user)
391 414 Session().commit()
392 415 token = user.auth_tokens[1]
393 416
394 417 clone_url = rc_web_server.repo_clone_url(
395 418 HG_REPO, user=user.username, passwd=token)
396 419
397 420 stdout, stderr = Command('/tmp').execute(
398 421 'hg clone', clone_url, tmpdir.strpath)
399 422 assert 'abort: authorization failed' in stderr
400 423
401 424 def test_clone_by_auth_token_with_scope(
402 425 self, rc_web_server, tmpdir, user_util, enable_auth_plugins):
403 426 enable_auth_plugins(['egg:rhodecode-enterprise-ce#token',
404 427 'egg:rhodecode-enterprise-ce#rhodecode'])
405 428 user = user_util.create_user()
406 429 auth_token = AuthTokenModel().create(
407 430 user.user_id, 'test-token', -1, AuthTokenModel.cls.ROLE_VCS)
408 431 token = auth_token.api_key
409 432
410 433 # manually set scope
411 434 auth_token.repo = Repository.get_by_repo_name(HG_REPO)
412 435 Session().add(auth_token)
413 436 Session().commit()
414 437
415 438 clone_url = rc_web_server.repo_clone_url(
416 439 HG_REPO, user=user.username, passwd=token)
417 440
418 441 stdout, stderr = Command('/tmp').execute(
419 442 'hg clone', clone_url, tmpdir.strpath)
420 443 _check_proper_clone(stdout, stderr, 'hg')
421 444
422 445 def test_clone_by_auth_token_with_wrong_scope(
423 446 self, rc_web_server, tmpdir, user_util, enable_auth_plugins):
424 447 enable_auth_plugins(['egg:rhodecode-enterprise-ce#token',
425 448 'egg:rhodecode-enterprise-ce#rhodecode'])
426 449 user = user_util.create_user()
427 450 auth_token = AuthTokenModel().create(
428 451 user.user_id, 'test-token', -1, AuthTokenModel.cls.ROLE_VCS)
429 452 token = auth_token.api_key
430 453
431 454 # manually set scope
432 455 auth_token.repo = Repository.get_by_repo_name(GIT_REPO)
433 456 Session().add(auth_token)
434 457 Session().commit()
435 458
436 459 clone_url = rc_web_server.repo_clone_url(
437 460 HG_REPO, user=user.username, passwd=token)
438 461
439 462 stdout, stderr = Command('/tmp').execute(
440 463 'hg clone', clone_url, tmpdir.strpath)
441 464 assert 'abort: authorization failed' in stderr
442 465
443 466
444 467 def test_git_sets_default_branch_if_not_master(
445 468 backend_git, tmpdir, disable_locking, rc_web_server):
446 469 empty_repo = backend_git.create_repo()
447 470 clone_url = rc_web_server.repo_clone_url(empty_repo.repo_name)
448 471
449 472 cmd = Command(tmpdir.strpath)
450 473 cmd.execute('git clone', clone_url)
451 474
452 475 repo = GitRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name))
453 476 repo.in_memory_commit.add(FileNode('file', content=''))
454 477 repo.in_memory_commit.commit(
455 478 message='Commit on branch test',
456 479 author='Automatic test',
457 480 branch='test')
458 481
459 482 repo_cmd = Command(repo.path)
460 483 stdout, stderr = repo_cmd.execute('git push --verbose origin test')
461 484 _check_proper_git_push(
462 485 stdout, stderr, branch='test', should_set_default_branch=True)
463 486
464 487 stdout, stderr = cmd.execute(
465 488 'git clone', clone_url, empty_repo.repo_name + '-clone')
466 489 _check_proper_clone(stdout, stderr, 'git')
467 490
468 491 # Doing an explicit commit in order to get latest user logs on MySQL
469 492 Session().commit()
470 493
471 494
472 495 def test_git_fetches_from_remote_repository_with_annotated_tags(
473 496 backend_git, disable_locking, rc_web_server):
474 497 # Note: This is a test specific to the git backend. It checks the
475 498 # integration of fetching from a remote repository which contains
476 499 # annotated tags.
477 500
478 501 # Dulwich shows this specific behavior only when
479 502 # operating against a remote repository.
480 503 source_repo = backend_git['annotated-tag']
481 504 target_vcs_repo = backend_git.create_repo().scm_instance()
482 505 target_vcs_repo.fetch(rc_web_server.repo_clone_url(source_repo.repo_name))
483 506
484 507
485 508 def test_git_push_shows_pull_request_refs(backend_git, rc_web_server, tmpdir):
486 509 """
487 510 test if remote info about refs is visible
488 511 """
489 512 empty_repo = backend_git.create_repo()
490 513
491 514 clone_url = rc_web_server.repo_clone_url(empty_repo.repo_name)
492 515
493 516 cmd = Command(tmpdir.strpath)
494 517 cmd.execute('git clone', clone_url)
495 518
496 519 repo = GitRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name))
497 520 repo.in_memory_commit.add(FileNode('readme.md', content='## Hello'))
498 521 repo.in_memory_commit.commit(
499 522 message='Commit on branch Master',
500 523 author='Automatic test',
501 524 branch='master')
502 525
503 526 repo_cmd = Command(repo.path)
504 527 stdout, stderr = repo_cmd.execute('git push --verbose origin master')
505 528 _check_proper_git_push(stdout, stderr, branch='master')
506 529
507 530 ref = '{}/{}/pull-request/new?branch=master'.format(
508 531 rc_web_server.host_url(), empty_repo.repo_name)
509 532 assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stderr
510 533 assert 'remote: RhodeCode: push completed' in stderr
511 534
512 535 # push on the same branch
513 536 repo = GitRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name))
514 537 repo.in_memory_commit.add(FileNode('setup.py', content='print\n'))
515 538 repo.in_memory_commit.commit(
516 539 message='Commit2 on branch Master',
517 540 author='Automatic test2',
518 541 branch='master')
519 542
520 543 repo_cmd = Command(repo.path)
521 544 stdout, stderr = repo_cmd.execute('git push --verbose origin master')
522 545 _check_proper_git_push(stdout, stderr, branch='master')
523 546
524 547 assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stderr
525 548 assert 'remote: RhodeCode: push completed' in stderr
526 549
527 550 # new Branch
528 551 repo = GitRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name))
529 552 repo.in_memory_commit.add(FileNode('feature1.py', content='## Hello world'))
530 553 repo.in_memory_commit.commit(
531 554 message='Commit on branch feature',
532 555 author='Automatic test',
533 556 branch='feature')
534 557
535 558 repo_cmd = Command(repo.path)
536 559 stdout, stderr = repo_cmd.execute('git push --verbose origin feature')
537 560 _check_proper_git_push(stdout, stderr, branch='feature')
538 561
539 562 ref = '{}/{}/pull-request/new?branch=feature'.format(
540 563 rc_web_server.host_url(), empty_repo.repo_name)
541 564 assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stderr
542 565 assert 'remote: RhodeCode: push completed' in stderr
543 566
544 567
545 568 def test_hg_push_shows_pull_request_refs(backend_hg, rc_web_server, tmpdir):
546 569 empty_repo = backend_hg.create_repo()
547 570
548 571 clone_url = rc_web_server.repo_clone_url(empty_repo.repo_name)
549 572
550 573 cmd = Command(tmpdir.strpath)
551 574 cmd.execute('hg clone', clone_url)
552 575
553 576 repo = MercurialRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name))
554 577 repo.in_memory_commit.add(FileNode(u'readme.md', content=u'## Hello'))
555 578 repo.in_memory_commit.commit(
556 579 message=u'Commit on branch default',
557 580 author=u'Automatic test',
558 581 branch='default')
559 582
560 583 repo_cmd = Command(repo.path)
561 584 repo_cmd.execute('hg checkout default')
562 585
563 586 stdout, stderr = repo_cmd.execute('hg push --verbose', clone_url)
564 587 _check_proper_hg_push(stdout, stderr, branch='default')
565 588
566 589 ref = '{}/{}/pull-request/new?branch=default'.format(
567 590 rc_web_server.host_url(), empty_repo.repo_name)
568 591 assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stdout
569 592 assert 'remote: RhodeCode: push completed' in stdout
570 593
571 594 # push on the same branch
572 595 repo = MercurialRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name))
573 596 repo.in_memory_commit.add(FileNode(u'setup.py', content=u'print\n'))
574 597 repo.in_memory_commit.commit(
575 598 message=u'Commit2 on branch default',
576 599 author=u'Automatic test2',
577 600 branch=u'default')
578 601
579 602 repo_cmd = Command(repo.path)
580 603 repo_cmd.execute('hg checkout default')
581 604
582 605 stdout, stderr = repo_cmd.execute('hg push --verbose', clone_url)
583 606 _check_proper_hg_push(stdout, stderr, branch='default')
584 607
585 608 assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stdout
586 609 assert 'remote: RhodeCode: push completed' in stdout
587 610
588 611 # new Branch
589 612 repo = MercurialRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name))
590 613 repo.in_memory_commit.add(FileNode(u'feature1.py', content=u'## Hello world'))
591 614 repo.in_memory_commit.commit(
592 615 message=u'Commit on branch feature',
593 616 author=u'Automatic test',
594 617 branch=u'feature')
595 618
596 619 repo_cmd = Command(repo.path)
597 620 repo_cmd.execute('hg checkout feature')
598 621
599 622 stdout, stderr = repo_cmd.execute('hg push --new-branch --verbose', clone_url)
600 623 _check_proper_hg_push(stdout, stderr, branch='feature')
601 624
602 625 ref = '{}/{}/pull-request/new?branch=feature'.format(
603 626 rc_web_server.host_url(), empty_repo.repo_name)
604 627 assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stdout
605 628 assert 'remote: RhodeCode: push completed' in stdout
606 629
607 630
608 631 def test_hg_push_shows_pull_request_refs_book(backend_hg, rc_web_server, tmpdir):
609 632 empty_repo = backend_hg.create_repo()
610 633
611 634 clone_url = rc_web_server.repo_clone_url(empty_repo.repo_name)
612 635
613 636 cmd = Command(tmpdir.strpath)
614 637 cmd.execute('hg clone', clone_url)
615 638
616 639 repo = MercurialRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name))
617 640 repo.in_memory_commit.add(FileNode(u'readme.md', content=u'## Hello'))
618 641 repo.in_memory_commit.commit(
619 642 message=u'Commit on branch default',
620 643 author=u'Automatic test',
621 644 branch='default')
622 645
623 646 repo_cmd = Command(repo.path)
624 647 repo_cmd.execute('hg checkout default')
625 648
626 649 stdout, stderr = repo_cmd.execute('hg push --verbose', clone_url)
627 650 _check_proper_hg_push(stdout, stderr, branch='default')
628 651
629 652 ref = '{}/{}/pull-request/new?branch=default'.format(
630 653 rc_web_server.host_url(), empty_repo.repo_name)
631 654 assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stdout
632 655 assert 'remote: RhodeCode: push completed' in stdout
633 656
634 657 # add bookmark
635 658 repo = MercurialRepository(os.path.join(tmpdir.strpath, empty_repo.repo_name))
636 659 repo.in_memory_commit.add(FileNode(u'setup.py', content=u'print\n'))
637 660 repo.in_memory_commit.commit(
638 661 message=u'Commit2 on branch default',
639 662 author=u'Automatic test2',
640 663 branch=u'default')
641 664
642 665 repo_cmd = Command(repo.path)
643 666 repo_cmd.execute('hg checkout default')
644 667 repo_cmd.execute('hg bookmark feature2')
645 668 stdout, stderr = repo_cmd.execute('hg push -B feature2 --verbose', clone_url)
646 669 _check_proper_hg_push(stdout, stderr, branch='default')
647 670
648 671 ref = '{}/{}/pull-request/new?branch=default'.format(
649 672 rc_web_server.host_url(), empty_repo.repo_name)
650 673 assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stdout
651 674 ref = '{}/{}/pull-request/new?bookmark=feature2'.format(
652 675 rc_web_server.host_url(), empty_repo.repo_name)
653 676 assert 'remote: RhodeCode: open pull request link: {}'.format(ref) in stdout
654 677 assert 'remote: RhodeCode: push completed' in stdout
655 678 assert 'exporting bookmark feature2' in stdout
General Comments 0
You need to be logged in to leave comments. Login now