##// END OF EJS Templates
code: fixed deprecated octal calls for py3 compat.
marcink -
r3268:6e0a80a7 default
parent child Browse files
Show More
@@ -1,96 +1,96 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22
23 23 from pyramid.events import ApplicationCreated
24 24 from pyramid.settings import asbool
25 25
26 26 from rhodecode.apps._base import ADMIN_PREFIX
27 27 from rhodecode.lib.ext_json import json
28 28
29 29
30 30 def url_gen(request):
31 31 registry = request.registry
32 32 longpoll_url = registry.settings.get('channelstream.longpoll_url', '')
33 33 ws_url = registry.settings.get('channelstream.ws_url', '')
34 34 proxy_url = request.route_url('channelstream_proxy')
35 35 urls = {
36 36 'connect': request.route_path('channelstream_connect'),
37 37 'subscribe': request.route_path('channelstream_subscribe'),
38 38 'longpoll': longpoll_url or proxy_url,
39 39 'ws': ws_url or proxy_url.replace('http', 'ws')
40 40 }
41 41 return json.dumps(urls)
42 42
43 43
44 44 PLUGIN_DEFINITION = {
45 45 'name': 'channelstream',
46 46 'config': {
47 47 'javascript': [],
48 48 'css': [],
49 49 'template_hooks': {
50 50 'plugin_init_template': 'rhodecode:templates/channelstream/plugin_init.mako'
51 51 },
52 52 'url_gen': url_gen,
53 53 'static': None,
54 54 'enabled': False,
55 55 'server': '',
56 56 'secret': ''
57 57 }
58 58 }
59 59
60 60
61 61 def maybe_create_history_store(event):
62 62 # create plugin history location
63 63 settings = event.app.registry.settings
64 64 history_dir = settings.get('channelstream.history.location', '')
65 65 if history_dir and not os.path.exists(history_dir):
66 os.makedirs(history_dir, 0750)
66 os.makedirs(history_dir, 0o750)
67 67
68 68
69 69 def includeme(config):
70 70 settings = config.registry.settings
71 71 PLUGIN_DEFINITION['config']['enabled'] = asbool(
72 72 settings.get('channelstream.enabled'))
73 73 PLUGIN_DEFINITION['config']['server'] = settings.get(
74 74 'channelstream.server', '')
75 75 PLUGIN_DEFINITION['config']['secret'] = settings.get(
76 76 'channelstream.secret', '')
77 77 PLUGIN_DEFINITION['config']['history.location'] = settings.get(
78 78 'channelstream.history.location', '')
79 79 config.register_rhodecode_plugin(
80 80 PLUGIN_DEFINITION['name'],
81 81 PLUGIN_DEFINITION['config']
82 82 )
83 83 config.add_subscriber(maybe_create_history_store, ApplicationCreated)
84 84
85 85 config.add_route(
86 86 name='channelstream_connect',
87 87 pattern=ADMIN_PREFIX + '/channelstream/connect')
88 88 config.add_route(
89 89 name='channelstream_subscribe',
90 90 pattern=ADMIN_PREFIX + '/channelstream/subscribe')
91 91 config.add_route(
92 92 name='channelstream_proxy',
93 93 pattern=settings.get('channelstream.proxy_path') or '/_channelstream')
94 94
95 95 # Scan module for configuration decorators.
96 96 config.scan('.views', ignore='.tests')
@@ -1,614 +1,614 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import sys
23 23 import logging
24 24 import collections
25 25 import tempfile
26 26 import time
27 27
28 28 from paste.gzipper import make_gzip_middleware
29 29 import pyramid.events
30 30 from pyramid.wsgi import wsgiapp
31 31 from pyramid.authorization import ACLAuthorizationPolicy
32 32 from pyramid.config import Configurator
33 33 from pyramid.settings import asbool, aslist
34 34 from pyramid.httpexceptions import (
35 35 HTTPException, HTTPError, HTTPInternalServerError, HTTPFound, HTTPNotFound)
36 36 from pyramid.renderers import render_to_response
37 37
38 38 from rhodecode.model import meta
39 39 from rhodecode.config import patches
40 40 from rhodecode.config import utils as config_utils
41 41 from rhodecode.config.environment import load_pyramid_environment
42 42
43 43 import rhodecode.events
44 44 from rhodecode.lib.middleware.vcs import VCSMiddleware
45 45 from rhodecode.lib.request import Request
46 46 from rhodecode.lib.vcs import VCSCommunicationError
47 47 from rhodecode.lib.exceptions import VCSServerUnavailable
48 48 from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled
49 49 from rhodecode.lib.middleware.https_fixup import HttpsFixup
50 50 from rhodecode.lib.celerylib.loader import configure_celery
51 51 from rhodecode.lib.plugins.utils import register_rhodecode_plugin
52 52 from rhodecode.lib.utils2 import aslist as rhodecode_aslist, AttributeDict
53 53 from rhodecode.lib.exc_tracking import store_exception
54 54 from rhodecode.subscribers import (
55 55 scan_repositories_if_enabled, write_js_routes_if_enabled,
56 56 write_metadata_if_needed, inject_app_settings)
57 57
58 58
59 59 log = logging.getLogger(__name__)
60 60
61 61
62 62 def is_http_error(response):
63 63 # error which should have traceback
64 64 return response.status_code > 499
65 65
66 66
67 67 def should_load_all():
68 68 """
69 69 Returns if all application components should be loaded. In some cases it's
70 70 desired to skip apps loading for faster shell script execution
71 71 """
72 72 return True
73 73
74 74
75 75 def make_pyramid_app(global_config, **settings):
76 76 """
77 77 Constructs the WSGI application based on Pyramid.
78 78
79 79 Specials:
80 80
81 81 * The application can also be integrated like a plugin via the call to
82 82 `includeme`. This is accompanied with the other utility functions which
83 83 are called. Changing this should be done with great care to not break
84 84 cases when these fragments are assembled from another place.
85 85
86 86 """
87 87
88 88 # Allows to use format style "{ENV_NAME}" placeholders in the configuration. It
89 89 # will be replaced by the value of the environment variable "NAME" in this case.
90 90 start_time = time.time()
91 91
92 92 environ = {'ENV_{}'.format(key): value for key, value in os.environ.items()}
93 93
94 94 global_config = _substitute_values(global_config, environ)
95 95 settings = _substitute_values(settings, environ)
96 96
97 97 sanitize_settings_and_apply_defaults(settings)
98 98
99 99 config = Configurator(settings=settings)
100 100
101 101 # Apply compatibility patches
102 102 patches.inspect_getargspec()
103 103
104 104 load_pyramid_environment(global_config, settings)
105 105
106 106 # Static file view comes first
107 107 includeme_first(config)
108 108
109 109 includeme(config)
110 110
111 111 pyramid_app = config.make_wsgi_app()
112 112 pyramid_app = wrap_app_in_wsgi_middlewares(pyramid_app, config)
113 113 pyramid_app.config = config
114 114
115 115 config.configure_celery(global_config['__file__'])
116 116 # creating the app uses a connection - return it after we are done
117 117 meta.Session.remove()
118 118 total_time = time.time() - start_time
119 119 log.info('Pyramid app `%s` created and configured in %.2fs',
120 120 pyramid_app.func_name, total_time)
121 121 return pyramid_app
122 122
123 123
124 124 def not_found_view(request):
125 125 """
126 126 This creates the view which should be registered as not-found-view to
127 127 pyramid.
128 128 """
129 129
130 130 if not getattr(request, 'vcs_call', None):
131 131 # handle like regular case with our error_handler
132 132 return error_handler(HTTPNotFound(), request)
133 133
134 134 # handle not found view as a vcs call
135 135 settings = request.registry.settings
136 136 ae_client = getattr(request, 'ae_client', None)
137 137 vcs_app = VCSMiddleware(
138 138 HTTPNotFound(), request.registry, settings,
139 139 appenlight_client=ae_client)
140 140
141 141 return wsgiapp(vcs_app)(None, request)
142 142
143 143
144 144 def error_handler(exception, request):
145 145 import rhodecode
146 146 from rhodecode.lib import helpers
147 147
148 148 rhodecode_title = rhodecode.CONFIG.get('rhodecode_title') or 'RhodeCode'
149 149
150 150 base_response = HTTPInternalServerError()
151 151 # prefer original exception for the response since it may have headers set
152 152 if isinstance(exception, HTTPException):
153 153 base_response = exception
154 154 elif isinstance(exception, VCSCommunicationError):
155 155 base_response = VCSServerUnavailable()
156 156
157 157 if is_http_error(base_response):
158 158 log.exception(
159 159 'error occurred handling this request for path: %s', request.path)
160 160
161 161 error_explanation = base_response.explanation or str(base_response)
162 162 if base_response.status_code == 404:
163 163 error_explanation += " Or you don't have permission to access it."
164 164 c = AttributeDict()
165 165 c.error_message = base_response.status
166 166 c.error_explanation = error_explanation
167 167 c.visual = AttributeDict()
168 168
169 169 c.visual.rhodecode_support_url = (
170 170 request.registry.settings.get('rhodecode_support_url') or
171 171 request.route_url('rhodecode_support')
172 172 )
173 173 c.redirect_time = 0
174 174 c.rhodecode_name = rhodecode_title
175 175 if not c.rhodecode_name:
176 176 c.rhodecode_name = 'Rhodecode'
177 177
178 178 c.causes = []
179 179 if is_http_error(base_response):
180 180 c.causes.append('Server is overloaded.')
181 181 c.causes.append('Server database connection is lost.')
182 182 c.causes.append('Server expected unhandled error.')
183 183
184 184 if hasattr(base_response, 'causes'):
185 185 c.causes = base_response.causes
186 186
187 187 c.messages = helpers.flash.pop_messages(request=request)
188 188
189 189 exc_info = sys.exc_info()
190 190 c.exception_id = id(exc_info)
191 191 c.show_exception_id = isinstance(base_response, VCSServerUnavailable) \
192 192 or base_response.status_code > 499
193 193 c.exception_id_url = request.route_url(
194 194 'admin_settings_exception_tracker_show', exception_id=c.exception_id)
195 195
196 196 if c.show_exception_id:
197 197 store_exception(c.exception_id, exc_info)
198 198
199 199 response = render_to_response(
200 200 '/errors/error_document.mako', {'c': c, 'h': helpers}, request=request,
201 201 response=base_response)
202 202
203 203 return response
204 204
205 205
206 206 def includeme_first(config):
207 207 # redirect automatic browser favicon.ico requests to correct place
208 208 def favicon_redirect(context, request):
209 209 return HTTPFound(
210 210 request.static_path('rhodecode:public/images/favicon.ico'))
211 211
212 212 config.add_view(favicon_redirect, route_name='favicon')
213 213 config.add_route('favicon', '/favicon.ico')
214 214
215 215 def robots_redirect(context, request):
216 216 return HTTPFound(
217 217 request.static_path('rhodecode:public/robots.txt'))
218 218
219 219 config.add_view(robots_redirect, route_name='robots')
220 220 config.add_route('robots', '/robots.txt')
221 221
222 222 config.add_static_view(
223 223 '_static/deform', 'deform:static')
224 224 config.add_static_view(
225 225 '_static/rhodecode', path='rhodecode:public', cache_max_age=3600 * 24)
226 226
227 227
228 228 def includeme(config):
229 229 log.debug('Initializing main includeme from %s', os.path.basename(__file__))
230 230 settings = config.registry.settings
231 231 config.set_request_factory(Request)
232 232
233 233 # plugin information
234 234 config.registry.rhodecode_plugins = collections.OrderedDict()
235 235
236 236 config.add_directive(
237 237 'register_rhodecode_plugin', register_rhodecode_plugin)
238 238
239 239 config.add_directive('configure_celery', configure_celery)
240 240
241 241 if asbool(settings.get('appenlight', 'false')):
242 242 config.include('appenlight_client.ext.pyramid_tween')
243 243
244 244 load_all = should_load_all()
245 245
246 246 # Includes which are required. The application would fail without them.
247 247 config.include('pyramid_mako')
248 248 config.include('pyramid_beaker')
249 249 config.include('rhodecode.lib.rc_cache')
250 250
251 251 config.include('rhodecode.apps._base.navigation')
252 252 config.include('rhodecode.apps._base.subscribers')
253 253 config.include('rhodecode.tweens')
254 254
255 255 config.include('rhodecode.integrations')
256 256 config.include('rhodecode.authentication')
257 257
258 258 if load_all:
259 259 from rhodecode.authentication import discover_legacy_plugins
260 260 # load CE authentication plugins
261 261 config.include('rhodecode.authentication.plugins.auth_crowd')
262 262 config.include('rhodecode.authentication.plugins.auth_headers')
263 263 config.include('rhodecode.authentication.plugins.auth_jasig_cas')
264 264 config.include('rhodecode.authentication.plugins.auth_ldap')
265 265 config.include('rhodecode.authentication.plugins.auth_pam')
266 266 config.include('rhodecode.authentication.plugins.auth_rhodecode')
267 267 config.include('rhodecode.authentication.plugins.auth_token')
268 268
269 269 # Auto discover authentication plugins and include their configuration.
270 270 discover_legacy_plugins(config)
271 271
272 272 # apps
273 273 config.include('rhodecode.apps._base')
274 274
275 275 if load_all:
276 276 config.include('rhodecode.apps.ops')
277 277 config.include('rhodecode.apps.admin')
278 278 config.include('rhodecode.apps.channelstream')
279 279 config.include('rhodecode.apps.login')
280 280 config.include('rhodecode.apps.home')
281 281 config.include('rhodecode.apps.journal')
282 282 config.include('rhodecode.apps.repository')
283 283 config.include('rhodecode.apps.repo_group')
284 284 config.include('rhodecode.apps.user_group')
285 285 config.include('rhodecode.apps.search')
286 286 config.include('rhodecode.apps.user_profile')
287 287 config.include('rhodecode.apps.user_group_profile')
288 288 config.include('rhodecode.apps.my_account')
289 289 config.include('rhodecode.apps.svn_support')
290 290 config.include('rhodecode.apps.ssh_support')
291 291 config.include('rhodecode.apps.gist')
292 292 config.include('rhodecode.apps.debug_style')
293 293 config.include('rhodecode.api')
294 294
295 295 config.add_route('rhodecode_support', 'https://rhodecode.com/help/', static=True)
296 296 config.add_translation_dirs('rhodecode:i18n/')
297 297 settings['default_locale_name'] = settings.get('lang', 'en')
298 298
299 299 # Add subscribers.
300 300 config.add_subscriber(inject_app_settings,
301 301 pyramid.events.ApplicationCreated)
302 302 config.add_subscriber(scan_repositories_if_enabled,
303 303 pyramid.events.ApplicationCreated)
304 304 config.add_subscriber(write_metadata_if_needed,
305 305 pyramid.events.ApplicationCreated)
306 306 config.add_subscriber(write_js_routes_if_enabled,
307 307 pyramid.events.ApplicationCreated)
308 308
309 309 # request custom methods
310 310 config.add_request_method(
311 311 'rhodecode.lib.partial_renderer.get_partial_renderer',
312 312 'get_partial_renderer')
313 313
314 314 # Set the authorization policy.
315 315 authz_policy = ACLAuthorizationPolicy()
316 316 config.set_authorization_policy(authz_policy)
317 317
318 318 # Set the default renderer for HTML templates to mako.
319 319 config.add_mako_renderer('.html')
320 320
321 321 config.add_renderer(
322 322 name='json_ext',
323 323 factory='rhodecode.lib.ext_json_renderer.pyramid_ext_json')
324 324
325 325 # include RhodeCode plugins
326 326 includes = aslist(settings.get('rhodecode.includes', []))
327 327 for inc in includes:
328 328 config.include(inc)
329 329
330 330 # custom not found view, if our pyramid app doesn't know how to handle
331 331 # the request pass it to potential VCS handling ap
332 332 config.add_notfound_view(not_found_view)
333 333 if not settings.get('debugtoolbar.enabled', False):
334 334 # disabled debugtoolbar handle all exceptions via the error_handlers
335 335 config.add_view(error_handler, context=Exception)
336 336
337 337 # all errors including 403/404/50X
338 338 config.add_view(error_handler, context=HTTPError)
339 339
340 340
341 341 def wrap_app_in_wsgi_middlewares(pyramid_app, config):
342 342 """
343 343 Apply outer WSGI middlewares around the application.
344 344 """
345 345 registry = config.registry
346 346 settings = registry.settings
347 347
348 348 # enable https redirects based on HTTP_X_URL_SCHEME set by proxy
349 349 pyramid_app = HttpsFixup(pyramid_app, settings)
350 350
351 351 pyramid_app, _ae_client = wrap_in_appenlight_if_enabled(
352 352 pyramid_app, settings)
353 353 registry.ae_client = _ae_client
354 354
355 355 if settings['gzip_responses']:
356 356 pyramid_app = make_gzip_middleware(
357 357 pyramid_app, settings, compress_level=1)
358 358
359 359 # this should be the outer most middleware in the wsgi stack since
360 360 # middleware like Routes make database calls
361 361 def pyramid_app_with_cleanup(environ, start_response):
362 362 try:
363 363 return pyramid_app(environ, start_response)
364 364 finally:
365 365 # Dispose current database session and rollback uncommitted
366 366 # transactions.
367 367 meta.Session.remove()
368 368
369 369 # In a single threaded mode server, on non sqlite db we should have
370 370 # '0 Current Checked out connections' at the end of a request,
371 371 # if not, then something, somewhere is leaving a connection open
372 372 pool = meta.Base.metadata.bind.engine.pool
373 373 log.debug('sa pool status: %s', pool.status())
374 374 log.debug('Request processing finalized')
375 375
376 376 return pyramid_app_with_cleanup
377 377
378 378
379 379 def sanitize_settings_and_apply_defaults(settings):
380 380 """
381 381 Applies settings defaults and does all type conversion.
382 382
383 383 We would move all settings parsing and preparation into this place, so that
384 384 we have only one place left which deals with this part. The remaining parts
385 385 of the application would start to rely fully on well prepared settings.
386 386
387 387 This piece would later be split up per topic to avoid a big fat monster
388 388 function.
389 389 """
390 390
391 391 settings.setdefault('rhodecode.edition', 'Community Edition')
392 392
393 393 if 'mako.default_filters' not in settings:
394 394 # set custom default filters if we don't have it defined
395 395 settings['mako.imports'] = 'from rhodecode.lib.base import h_filter'
396 396 settings['mako.default_filters'] = 'h_filter'
397 397
398 398 if 'mako.directories' not in settings:
399 399 mako_directories = settings.setdefault('mako.directories', [
400 400 # Base templates of the original application
401 401 'rhodecode:templates',
402 402 ])
403 403 log.debug(
404 404 "Using the following Mako template directories: %s",
405 405 mako_directories)
406 406
407 407 # Default includes, possible to change as a user
408 408 pyramid_includes = settings.setdefault('pyramid.includes', [
409 409 'rhodecode.lib.middleware.request_wrapper',
410 410 ])
411 411 log.debug(
412 412 "Using the following pyramid.includes: %s",
413 413 pyramid_includes)
414 414
415 415 # TODO: johbo: Re-think this, usually the call to config.include
416 416 # should allow to pass in a prefix.
417 417 settings.setdefault('rhodecode.api.url', '/_admin/api')
418 418
419 419 # Sanitize generic settings.
420 420 _list_setting(settings, 'default_encoding', 'UTF-8')
421 421 _bool_setting(settings, 'is_test', 'false')
422 422 _bool_setting(settings, 'gzip_responses', 'false')
423 423
424 424 # Call split out functions that sanitize settings for each topic.
425 425 _sanitize_appenlight_settings(settings)
426 426 _sanitize_vcs_settings(settings)
427 427 _sanitize_cache_settings(settings)
428 428
429 429 # configure instance id
430 430 config_utils.set_instance_id(settings)
431 431
432 432 return settings
433 433
434 434
435 435 def _sanitize_appenlight_settings(settings):
436 436 _bool_setting(settings, 'appenlight', 'false')
437 437
438 438
439 439 def _sanitize_vcs_settings(settings):
440 440 """
441 441 Applies settings defaults and does type conversion for all VCS related
442 442 settings.
443 443 """
444 444 _string_setting(settings, 'vcs.svn.compatible_version', '')
445 445 _string_setting(settings, 'git_rev_filter', '--all')
446 446 _string_setting(settings, 'vcs.hooks.protocol', 'http')
447 447 _string_setting(settings, 'vcs.hooks.host', '127.0.0.1')
448 448 _string_setting(settings, 'vcs.scm_app_implementation', 'http')
449 449 _string_setting(settings, 'vcs.server', '')
450 450 _string_setting(settings, 'vcs.server.log_level', 'debug')
451 451 _string_setting(settings, 'vcs.server.protocol', 'http')
452 452 _bool_setting(settings, 'startup.import_repos', 'false')
453 453 _bool_setting(settings, 'vcs.hooks.direct_calls', 'false')
454 454 _bool_setting(settings, 'vcs.server.enable', 'true')
455 455 _bool_setting(settings, 'vcs.start_server', 'false')
456 456 _list_setting(settings, 'vcs.backends', 'hg, git, svn')
457 457 _int_setting(settings, 'vcs.connection_timeout', 3600)
458 458
459 459 # Support legacy values of vcs.scm_app_implementation. Legacy
460 460 # configurations may use 'rhodecode.lib.middleware.utils.scm_app_http', or
461 461 # disabled since 4.13 'vcsserver.scm_app' which is now mapped to 'http'.
462 462 scm_app_impl = settings['vcs.scm_app_implementation']
463 463 if scm_app_impl in ['rhodecode.lib.middleware.utils.scm_app_http', 'vcsserver.scm_app']:
464 464 settings['vcs.scm_app_implementation'] = 'http'
465 465
466 466
467 467 def _sanitize_cache_settings(settings):
468 468 temp_store = tempfile.gettempdir()
469 469 default_cache_dir = os.path.join(temp_store, 'rc_cache')
470 470
471 471 # save default, cache dir, and use it for all backends later.
472 472 default_cache_dir = _string_setting(
473 473 settings,
474 474 'cache_dir',
475 475 default_cache_dir, lower=False, default_when_empty=True)
476 476
477 477 # ensure we have our dir created
478 478 if not os.path.isdir(default_cache_dir):
479 os.makedirs(default_cache_dir, mode=0755)
479 os.makedirs(default_cache_dir, mode=0o755)
480 480
481 481 # exception store cache
482 482 _string_setting(
483 483 settings,
484 484 'exception_tracker.store_path',
485 485 temp_store, lower=False, default_when_empty=True)
486 486
487 487 # cache_perms
488 488 _string_setting(
489 489 settings,
490 490 'rc_cache.cache_perms.backend',
491 491 'dogpile.cache.rc.file_namespace', lower=False)
492 492 _int_setting(
493 493 settings,
494 494 'rc_cache.cache_perms.expiration_time',
495 495 60)
496 496 _string_setting(
497 497 settings,
498 498 'rc_cache.cache_perms.arguments.filename',
499 499 os.path.join(default_cache_dir, 'rc_cache_1'), lower=False)
500 500
501 501 # cache_repo
502 502 _string_setting(
503 503 settings,
504 504 'rc_cache.cache_repo.backend',
505 505 'dogpile.cache.rc.file_namespace', lower=False)
506 506 _int_setting(
507 507 settings,
508 508 'rc_cache.cache_repo.expiration_time',
509 509 60)
510 510 _string_setting(
511 511 settings,
512 512 'rc_cache.cache_repo.arguments.filename',
513 513 os.path.join(default_cache_dir, 'rc_cache_2'), lower=False)
514 514
515 515 # cache_license
516 516 _string_setting(
517 517 settings,
518 518 'rc_cache.cache_license.backend',
519 519 'dogpile.cache.rc.file_namespace', lower=False)
520 520 _int_setting(
521 521 settings,
522 522 'rc_cache.cache_license.expiration_time',
523 523 5*60)
524 524 _string_setting(
525 525 settings,
526 526 'rc_cache.cache_license.arguments.filename',
527 527 os.path.join(default_cache_dir, 'rc_cache_3'), lower=False)
528 528
529 529 # cache_repo_longterm memory, 96H
530 530 _string_setting(
531 531 settings,
532 532 'rc_cache.cache_repo_longterm.backend',
533 533 'dogpile.cache.rc.memory_lru', lower=False)
534 534 _int_setting(
535 535 settings,
536 536 'rc_cache.cache_repo_longterm.expiration_time',
537 537 345600)
538 538 _int_setting(
539 539 settings,
540 540 'rc_cache.cache_repo_longterm.max_size',
541 541 10000)
542 542
543 543 # sql_cache_short
544 544 _string_setting(
545 545 settings,
546 546 'rc_cache.sql_cache_short.backend',
547 547 'dogpile.cache.rc.memory_lru', lower=False)
548 548 _int_setting(
549 549 settings,
550 550 'rc_cache.sql_cache_short.expiration_time',
551 551 30)
552 552 _int_setting(
553 553 settings,
554 554 'rc_cache.sql_cache_short.max_size',
555 555 10000)
556 556
557 557
558 558 def _int_setting(settings, name, default):
559 559 settings[name] = int(settings.get(name, default))
560 560 return settings[name]
561 561
562 562
563 563 def _bool_setting(settings, name, default):
564 564 input_val = settings.get(name, default)
565 565 if isinstance(input_val, unicode):
566 566 input_val = input_val.encode('utf8')
567 567 settings[name] = asbool(input_val)
568 568 return settings[name]
569 569
570 570
571 571 def _list_setting(settings, name, default):
572 572 raw_value = settings.get(name, default)
573 573
574 574 old_separator = ','
575 575 if old_separator in raw_value:
576 576 # If we get a comma separated list, pass it to our own function.
577 577 settings[name] = rhodecode_aslist(raw_value, sep=old_separator)
578 578 else:
579 579 # Otherwise we assume it uses pyramids space/newline separation.
580 580 settings[name] = aslist(raw_value)
581 581 return settings[name]
582 582
583 583
584 584 def _string_setting(settings, name, default, lower=True, default_when_empty=False):
585 585 value = settings.get(name, default)
586 586
587 587 if default_when_empty and not value:
588 588 # use default value when value is empty
589 589 value = default
590 590
591 591 if lower:
592 592 value = value.lower()
593 593 settings[name] = value
594 594 return settings[name]
595 595
596 596
597 597 def _substitute_values(mapping, substitutions):
598 598
599 599 try:
600 600 result = {
601 601 # Note: Cannot use regular replacements, since they would clash
602 602 # with the implementation of ConfigParser. Using "format" instead.
603 603 key: value.format(**substitutions)
604 604 for key, value in mapping.items()
605 605 }
606 606 except KeyError as e:
607 607 raise ValueError(
608 608 'Failed to substitute env variable: {}. '
609 609 'Make sure you have specified this env variable without ENV_ prefix'.format(e))
610 610 except ValueError as e:
611 611 log.warning('Failed to substitute ENV variable: %s', e)
612 612 result = mapping
613 613
614 614 return result
@@ -1,1755 +1,1755 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Base module for all VCS systems
23 23 """
24 24
25 25 import collections
26 26 import datetime
27 27 import fnmatch
28 28 import itertools
29 29 import logging
30 30 import os
31 31 import re
32 32 import time
33 33 import warnings
34 34 import shutil
35 35
36 36 from zope.cachedescriptors.property import Lazy as LazyProperty
37 37
38 38 from rhodecode.lib.utils2 import safe_str, safe_unicode
39 39 from rhodecode.lib.vcs import connection
40 40 from rhodecode.lib.vcs.utils import author_name, author_email
41 41 from rhodecode.lib.vcs.conf import settings
42 42 from rhodecode.lib.vcs.exceptions import (
43 43 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
44 44 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
45 45 NodeDoesNotExistError, NodeNotChangedError, VCSError,
46 46 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
47 47 RepositoryError)
48 48
49 49
50 50 log = logging.getLogger(__name__)
51 51
52 52
53 FILEMODE_DEFAULT = 0100644
54 FILEMODE_EXECUTABLE = 0100755
53 FILEMODE_DEFAULT = 0o100644
54 FILEMODE_EXECUTABLE = 0o100755
55 55
56 56 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
57 57 MergeResponse = collections.namedtuple(
58 58 'MergeResponse',
59 59 ('possible', 'executed', 'merge_ref', 'failure_reason'))
60 60
61 61
62 62 class MergeFailureReason(object):
63 63 """
64 64 Enumeration with all the reasons why the server side merge could fail.
65 65
66 66 DO NOT change the number of the reasons, as they may be stored in the
67 67 database.
68 68
69 69 Changing the name of a reason is acceptable and encouraged to deprecate old
70 70 reasons.
71 71 """
72 72
73 73 # Everything went well.
74 74 NONE = 0
75 75
76 76 # An unexpected exception was raised. Check the logs for more details.
77 77 UNKNOWN = 1
78 78
79 79 # The merge was not successful, there are conflicts.
80 80 MERGE_FAILED = 2
81 81
82 82 # The merge succeeded but we could not push it to the target repository.
83 83 PUSH_FAILED = 3
84 84
85 85 # The specified target is not a head in the target repository.
86 86 TARGET_IS_NOT_HEAD = 4
87 87
88 88 # The source repository contains more branches than the target. Pushing
89 89 # the merge will create additional branches in the target.
90 90 HG_SOURCE_HAS_MORE_BRANCHES = 5
91 91
92 92 # The target reference has multiple heads. That does not allow to correctly
93 93 # identify the target location. This could only happen for mercurial
94 94 # branches.
95 95 HG_TARGET_HAS_MULTIPLE_HEADS = 6
96 96
97 97 # The target repository is locked
98 98 TARGET_IS_LOCKED = 7
99 99
100 100 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
101 101 # A involved commit could not be found.
102 102 _DEPRECATED_MISSING_COMMIT = 8
103 103
104 104 # The target repo reference is missing.
105 105 MISSING_TARGET_REF = 9
106 106
107 107 # The source repo reference is missing.
108 108 MISSING_SOURCE_REF = 10
109 109
110 110 # The merge was not successful, there are conflicts related to sub
111 111 # repositories.
112 112 SUBREPO_MERGE_FAILED = 11
113 113
114 114
115 115 class UpdateFailureReason(object):
116 116 """
117 117 Enumeration with all the reasons why the pull request update could fail.
118 118
119 119 DO NOT change the number of the reasons, as they may be stored in the
120 120 database.
121 121
122 122 Changing the name of a reason is acceptable and encouraged to deprecate old
123 123 reasons.
124 124 """
125 125
126 126 # Everything went well.
127 127 NONE = 0
128 128
129 129 # An unexpected exception was raised. Check the logs for more details.
130 130 UNKNOWN = 1
131 131
132 132 # The pull request is up to date.
133 133 NO_CHANGE = 2
134 134
135 135 # The pull request has a reference type that is not supported for update.
136 136 WRONG_REF_TYPE = 3
137 137
138 138 # Update failed because the target reference is missing.
139 139 MISSING_TARGET_REF = 4
140 140
141 141 # Update failed because the source reference is missing.
142 142 MISSING_SOURCE_REF = 5
143 143
144 144
145 145 class BaseRepository(object):
146 146 """
147 147 Base Repository for final backends
148 148
149 149 .. attribute:: DEFAULT_BRANCH_NAME
150 150
151 151 name of default branch (i.e. "trunk" for svn, "master" for git etc.
152 152
153 153 .. attribute:: commit_ids
154 154
155 155 list of all available commit ids, in ascending order
156 156
157 157 .. attribute:: path
158 158
159 159 absolute path to the repository
160 160
161 161 .. attribute:: bookmarks
162 162
163 163 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
164 164 there are no bookmarks or the backend implementation does not support
165 165 bookmarks.
166 166
167 167 .. attribute:: tags
168 168
169 169 Mapping from name to :term:`Commit ID` of the tag.
170 170
171 171 """
172 172
173 173 DEFAULT_BRANCH_NAME = None
174 174 DEFAULT_CONTACT = u"Unknown"
175 175 DEFAULT_DESCRIPTION = u"unknown"
176 176 EMPTY_COMMIT_ID = '0' * 40
177 177
178 178 path = None
179 179
180 180 def __init__(self, repo_path, config=None, create=False, **kwargs):
181 181 """
182 182 Initializes repository. Raises RepositoryError if repository could
183 183 not be find at the given ``repo_path`` or directory at ``repo_path``
184 184 exists and ``create`` is set to True.
185 185
186 186 :param repo_path: local path of the repository
187 187 :param config: repository configuration
188 188 :param create=False: if set to True, would try to create repository.
189 189 :param src_url=None: if set, should be proper url from which repository
190 190 would be cloned; requires ``create`` parameter to be set to True -
191 191 raises RepositoryError if src_url is set and create evaluates to
192 192 False
193 193 """
194 194 raise NotImplementedError
195 195
196 196 def __repr__(self):
197 197 return '<%s at %s>' % (self.__class__.__name__, self.path)
198 198
199 199 def __len__(self):
200 200 return self.count()
201 201
202 202 def __eq__(self, other):
203 203 same_instance = isinstance(other, self.__class__)
204 204 return same_instance and other.path == self.path
205 205
206 206 def __ne__(self, other):
207 207 return not self.__eq__(other)
208 208
209 209 def get_create_shadow_cache_pr_path(self, db_repo):
210 210 path = db_repo.cached_diffs_dir
211 211 if not os.path.exists(path):
212 os.makedirs(path, 0755)
212 os.makedirs(path, 0o755)
213 213 return path
214 214
215 215 @classmethod
216 216 def get_default_config(cls, default=None):
217 217 config = Config()
218 218 if default and isinstance(default, list):
219 219 for section, key, val in default:
220 220 config.set(section, key, val)
221 221 return config
222 222
223 223 @LazyProperty
224 224 def _remote(self):
225 225 raise NotImplementedError
226 226
227 227 @LazyProperty
228 228 def EMPTY_COMMIT(self):
229 229 return EmptyCommit(self.EMPTY_COMMIT_ID)
230 230
231 231 @LazyProperty
232 232 def alias(self):
233 233 for k, v in settings.BACKENDS.items():
234 234 if v.split('.')[-1] == str(self.__class__.__name__):
235 235 return k
236 236
237 237 @LazyProperty
238 238 def name(self):
239 239 return safe_unicode(os.path.basename(self.path))
240 240
241 241 @LazyProperty
242 242 def description(self):
243 243 raise NotImplementedError
244 244
245 245 def refs(self):
246 246 """
247 247 returns a `dict` with branches, bookmarks, tags, and closed_branches
248 248 for this repository
249 249 """
250 250 return dict(
251 251 branches=self.branches,
252 252 branches_closed=self.branches_closed,
253 253 tags=self.tags,
254 254 bookmarks=self.bookmarks
255 255 )
256 256
257 257 @LazyProperty
258 258 def branches(self):
259 259 """
260 260 A `dict` which maps branch names to commit ids.
261 261 """
262 262 raise NotImplementedError
263 263
264 264 @LazyProperty
265 265 def branches_closed(self):
266 266 """
267 267 A `dict` which maps tags names to commit ids.
268 268 """
269 269 raise NotImplementedError
270 270
271 271 @LazyProperty
272 272 def bookmarks(self):
273 273 """
274 274 A `dict` which maps tags names to commit ids.
275 275 """
276 276 raise NotImplementedError
277 277
278 278 @LazyProperty
279 279 def tags(self):
280 280 """
281 281 A `dict` which maps tags names to commit ids.
282 282 """
283 283 raise NotImplementedError
284 284
285 285 @LazyProperty
286 286 def size(self):
287 287 """
288 288 Returns combined size in bytes for all repository files
289 289 """
290 290 tip = self.get_commit()
291 291 return tip.size
292 292
293 293 def size_at_commit(self, commit_id):
294 294 commit = self.get_commit(commit_id)
295 295 return commit.size
296 296
297 297 def is_empty(self):
298 298 return not bool(self.commit_ids)
299 299
300 300 @staticmethod
301 301 def check_url(url, config):
302 302 """
303 303 Function will check given url and try to verify if it's a valid
304 304 link.
305 305 """
306 306 raise NotImplementedError
307 307
308 308 @staticmethod
309 309 def is_valid_repository(path):
310 310 """
311 311 Check if given `path` contains a valid repository of this backend
312 312 """
313 313 raise NotImplementedError
314 314
315 315 # ==========================================================================
316 316 # COMMITS
317 317 # ==========================================================================
318 318
319 319 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
320 320 """
321 321 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
322 322 are both None, most recent commit is returned.
323 323
324 324 :param pre_load: Optional. List of commit attributes to load.
325 325
326 326 :raises ``EmptyRepositoryError``: if there are no commits
327 327 """
328 328 raise NotImplementedError
329 329
330 330 def __iter__(self):
331 331 for commit_id in self.commit_ids:
332 332 yield self.get_commit(commit_id=commit_id)
333 333
334 334 def get_commits(
335 335 self, start_id=None, end_id=None, start_date=None, end_date=None,
336 336 branch_name=None, show_hidden=False, pre_load=None):
337 337 """
338 338 Returns iterator of `BaseCommit` objects from start to end
339 339 not inclusive. This should behave just like a list, ie. end is not
340 340 inclusive.
341 341
342 342 :param start_id: None or str, must be a valid commit id
343 343 :param end_id: None or str, must be a valid commit id
344 344 :param start_date:
345 345 :param end_date:
346 346 :param branch_name:
347 347 :param show_hidden:
348 348 :param pre_load:
349 349 """
350 350 raise NotImplementedError
351 351
352 352 def __getitem__(self, key):
353 353 """
354 354 Allows index based access to the commit objects of this repository.
355 355 """
356 356 pre_load = ["author", "branch", "date", "message", "parents"]
357 357 if isinstance(key, slice):
358 358 return self._get_range(key, pre_load)
359 359 return self.get_commit(commit_idx=key, pre_load=pre_load)
360 360
361 361 def _get_range(self, slice_obj, pre_load):
362 362 for commit_id in self.commit_ids.__getitem__(slice_obj):
363 363 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
364 364
365 365 def count(self):
366 366 return len(self.commit_ids)
367 367
368 368 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
369 369 """
370 370 Creates and returns a tag for the given ``commit_id``.
371 371
372 372 :param name: name for new tag
373 373 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
374 374 :param commit_id: commit id for which new tag would be created
375 375 :param message: message of the tag's commit
376 376 :param date: date of tag's commit
377 377
378 378 :raises TagAlreadyExistError: if tag with same name already exists
379 379 """
380 380 raise NotImplementedError
381 381
382 382 def remove_tag(self, name, user, message=None, date=None):
383 383 """
384 384 Removes tag with the given ``name``.
385 385
386 386 :param name: name of the tag to be removed
387 387 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
388 388 :param message: message of the tag's removal commit
389 389 :param date: date of tag's removal commit
390 390
391 391 :raises TagDoesNotExistError: if tag with given name does not exists
392 392 """
393 393 raise NotImplementedError
394 394
395 395 def get_diff(
396 396 self, commit1, commit2, path=None, ignore_whitespace=False,
397 397 context=3, path1=None):
398 398 """
399 399 Returns (git like) *diff*, as plain text. Shows changes introduced by
400 400 `commit2` since `commit1`.
401 401
402 402 :param commit1: Entry point from which diff is shown. Can be
403 403 ``self.EMPTY_COMMIT`` - in this case, patch showing all
404 404 the changes since empty state of the repository until `commit2`
405 405 :param commit2: Until which commit changes should be shown.
406 406 :param path: Can be set to a path of a file to create a diff of that
407 407 file. If `path1` is also set, this value is only associated to
408 408 `commit2`.
409 409 :param ignore_whitespace: If set to ``True``, would not show whitespace
410 410 changes. Defaults to ``False``.
411 411 :param context: How many lines before/after changed lines should be
412 412 shown. Defaults to ``3``.
413 413 :param path1: Can be set to a path to associate with `commit1`. This
414 414 parameter works only for backends which support diff generation for
415 415 different paths. Other backends will raise a `ValueError` if `path1`
416 416 is set and has a different value than `path`.
417 417 :param file_path: filter this diff by given path pattern
418 418 """
419 419 raise NotImplementedError
420 420
421 421 def strip(self, commit_id, branch=None):
422 422 """
423 423 Strip given commit_id from the repository
424 424 """
425 425 raise NotImplementedError
426 426
427 427 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
428 428 """
429 429 Return a latest common ancestor commit if one exists for this repo
430 430 `commit_id1` vs `commit_id2` from `repo2`.
431 431
432 432 :param commit_id1: Commit it from this repository to use as a
433 433 target for the comparison.
434 434 :param commit_id2: Source commit id to use for comparison.
435 435 :param repo2: Source repository to use for comparison.
436 436 """
437 437 raise NotImplementedError
438 438
439 439 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
440 440 """
441 441 Compare this repository's revision `commit_id1` with `commit_id2`.
442 442
443 443 Returns a tuple(commits, ancestor) that would be merged from
444 444 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
445 445 will be returned as ancestor.
446 446
447 447 :param commit_id1: Commit it from this repository to use as a
448 448 target for the comparison.
449 449 :param commit_id2: Source commit id to use for comparison.
450 450 :param repo2: Source repository to use for comparison.
451 451 :param merge: If set to ``True`` will do a merge compare which also
452 452 returns the common ancestor.
453 453 :param pre_load: Optional. List of commit attributes to load.
454 454 """
455 455 raise NotImplementedError
456 456
457 457 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
458 458 user_name='', user_email='', message='', dry_run=False,
459 459 use_rebase=False, close_branch=False):
460 460 """
461 461 Merge the revisions specified in `source_ref` from `source_repo`
462 462 onto the `target_ref` of this repository.
463 463
464 464 `source_ref` and `target_ref` are named tupls with the following
465 465 fields `type`, `name` and `commit_id`.
466 466
467 467 Returns a MergeResponse named tuple with the following fields
468 468 'possible', 'executed', 'source_commit', 'target_commit',
469 469 'merge_commit'.
470 470
471 471 :param repo_id: `repo_id` target repo id.
472 472 :param workspace_id: `workspace_id` unique identifier.
473 473 :param target_ref: `target_ref` points to the commit on top of which
474 474 the `source_ref` should be merged.
475 475 :param source_repo: The repository that contains the commits to be
476 476 merged.
477 477 :param source_ref: `source_ref` points to the topmost commit from
478 478 the `source_repo` which should be merged.
479 479 :param user_name: Merge commit `user_name`.
480 480 :param user_email: Merge commit `user_email`.
481 481 :param message: Merge commit `message`.
482 482 :param dry_run: If `True` the merge will not take place.
483 483 :param use_rebase: If `True` commits from the source will be rebased
484 484 on top of the target instead of being merged.
485 485 :param close_branch: If `True` branch will be close before merging it
486 486 """
487 487 if dry_run:
488 488 message = message or settings.MERGE_DRY_RUN_MESSAGE
489 489 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
490 490 user_name = user_name or settings.MERGE_DRY_RUN_USER
491 491 else:
492 492 if not user_name:
493 493 raise ValueError('user_name cannot be empty')
494 494 if not user_email:
495 495 raise ValueError('user_email cannot be empty')
496 496 if not message:
497 497 raise ValueError('message cannot be empty')
498 498
499 499 try:
500 500 return self._merge_repo(
501 501 repo_id, workspace_id, target_ref, source_repo,
502 502 source_ref, message, user_name, user_email, dry_run=dry_run,
503 503 use_rebase=use_rebase, close_branch=close_branch)
504 504 except RepositoryError:
505 505 log.exception(
506 506 'Unexpected failure when running merge, dry-run=%s',
507 507 dry_run)
508 508 return MergeResponse(
509 509 False, False, None, MergeFailureReason.UNKNOWN)
510 510
511 511 def _merge_repo(self, repo_id, workspace_id, target_ref,
512 512 source_repo, source_ref, merge_message,
513 513 merger_name, merger_email, dry_run=False,
514 514 use_rebase=False, close_branch=False):
515 515 """Internal implementation of merge."""
516 516 raise NotImplementedError
517 517
518 518 def _maybe_prepare_merge_workspace(
519 519 self, repo_id, workspace_id, target_ref, source_ref):
520 520 """
521 521 Create the merge workspace.
522 522
523 523 :param workspace_id: `workspace_id` unique identifier.
524 524 """
525 525 raise NotImplementedError
526 526
527 527 def _get_legacy_shadow_repository_path(self, workspace_id):
528 528 """
529 529 Legacy version that was used before. We still need it for
530 530 backward compat
531 531 """
532 532 return os.path.join(
533 533 os.path.dirname(self.path),
534 534 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
535 535
536 536 def _get_shadow_repository_path(self, repo_id, workspace_id):
537 537 # The name of the shadow repository must start with '.', so it is
538 538 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
539 539 legacy_repository_path = self._get_legacy_shadow_repository_path(workspace_id)
540 540 if os.path.exists(legacy_repository_path):
541 541 return legacy_repository_path
542 542 else:
543 543 return os.path.join(
544 544 os.path.dirname(self.path),
545 545 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
546 546
547 547 def cleanup_merge_workspace(self, repo_id, workspace_id):
548 548 """
549 549 Remove merge workspace.
550 550
551 551 This function MUST not fail in case there is no workspace associated to
552 552 the given `workspace_id`.
553 553
554 554 :param workspace_id: `workspace_id` unique identifier.
555 555 """
556 556 shadow_repository_path = self._get_shadow_repository_path(repo_id, workspace_id)
557 557 shadow_repository_path_del = '{}.{}.delete'.format(
558 558 shadow_repository_path, time.time())
559 559
560 560 # move the shadow repo, so it never conflicts with the one used.
561 561 # we use this method because shutil.rmtree had some edge case problems
562 562 # removing symlinked repositories
563 563 if not os.path.isdir(shadow_repository_path):
564 564 return
565 565
566 566 shutil.move(shadow_repository_path, shadow_repository_path_del)
567 567 try:
568 568 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
569 569 except Exception:
570 570 log.exception('Failed to gracefully remove shadow repo under %s',
571 571 shadow_repository_path_del)
572 572 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
573 573
574 574 # ========== #
575 575 # COMMIT API #
576 576 # ========== #
577 577
578 578 @LazyProperty
579 579 def in_memory_commit(self):
580 580 """
581 581 Returns :class:`InMemoryCommit` object for this repository.
582 582 """
583 583 raise NotImplementedError
584 584
585 585 # ======================== #
586 586 # UTILITIES FOR SUBCLASSES #
587 587 # ======================== #
588 588
589 589 def _validate_diff_commits(self, commit1, commit2):
590 590 """
591 591 Validates that the given commits are related to this repository.
592 592
593 593 Intended as a utility for sub classes to have a consistent validation
594 594 of input parameters in methods like :meth:`get_diff`.
595 595 """
596 596 self._validate_commit(commit1)
597 597 self._validate_commit(commit2)
598 598 if (isinstance(commit1, EmptyCommit) and
599 599 isinstance(commit2, EmptyCommit)):
600 600 raise ValueError("Cannot compare two empty commits")
601 601
602 602 def _validate_commit(self, commit):
603 603 if not isinstance(commit, BaseCommit):
604 604 raise TypeError(
605 605 "%s is not of type BaseCommit" % repr(commit))
606 606 if commit.repository != self and not isinstance(commit, EmptyCommit):
607 607 raise ValueError(
608 608 "Commit %s must be a valid commit from this repository %s, "
609 609 "related to this repository instead %s." %
610 610 (commit, self, commit.repository))
611 611
612 612 def _validate_commit_id(self, commit_id):
613 613 if not isinstance(commit_id, basestring):
614 614 raise TypeError("commit_id must be a string value")
615 615
616 616 def _validate_commit_idx(self, commit_idx):
617 617 if not isinstance(commit_idx, (int, long)):
618 618 raise TypeError("commit_idx must be a numeric value")
619 619
620 620 def _validate_branch_name(self, branch_name):
621 621 if branch_name and branch_name not in self.branches_all:
622 622 msg = ("Branch %s not found in %s" % (branch_name, self))
623 623 raise BranchDoesNotExistError(msg)
624 624
625 625 #
626 626 # Supporting deprecated API parts
627 627 # TODO: johbo: consider to move this into a mixin
628 628 #
629 629
630 630 @property
631 631 def EMPTY_CHANGESET(self):
632 632 warnings.warn(
633 633 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
634 634 return self.EMPTY_COMMIT_ID
635 635
636 636 @property
637 637 def revisions(self):
638 638 warnings.warn("Use commits attribute instead", DeprecationWarning)
639 639 return self.commit_ids
640 640
641 641 @revisions.setter
642 642 def revisions(self, value):
643 643 warnings.warn("Use commits attribute instead", DeprecationWarning)
644 644 self.commit_ids = value
645 645
646 646 def get_changeset(self, revision=None, pre_load=None):
647 647 warnings.warn("Use get_commit instead", DeprecationWarning)
648 648 commit_id = None
649 649 commit_idx = None
650 650 if isinstance(revision, basestring):
651 651 commit_id = revision
652 652 else:
653 653 commit_idx = revision
654 654 return self.get_commit(
655 655 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
656 656
657 657 def get_changesets(
658 658 self, start=None, end=None, start_date=None, end_date=None,
659 659 branch_name=None, pre_load=None):
660 660 warnings.warn("Use get_commits instead", DeprecationWarning)
661 661 start_id = self._revision_to_commit(start)
662 662 end_id = self._revision_to_commit(end)
663 663 return self.get_commits(
664 664 start_id=start_id, end_id=end_id, start_date=start_date,
665 665 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
666 666
667 667 def _revision_to_commit(self, revision):
668 668 """
669 669 Translates a revision to a commit_id
670 670
671 671 Helps to support the old changeset based API which allows to use
672 672 commit ids and commit indices interchangeable.
673 673 """
674 674 if revision is None:
675 675 return revision
676 676
677 677 if isinstance(revision, basestring):
678 678 commit_id = revision
679 679 else:
680 680 commit_id = self.commit_ids[revision]
681 681 return commit_id
682 682
683 683 @property
684 684 def in_memory_changeset(self):
685 685 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
686 686 return self.in_memory_commit
687 687
688 688 def get_path_permissions(self, username):
689 689 """
690 690 Returns a path permission checker or None if not supported
691 691
692 692 :param username: session user name
693 693 :return: an instance of BasePathPermissionChecker or None
694 694 """
695 695 return None
696 696
697 697 def install_hooks(self, force=False):
698 698 return self._remote.install_hooks(force)
699 699
700 700
701 701 class BaseCommit(object):
702 702 """
703 703 Each backend should implement it's commit representation.
704 704
705 705 **Attributes**
706 706
707 707 ``repository``
708 708 repository object within which commit exists
709 709
710 710 ``id``
711 711 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
712 712 just ``tip``.
713 713
714 714 ``raw_id``
715 715 raw commit representation (i.e. full 40 length sha for git
716 716 backend)
717 717
718 718 ``short_id``
719 719 shortened (if apply) version of ``raw_id``; it would be simple
720 720 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
721 721 as ``raw_id`` for subversion
722 722
723 723 ``idx``
724 724 commit index
725 725
726 726 ``files``
727 727 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
728 728
729 729 ``dirs``
730 730 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
731 731
732 732 ``nodes``
733 733 combined list of ``Node`` objects
734 734
735 735 ``author``
736 736 author of the commit, as unicode
737 737
738 738 ``message``
739 739 message of the commit, as unicode
740 740
741 741 ``parents``
742 742 list of parent commits
743 743
744 744 """
745 745
746 746 branch = None
747 747 """
748 748 Depending on the backend this should be set to the branch name of the
749 749 commit. Backends not supporting branches on commits should leave this
750 750 value as ``None``.
751 751 """
752 752
753 753 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
754 754 """
755 755 This template is used to generate a default prefix for repository archives
756 756 if no prefix has been specified.
757 757 """
758 758
759 759 def __str__(self):
760 760 return '<%s at %s:%s>' % (
761 761 self.__class__.__name__, self.idx, self.short_id)
762 762
763 763 def __repr__(self):
764 764 return self.__str__()
765 765
766 766 def __unicode__(self):
767 767 return u'%s:%s' % (self.idx, self.short_id)
768 768
769 769 def __eq__(self, other):
770 770 same_instance = isinstance(other, self.__class__)
771 771 return same_instance and self.raw_id == other.raw_id
772 772
773 773 def __json__(self):
774 774 parents = []
775 775 try:
776 776 for parent in self.parents:
777 777 parents.append({'raw_id': parent.raw_id})
778 778 except NotImplementedError:
779 779 # empty commit doesn't have parents implemented
780 780 pass
781 781
782 782 return {
783 783 'short_id': self.short_id,
784 784 'raw_id': self.raw_id,
785 785 'revision': self.idx,
786 786 'message': self.message,
787 787 'date': self.date,
788 788 'author': self.author,
789 789 'parents': parents,
790 790 'branch': self.branch
791 791 }
792 792
793 793 def __getstate__(self):
794 794 d = self.__dict__.copy()
795 795 d.pop('_remote', None)
796 796 d.pop('repository', None)
797 797 return d
798 798
799 799 def _get_refs(self):
800 800 return {
801 801 'branches': [self.branch] if self.branch else [],
802 802 'bookmarks': getattr(self, 'bookmarks', []),
803 803 'tags': self.tags
804 804 }
805 805
806 806 @LazyProperty
807 807 def last(self):
808 808 """
809 809 ``True`` if this is last commit in repository, ``False``
810 810 otherwise; trying to access this attribute while there is no
811 811 commits would raise `EmptyRepositoryError`
812 812 """
813 813 if self.repository is None:
814 814 raise CommitError("Cannot check if it's most recent commit")
815 815 return self.raw_id == self.repository.commit_ids[-1]
816 816
817 817 @LazyProperty
818 818 def parents(self):
819 819 """
820 820 Returns list of parent commits.
821 821 """
822 822 raise NotImplementedError
823 823
824 824 @LazyProperty
825 825 def first_parent(self):
826 826 """
827 827 Returns list of parent commits.
828 828 """
829 829 return self.parents[0] if self.parents else EmptyCommit()
830 830
831 831 @property
832 832 def merge(self):
833 833 """
834 834 Returns boolean if commit is a merge.
835 835 """
836 836 return len(self.parents) > 1
837 837
838 838 @LazyProperty
839 839 def children(self):
840 840 """
841 841 Returns list of child commits.
842 842 """
843 843 raise NotImplementedError
844 844
845 845 @LazyProperty
846 846 def id(self):
847 847 """
848 848 Returns string identifying this commit.
849 849 """
850 850 raise NotImplementedError
851 851
852 852 @LazyProperty
853 853 def raw_id(self):
854 854 """
855 855 Returns raw string identifying this commit.
856 856 """
857 857 raise NotImplementedError
858 858
859 859 @LazyProperty
860 860 def short_id(self):
861 861 """
862 862 Returns shortened version of ``raw_id`` attribute, as string,
863 863 identifying this commit, useful for presentation to users.
864 864 """
865 865 raise NotImplementedError
866 866
867 867 @LazyProperty
868 868 def idx(self):
869 869 """
870 870 Returns integer identifying this commit.
871 871 """
872 872 raise NotImplementedError
873 873
874 874 @LazyProperty
875 875 def committer(self):
876 876 """
877 877 Returns committer for this commit
878 878 """
879 879 raise NotImplementedError
880 880
881 881 @LazyProperty
882 882 def committer_name(self):
883 883 """
884 884 Returns committer name for this commit
885 885 """
886 886
887 887 return author_name(self.committer)
888 888
889 889 @LazyProperty
890 890 def committer_email(self):
891 891 """
892 892 Returns committer email address for this commit
893 893 """
894 894
895 895 return author_email(self.committer)
896 896
897 897 @LazyProperty
898 898 def author(self):
899 899 """
900 900 Returns author for this commit
901 901 """
902 902
903 903 raise NotImplementedError
904 904
905 905 @LazyProperty
906 906 def author_name(self):
907 907 """
908 908 Returns author name for this commit
909 909 """
910 910
911 911 return author_name(self.author)
912 912
913 913 @LazyProperty
914 914 def author_email(self):
915 915 """
916 916 Returns author email address for this commit
917 917 """
918 918
919 919 return author_email(self.author)
920 920
921 921 def get_file_mode(self, path):
922 922 """
923 923 Returns stat mode of the file at `path`.
924 924 """
925 925 raise NotImplementedError
926 926
927 927 def is_link(self, path):
928 928 """
929 929 Returns ``True`` if given `path` is a symlink
930 930 """
931 931 raise NotImplementedError
932 932
933 933 def get_file_content(self, path):
934 934 """
935 935 Returns content of the file at the given `path`.
936 936 """
937 937 raise NotImplementedError
938 938
939 939 def get_file_size(self, path):
940 940 """
941 941 Returns size of the file at the given `path`.
942 942 """
943 943 raise NotImplementedError
944 944
945 945 def get_file_commit(self, path, pre_load=None):
946 946 """
947 947 Returns last commit of the file at the given `path`.
948 948
949 949 :param pre_load: Optional. List of commit attributes to load.
950 950 """
951 951 commits = self.get_file_history(path, limit=1, pre_load=pre_load)
952 952 if not commits:
953 953 raise RepositoryError(
954 954 'Failed to fetch history for path {}. '
955 955 'Please check if such path exists in your repository'.format(
956 956 path))
957 957 return commits[0]
958 958
959 959 def get_file_history(self, path, limit=None, pre_load=None):
960 960 """
961 961 Returns history of file as reversed list of :class:`BaseCommit`
962 962 objects for which file at given `path` has been modified.
963 963
964 964 :param limit: Optional. Allows to limit the size of the returned
965 965 history. This is intended as a hint to the underlying backend, so
966 966 that it can apply optimizations depending on the limit.
967 967 :param pre_load: Optional. List of commit attributes to load.
968 968 """
969 969 raise NotImplementedError
970 970
971 971 def get_file_annotate(self, path, pre_load=None):
972 972 """
973 973 Returns a generator of four element tuples with
974 974 lineno, sha, commit lazy loader and line
975 975
976 976 :param pre_load: Optional. List of commit attributes to load.
977 977 """
978 978 raise NotImplementedError
979 979
980 980 def get_nodes(self, path):
981 981 """
982 982 Returns combined ``DirNode`` and ``FileNode`` objects list representing
983 983 state of commit at the given ``path``.
984 984
985 985 :raises ``CommitError``: if node at the given ``path`` is not
986 986 instance of ``DirNode``
987 987 """
988 988 raise NotImplementedError
989 989
990 990 def get_node(self, path):
991 991 """
992 992 Returns ``Node`` object from the given ``path``.
993 993
994 994 :raises ``NodeDoesNotExistError``: if there is no node at the given
995 995 ``path``
996 996 """
997 997 raise NotImplementedError
998 998
999 999 def get_largefile_node(self, path):
1000 1000 """
1001 1001 Returns the path to largefile from Mercurial/Git-lfs storage.
1002 1002 or None if it's not a largefile node
1003 1003 """
1004 1004 return None
1005 1005
1006 1006 def archive_repo(self, file_path, kind='tgz', subrepos=None,
1007 1007 prefix=None, write_metadata=False, mtime=None):
1008 1008 """
1009 1009 Creates an archive containing the contents of the repository.
1010 1010
1011 1011 :param file_path: path to the file which to create the archive.
1012 1012 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1013 1013 :param prefix: name of root directory in archive.
1014 1014 Default is repository name and commit's short_id joined with dash:
1015 1015 ``"{repo_name}-{short_id}"``.
1016 1016 :param write_metadata: write a metadata file into archive.
1017 1017 :param mtime: custom modification time for archive creation, defaults
1018 1018 to time.time() if not given.
1019 1019
1020 1020 :raise VCSError: If prefix has a problem.
1021 1021 """
1022 1022 allowed_kinds = settings.ARCHIVE_SPECS.keys()
1023 1023 if kind not in allowed_kinds:
1024 1024 raise ImproperArchiveTypeError(
1025 1025 'Archive kind (%s) not supported use one of %s' %
1026 1026 (kind, allowed_kinds))
1027 1027
1028 1028 prefix = self._validate_archive_prefix(prefix)
1029 1029
1030 1030 mtime = mtime or time.mktime(self.date.timetuple())
1031 1031
1032 1032 file_info = []
1033 1033 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1034 1034 for _r, _d, files in cur_rev.walk('/'):
1035 1035 for f in files:
1036 1036 f_path = os.path.join(prefix, f.path)
1037 1037 file_info.append(
1038 1038 (f_path, f.mode, f.is_link(), f.raw_bytes))
1039 1039
1040 1040 if write_metadata:
1041 1041 metadata = [
1042 1042 ('repo_name', self.repository.name),
1043 1043 ('rev', self.raw_id),
1044 1044 ('create_time', mtime),
1045 1045 ('branch', self.branch),
1046 1046 ('tags', ','.join(self.tags)),
1047 1047 ]
1048 1048 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1049 file_info.append(('.archival.txt', 0644, False, '\n'.join(meta)))
1049 file_info.append(('.archival.txt', 0o644, False, '\n'.join(meta)))
1050 1050
1051 1051 connection.Hg.archive_repo(file_path, mtime, file_info, kind)
1052 1052
1053 1053 def _validate_archive_prefix(self, prefix):
1054 1054 if prefix is None:
1055 1055 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1056 1056 repo_name=safe_str(self.repository.name),
1057 1057 short_id=self.short_id)
1058 1058 elif not isinstance(prefix, str):
1059 1059 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1060 1060 elif prefix.startswith('/'):
1061 1061 raise VCSError("Prefix cannot start with leading slash")
1062 1062 elif prefix.strip() == '':
1063 1063 raise VCSError("Prefix cannot be empty")
1064 1064 return prefix
1065 1065
1066 1066 @LazyProperty
1067 1067 def root(self):
1068 1068 """
1069 1069 Returns ``RootNode`` object for this commit.
1070 1070 """
1071 1071 return self.get_node('')
1072 1072
1073 1073 def next(self, branch=None):
1074 1074 """
1075 1075 Returns next commit from current, if branch is gives it will return
1076 1076 next commit belonging to this branch
1077 1077
1078 1078 :param branch: show commits within the given named branch
1079 1079 """
1080 1080 indexes = xrange(self.idx + 1, self.repository.count())
1081 1081 return self._find_next(indexes, branch)
1082 1082
1083 1083 def prev(self, branch=None):
1084 1084 """
1085 1085 Returns previous commit from current, if branch is gives it will
1086 1086 return previous commit belonging to this branch
1087 1087
1088 1088 :param branch: show commit within the given named branch
1089 1089 """
1090 1090 indexes = xrange(self.idx - 1, -1, -1)
1091 1091 return self._find_next(indexes, branch)
1092 1092
1093 1093 def _find_next(self, indexes, branch=None):
1094 1094 if branch and self.branch != branch:
1095 1095 raise VCSError('Branch option used on commit not belonging '
1096 1096 'to that branch')
1097 1097
1098 1098 for next_idx in indexes:
1099 1099 commit = self.repository.get_commit(commit_idx=next_idx)
1100 1100 if branch and branch != commit.branch:
1101 1101 continue
1102 1102 return commit
1103 1103 raise CommitDoesNotExistError
1104 1104
1105 1105 def diff(self, ignore_whitespace=True, context=3):
1106 1106 """
1107 1107 Returns a `Diff` object representing the change made by this commit.
1108 1108 """
1109 1109 parent = self.first_parent
1110 1110 diff = self.repository.get_diff(
1111 1111 parent, self,
1112 1112 ignore_whitespace=ignore_whitespace,
1113 1113 context=context)
1114 1114 return diff
1115 1115
1116 1116 @LazyProperty
1117 1117 def added(self):
1118 1118 """
1119 1119 Returns list of added ``FileNode`` objects.
1120 1120 """
1121 1121 raise NotImplementedError
1122 1122
1123 1123 @LazyProperty
1124 1124 def changed(self):
1125 1125 """
1126 1126 Returns list of modified ``FileNode`` objects.
1127 1127 """
1128 1128 raise NotImplementedError
1129 1129
1130 1130 @LazyProperty
1131 1131 def removed(self):
1132 1132 """
1133 1133 Returns list of removed ``FileNode`` objects.
1134 1134 """
1135 1135 raise NotImplementedError
1136 1136
1137 1137 @LazyProperty
1138 1138 def size(self):
1139 1139 """
1140 1140 Returns total number of bytes from contents of all filenodes.
1141 1141 """
1142 1142 return sum((node.size for node in self.get_filenodes_generator()))
1143 1143
1144 1144 def walk(self, topurl=''):
1145 1145 """
1146 1146 Similar to os.walk method. Insted of filesystem it walks through
1147 1147 commit starting at given ``topurl``. Returns generator of tuples
1148 1148 (topnode, dirnodes, filenodes).
1149 1149 """
1150 1150 topnode = self.get_node(topurl)
1151 1151 if not topnode.is_dir():
1152 1152 return
1153 1153 yield (topnode, topnode.dirs, topnode.files)
1154 1154 for dirnode in topnode.dirs:
1155 1155 for tup in self.walk(dirnode.path):
1156 1156 yield tup
1157 1157
1158 1158 def get_filenodes_generator(self):
1159 1159 """
1160 1160 Returns generator that yields *all* file nodes.
1161 1161 """
1162 1162 for topnode, dirs, files in self.walk():
1163 1163 for node in files:
1164 1164 yield node
1165 1165
1166 1166 #
1167 1167 # Utilities for sub classes to support consistent behavior
1168 1168 #
1169 1169
1170 1170 def no_node_at_path(self, path):
1171 1171 return NodeDoesNotExistError(
1172 1172 u"There is no file nor directory at the given path: "
1173 1173 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1174 1174
1175 1175 def _fix_path(self, path):
1176 1176 """
1177 1177 Paths are stored without trailing slash so we need to get rid off it if
1178 1178 needed.
1179 1179 """
1180 1180 return path.rstrip('/')
1181 1181
1182 1182 #
1183 1183 # Deprecated API based on changesets
1184 1184 #
1185 1185
1186 1186 @property
1187 1187 def revision(self):
1188 1188 warnings.warn("Use idx instead", DeprecationWarning)
1189 1189 return self.idx
1190 1190
1191 1191 @revision.setter
1192 1192 def revision(self, value):
1193 1193 warnings.warn("Use idx instead", DeprecationWarning)
1194 1194 self.idx = value
1195 1195
1196 1196 def get_file_changeset(self, path):
1197 1197 warnings.warn("Use get_file_commit instead", DeprecationWarning)
1198 1198 return self.get_file_commit(path)
1199 1199
1200 1200
1201 1201 class BaseChangesetClass(type):
1202 1202
1203 1203 def __instancecheck__(self, instance):
1204 1204 return isinstance(instance, BaseCommit)
1205 1205
1206 1206
1207 1207 class BaseChangeset(BaseCommit):
1208 1208
1209 1209 __metaclass__ = BaseChangesetClass
1210 1210
1211 1211 def __new__(cls, *args, **kwargs):
1212 1212 warnings.warn(
1213 1213 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1214 1214 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1215 1215
1216 1216
1217 1217 class BaseInMemoryCommit(object):
1218 1218 """
1219 1219 Represents differences between repository's state (most recent head) and
1220 1220 changes made *in place*.
1221 1221
1222 1222 **Attributes**
1223 1223
1224 1224 ``repository``
1225 1225 repository object for this in-memory-commit
1226 1226
1227 1227 ``added``
1228 1228 list of ``FileNode`` objects marked as *added*
1229 1229
1230 1230 ``changed``
1231 1231 list of ``FileNode`` objects marked as *changed*
1232 1232
1233 1233 ``removed``
1234 1234 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1235 1235 *removed*
1236 1236
1237 1237 ``parents``
1238 1238 list of :class:`BaseCommit` instances representing parents of
1239 1239 in-memory commit. Should always be 2-element sequence.
1240 1240
1241 1241 """
1242 1242
1243 1243 def __init__(self, repository):
1244 1244 self.repository = repository
1245 1245 self.added = []
1246 1246 self.changed = []
1247 1247 self.removed = []
1248 1248 self.parents = []
1249 1249
1250 1250 def add(self, *filenodes):
1251 1251 """
1252 1252 Marks given ``FileNode`` objects as *to be committed*.
1253 1253
1254 1254 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1255 1255 latest commit
1256 1256 :raises ``NodeAlreadyAddedError``: if node with same path is already
1257 1257 marked as *added*
1258 1258 """
1259 1259 # Check if not already marked as *added* first
1260 1260 for node in filenodes:
1261 1261 if node.path in (n.path for n in self.added):
1262 1262 raise NodeAlreadyAddedError(
1263 1263 "Such FileNode %s is already marked for addition"
1264 1264 % node.path)
1265 1265 for node in filenodes:
1266 1266 self.added.append(node)
1267 1267
1268 1268 def change(self, *filenodes):
1269 1269 """
1270 1270 Marks given ``FileNode`` objects to be *changed* in next commit.
1271 1271
1272 1272 :raises ``EmptyRepositoryError``: if there are no commits yet
1273 1273 :raises ``NodeAlreadyExistsError``: if node with same path is already
1274 1274 marked to be *changed*
1275 1275 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1276 1276 marked to be *removed*
1277 1277 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1278 1278 commit
1279 1279 :raises ``NodeNotChangedError``: if node hasn't really be changed
1280 1280 """
1281 1281 for node in filenodes:
1282 1282 if node.path in (n.path for n in self.removed):
1283 1283 raise NodeAlreadyRemovedError(
1284 1284 "Node at %s is already marked as removed" % node.path)
1285 1285 try:
1286 1286 self.repository.get_commit()
1287 1287 except EmptyRepositoryError:
1288 1288 raise EmptyRepositoryError(
1289 1289 "Nothing to change - try to *add* new nodes rather than "
1290 1290 "changing them")
1291 1291 for node in filenodes:
1292 1292 if node.path in (n.path for n in self.changed):
1293 1293 raise NodeAlreadyChangedError(
1294 1294 "Node at '%s' is already marked as changed" % node.path)
1295 1295 self.changed.append(node)
1296 1296
1297 1297 def remove(self, *filenodes):
1298 1298 """
1299 1299 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1300 1300 *removed* in next commit.
1301 1301
1302 1302 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1303 1303 be *removed*
1304 1304 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1305 1305 be *changed*
1306 1306 """
1307 1307 for node in filenodes:
1308 1308 if node.path in (n.path for n in self.removed):
1309 1309 raise NodeAlreadyRemovedError(
1310 1310 "Node is already marked to for removal at %s" % node.path)
1311 1311 if node.path in (n.path for n in self.changed):
1312 1312 raise NodeAlreadyChangedError(
1313 1313 "Node is already marked to be changed at %s" % node.path)
1314 1314 # We only mark node as *removed* - real removal is done by
1315 1315 # commit method
1316 1316 self.removed.append(node)
1317 1317
1318 1318 def reset(self):
1319 1319 """
1320 1320 Resets this instance to initial state (cleans ``added``, ``changed``
1321 1321 and ``removed`` lists).
1322 1322 """
1323 1323 self.added = []
1324 1324 self.changed = []
1325 1325 self.removed = []
1326 1326 self.parents = []
1327 1327
1328 1328 def get_ipaths(self):
1329 1329 """
1330 1330 Returns generator of paths from nodes marked as added, changed or
1331 1331 removed.
1332 1332 """
1333 1333 for node in itertools.chain(self.added, self.changed, self.removed):
1334 1334 yield node.path
1335 1335
1336 1336 def get_paths(self):
1337 1337 """
1338 1338 Returns list of paths from nodes marked as added, changed or removed.
1339 1339 """
1340 1340 return list(self.get_ipaths())
1341 1341
1342 1342 def check_integrity(self, parents=None):
1343 1343 """
1344 1344 Checks in-memory commit's integrity. Also, sets parents if not
1345 1345 already set.
1346 1346
1347 1347 :raises CommitError: if any error occurs (i.e.
1348 1348 ``NodeDoesNotExistError``).
1349 1349 """
1350 1350 if not self.parents:
1351 1351 parents = parents or []
1352 1352 if len(parents) == 0:
1353 1353 try:
1354 1354 parents = [self.repository.get_commit(), None]
1355 1355 except EmptyRepositoryError:
1356 1356 parents = [None, None]
1357 1357 elif len(parents) == 1:
1358 1358 parents += [None]
1359 1359 self.parents = parents
1360 1360
1361 1361 # Local parents, only if not None
1362 1362 parents = [p for p in self.parents if p]
1363 1363
1364 1364 # Check nodes marked as added
1365 1365 for p in parents:
1366 1366 for node in self.added:
1367 1367 try:
1368 1368 p.get_node(node.path)
1369 1369 except NodeDoesNotExistError:
1370 1370 pass
1371 1371 else:
1372 1372 raise NodeAlreadyExistsError(
1373 1373 "Node `%s` already exists at %s" % (node.path, p))
1374 1374
1375 1375 # Check nodes marked as changed
1376 1376 missing = set(self.changed)
1377 1377 not_changed = set(self.changed)
1378 1378 if self.changed and not parents:
1379 1379 raise NodeDoesNotExistError(str(self.changed[0].path))
1380 1380 for p in parents:
1381 1381 for node in self.changed:
1382 1382 try:
1383 1383 old = p.get_node(node.path)
1384 1384 missing.remove(node)
1385 1385 # if content actually changed, remove node from not_changed
1386 1386 if old.content != node.content:
1387 1387 not_changed.remove(node)
1388 1388 except NodeDoesNotExistError:
1389 1389 pass
1390 1390 if self.changed and missing:
1391 1391 raise NodeDoesNotExistError(
1392 1392 "Node `%s` marked as modified but missing in parents: %s"
1393 1393 % (node.path, parents))
1394 1394
1395 1395 if self.changed and not_changed:
1396 1396 raise NodeNotChangedError(
1397 1397 "Node `%s` wasn't actually changed (parents: %s)"
1398 1398 % (not_changed.pop().path, parents))
1399 1399
1400 1400 # Check nodes marked as removed
1401 1401 if self.removed and not parents:
1402 1402 raise NodeDoesNotExistError(
1403 1403 "Cannot remove node at %s as there "
1404 1404 "were no parents specified" % self.removed[0].path)
1405 1405 really_removed = set()
1406 1406 for p in parents:
1407 1407 for node in self.removed:
1408 1408 try:
1409 1409 p.get_node(node.path)
1410 1410 really_removed.add(node)
1411 1411 except CommitError:
1412 1412 pass
1413 1413 not_removed = set(self.removed) - really_removed
1414 1414 if not_removed:
1415 1415 # TODO: johbo: This code branch does not seem to be covered
1416 1416 raise NodeDoesNotExistError(
1417 1417 "Cannot remove node at %s from "
1418 1418 "following parents: %s" % (not_removed, parents))
1419 1419
1420 1420 def commit(
1421 1421 self, message, author, parents=None, branch=None, date=None,
1422 1422 **kwargs):
1423 1423 """
1424 1424 Performs in-memory commit (doesn't check workdir in any way) and
1425 1425 returns newly created :class:`BaseCommit`. Updates repository's
1426 1426 attribute `commits`.
1427 1427
1428 1428 .. note::
1429 1429
1430 1430 While overriding this method each backend's should call
1431 1431 ``self.check_integrity(parents)`` in the first place.
1432 1432
1433 1433 :param message: message of the commit
1434 1434 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1435 1435 :param parents: single parent or sequence of parents from which commit
1436 1436 would be derived
1437 1437 :param date: ``datetime.datetime`` instance. Defaults to
1438 1438 ``datetime.datetime.now()``.
1439 1439 :param branch: branch name, as string. If none given, default backend's
1440 1440 branch would be used.
1441 1441
1442 1442 :raises ``CommitError``: if any error occurs while committing
1443 1443 """
1444 1444 raise NotImplementedError
1445 1445
1446 1446
1447 1447 class BaseInMemoryChangesetClass(type):
1448 1448
1449 1449 def __instancecheck__(self, instance):
1450 1450 return isinstance(instance, BaseInMemoryCommit)
1451 1451
1452 1452
1453 1453 class BaseInMemoryChangeset(BaseInMemoryCommit):
1454 1454
1455 1455 __metaclass__ = BaseInMemoryChangesetClass
1456 1456
1457 1457 def __new__(cls, *args, **kwargs):
1458 1458 warnings.warn(
1459 1459 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1460 1460 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1461 1461
1462 1462
1463 1463 class EmptyCommit(BaseCommit):
1464 1464 """
1465 1465 An dummy empty commit. It's possible to pass hash when creating
1466 1466 an EmptyCommit
1467 1467 """
1468 1468
1469 1469 def __init__(
1470 1470 self, commit_id='0' * 40, repo=None, alias=None, idx=-1,
1471 1471 message='', author='', date=None):
1472 1472 self._empty_commit_id = commit_id
1473 1473 # TODO: johbo: Solve idx parameter, default value does not make
1474 1474 # too much sense
1475 1475 self.idx = idx
1476 1476 self.message = message
1477 1477 self.author = author
1478 1478 self.date = date or datetime.datetime.fromtimestamp(0)
1479 1479 self.repository = repo
1480 1480 self.alias = alias
1481 1481
1482 1482 @LazyProperty
1483 1483 def raw_id(self):
1484 1484 """
1485 1485 Returns raw string identifying this commit, useful for web
1486 1486 representation.
1487 1487 """
1488 1488
1489 1489 return self._empty_commit_id
1490 1490
1491 1491 @LazyProperty
1492 1492 def branch(self):
1493 1493 if self.alias:
1494 1494 from rhodecode.lib.vcs.backends import get_backend
1495 1495 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1496 1496
1497 1497 @LazyProperty
1498 1498 def short_id(self):
1499 1499 return self.raw_id[:12]
1500 1500
1501 1501 @LazyProperty
1502 1502 def id(self):
1503 1503 return self.raw_id
1504 1504
1505 1505 def get_file_commit(self, path):
1506 1506 return self
1507 1507
1508 1508 def get_file_content(self, path):
1509 1509 return u''
1510 1510
1511 1511 def get_file_size(self, path):
1512 1512 return 0
1513 1513
1514 1514
1515 1515 class EmptyChangesetClass(type):
1516 1516
1517 1517 def __instancecheck__(self, instance):
1518 1518 return isinstance(instance, EmptyCommit)
1519 1519
1520 1520
1521 1521 class EmptyChangeset(EmptyCommit):
1522 1522
1523 1523 __metaclass__ = EmptyChangesetClass
1524 1524
1525 1525 def __new__(cls, *args, **kwargs):
1526 1526 warnings.warn(
1527 1527 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1528 1528 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1529 1529
1530 1530 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
1531 1531 alias=None, revision=-1, message='', author='', date=None):
1532 1532 if requested_revision is not None:
1533 1533 warnings.warn(
1534 1534 "Parameter requested_revision not supported anymore",
1535 1535 DeprecationWarning)
1536 1536 super(EmptyChangeset, self).__init__(
1537 1537 commit_id=cs, repo=repo, alias=alias, idx=revision,
1538 1538 message=message, author=author, date=date)
1539 1539
1540 1540 @property
1541 1541 def revision(self):
1542 1542 warnings.warn("Use idx instead", DeprecationWarning)
1543 1543 return self.idx
1544 1544
1545 1545 @revision.setter
1546 1546 def revision(self, value):
1547 1547 warnings.warn("Use idx instead", DeprecationWarning)
1548 1548 self.idx = value
1549 1549
1550 1550
1551 1551 class EmptyRepository(BaseRepository):
1552 1552 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1553 1553 pass
1554 1554
1555 1555 def get_diff(self, *args, **kwargs):
1556 1556 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1557 1557 return GitDiff('')
1558 1558
1559 1559
1560 1560 class CollectionGenerator(object):
1561 1561
1562 1562 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None):
1563 1563 self.repo = repo
1564 1564 self.commit_ids = commit_ids
1565 1565 # TODO: (oliver) this isn't currently hooked up
1566 1566 self.collection_size = None
1567 1567 self.pre_load = pre_load
1568 1568
1569 1569 def __len__(self):
1570 1570 if self.collection_size is not None:
1571 1571 return self.collection_size
1572 1572 return self.commit_ids.__len__()
1573 1573
1574 1574 def __iter__(self):
1575 1575 for commit_id in self.commit_ids:
1576 1576 # TODO: johbo: Mercurial passes in commit indices or commit ids
1577 1577 yield self._commit_factory(commit_id)
1578 1578
1579 1579 def _commit_factory(self, commit_id):
1580 1580 """
1581 1581 Allows backends to override the way commits are generated.
1582 1582 """
1583 1583 return self.repo.get_commit(commit_id=commit_id,
1584 1584 pre_load=self.pre_load)
1585 1585
1586 1586 def __getslice__(self, i, j):
1587 1587 """
1588 1588 Returns an iterator of sliced repository
1589 1589 """
1590 1590 commit_ids = self.commit_ids[i:j]
1591 1591 return self.__class__(
1592 1592 self.repo, commit_ids, pre_load=self.pre_load)
1593 1593
1594 1594 def __repr__(self):
1595 1595 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1596 1596
1597 1597
1598 1598 class Config(object):
1599 1599 """
1600 1600 Represents the configuration for a repository.
1601 1601
1602 1602 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1603 1603 standard library. It implements only the needed subset.
1604 1604 """
1605 1605
1606 1606 def __init__(self):
1607 1607 self._values = {}
1608 1608
1609 1609 def copy(self):
1610 1610 clone = Config()
1611 1611 for section, values in self._values.items():
1612 1612 clone._values[section] = values.copy()
1613 1613 return clone
1614 1614
1615 1615 def __repr__(self):
1616 1616 return '<Config(%s sections) at %s>' % (
1617 1617 len(self._values), hex(id(self)))
1618 1618
1619 1619 def items(self, section):
1620 1620 return self._values.get(section, {}).iteritems()
1621 1621
1622 1622 def get(self, section, option):
1623 1623 return self._values.get(section, {}).get(option)
1624 1624
1625 1625 def set(self, section, option, value):
1626 1626 section_values = self._values.setdefault(section, {})
1627 1627 section_values[option] = value
1628 1628
1629 1629 def clear_section(self, section):
1630 1630 self._values[section] = {}
1631 1631
1632 1632 def serialize(self):
1633 1633 """
1634 1634 Creates a list of three tuples (section, key, value) representing
1635 1635 this config object.
1636 1636 """
1637 1637 items = []
1638 1638 for section in self._values:
1639 1639 for option, value in self._values[section].items():
1640 1640 items.append(
1641 1641 (safe_str(section), safe_str(option), safe_str(value)))
1642 1642 return items
1643 1643
1644 1644
1645 1645 class Diff(object):
1646 1646 """
1647 1647 Represents a diff result from a repository backend.
1648 1648
1649 1649 Subclasses have to provide a backend specific value for
1650 1650 :attr:`_header_re` and :attr:`_meta_re`.
1651 1651 """
1652 1652 _meta_re = None
1653 1653 _header_re = None
1654 1654
1655 1655 def __init__(self, raw_diff):
1656 1656 self.raw = raw_diff
1657 1657
1658 1658 def chunks(self):
1659 1659 """
1660 1660 split the diff in chunks of separate --git a/file b/file chunks
1661 1661 to make diffs consistent we must prepend with \n, and make sure
1662 1662 we can detect last chunk as this was also has special rule
1663 1663 """
1664 1664
1665 1665 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1666 1666 header = diff_parts[0]
1667 1667
1668 1668 if self._meta_re:
1669 1669 match = self._meta_re.match(header)
1670 1670
1671 1671 chunks = diff_parts[1:]
1672 1672 total_chunks = len(chunks)
1673 1673
1674 1674 return (
1675 1675 DiffChunk(chunk, self, cur_chunk == total_chunks)
1676 1676 for cur_chunk, chunk in enumerate(chunks, start=1))
1677 1677
1678 1678
1679 1679 class DiffChunk(object):
1680 1680
1681 1681 def __init__(self, chunk, diff, last_chunk):
1682 1682 self._diff = diff
1683 1683
1684 1684 # since we split by \ndiff --git that part is lost from original diff
1685 1685 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1686 1686 if not last_chunk:
1687 1687 chunk += '\n'
1688 1688
1689 1689 match = self._diff._header_re.match(chunk)
1690 1690 self.header = match.groupdict()
1691 1691 self.diff = chunk[match.end():]
1692 1692 self.raw = chunk
1693 1693
1694 1694
1695 1695 class BasePathPermissionChecker(object):
1696 1696
1697 1697 @staticmethod
1698 1698 def create_from_patterns(includes, excludes):
1699 1699 if includes and '*' in includes and not excludes:
1700 1700 return AllPathPermissionChecker()
1701 1701 elif excludes and '*' in excludes:
1702 1702 return NonePathPermissionChecker()
1703 1703 else:
1704 1704 return PatternPathPermissionChecker(includes, excludes)
1705 1705
1706 1706 @property
1707 1707 def has_full_access(self):
1708 1708 raise NotImplemented()
1709 1709
1710 1710 def has_access(self, path):
1711 1711 raise NotImplemented()
1712 1712
1713 1713
1714 1714 class AllPathPermissionChecker(BasePathPermissionChecker):
1715 1715
1716 1716 @property
1717 1717 def has_full_access(self):
1718 1718 return True
1719 1719
1720 1720 def has_access(self, path):
1721 1721 return True
1722 1722
1723 1723
1724 1724 class NonePathPermissionChecker(BasePathPermissionChecker):
1725 1725
1726 1726 @property
1727 1727 def has_full_access(self):
1728 1728 return False
1729 1729
1730 1730 def has_access(self, path):
1731 1731 return False
1732 1732
1733 1733
1734 1734 class PatternPathPermissionChecker(BasePathPermissionChecker):
1735 1735
1736 1736 def __init__(self, includes, excludes):
1737 1737 self.includes = includes
1738 1738 self.excludes = excludes
1739 1739 self.includes_re = [] if not includes else [
1740 1740 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1741 1741 self.excludes_re = [] if not excludes else [
1742 1742 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1743 1743
1744 1744 @property
1745 1745 def has_full_access(self):
1746 1746 return '*' in self.includes and not self.excludes
1747 1747
1748 1748 def has_access(self, path):
1749 1749 for regex in self.excludes_re:
1750 1750 if regex.match(path):
1751 1751 return False
1752 1752 for regex in self.includes_re:
1753 1753 if regex.match(path):
1754 1754 return True
1755 1755 return False
@@ -1,999 +1,999 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 GIT repository module
23 23 """
24 24
25 25 import logging
26 26 import os
27 27 import re
28 28
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 31 from rhodecode.lib.compat import OrderedDict
32 32 from rhodecode.lib.datelib import (
33 33 utcdate_fromtimestamp, makedate, date_astimestamp)
34 34 from rhodecode.lib.utils import safe_unicode, safe_str
35 35 from rhodecode.lib.vcs import connection, path as vcspath
36 36 from rhodecode.lib.vcs.backends.base import (
37 37 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 38 MergeFailureReason, Reference)
39 39 from rhodecode.lib.vcs.backends.git.commit import GitCommit
40 40 from rhodecode.lib.vcs.backends.git.diff import GitDiff
41 41 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
42 42 from rhodecode.lib.vcs.exceptions import (
43 43 CommitDoesNotExistError, EmptyRepositoryError,
44 44 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError)
45 45
46 46
47 47 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
48 48
49 49 log = logging.getLogger(__name__)
50 50
51 51
52 52 class GitRepository(BaseRepository):
53 53 """
54 54 Git repository backend.
55 55 """
56 56 DEFAULT_BRANCH_NAME = 'master'
57 57
58 58 contact = BaseRepository.DEFAULT_CONTACT
59 59
60 60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 61 do_workspace_checkout=False, with_wire=None, bare=False):
62 62
63 63 self.path = safe_str(os.path.abspath(repo_path))
64 64 self.config = config if config else self.get_default_config()
65 65 self.with_wire = with_wire
66 66
67 67 self._init_repo(create, src_url, do_workspace_checkout, bare)
68 68
69 69 # caches
70 70 self._commit_ids = {}
71 71
72 72 @LazyProperty
73 73 def _remote(self):
74 74 return connection.Git(self.path, self.config, with_wire=self.with_wire)
75 75
76 76 @LazyProperty
77 77 def bare(self):
78 78 return self._remote.bare()
79 79
80 80 @LazyProperty
81 81 def head(self):
82 82 return self._remote.head()
83 83
84 84 @LazyProperty
85 85 def commit_ids(self):
86 86 """
87 87 Returns list of commit ids, in ascending order. Being lazy
88 88 attribute allows external tools to inject commit ids from cache.
89 89 """
90 90 commit_ids = self._get_all_commit_ids()
91 91 self._rebuild_cache(commit_ids)
92 92 return commit_ids
93 93
94 94 def _rebuild_cache(self, commit_ids):
95 95 self._commit_ids = dict((commit_id, index)
96 96 for index, commit_id in enumerate(commit_ids))
97 97
98 98 def run_git_command(self, cmd, **opts):
99 99 """
100 100 Runs given ``cmd`` as git command and returns tuple
101 101 (stdout, stderr).
102 102
103 103 :param cmd: git command to be executed
104 104 :param opts: env options to pass into Subprocess command
105 105 """
106 106 if not isinstance(cmd, list):
107 107 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
108 108
109 109 skip_stderr_log = opts.pop('skip_stderr_log', False)
110 110 out, err = self._remote.run_git_command(cmd, **opts)
111 111 if err and not skip_stderr_log:
112 112 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
113 113 return out, err
114 114
115 115 @staticmethod
116 116 def check_url(url, config):
117 117 """
118 118 Function will check given url and try to verify if it's a valid
119 119 link. Sometimes it may happened that git will issue basic
120 120 auth request that can cause whole API to hang when used from python
121 121 or other external calls.
122 122
123 123 On failures it'll raise urllib2.HTTPError, exception is also thrown
124 124 when the return code is non 200
125 125 """
126 126 # check first if it's not an url
127 127 if os.path.isdir(url) or url.startswith('file:'):
128 128 return True
129 129
130 130 if '+' in url.split('://', 1)[0]:
131 131 url = url.split('+', 1)[1]
132 132
133 133 # Request the _remote to verify the url
134 134 return connection.Git.check_url(url, config.serialize())
135 135
136 136 @staticmethod
137 137 def is_valid_repository(path):
138 138 if os.path.isdir(os.path.join(path, '.git')):
139 139 return True
140 140 # check case of bare repository
141 141 try:
142 142 GitRepository(path)
143 143 return True
144 144 except VCSError:
145 145 pass
146 146 return False
147 147
148 148 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
149 149 bare=False):
150 150 if create and os.path.exists(self.path):
151 151 raise RepositoryError(
152 152 "Cannot create repository at %s, location already exist"
153 153 % self.path)
154 154
155 155 if bare and do_workspace_checkout:
156 156 raise RepositoryError("Cannot update a bare repository")
157 157 try:
158 158
159 159 if src_url:
160 160 # check URL before any actions
161 161 GitRepository.check_url(src_url, self.config)
162 162
163 163 if create:
164 os.makedirs(self.path, mode=0755)
164 os.makedirs(self.path, mode=0o755)
165 165
166 166 if bare:
167 167 self._remote.init_bare()
168 168 else:
169 169 self._remote.init()
170 170
171 171 if src_url and bare:
172 172 # bare repository only allows a fetch and checkout is not allowed
173 173 self.fetch(src_url, commit_ids=None)
174 174 elif src_url:
175 175 self.pull(src_url, commit_ids=None,
176 176 update_after=do_workspace_checkout)
177 177
178 178 else:
179 179 if not self._remote.assert_correct_path():
180 180 raise RepositoryError(
181 181 'Path "%s" does not contain a Git repository' %
182 182 (self.path,))
183 183
184 184 # TODO: johbo: check if we have to translate the OSError here
185 185 except OSError as err:
186 186 raise RepositoryError(err)
187 187
188 188 def _get_all_commit_ids(self, filters=None):
189 189 # we must check if this repo is not empty, since later command
190 190 # fails if it is. And it's cheaper to ask than throw the subprocess
191 191 # errors
192 192
193 193 head = self._remote.head(show_exc=False)
194 194 if not head:
195 195 return []
196 196
197 197 rev_filter = ['--branches', '--tags']
198 198 extra_filter = []
199 199
200 200 if filters:
201 201 if filters.get('since'):
202 202 extra_filter.append('--since=%s' % (filters['since']))
203 203 if filters.get('until'):
204 204 extra_filter.append('--until=%s' % (filters['until']))
205 205 if filters.get('branch_name'):
206 206 rev_filter = ['--tags']
207 207 extra_filter.append(filters['branch_name'])
208 208 rev_filter.extend(extra_filter)
209 209
210 210 # if filters.get('start') or filters.get('end'):
211 211 # # skip is offset, max-count is limit
212 212 # if filters.get('start'):
213 213 # extra_filter += ' --skip=%s' % filters['start']
214 214 # if filters.get('end'):
215 215 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
216 216
217 217 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
218 218 try:
219 219 output, __ = self.run_git_command(cmd)
220 220 except RepositoryError:
221 221 # Can be raised for empty repositories
222 222 return []
223 223 return output.splitlines()
224 224
225 225 def _get_commit_id(self, commit_id_or_idx):
226 226 def is_null(value):
227 227 return len(value) == commit_id_or_idx.count('0')
228 228
229 229 if self.is_empty():
230 230 raise EmptyRepositoryError("There are no commits yet")
231 231
232 232 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
233 233 return self.commit_ids[-1]
234 234
235 235 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
236 236 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
237 237 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
238 238 try:
239 239 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
240 240 except Exception:
241 241 msg = "Commit %s does not exist for %s" % (
242 242 commit_id_or_idx, self)
243 243 raise CommitDoesNotExistError(msg)
244 244
245 245 elif is_bstr:
246 246 # check full path ref, eg. refs/heads/master
247 247 ref_id = self._refs.get(commit_id_or_idx)
248 248 if ref_id:
249 249 return ref_id
250 250
251 251 # check branch name
252 252 branch_ids = self.branches.values()
253 253 ref_id = self._refs.get('refs/heads/%s' % commit_id_or_idx)
254 254 if ref_id:
255 255 return ref_id
256 256
257 257 # check tag name
258 258 ref_id = self._refs.get('refs/tags/%s' % commit_id_or_idx)
259 259 if ref_id:
260 260 return ref_id
261 261
262 262 if (not SHA_PATTERN.match(commit_id_or_idx) or
263 263 commit_id_or_idx not in self.commit_ids):
264 264 msg = "Commit %s does not exist for %s" % (
265 265 commit_id_or_idx, self)
266 266 raise CommitDoesNotExistError(msg)
267 267
268 268 # Ensure we return full id
269 269 if not SHA_PATTERN.match(str(commit_id_or_idx)):
270 270 raise CommitDoesNotExistError(
271 271 "Given commit id %s not recognized" % commit_id_or_idx)
272 272 return commit_id_or_idx
273 273
274 274 def get_hook_location(self):
275 275 """
276 276 returns absolute path to location where hooks are stored
277 277 """
278 278 loc = os.path.join(self.path, 'hooks')
279 279 if not self.bare:
280 280 loc = os.path.join(self.path, '.git', 'hooks')
281 281 return loc
282 282
283 283 @LazyProperty
284 284 def last_change(self):
285 285 """
286 286 Returns last change made on this repository as
287 287 `datetime.datetime` object.
288 288 """
289 289 try:
290 290 return self.get_commit().date
291 291 except RepositoryError:
292 292 tzoffset = makedate()[1]
293 293 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
294 294
295 295 def _get_fs_mtime(self):
296 296 idx_loc = '' if self.bare else '.git'
297 297 # fallback to filesystem
298 298 in_path = os.path.join(self.path, idx_loc, "index")
299 299 he_path = os.path.join(self.path, idx_loc, "HEAD")
300 300 if os.path.exists(in_path):
301 301 return os.stat(in_path).st_mtime
302 302 else:
303 303 return os.stat(he_path).st_mtime
304 304
305 305 @LazyProperty
306 306 def description(self):
307 307 description = self._remote.get_description()
308 308 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
309 309
310 310 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
311 311 if self.is_empty():
312 312 return OrderedDict()
313 313
314 314 result = []
315 315 for ref, sha in self._refs.iteritems():
316 316 if ref.startswith(prefix):
317 317 ref_name = ref
318 318 if strip_prefix:
319 319 ref_name = ref[len(prefix):]
320 320 result.append((safe_unicode(ref_name), sha))
321 321
322 322 def get_name(entry):
323 323 return entry[0]
324 324
325 325 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
326 326
327 327 def _get_branches(self):
328 328 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
329 329
330 330 @LazyProperty
331 331 def branches(self):
332 332 return self._get_branches()
333 333
334 334 @LazyProperty
335 335 def branches_closed(self):
336 336 return {}
337 337
338 338 @LazyProperty
339 339 def bookmarks(self):
340 340 return {}
341 341
342 342 @LazyProperty
343 343 def branches_all(self):
344 344 all_branches = {}
345 345 all_branches.update(self.branches)
346 346 all_branches.update(self.branches_closed)
347 347 return all_branches
348 348
349 349 @LazyProperty
350 350 def tags(self):
351 351 return self._get_tags()
352 352
353 353 def _get_tags(self):
354 354 return self._get_refs_entries(
355 355 prefix='refs/tags/', strip_prefix=True, reverse=True)
356 356
357 357 def tag(self, name, user, commit_id=None, message=None, date=None,
358 358 **kwargs):
359 359 # TODO: fix this method to apply annotated tags correct with message
360 360 """
361 361 Creates and returns a tag for the given ``commit_id``.
362 362
363 363 :param name: name for new tag
364 364 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
365 365 :param commit_id: commit id for which new tag would be created
366 366 :param message: message of the tag's commit
367 367 :param date: date of tag's commit
368 368
369 369 :raises TagAlreadyExistError: if tag with same name already exists
370 370 """
371 371 if name in self.tags:
372 372 raise TagAlreadyExistError("Tag %s already exists" % name)
373 373 commit = self.get_commit(commit_id=commit_id)
374 374 message = message or "Added tag %s for commit %s" % (
375 375 name, commit.raw_id)
376 376 self._remote.set_refs('refs/tags/%s' % name, commit._commit['id'])
377 377
378 378 self._refs = self._get_refs()
379 379 self.tags = self._get_tags()
380 380 return commit
381 381
382 382 def remove_tag(self, name, user, message=None, date=None):
383 383 """
384 384 Removes tag with the given ``name``.
385 385
386 386 :param name: name of the tag to be removed
387 387 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
388 388 :param message: message of the tag's removal commit
389 389 :param date: date of tag's removal commit
390 390
391 391 :raises TagDoesNotExistError: if tag with given name does not exists
392 392 """
393 393 if name not in self.tags:
394 394 raise TagDoesNotExistError("Tag %s does not exist" % name)
395 395 tagpath = vcspath.join(
396 396 self._remote.get_refs_path(), 'refs', 'tags', name)
397 397 try:
398 398 os.remove(tagpath)
399 399 self._refs = self._get_refs()
400 400 self.tags = self._get_tags()
401 401 except OSError as e:
402 402 raise RepositoryError(e.strerror)
403 403
404 404 def _get_refs(self):
405 405 return self._remote.get_refs()
406 406
407 407 @LazyProperty
408 408 def _refs(self):
409 409 return self._get_refs()
410 410
411 411 @property
412 412 def _ref_tree(self):
413 413 node = tree = {}
414 414 for ref, sha in self._refs.iteritems():
415 415 path = ref.split('/')
416 416 for bit in path[:-1]:
417 417 node = node.setdefault(bit, {})
418 418 node[path[-1]] = sha
419 419 node = tree
420 420 return tree
421 421
422 422 def get_remote_ref(self, ref_name):
423 423 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
424 424 try:
425 425 return self._refs[ref_key]
426 426 except Exception:
427 427 return
428 428
429 429 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
430 430 """
431 431 Returns `GitCommit` object representing commit from git repository
432 432 at the given `commit_id` or head (most recent commit) if None given.
433 433 """
434 434 if commit_id is not None:
435 435 self._validate_commit_id(commit_id)
436 436 elif commit_idx is not None:
437 437 self._validate_commit_idx(commit_idx)
438 438 commit_id = commit_idx
439 439 commit_id = self._get_commit_id(commit_id)
440 440 try:
441 441 # Need to call remote to translate id for tagging scenario
442 442 commit_id = self._remote.get_object(commit_id)["commit_id"]
443 443 idx = self._commit_ids[commit_id]
444 444 except KeyError:
445 445 raise RepositoryError("Cannot get object with id %s" % commit_id)
446 446
447 447 return GitCommit(self, commit_id, idx, pre_load=pre_load)
448 448
449 449 def get_commits(
450 450 self, start_id=None, end_id=None, start_date=None, end_date=None,
451 451 branch_name=None, show_hidden=False, pre_load=None):
452 452 """
453 453 Returns generator of `GitCommit` objects from start to end (both
454 454 are inclusive), in ascending date order.
455 455
456 456 :param start_id: None, str(commit_id)
457 457 :param end_id: None, str(commit_id)
458 458 :param start_date: if specified, commits with commit date less than
459 459 ``start_date`` would be filtered out from returned set
460 460 :param end_date: if specified, commits with commit date greater than
461 461 ``end_date`` would be filtered out from returned set
462 462 :param branch_name: if specified, commits not reachable from given
463 463 branch would be filtered out from returned set
464 464 :param show_hidden: Show hidden commits such as obsolete or hidden from
465 465 Mercurial evolve
466 466 :raise BranchDoesNotExistError: If given `branch_name` does not
467 467 exist.
468 468 :raise CommitDoesNotExistError: If commits for given `start` or
469 469 `end` could not be found.
470 470
471 471 """
472 472 if self.is_empty():
473 473 raise EmptyRepositoryError("There are no commits yet")
474 474 self._validate_branch_name(branch_name)
475 475
476 476 if start_id is not None:
477 477 self._validate_commit_id(start_id)
478 478 if end_id is not None:
479 479 self._validate_commit_id(end_id)
480 480
481 481 start_raw_id = self._get_commit_id(start_id)
482 482 start_pos = self._commit_ids[start_raw_id] if start_id else None
483 483 end_raw_id = self._get_commit_id(end_id)
484 484 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
485 485
486 486 if None not in [start_id, end_id] and start_pos > end_pos:
487 487 raise RepositoryError(
488 488 "Start commit '%s' cannot be after end commit '%s'" %
489 489 (start_id, end_id))
490 490
491 491 if end_pos is not None:
492 492 end_pos += 1
493 493
494 494 filter_ = []
495 495 if branch_name:
496 496 filter_.append({'branch_name': branch_name})
497 497 if start_date and not end_date:
498 498 filter_.append({'since': start_date})
499 499 if end_date and not start_date:
500 500 filter_.append({'until': end_date})
501 501 if start_date and end_date:
502 502 filter_.append({'since': start_date})
503 503 filter_.append({'until': end_date})
504 504
505 505 # if start_pos or end_pos:
506 506 # filter_.append({'start': start_pos})
507 507 # filter_.append({'end': end_pos})
508 508
509 509 if filter_:
510 510 revfilters = {
511 511 'branch_name': branch_name,
512 512 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
513 513 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
514 514 'start': start_pos,
515 515 'end': end_pos,
516 516 }
517 517 commit_ids = self._get_all_commit_ids(filters=revfilters)
518 518
519 519 # pure python stuff, it's slow due to walker walking whole repo
520 520 # def get_revs(walker):
521 521 # for walker_entry in walker:
522 522 # yield walker_entry.commit.id
523 523 # revfilters = {}
524 524 # commit_ids = list(reversed(list(get_revs(self._repo.get_walker(**revfilters)))))
525 525 else:
526 526 commit_ids = self.commit_ids
527 527
528 528 if start_pos or end_pos:
529 529 commit_ids = commit_ids[start_pos: end_pos]
530 530
531 531 return CollectionGenerator(self, commit_ids, pre_load=pre_load)
532 532
533 533 def get_diff(
534 534 self, commit1, commit2, path='', ignore_whitespace=False,
535 535 context=3, path1=None):
536 536 """
537 537 Returns (git like) *diff*, as plain text. Shows changes introduced by
538 538 ``commit2`` since ``commit1``.
539 539
540 540 :param commit1: Entry point from which diff is shown. Can be
541 541 ``self.EMPTY_COMMIT`` - in this case, patch showing all
542 542 the changes since empty state of the repository until ``commit2``
543 543 :param commit2: Until which commits changes should be shown.
544 544 :param ignore_whitespace: If set to ``True``, would not show whitespace
545 545 changes. Defaults to ``False``.
546 546 :param context: How many lines before/after changed lines should be
547 547 shown. Defaults to ``3``.
548 548 """
549 549 self._validate_diff_commits(commit1, commit2)
550 550 if path1 is not None and path1 != path:
551 551 raise ValueError("Diff of two different paths not supported.")
552 552
553 553 flags = [
554 554 '-U%s' % context, '--full-index', '--binary', '-p',
555 555 '-M', '--abbrev=40']
556 556 if ignore_whitespace:
557 557 flags.append('-w')
558 558
559 559 if commit1 == self.EMPTY_COMMIT:
560 560 cmd = ['show'] + flags + [commit2.raw_id]
561 561 else:
562 562 cmd = ['diff'] + flags + [commit1.raw_id, commit2.raw_id]
563 563
564 564 if path:
565 565 cmd.extend(['--', path])
566 566
567 567 stdout, __ = self.run_git_command(cmd)
568 568 # If we used 'show' command, strip first few lines (until actual diff
569 569 # starts)
570 570 if commit1 == self.EMPTY_COMMIT:
571 571 lines = stdout.splitlines()
572 572 x = 0
573 573 for line in lines:
574 574 if line.startswith('diff'):
575 575 break
576 576 x += 1
577 577 # Append new line just like 'diff' command do
578 578 stdout = '\n'.join(lines[x:]) + '\n'
579 579 return GitDiff(stdout)
580 580
581 581 def strip(self, commit_id, branch_name):
582 582 commit = self.get_commit(commit_id=commit_id)
583 583 if commit.merge:
584 584 raise Exception('Cannot reset to merge commit')
585 585
586 586 # parent is going to be the new head now
587 587 commit = commit.parents[0]
588 588 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
589 589
590 590 self.commit_ids = self._get_all_commit_ids()
591 591 self._rebuild_cache(self.commit_ids)
592 592
593 593 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
594 594 if commit_id1 == commit_id2:
595 595 return commit_id1
596 596
597 597 if self != repo2:
598 598 commits = self._remote.get_missing_revs(
599 599 commit_id1, commit_id2, repo2.path)
600 600 if commits:
601 601 commit = repo2.get_commit(commits[-1])
602 602 if commit.parents:
603 603 ancestor_id = commit.parents[0].raw_id
604 604 else:
605 605 ancestor_id = None
606 606 else:
607 607 # no commits from other repo, ancestor_id is the commit_id2
608 608 ancestor_id = commit_id2
609 609 else:
610 610 output, __ = self.run_git_command(
611 611 ['merge-base', commit_id1, commit_id2])
612 612 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
613 613
614 614 return ancestor_id
615 615
616 616 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
617 617 repo1 = self
618 618 ancestor_id = None
619 619
620 620 if commit_id1 == commit_id2:
621 621 commits = []
622 622 elif repo1 != repo2:
623 623 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
624 624 repo2.path)
625 625 commits = [
626 626 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
627 627 for commit_id in reversed(missing_ids)]
628 628 else:
629 629 output, __ = repo1.run_git_command(
630 630 ['log', '--reverse', '--pretty=format: %H', '-s',
631 631 '%s..%s' % (commit_id1, commit_id2)])
632 632 commits = [
633 633 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
634 634 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
635 635
636 636 return commits
637 637
638 638 @LazyProperty
639 639 def in_memory_commit(self):
640 640 """
641 641 Returns ``GitInMemoryCommit`` object for this repository.
642 642 """
643 643 return GitInMemoryCommit(self)
644 644
645 645 def pull(self, url, commit_ids=None, update_after=False):
646 646 """
647 647 Pull changes from external location. Pull is different in GIT
648 648 that fetch since it's doing a checkout
649 649
650 650 :param commit_ids: Optional. Can be set to a list of commit ids
651 651 which shall be pulled from the other repository.
652 652 """
653 653 refs = None
654 654 if commit_ids is not None:
655 655 remote_refs = self._remote.get_remote_refs(url)
656 656 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
657 657 self._remote.pull(url, refs=refs, update_after=update_after)
658 658 self._remote.invalidate_vcs_cache()
659 659
660 660 def fetch(self, url, commit_ids=None):
661 661 """
662 662 Fetch all git objects from external location.
663 663 """
664 664 self._remote.sync_fetch(url, refs=commit_ids)
665 665 self._remote.invalidate_vcs_cache()
666 666
667 667 def push(self, url):
668 668 refs = None
669 669 self._remote.sync_push(url, refs=refs)
670 670
671 671 def set_refs(self, ref_name, commit_id):
672 672 self._remote.set_refs(ref_name, commit_id)
673 673
674 674 def remove_ref(self, ref_name):
675 675 self._remote.remove_ref(ref_name)
676 676
677 677 def _update_server_info(self):
678 678 """
679 679 runs gits update-server-info command in this repo instance
680 680 """
681 681 self._remote.update_server_info()
682 682
683 683 def _current_branch(self):
684 684 """
685 685 Return the name of the current branch.
686 686
687 687 It only works for non bare repositories (i.e. repositories with a
688 688 working copy)
689 689 """
690 690 if self.bare:
691 691 raise RepositoryError('Bare git repos do not have active branches')
692 692
693 693 if self.is_empty():
694 694 return None
695 695
696 696 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
697 697 return stdout.strip()
698 698
699 699 def _checkout(self, branch_name, create=False, force=False):
700 700 """
701 701 Checkout a branch in the working directory.
702 702
703 703 It tries to create the branch if create is True, failing if the branch
704 704 already exists.
705 705
706 706 It only works for non bare repositories (i.e. repositories with a
707 707 working copy)
708 708 """
709 709 if self.bare:
710 710 raise RepositoryError('Cannot checkout branches in a bare git repo')
711 711
712 712 cmd = ['checkout']
713 713 if force:
714 714 cmd.append('-f')
715 715 if create:
716 716 cmd.append('-b')
717 717 cmd.append(branch_name)
718 718 self.run_git_command(cmd, fail_on_stderr=False)
719 719
720 720 def _identify(self):
721 721 """
722 722 Return the current state of the working directory.
723 723 """
724 724 if self.bare:
725 725 raise RepositoryError('Bare git repos do not have active branches')
726 726
727 727 if self.is_empty():
728 728 return None
729 729
730 730 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
731 731 return stdout.strip()
732 732
733 733 def _local_clone(self, clone_path, branch_name, source_branch=None):
734 734 """
735 735 Create a local clone of the current repo.
736 736 """
737 737 # N.B.(skreft): the --branch option is required as otherwise the shallow
738 738 # clone will only fetch the active branch.
739 739 cmd = ['clone', '--branch', branch_name,
740 740 self.path, os.path.abspath(clone_path)]
741 741
742 742 self.run_git_command(cmd, fail_on_stderr=False)
743 743
744 744 # if we get the different source branch, make sure we also fetch it for
745 745 # merge conditions
746 746 if source_branch and source_branch != branch_name:
747 747 # check if the ref exists.
748 748 shadow_repo = GitRepository(os.path.abspath(clone_path))
749 749 if shadow_repo.get_remote_ref(source_branch):
750 750 cmd = ['fetch', self.path, source_branch]
751 751 self.run_git_command(cmd, fail_on_stderr=False)
752 752
753 753 def _local_fetch(self, repository_path, branch_name, use_origin=False):
754 754 """
755 755 Fetch a branch from a local repository.
756 756 """
757 757 repository_path = os.path.abspath(repository_path)
758 758 if repository_path == self.path:
759 759 raise ValueError('Cannot fetch from the same repository')
760 760
761 761 if use_origin:
762 762 branch_name = '+{branch}:refs/heads/{branch}'.format(
763 763 branch=branch_name)
764 764
765 765 cmd = ['fetch', '--no-tags', '--update-head-ok',
766 766 repository_path, branch_name]
767 767 self.run_git_command(cmd, fail_on_stderr=False)
768 768
769 769 def _local_reset(self, branch_name):
770 770 branch_name = '{}'.format(branch_name)
771 771 cmd = ['reset', '--hard', branch_name, '--']
772 772 self.run_git_command(cmd, fail_on_stderr=False)
773 773
774 774 def _last_fetch_heads(self):
775 775 """
776 776 Return the last fetched heads that need merging.
777 777
778 778 The algorithm is defined at
779 779 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
780 780 """
781 781 if not self.bare:
782 782 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
783 783 else:
784 784 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
785 785
786 786 heads = []
787 787 with open(fetch_heads_path) as f:
788 788 for line in f:
789 789 if ' not-for-merge ' in line:
790 790 continue
791 791 line = re.sub('\t.*', '', line, flags=re.DOTALL)
792 792 heads.append(line)
793 793
794 794 return heads
795 795
796 796 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
797 797 return GitRepository(shadow_repository_path)
798 798
799 799 def _local_pull(self, repository_path, branch_name, ff_only=True):
800 800 """
801 801 Pull a branch from a local repository.
802 802 """
803 803 if self.bare:
804 804 raise RepositoryError('Cannot pull into a bare git repository')
805 805 # N.B.(skreft): The --ff-only option is to make sure this is a
806 806 # fast-forward (i.e., we are only pulling new changes and there are no
807 807 # conflicts with our current branch)
808 808 # Additionally, that option needs to go before --no-tags, otherwise git
809 809 # pull complains about it being an unknown flag.
810 810 cmd = ['pull']
811 811 if ff_only:
812 812 cmd.append('--ff-only')
813 813 cmd.extend(['--no-tags', repository_path, branch_name])
814 814 self.run_git_command(cmd, fail_on_stderr=False)
815 815
816 816 def _local_merge(self, merge_message, user_name, user_email, heads):
817 817 """
818 818 Merge the given head into the checked out branch.
819 819
820 820 It will force a merge commit.
821 821
822 822 Currently it raises an error if the repo is empty, as it is not possible
823 823 to create a merge commit in an empty repo.
824 824
825 825 :param merge_message: The message to use for the merge commit.
826 826 :param heads: the heads to merge.
827 827 """
828 828 if self.bare:
829 829 raise RepositoryError('Cannot merge into a bare git repository')
830 830
831 831 if not heads:
832 832 return
833 833
834 834 if self.is_empty():
835 835 # TODO(skreft): do somehting more robust in this case.
836 836 raise RepositoryError(
837 837 'Do not know how to merge into empty repositories yet')
838 838
839 839 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
840 840 # commit message. We also specify the user who is doing the merge.
841 841 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
842 842 '-c', 'user.email=%s' % safe_str(user_email),
843 843 'merge', '--no-ff', '-m', safe_str(merge_message)]
844 844 cmd.extend(heads)
845 845 try:
846 846 output = self.run_git_command(cmd, fail_on_stderr=False)
847 847 except RepositoryError:
848 848 # Cleanup any merge leftovers
849 849 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
850 850 raise
851 851
852 852 def _local_push(
853 853 self, source_branch, repository_path, target_branch,
854 854 enable_hooks=False, rc_scm_data=None):
855 855 """
856 856 Push the source_branch to the given repository and target_branch.
857 857
858 858 Currently it if the target_branch is not master and the target repo is
859 859 empty, the push will work, but then GitRepository won't be able to find
860 860 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
861 861 pointing to master, which does not exist).
862 862
863 863 It does not run the hooks in the target repo.
864 864 """
865 865 # TODO(skreft): deal with the case in which the target repo is empty,
866 866 # and the target_branch is not master.
867 867 target_repo = GitRepository(repository_path)
868 868 if (not target_repo.bare and
869 869 target_repo._current_branch() == target_branch):
870 870 # Git prevents pushing to the checked out branch, so simulate it by
871 871 # pulling into the target repository.
872 872 target_repo._local_pull(self.path, source_branch)
873 873 else:
874 874 cmd = ['push', os.path.abspath(repository_path),
875 875 '%s:%s' % (source_branch, target_branch)]
876 876 gitenv = {}
877 877 if rc_scm_data:
878 878 gitenv.update({'RC_SCM_DATA': rc_scm_data})
879 879
880 880 if not enable_hooks:
881 881 gitenv['RC_SKIP_HOOKS'] = '1'
882 882 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
883 883
884 884 def _get_new_pr_branch(self, source_branch, target_branch):
885 885 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
886 886 pr_branches = []
887 887 for branch in self.branches:
888 888 if branch.startswith(prefix):
889 889 pr_branches.append(int(branch[len(prefix):]))
890 890
891 891 if not pr_branches:
892 892 branch_id = 0
893 893 else:
894 894 branch_id = max(pr_branches) + 1
895 895
896 896 return '%s%d' % (prefix, branch_id)
897 897
898 898 def _maybe_prepare_merge_workspace(
899 899 self, repo_id, workspace_id, target_ref, source_ref):
900 900 shadow_repository_path = self._get_shadow_repository_path(
901 901 repo_id, workspace_id)
902 902 if not os.path.exists(shadow_repository_path):
903 903 self._local_clone(
904 904 shadow_repository_path, target_ref.name, source_ref.name)
905 905 log.debug(
906 906 'Prepared shadow repository in %s', shadow_repository_path)
907 907
908 908 return shadow_repository_path
909 909
910 910 def _merge_repo(self, repo_id, workspace_id, target_ref,
911 911 source_repo, source_ref, merge_message,
912 912 merger_name, merger_email, dry_run=False,
913 913 use_rebase=False, close_branch=False):
914 914 if target_ref.commit_id != self.branches[target_ref.name]:
915 915 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
916 916 target_ref.commit_id, self.branches[target_ref.name])
917 917 return MergeResponse(
918 918 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
919 919
920 920 shadow_repository_path = self._maybe_prepare_merge_workspace(
921 921 repo_id, workspace_id, target_ref, source_ref)
922 922 shadow_repo = self._get_shadow_instance(shadow_repository_path)
923 923
924 924 # checkout source, if it's different. Otherwise we could not
925 925 # fetch proper commits for merge testing
926 926 if source_ref.name != target_ref.name:
927 927 if shadow_repo.get_remote_ref(source_ref.name):
928 928 shadow_repo._checkout(source_ref.name, force=True)
929 929
930 930 # checkout target, and fetch changes
931 931 shadow_repo._checkout(target_ref.name, force=True)
932 932
933 933 # fetch/reset pull the target, in case it is changed
934 934 # this handles even force changes
935 935 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
936 936 shadow_repo._local_reset(target_ref.name)
937 937
938 938 # Need to reload repo to invalidate the cache, or otherwise we cannot
939 939 # retrieve the last target commit.
940 940 shadow_repo = self._get_shadow_instance(shadow_repository_path)
941 941 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
942 942 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
943 943 target_ref, target_ref.commit_id,
944 944 shadow_repo.branches[target_ref.name])
945 945 return MergeResponse(
946 946 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
947 947
948 948 # calculate new branch
949 949 pr_branch = shadow_repo._get_new_pr_branch(
950 950 source_ref.name, target_ref.name)
951 951 log.debug('using pull-request merge branch: `%s`', pr_branch)
952 952 # checkout to temp branch, and fetch changes
953 953 shadow_repo._checkout(pr_branch, create=True)
954 954 try:
955 955 shadow_repo._local_fetch(source_repo.path, source_ref.name)
956 956 except RepositoryError:
957 957 log.exception('Failure when doing local fetch on git shadow repo')
958 958 return MergeResponse(
959 959 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
960 960
961 961 merge_ref = None
962 962 merge_failure_reason = MergeFailureReason.NONE
963 963 try:
964 964 shadow_repo._local_merge(merge_message, merger_name, merger_email,
965 965 [source_ref.commit_id])
966 966 merge_possible = True
967 967
968 968 # Need to reload repo to invalidate the cache, or otherwise we
969 969 # cannot retrieve the merge commit.
970 970 shadow_repo = GitRepository(shadow_repository_path)
971 971 merge_commit_id = shadow_repo.branches[pr_branch]
972 972
973 973 # Set a reference pointing to the merge commit. This reference may
974 974 # be used to easily identify the last successful merge commit in
975 975 # the shadow repository.
976 976 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
977 977 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
978 978 except RepositoryError:
979 979 log.exception('Failure when doing local merge on git shadow repo')
980 980 merge_possible = False
981 981 merge_failure_reason = MergeFailureReason.MERGE_FAILED
982 982
983 983 if merge_possible and not dry_run:
984 984 try:
985 985 shadow_repo._local_push(
986 986 pr_branch, self.path, target_ref.name, enable_hooks=True,
987 987 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
988 988 merge_succeeded = True
989 989 except RepositoryError:
990 990 log.exception(
991 991 'Failure when doing local push on git shadow repo')
992 992 merge_succeeded = False
993 993 merge_failure_reason = MergeFailureReason.PUSH_FAILED
994 994 else:
995 995 merge_succeeded = False
996 996
997 997 return MergeResponse(
998 998 merge_possible, merge_succeeded, merge_ref,
999 999 merge_failure_reason)
@@ -1,924 +1,924 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 HG repository module
23 23 """
24 24 import os
25 25 import logging
26 26 import binascii
27 27 import urllib
28 28
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 31 from rhodecode.lib.compat import OrderedDict
32 32 from rhodecode.lib.datelib import (
33 33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 34 from rhodecode.lib.utils import safe_unicode, safe_str
35 35 from rhodecode.lib.vcs import connection, exceptions
36 36 from rhodecode.lib.vcs.backends.base import (
37 37 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 38 MergeFailureReason, Reference, BasePathPermissionChecker)
39 39 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
40 40 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
41 41 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
42 42 from rhodecode.lib.vcs.exceptions import (
43 43 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
44 44 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
45 45 from rhodecode.lib.vcs.compat import configparser
46 46
47 47 hexlify = binascii.hexlify
48 48 nullid = "\0" * 20
49 49
50 50 log = logging.getLogger(__name__)
51 51
52 52
53 53 class MercurialRepository(BaseRepository):
54 54 """
55 55 Mercurial repository backend
56 56 """
57 57 DEFAULT_BRANCH_NAME = 'default'
58 58
59 59 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 60 do_workspace_checkout=False, with_wire=None, bare=False):
61 61 """
62 62 Raises RepositoryError if repository could not be find at the given
63 63 ``repo_path``.
64 64
65 65 :param repo_path: local path of the repository
66 66 :param config: config object containing the repo configuration
67 67 :param create=False: if set to True, would try to create repository if
68 68 it does not exist rather than raising exception
69 69 :param src_url=None: would try to clone repository from given location
70 70 :param do_workspace_checkout=False: sets update of working copy after
71 71 making a clone
72 72 :param bare: not used, compatible with other VCS
73 73 """
74 74
75 75 self.path = safe_str(os.path.abspath(repo_path))
76 76 # mercurial since 4.4.X requires certain configuration to be present
77 77 # because sometimes we init the repos with config we need to meet
78 78 # special requirements
79 79 self.config = config if config else self.get_default_config(
80 80 default=[('extensions', 'largefiles', '1')])
81 81 self.with_wire = with_wire
82 82
83 83 self._init_repo(create, src_url, do_workspace_checkout)
84 84
85 85 # caches
86 86 self._commit_ids = {}
87 87
88 88 @LazyProperty
89 89 def _remote(self):
90 90 return connection.Hg(self.path, self.config, with_wire=self.with_wire)
91 91
92 92 @LazyProperty
93 93 def commit_ids(self):
94 94 """
95 95 Returns list of commit ids, in ascending order. Being lazy
96 96 attribute allows external tools to inject shas from cache.
97 97 """
98 98 commit_ids = self._get_all_commit_ids()
99 99 self._rebuild_cache(commit_ids)
100 100 return commit_ids
101 101
102 102 def _rebuild_cache(self, commit_ids):
103 103 self._commit_ids = dict((commit_id, index)
104 104 for index, commit_id in enumerate(commit_ids))
105 105
106 106 @LazyProperty
107 107 def branches(self):
108 108 return self._get_branches()
109 109
110 110 @LazyProperty
111 111 def branches_closed(self):
112 112 return self._get_branches(active=False, closed=True)
113 113
114 114 @LazyProperty
115 115 def branches_all(self):
116 116 all_branches = {}
117 117 all_branches.update(self.branches)
118 118 all_branches.update(self.branches_closed)
119 119 return all_branches
120 120
121 121 def _get_branches(self, active=True, closed=False):
122 122 """
123 123 Gets branches for this repository
124 124 Returns only not closed active branches by default
125 125
126 126 :param active: return also active branches
127 127 :param closed: return also closed branches
128 128
129 129 """
130 130 if self.is_empty():
131 131 return {}
132 132
133 133 def get_name(ctx):
134 134 return ctx[0]
135 135
136 136 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
137 137 self._remote.branches(active, closed).items()]
138 138
139 139 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
140 140
141 141 @LazyProperty
142 142 def tags(self):
143 143 """
144 144 Gets tags for this repository
145 145 """
146 146 return self._get_tags()
147 147
148 148 def _get_tags(self):
149 149 if self.is_empty():
150 150 return {}
151 151
152 152 def get_name(ctx):
153 153 return ctx[0]
154 154
155 155 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
156 156 self._remote.tags().items()]
157 157
158 158 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
159 159
160 160 def tag(self, name, user, commit_id=None, message=None, date=None,
161 161 **kwargs):
162 162 """
163 163 Creates and returns a tag for the given ``commit_id``.
164 164
165 165 :param name: name for new tag
166 166 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
167 167 :param commit_id: commit id for which new tag would be created
168 168 :param message: message of the tag's commit
169 169 :param date: date of tag's commit
170 170
171 171 :raises TagAlreadyExistError: if tag with same name already exists
172 172 """
173 173 if name in self.tags:
174 174 raise TagAlreadyExistError("Tag %s already exists" % name)
175 175 commit = self.get_commit(commit_id=commit_id)
176 176 local = kwargs.setdefault('local', False)
177 177
178 178 if message is None:
179 179 message = "Added tag %s for commit %s" % (name, commit.short_id)
180 180
181 181 date, tz = date_to_timestamp_plus_offset(date)
182 182
183 183 self._remote.tag(
184 184 name, commit.raw_id, message, local, user, date, tz)
185 185 self._remote.invalidate_vcs_cache()
186 186
187 187 # Reinitialize tags
188 188 self.tags = self._get_tags()
189 189 tag_id = self.tags[name]
190 190
191 191 return self.get_commit(commit_id=tag_id)
192 192
193 193 def remove_tag(self, name, user, message=None, date=None):
194 194 """
195 195 Removes tag with the given `name`.
196 196
197 197 :param name: name of the tag to be removed
198 198 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
199 199 :param message: message of the tag's removal commit
200 200 :param date: date of tag's removal commit
201 201
202 202 :raises TagDoesNotExistError: if tag with given name does not exists
203 203 """
204 204 if name not in self.tags:
205 205 raise TagDoesNotExistError("Tag %s does not exist" % name)
206 206 if message is None:
207 207 message = "Removed tag %s" % name
208 208 local = False
209 209
210 210 date, tz = date_to_timestamp_plus_offset(date)
211 211
212 212 self._remote.tag(name, nullid, message, local, user, date, tz)
213 213 self._remote.invalidate_vcs_cache()
214 214 self.tags = self._get_tags()
215 215
216 216 @LazyProperty
217 217 def bookmarks(self):
218 218 """
219 219 Gets bookmarks for this repository
220 220 """
221 221 return self._get_bookmarks()
222 222
223 223 def _get_bookmarks(self):
224 224 if self.is_empty():
225 225 return {}
226 226
227 227 def get_name(ctx):
228 228 return ctx[0]
229 229
230 230 _bookmarks = [
231 231 (safe_unicode(n), hexlify(h)) for n, h in
232 232 self._remote.bookmarks().items()]
233 233
234 234 return OrderedDict(sorted(_bookmarks, key=get_name))
235 235
236 236 def _get_all_commit_ids(self):
237 237 return self._remote.get_all_commit_ids('visible')
238 238
239 239 def get_diff(
240 240 self, commit1, commit2, path='', ignore_whitespace=False,
241 241 context=3, path1=None):
242 242 """
243 243 Returns (git like) *diff*, as plain text. Shows changes introduced by
244 244 `commit2` since `commit1`.
245 245
246 246 :param commit1: Entry point from which diff is shown. Can be
247 247 ``self.EMPTY_COMMIT`` - in this case, patch showing all
248 248 the changes since empty state of the repository until `commit2`
249 249 :param commit2: Until which commit changes should be shown.
250 250 :param ignore_whitespace: If set to ``True``, would not show whitespace
251 251 changes. Defaults to ``False``.
252 252 :param context: How many lines before/after changed lines should be
253 253 shown. Defaults to ``3``.
254 254 """
255 255 self._validate_diff_commits(commit1, commit2)
256 256 if path1 is not None and path1 != path:
257 257 raise ValueError("Diff of two different paths not supported.")
258 258
259 259 if path:
260 260 file_filter = [self.path, path]
261 261 else:
262 262 file_filter = None
263 263
264 264 diff = self._remote.diff(
265 265 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
266 266 opt_git=True, opt_ignorews=ignore_whitespace,
267 267 context=context)
268 268 return MercurialDiff(diff)
269 269
270 270 def strip(self, commit_id, branch=None):
271 271 self._remote.strip(commit_id, update=False, backup="none")
272 272
273 273 self._remote.invalidate_vcs_cache()
274 274 self.commit_ids = self._get_all_commit_ids()
275 275 self._rebuild_cache(self.commit_ids)
276 276
277 277 def verify(self):
278 278 verify = self._remote.verify()
279 279
280 280 self._remote.invalidate_vcs_cache()
281 281 return verify
282 282
283 283 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
284 284 if commit_id1 == commit_id2:
285 285 return commit_id1
286 286
287 287 ancestors = self._remote.revs_from_revspec(
288 288 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
289 289 other_path=repo2.path)
290 290 return repo2[ancestors[0]].raw_id if ancestors else None
291 291
292 292 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
293 293 if commit_id1 == commit_id2:
294 294 commits = []
295 295 else:
296 296 if merge:
297 297 indexes = self._remote.revs_from_revspec(
298 298 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
299 299 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
300 300 else:
301 301 indexes = self._remote.revs_from_revspec(
302 302 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
303 303 commit_id1, other_path=repo2.path)
304 304
305 305 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
306 306 for idx in indexes]
307 307
308 308 return commits
309 309
310 310 @staticmethod
311 311 def check_url(url, config):
312 312 """
313 313 Function will check given url and try to verify if it's a valid
314 314 link. Sometimes it may happened that mercurial will issue basic
315 315 auth request that can cause whole API to hang when used from python
316 316 or other external calls.
317 317
318 318 On failures it'll raise urllib2.HTTPError, exception is also thrown
319 319 when the return code is non 200
320 320 """
321 321 # check first if it's not an local url
322 322 if os.path.isdir(url) or url.startswith('file:'):
323 323 return True
324 324
325 325 # Request the _remote to verify the url
326 326 return connection.Hg.check_url(url, config.serialize())
327 327
328 328 @staticmethod
329 329 def is_valid_repository(path):
330 330 return os.path.isdir(os.path.join(path, '.hg'))
331 331
332 332 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
333 333 """
334 334 Function will check for mercurial repository in given path. If there
335 335 is no repository in that path it will raise an exception unless
336 336 `create` parameter is set to True - in that case repository would
337 337 be created.
338 338
339 339 If `src_url` is given, would try to clone repository from the
340 340 location at given clone_point. Additionally it'll make update to
341 341 working copy accordingly to `do_workspace_checkout` flag.
342 342 """
343 343 if create and os.path.exists(self.path):
344 344 raise RepositoryError(
345 345 "Cannot create repository at %s, location already exist"
346 346 % self.path)
347 347
348 348 if src_url:
349 349 url = str(self._get_url(src_url))
350 350 MercurialRepository.check_url(url, self.config)
351 351
352 352 self._remote.clone(url, self.path, do_workspace_checkout)
353 353
354 354 # Don't try to create if we've already cloned repo
355 355 create = False
356 356
357 357 if create:
358 os.makedirs(self.path, mode=0755)
358 os.makedirs(self.path, mode=0o755)
359 359
360 360 self._remote.localrepository(create)
361 361
362 362 @LazyProperty
363 363 def in_memory_commit(self):
364 364 return MercurialInMemoryCommit(self)
365 365
366 366 @LazyProperty
367 367 def description(self):
368 368 description = self._remote.get_config_value(
369 369 'web', 'description', untrusted=True)
370 370 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
371 371
372 372 @LazyProperty
373 373 def contact(self):
374 374 contact = (
375 375 self._remote.get_config_value("web", "contact") or
376 376 self._remote.get_config_value("ui", "username"))
377 377 return safe_unicode(contact or self.DEFAULT_CONTACT)
378 378
379 379 @LazyProperty
380 380 def last_change(self):
381 381 """
382 382 Returns last change made on this repository as
383 383 `datetime.datetime` object.
384 384 """
385 385 try:
386 386 return self.get_commit().date
387 387 except RepositoryError:
388 388 tzoffset = makedate()[1]
389 389 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
390 390
391 391 def _get_fs_mtime(self):
392 392 # fallback to filesystem
393 393 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
394 394 st_path = os.path.join(self.path, '.hg', "store")
395 395 if os.path.exists(cl_path):
396 396 return os.stat(cl_path).st_mtime
397 397 else:
398 398 return os.stat(st_path).st_mtime
399 399
400 400 def _get_url(self, url):
401 401 """
402 402 Returns normalized url. If schema is not given, would fall
403 403 to filesystem
404 404 (``file:///``) schema.
405 405 """
406 406 url = url.encode('utf8')
407 407 if url != 'default' and '://' not in url:
408 408 url = "file:" + urllib.pathname2url(url)
409 409 return url
410 410
411 411 def get_hook_location(self):
412 412 """
413 413 returns absolute path to location where hooks are stored
414 414 """
415 415 return os.path.join(self.path, '.hg', '.hgrc')
416 416
417 417 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
418 418 """
419 419 Returns ``MercurialCommit`` object representing repository's
420 420 commit at the given `commit_id` or `commit_idx`.
421 421 """
422 422 if self.is_empty():
423 423 raise EmptyRepositoryError("There are no commits yet")
424 424
425 425 if commit_id is not None:
426 426 self._validate_commit_id(commit_id)
427 427 try:
428 428 idx = self._commit_ids[commit_id]
429 429 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
430 430 except KeyError:
431 431 pass
432 432 elif commit_idx is not None:
433 433 self._validate_commit_idx(commit_idx)
434 434 try:
435 435 id_ = self.commit_ids[commit_idx]
436 436 if commit_idx < 0:
437 437 commit_idx += len(self.commit_ids)
438 438 return MercurialCommit(
439 439 self, id_, commit_idx, pre_load=pre_load)
440 440 except IndexError:
441 441 commit_id = commit_idx
442 442 else:
443 443 commit_id = "tip"
444 444
445 445 if isinstance(commit_id, unicode):
446 446 commit_id = safe_str(commit_id)
447 447
448 448 try:
449 449 raw_id, idx = self._remote.lookup(commit_id, both=True)
450 450 except CommitDoesNotExistError:
451 451 msg = "Commit %s does not exist for %s" % (
452 452 commit_id, self)
453 453 raise CommitDoesNotExistError(msg)
454 454
455 455 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
456 456
457 457 def get_commits(
458 458 self, start_id=None, end_id=None, start_date=None, end_date=None,
459 459 branch_name=None, show_hidden=False, pre_load=None):
460 460 """
461 461 Returns generator of ``MercurialCommit`` objects from start to end
462 462 (both are inclusive)
463 463
464 464 :param start_id: None, str(commit_id)
465 465 :param end_id: None, str(commit_id)
466 466 :param start_date: if specified, commits with commit date less than
467 467 ``start_date`` would be filtered out from returned set
468 468 :param end_date: if specified, commits with commit date greater than
469 469 ``end_date`` would be filtered out from returned set
470 470 :param branch_name: if specified, commits not reachable from given
471 471 branch would be filtered out from returned set
472 472 :param show_hidden: Show hidden commits such as obsolete or hidden from
473 473 Mercurial evolve
474 474 :raise BranchDoesNotExistError: If given ``branch_name`` does not
475 475 exist.
476 476 :raise CommitDoesNotExistError: If commit for given ``start`` or
477 477 ``end`` could not be found.
478 478 """
479 479 # actually we should check now if it's not an empty repo
480 480 branch_ancestors = False
481 481 if self.is_empty():
482 482 raise EmptyRepositoryError("There are no commits yet")
483 483 self._validate_branch_name(branch_name)
484 484
485 485 if start_id is not None:
486 486 self._validate_commit_id(start_id)
487 487 c_start = self.get_commit(commit_id=start_id)
488 488 start_pos = self._commit_ids[c_start.raw_id]
489 489 else:
490 490 start_pos = None
491 491
492 492 if end_id is not None:
493 493 self._validate_commit_id(end_id)
494 494 c_end = self.get_commit(commit_id=end_id)
495 495 end_pos = max(0, self._commit_ids[c_end.raw_id])
496 496 else:
497 497 end_pos = None
498 498
499 499 if None not in [start_id, end_id] and start_pos > end_pos:
500 500 raise RepositoryError(
501 501 "Start commit '%s' cannot be after end commit '%s'" %
502 502 (start_id, end_id))
503 503
504 504 if end_pos is not None:
505 505 end_pos += 1
506 506
507 507 commit_filter = []
508 508
509 509 if branch_name and not branch_ancestors:
510 510 commit_filter.append('branch("%s")' % (branch_name,))
511 511 elif branch_name and branch_ancestors:
512 512 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
513 513
514 514 if start_date and not end_date:
515 515 commit_filter.append('date(">%s")' % (start_date,))
516 516 if end_date and not start_date:
517 517 commit_filter.append('date("<%s")' % (end_date,))
518 518 if start_date and end_date:
519 519 commit_filter.append(
520 520 'date(">%s") and date("<%s")' % (start_date, end_date))
521 521
522 522 if not show_hidden:
523 523 commit_filter.append('not obsolete()')
524 524 commit_filter.append('not hidden()')
525 525
526 526 # TODO: johbo: Figure out a simpler way for this solution
527 527 collection_generator = CollectionGenerator
528 528 if commit_filter:
529 529 commit_filter = ' and '.join(map(safe_str, commit_filter))
530 530 revisions = self._remote.rev_range([commit_filter])
531 531 collection_generator = MercurialIndexBasedCollectionGenerator
532 532 else:
533 533 revisions = self.commit_ids
534 534
535 535 if start_pos or end_pos:
536 536 revisions = revisions[start_pos:end_pos]
537 537
538 538 return collection_generator(self, revisions, pre_load=pre_load)
539 539
540 540 def pull(self, url, commit_ids=None):
541 541 """
542 542 Pull changes from external location.
543 543
544 544 :param commit_ids: Optional. Can be set to a list of commit ids
545 545 which shall be pulled from the other repository.
546 546 """
547 547 url = self._get_url(url)
548 548 self._remote.pull(url, commit_ids=commit_ids)
549 549 self._remote.invalidate_vcs_cache()
550 550
551 551 def fetch(self, url, commit_ids=None):
552 552 """
553 553 Backward compatibility with GIT fetch==pull
554 554 """
555 555 return self.pull(url, commit_ids=commit_ids)
556 556
557 557 def push(self, url):
558 558 url = self._get_url(url)
559 559 self._remote.sync_push(url)
560 560
561 561 def _local_clone(self, clone_path):
562 562 """
563 563 Create a local clone of the current repo.
564 564 """
565 565 self._remote.clone(self.path, clone_path, update_after_clone=True,
566 566 hooks=False)
567 567
568 568 def _update(self, revision, clean=False):
569 569 """
570 570 Update the working copy to the specified revision.
571 571 """
572 572 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
573 573 self._remote.update(revision, clean=clean)
574 574
575 575 def _identify(self):
576 576 """
577 577 Return the current state of the working directory.
578 578 """
579 579 return self._remote.identify().strip().rstrip('+')
580 580
581 581 def _heads(self, branch=None):
582 582 """
583 583 Return the commit ids of the repository heads.
584 584 """
585 585 return self._remote.heads(branch=branch).strip().split(' ')
586 586
587 587 def _ancestor(self, revision1, revision2):
588 588 """
589 589 Return the common ancestor of the two revisions.
590 590 """
591 591 return self._remote.ancestor(revision1, revision2)
592 592
593 593 def _local_push(
594 594 self, revision, repository_path, push_branches=False,
595 595 enable_hooks=False):
596 596 """
597 597 Push the given revision to the specified repository.
598 598
599 599 :param push_branches: allow to create branches in the target repo.
600 600 """
601 601 self._remote.push(
602 602 [revision], repository_path, hooks=enable_hooks,
603 603 push_branches=push_branches)
604 604
605 605 def _local_merge(self, target_ref, merge_message, user_name, user_email,
606 606 source_ref, use_rebase=False, dry_run=False):
607 607 """
608 608 Merge the given source_revision into the checked out revision.
609 609
610 610 Returns the commit id of the merge and a boolean indicating if the
611 611 commit needs to be pushed.
612 612 """
613 613 self._update(target_ref.commit_id)
614 614
615 615 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
616 616 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
617 617
618 618 if ancestor == source_ref.commit_id:
619 619 # Nothing to do, the changes were already integrated
620 620 return target_ref.commit_id, False
621 621
622 622 elif ancestor == target_ref.commit_id and is_the_same_branch:
623 623 # In this case we should force a commit message
624 624 return source_ref.commit_id, True
625 625
626 626 if use_rebase:
627 627 try:
628 628 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
629 629 target_ref.commit_id)
630 630 self.bookmark(bookmark_name, revision=source_ref.commit_id)
631 631 self._remote.rebase(
632 632 source=source_ref.commit_id, dest=target_ref.commit_id)
633 633 self._remote.invalidate_vcs_cache()
634 634 self._update(bookmark_name)
635 635 return self._identify(), True
636 636 except RepositoryError:
637 637 # The rebase-abort may raise another exception which 'hides'
638 638 # the original one, therefore we log it here.
639 639 log.exception('Error while rebasing shadow repo during merge.')
640 640
641 641 # Cleanup any rebase leftovers
642 642 self._remote.invalidate_vcs_cache()
643 643 self._remote.rebase(abort=True)
644 644 self._remote.invalidate_vcs_cache()
645 645 self._remote.update(clean=True)
646 646 raise
647 647 else:
648 648 try:
649 649 self._remote.merge(source_ref.commit_id)
650 650 self._remote.invalidate_vcs_cache()
651 651 self._remote.commit(
652 652 message=safe_str(merge_message),
653 653 username=safe_str('%s <%s>' % (user_name, user_email)))
654 654 self._remote.invalidate_vcs_cache()
655 655 return self._identify(), True
656 656 except RepositoryError:
657 657 # Cleanup any merge leftovers
658 658 self._remote.update(clean=True)
659 659 raise
660 660
661 661 def _local_close(self, target_ref, user_name, user_email,
662 662 source_ref, close_message=''):
663 663 """
664 664 Close the branch of the given source_revision
665 665
666 666 Returns the commit id of the close and a boolean indicating if the
667 667 commit needs to be pushed.
668 668 """
669 669 self._update(source_ref.commit_id)
670 670 message = close_message or "Closing branch: `{}`".format(source_ref.name)
671 671 try:
672 672 self._remote.commit(
673 673 message=safe_str(message),
674 674 username=safe_str('%s <%s>' % (user_name, user_email)),
675 675 close_branch=True)
676 676 self._remote.invalidate_vcs_cache()
677 677 return self._identify(), True
678 678 except RepositoryError:
679 679 # Cleanup any commit leftovers
680 680 self._remote.update(clean=True)
681 681 raise
682 682
683 683 def _is_the_same_branch(self, target_ref, source_ref):
684 684 return (
685 685 self._get_branch_name(target_ref) ==
686 686 self._get_branch_name(source_ref))
687 687
688 688 def _get_branch_name(self, ref):
689 689 if ref.type == 'branch':
690 690 return ref.name
691 691 return self._remote.ctx_branch(ref.commit_id)
692 692
693 693 def _maybe_prepare_merge_workspace(
694 694 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
695 695 shadow_repository_path = self._get_shadow_repository_path(
696 696 repo_id, workspace_id)
697 697 if not os.path.exists(shadow_repository_path):
698 698 self._local_clone(shadow_repository_path)
699 699 log.debug(
700 700 'Prepared shadow repository in %s', shadow_repository_path)
701 701
702 702 return shadow_repository_path
703 703
704 704 def _merge_repo(self, repo_id, workspace_id, target_ref,
705 705 source_repo, source_ref, merge_message,
706 706 merger_name, merger_email, dry_run=False,
707 707 use_rebase=False, close_branch=False):
708 708
709 709 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
710 710 'rebase' if use_rebase else 'merge', dry_run)
711 711 if target_ref.commit_id not in self._heads():
712 712 return MergeResponse(
713 713 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
714 714
715 715 try:
716 716 if (target_ref.type == 'branch' and
717 717 len(self._heads(target_ref.name)) != 1):
718 718 return MergeResponse(
719 719 False, False, None,
720 720 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS)
721 721 except CommitDoesNotExistError:
722 722 log.exception('Failure when looking up branch heads on hg target')
723 723 return MergeResponse(
724 724 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
725 725
726 726 shadow_repository_path = self._maybe_prepare_merge_workspace(
727 727 repo_id, workspace_id, target_ref, source_ref)
728 728 shadow_repo = self._get_shadow_instance(shadow_repository_path)
729 729
730 730 log.debug('Pulling in target reference %s', target_ref)
731 731 self._validate_pull_reference(target_ref)
732 732 shadow_repo._local_pull(self.path, target_ref)
733 733 try:
734 734 log.debug('Pulling in source reference %s', source_ref)
735 735 source_repo._validate_pull_reference(source_ref)
736 736 shadow_repo._local_pull(source_repo.path, source_ref)
737 737 except CommitDoesNotExistError:
738 738 log.exception('Failure when doing local pull on hg shadow repo')
739 739 return MergeResponse(
740 740 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
741 741
742 742 merge_ref = None
743 743 merge_commit_id = None
744 744 close_commit_id = None
745 745 merge_failure_reason = MergeFailureReason.NONE
746 746
747 747 # enforce that close branch should be used only in case we source from
748 748 # an actual Branch
749 749 close_branch = close_branch and source_ref.type == 'branch'
750 750
751 751 # don't allow to close branch if source and target are the same
752 752 close_branch = close_branch and source_ref.name != target_ref.name
753 753
754 754 needs_push_on_close = False
755 755 if close_branch and not use_rebase and not dry_run:
756 756 try:
757 757 close_commit_id, needs_push_on_close = shadow_repo._local_close(
758 758 target_ref, merger_name, merger_email, source_ref)
759 759 merge_possible = True
760 760 except RepositoryError:
761 761 log.exception(
762 762 'Failure when doing close branch on hg shadow repo')
763 763 merge_possible = False
764 764 merge_failure_reason = MergeFailureReason.MERGE_FAILED
765 765 else:
766 766 merge_possible = True
767 767
768 768 needs_push = False
769 769 if merge_possible:
770 770 try:
771 771 merge_commit_id, needs_push = shadow_repo._local_merge(
772 772 target_ref, merge_message, merger_name, merger_email,
773 773 source_ref, use_rebase=use_rebase, dry_run=dry_run)
774 774 merge_possible = True
775 775
776 776 # read the state of the close action, if it
777 777 # maybe required a push
778 778 needs_push = needs_push or needs_push_on_close
779 779
780 780 # Set a bookmark pointing to the merge commit. This bookmark
781 781 # may be used to easily identify the last successful merge
782 782 # commit in the shadow repository.
783 783 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
784 784 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
785 785 except SubrepoMergeError:
786 786 log.exception(
787 787 'Subrepo merge error during local merge on hg shadow repo.')
788 788 merge_possible = False
789 789 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
790 790 needs_push = False
791 791 except RepositoryError:
792 792 log.exception('Failure when doing local merge on hg shadow repo')
793 793 merge_possible = False
794 794 merge_failure_reason = MergeFailureReason.MERGE_FAILED
795 795 needs_push = False
796 796
797 797 if merge_possible and not dry_run:
798 798 if needs_push:
799 799 # In case the target is a bookmark, update it, so after pushing
800 800 # the bookmarks is also updated in the target.
801 801 if target_ref.type == 'book':
802 802 shadow_repo.bookmark(
803 803 target_ref.name, revision=merge_commit_id)
804 804 try:
805 805 shadow_repo_with_hooks = self._get_shadow_instance(
806 806 shadow_repository_path,
807 807 enable_hooks=True)
808 808 # This is the actual merge action, we push from shadow
809 809 # into origin.
810 810 # Note: the push_branches option will push any new branch
811 811 # defined in the source repository to the target. This may
812 812 # be dangerous as branches are permanent in Mercurial.
813 813 # This feature was requested in issue #441.
814 814 shadow_repo_with_hooks._local_push(
815 815 merge_commit_id, self.path, push_branches=True,
816 816 enable_hooks=True)
817 817
818 818 # maybe we also need to push the close_commit_id
819 819 if close_commit_id:
820 820 shadow_repo_with_hooks._local_push(
821 821 close_commit_id, self.path, push_branches=True,
822 822 enable_hooks=True)
823 823 merge_succeeded = True
824 824 except RepositoryError:
825 825 log.exception(
826 826 'Failure when doing local push from the shadow '
827 827 'repository to the target repository.')
828 828 merge_succeeded = False
829 829 merge_failure_reason = MergeFailureReason.PUSH_FAILED
830 830 else:
831 831 merge_succeeded = True
832 832 else:
833 833 merge_succeeded = False
834 834
835 835 return MergeResponse(
836 836 merge_possible, merge_succeeded, merge_ref, merge_failure_reason)
837 837
838 838 def _get_shadow_instance(
839 839 self, shadow_repository_path, enable_hooks=False):
840 840 config = self.config.copy()
841 841 if not enable_hooks:
842 842 config.clear_section('hooks')
843 843 return MercurialRepository(shadow_repository_path, config)
844 844
845 845 def _validate_pull_reference(self, reference):
846 846 if not (reference.name in self.bookmarks or
847 847 reference.name in self.branches or
848 848 self.get_commit(reference.commit_id)):
849 849 raise CommitDoesNotExistError(
850 850 'Unknown branch, bookmark or commit id')
851 851
852 852 def _local_pull(self, repository_path, reference):
853 853 """
854 854 Fetch a branch, bookmark or commit from a local repository.
855 855 """
856 856 repository_path = os.path.abspath(repository_path)
857 857 if repository_path == self.path:
858 858 raise ValueError('Cannot pull from the same repository')
859 859
860 860 reference_type_to_option_name = {
861 861 'book': 'bookmark',
862 862 'branch': 'branch',
863 863 }
864 864 option_name = reference_type_to_option_name.get(
865 865 reference.type, 'revision')
866 866
867 867 if option_name == 'revision':
868 868 ref = reference.commit_id
869 869 else:
870 870 ref = reference.name
871 871
872 872 options = {option_name: [ref]}
873 873 self._remote.pull_cmd(repository_path, hooks=False, **options)
874 874 self._remote.invalidate_vcs_cache()
875 875
876 876 def bookmark(self, bookmark, revision=None):
877 877 if isinstance(bookmark, unicode):
878 878 bookmark = safe_str(bookmark)
879 879 self._remote.bookmark(bookmark, revision=revision)
880 880 self._remote.invalidate_vcs_cache()
881 881
882 882 def get_path_permissions(self, username):
883 883 hgacl_file = os.path.join(self.path, '.hg/hgacl')
884 884
885 885 def read_patterns(suffix):
886 886 svalue = None
887 887 try:
888 888 svalue = hgacl.get('narrowhgacl', username + suffix)
889 889 except configparser.NoOptionError:
890 890 try:
891 891 svalue = hgacl.get('narrowhgacl', 'default' + suffix)
892 892 except configparser.NoOptionError:
893 893 pass
894 894 if not svalue:
895 895 return None
896 896 result = ['/']
897 897 for pattern in svalue.split():
898 898 result.append(pattern)
899 899 if '*' not in pattern and '?' not in pattern:
900 900 result.append(pattern + '/*')
901 901 return result
902 902
903 903 if os.path.exists(hgacl_file):
904 904 try:
905 905 hgacl = configparser.RawConfigParser()
906 906 hgacl.read(hgacl_file)
907 907
908 908 includes = read_patterns('.includes')
909 909 excludes = read_patterns('.excludes')
910 910 return BasePathPermissionChecker.create_from_patterns(
911 911 includes, excludes)
912 912 except BaseException as e:
913 913 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
914 914 hgacl_file, self.name, e)
915 915 raise exceptions.RepositoryRequirementError(msg)
916 916 else:
917 917 return None
918 918
919 919
920 920 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
921 921
922 922 def _commit_factory(self, commit_id):
923 923 return self.repo.get_commit(
924 924 commit_idx=commit_id, pre_load=self.pre_load)
@@ -1,773 +1,773 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 repo group model for RhodeCode
24 24 """
25 25
26 26 import os
27 27 import datetime
28 28 import itertools
29 29 import logging
30 30 import shutil
31 31 import traceback
32 32 import string
33 33
34 34 from zope.cachedescriptors.property import Lazy as LazyProperty
35 35
36 36 from rhodecode import events
37 37 from rhodecode.model import BaseModel
38 38 from rhodecode.model.db import (_hash_key,
39 39 RepoGroup, UserRepoGroupToPerm, User, Permission, UserGroupRepoGroupToPerm,
40 40 UserGroup, Repository)
41 41 from rhodecode.model.settings import VcsSettingsModel, SettingsModel
42 42 from rhodecode.lib.caching_query import FromCache
43 43 from rhodecode.lib.utils2 import action_logger_generic, datetime_to_time
44 44
45 45 log = logging.getLogger(__name__)
46 46
47 47
48 48 class RepoGroupModel(BaseModel):
49 49
50 50 cls = RepoGroup
51 51 PERSONAL_GROUP_DESC = 'personal repo group of user `%(username)s`'
52 52 PERSONAL_GROUP_PATTERN = '${username}' # default
53 53
54 54 def _get_user_group(self, users_group):
55 55 return self._get_instance(UserGroup, users_group,
56 56 callback=UserGroup.get_by_group_name)
57 57
58 58 def _get_repo_group(self, repo_group):
59 59 return self._get_instance(RepoGroup, repo_group,
60 60 callback=RepoGroup.get_by_group_name)
61 61
62 62 @LazyProperty
63 63 def repos_path(self):
64 64 """
65 65 Gets the repositories root path from database
66 66 """
67 67
68 68 settings_model = VcsSettingsModel(sa=self.sa)
69 69 return settings_model.get_repos_location()
70 70
71 71 def get_by_group_name(self, repo_group_name, cache=None):
72 72 repo = self.sa.query(RepoGroup) \
73 73 .filter(RepoGroup.group_name == repo_group_name)
74 74
75 75 if cache:
76 76 name_key = _hash_key(repo_group_name)
77 77 repo = repo.options(
78 78 FromCache("sql_cache_short", "get_repo_group_%s" % name_key))
79 79 return repo.scalar()
80 80
81 81 def get_default_create_personal_repo_group(self):
82 82 value = SettingsModel().get_setting_by_name(
83 83 'create_personal_repo_group')
84 84 return value.app_settings_value if value else None or False
85 85
86 86 def get_personal_group_name_pattern(self):
87 87 value = SettingsModel().get_setting_by_name(
88 88 'personal_repo_group_pattern')
89 89 val = value.app_settings_value if value else None
90 90 group_template = val or self.PERSONAL_GROUP_PATTERN
91 91
92 92 group_template = group_template.lstrip('/')
93 93 return group_template
94 94
95 95 def get_personal_group_name(self, user):
96 96 template = self.get_personal_group_name_pattern()
97 97 return string.Template(template).safe_substitute(
98 98 username=user.username,
99 99 user_id=user.user_id,
100 100 )
101 101
102 102 def create_personal_repo_group(self, user, commit_early=True):
103 103 desc = self.PERSONAL_GROUP_DESC % {'username': user.username}
104 104 personal_repo_group_name = self.get_personal_group_name(user)
105 105
106 106 # create a new one
107 107 RepoGroupModel().create(
108 108 group_name=personal_repo_group_name,
109 109 group_description=desc,
110 110 owner=user.username,
111 111 personal=True,
112 112 commit_early=commit_early)
113 113
114 114 def _create_default_perms(self, new_group):
115 115 # create default permission
116 116 default_perm = 'group.read'
117 117 def_user = User.get_default_user()
118 118 for p in def_user.user_perms:
119 119 if p.permission.permission_name.startswith('group.'):
120 120 default_perm = p.permission.permission_name
121 121 break
122 122
123 123 repo_group_to_perm = UserRepoGroupToPerm()
124 124 repo_group_to_perm.permission = Permission.get_by_key(default_perm)
125 125
126 126 repo_group_to_perm.group = new_group
127 127 repo_group_to_perm.user_id = def_user.user_id
128 128 return repo_group_to_perm
129 129
130 130 def _get_group_name_and_parent(self, group_name_full, repo_in_path=False,
131 131 get_object=False):
132 132 """
133 133 Get's the group name and a parent group name from given group name.
134 134 If repo_in_path is set to truth, we asume the full path also includes
135 135 repo name, in such case we clean the last element.
136 136
137 137 :param group_name_full:
138 138 """
139 139 split_paths = 1
140 140 if repo_in_path:
141 141 split_paths = 2
142 142 _parts = group_name_full.rsplit(RepoGroup.url_sep(), split_paths)
143 143
144 144 if repo_in_path and len(_parts) > 1:
145 145 # such case last element is the repo_name
146 146 _parts.pop(-1)
147 147 group_name_cleaned = _parts[-1] # just the group name
148 148 parent_repo_group_name = None
149 149
150 150 if len(_parts) > 1:
151 151 parent_repo_group_name = _parts[0]
152 152
153 153 parent_group = None
154 154 if parent_repo_group_name:
155 155 parent_group = RepoGroup.get_by_group_name(parent_repo_group_name)
156 156
157 157 if get_object:
158 158 return group_name_cleaned, parent_repo_group_name, parent_group
159 159
160 160 return group_name_cleaned, parent_repo_group_name
161 161
162 162 def check_exist_filesystem(self, group_name, exc_on_failure=True):
163 163 create_path = os.path.join(self.repos_path, group_name)
164 164 log.debug('creating new group in %s', create_path)
165 165
166 166 if os.path.isdir(create_path):
167 167 if exc_on_failure:
168 168 abs_create_path = os.path.abspath(create_path)
169 169 raise Exception('Directory `{}` already exists !'.format(abs_create_path))
170 170 return False
171 171 return True
172 172
173 173 def _create_group(self, group_name):
174 174 """
175 175 makes repository group on filesystem
176 176
177 177 :param repo_name:
178 178 :param parent_id:
179 179 """
180 180
181 181 self.check_exist_filesystem(group_name)
182 182 create_path = os.path.join(self.repos_path, group_name)
183 183 log.debug('creating new group in %s', create_path)
184 os.makedirs(create_path, mode=0755)
184 os.makedirs(create_path, mode=0o755)
185 185 log.debug('created group in %s', create_path)
186 186
187 187 def _rename_group(self, old, new):
188 188 """
189 189 Renames a group on filesystem
190 190
191 191 :param group_name:
192 192 """
193 193
194 194 if old == new:
195 195 log.debug('skipping group rename')
196 196 return
197 197
198 198 log.debug('renaming repository group from %s to %s', old, new)
199 199
200 200 old_path = os.path.join(self.repos_path, old)
201 201 new_path = os.path.join(self.repos_path, new)
202 202
203 203 log.debug('renaming repos paths from %s to %s', old_path, new_path)
204 204
205 205 if os.path.isdir(new_path):
206 206 raise Exception('Was trying to rename to already '
207 207 'existing dir %s' % new_path)
208 208 shutil.move(old_path, new_path)
209 209
210 210 def _delete_filesystem_group(self, group, force_delete=False):
211 211 """
212 212 Deletes a group from a filesystem
213 213
214 214 :param group: instance of group from database
215 215 :param force_delete: use shutil rmtree to remove all objects
216 216 """
217 217 paths = group.full_path.split(RepoGroup.url_sep())
218 218 paths = os.sep.join(paths)
219 219
220 220 rm_path = os.path.join(self.repos_path, paths)
221 221 log.info("Removing group %s", rm_path)
222 222 # delete only if that path really exists
223 223 if os.path.isdir(rm_path):
224 224 if force_delete:
225 225 shutil.rmtree(rm_path)
226 226 else:
227 227 # archive that group`
228 228 _now = datetime.datetime.now()
229 229 _ms = str(_now.microsecond).rjust(6, '0')
230 230 _d = 'rm__%s_GROUP_%s' % (
231 231 _now.strftime('%Y%m%d_%H%M%S_' + _ms), group.name)
232 232 shutil.move(rm_path, os.path.join(self.repos_path, _d))
233 233
234 234 def create(self, group_name, group_description, owner, just_db=False,
235 235 copy_permissions=False, personal=None, commit_early=True):
236 236
237 237 (group_name_cleaned,
238 238 parent_group_name) = RepoGroupModel()._get_group_name_and_parent(group_name)
239 239
240 240 parent_group = None
241 241 if parent_group_name:
242 242 parent_group = self._get_repo_group(parent_group_name)
243 243 if not parent_group:
244 244 # we tried to create a nested group, but the parent is not
245 245 # existing
246 246 raise ValueError(
247 247 'Parent group `%s` given in `%s` group name '
248 248 'is not yet existing.' % (parent_group_name, group_name))
249 249
250 250 # because we are doing a cleanup, we need to check if such directory
251 251 # already exists. If we don't do that we can accidentally delete
252 252 # existing directory via cleanup that can cause data issues, since
253 253 # delete does a folder rename to special syntax later cleanup
254 254 # functions can delete this
255 255 cleanup_group = self.check_exist_filesystem(group_name,
256 256 exc_on_failure=False)
257 257 user = self._get_user(owner)
258 258 if not user:
259 259 raise ValueError('Owner %s not found as rhodecode user', owner)
260 260
261 261 try:
262 262 new_repo_group = RepoGroup()
263 263 new_repo_group.user = user
264 264 new_repo_group.group_description = group_description or group_name
265 265 new_repo_group.parent_group = parent_group
266 266 new_repo_group.group_name = group_name
267 267 new_repo_group.personal = personal
268 268
269 269 self.sa.add(new_repo_group)
270 270
271 271 # create an ADMIN permission for owner except if we're super admin,
272 272 # later owner should go into the owner field of groups
273 273 if not user.is_admin:
274 274 self.grant_user_permission(repo_group=new_repo_group,
275 275 user=owner, perm='group.admin')
276 276
277 277 if parent_group and copy_permissions:
278 278 # copy permissions from parent
279 279 user_perms = UserRepoGroupToPerm.query() \
280 280 .filter(UserRepoGroupToPerm.group == parent_group).all()
281 281
282 282 group_perms = UserGroupRepoGroupToPerm.query() \
283 283 .filter(UserGroupRepoGroupToPerm.group == parent_group).all()
284 284
285 285 for perm in user_perms:
286 286 # don't copy over the permission for user who is creating
287 287 # this group, if he is not super admin he get's admin
288 288 # permission set above
289 289 if perm.user != user or user.is_admin:
290 290 UserRepoGroupToPerm.create(
291 291 perm.user, new_repo_group, perm.permission)
292 292
293 293 for perm in group_perms:
294 294 UserGroupRepoGroupToPerm.create(
295 295 perm.users_group, new_repo_group, perm.permission)
296 296 else:
297 297 perm_obj = self._create_default_perms(new_repo_group)
298 298 self.sa.add(perm_obj)
299 299
300 300 # now commit the changes, earlier so we are sure everything is in
301 301 # the database.
302 302 if commit_early:
303 303 self.sa.commit()
304 304 if not just_db:
305 305 self._create_group(new_repo_group.group_name)
306 306
307 307 # trigger the post hook
308 308 from rhodecode.lib.hooks_base import log_create_repository_group
309 309 repo_group = RepoGroup.get_by_group_name(group_name)
310 310 log_create_repository_group(
311 311 created_by=user.username, **repo_group.get_dict())
312 312
313 313 # Trigger create event.
314 314 events.trigger(events.RepoGroupCreateEvent(repo_group))
315 315
316 316 return new_repo_group
317 317 except Exception:
318 318 self.sa.rollback()
319 319 log.exception('Exception occurred when creating repository group, '
320 320 'doing cleanup...')
321 321 # rollback things manually !
322 322 repo_group = RepoGroup.get_by_group_name(group_name)
323 323 if repo_group:
324 324 RepoGroup.delete(repo_group.group_id)
325 325 self.sa.commit()
326 326 if cleanup_group:
327 327 RepoGroupModel()._delete_filesystem_group(repo_group)
328 328 raise
329 329
330 330 def update_permissions(
331 331 self, repo_group, perm_additions=None, perm_updates=None,
332 332 perm_deletions=None, recursive=None, check_perms=True,
333 333 cur_user=None):
334 334 from rhodecode.model.repo import RepoModel
335 335 from rhodecode.lib.auth import HasUserGroupPermissionAny
336 336
337 337 if not perm_additions:
338 338 perm_additions = []
339 339 if not perm_updates:
340 340 perm_updates = []
341 341 if not perm_deletions:
342 342 perm_deletions = []
343 343
344 344 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
345 345
346 346 changes = {
347 347 'added': [],
348 348 'updated': [],
349 349 'deleted': []
350 350 }
351 351
352 352 def _set_perm_user(obj, user, perm):
353 353 if isinstance(obj, RepoGroup):
354 354 self.grant_user_permission(
355 355 repo_group=obj, user=user, perm=perm)
356 356 elif isinstance(obj, Repository):
357 357 # private repos will not allow to change the default
358 358 # permissions using recursive mode
359 359 if obj.private and user == User.DEFAULT_USER:
360 360 return
361 361
362 362 # we set group permission but we have to switch to repo
363 363 # permission
364 364 perm = perm.replace('group.', 'repository.')
365 365 RepoModel().grant_user_permission(
366 366 repo=obj, user=user, perm=perm)
367 367
368 368 def _set_perm_group(obj, users_group, perm):
369 369 if isinstance(obj, RepoGroup):
370 370 self.grant_user_group_permission(
371 371 repo_group=obj, group_name=users_group, perm=perm)
372 372 elif isinstance(obj, Repository):
373 373 # we set group permission but we have to switch to repo
374 374 # permission
375 375 perm = perm.replace('group.', 'repository.')
376 376 RepoModel().grant_user_group_permission(
377 377 repo=obj, group_name=users_group, perm=perm)
378 378
379 379 def _revoke_perm_user(obj, user):
380 380 if isinstance(obj, RepoGroup):
381 381 self.revoke_user_permission(repo_group=obj, user=user)
382 382 elif isinstance(obj, Repository):
383 383 RepoModel().revoke_user_permission(repo=obj, user=user)
384 384
385 385 def _revoke_perm_group(obj, user_group):
386 386 if isinstance(obj, RepoGroup):
387 387 self.revoke_user_group_permission(
388 388 repo_group=obj, group_name=user_group)
389 389 elif isinstance(obj, Repository):
390 390 RepoModel().revoke_user_group_permission(
391 391 repo=obj, group_name=user_group)
392 392
393 393 # start updates
394 394 log.debug('Now updating permissions for %s in recursive mode:%s',
395 395 repo_group, recursive)
396 396
397 397 # initialize check function, we'll call that multiple times
398 398 has_group_perm = HasUserGroupPermissionAny(*req_perms)
399 399
400 400 for obj in repo_group.recursive_groups_and_repos():
401 401 # iterated obj is an instance of a repos group or repository in
402 402 # that group, recursive option can be: none, repos, groups, all
403 403 if recursive == 'all':
404 404 obj = obj
405 405 elif recursive == 'repos':
406 406 # skip groups, other than this one
407 407 if isinstance(obj, RepoGroup) and not obj == repo_group:
408 408 continue
409 409 elif recursive == 'groups':
410 410 # skip repos
411 411 if isinstance(obj, Repository):
412 412 continue
413 413 else: # recursive == 'none':
414 414 # DEFAULT option - don't apply to iterated objects
415 415 # also we do a break at the end of this loop. if we are not
416 416 # in recursive mode
417 417 obj = repo_group
418 418
419 419 change_obj = obj.get_api_data()
420 420
421 421 # update permissions
422 422 for member_id, perm, member_type in perm_updates:
423 423 member_id = int(member_id)
424 424 if member_type == 'user':
425 425 member_name = User.get(member_id).username
426 426 # this updates also current one if found
427 427 _set_perm_user(obj, user=member_id, perm=perm)
428 428 elif member_type == 'user_group':
429 429 member_name = UserGroup.get(member_id).users_group_name
430 430 if not check_perms or has_group_perm(member_name,
431 431 user=cur_user):
432 432 _set_perm_group(obj, users_group=member_id, perm=perm)
433 433 else:
434 434 raise ValueError("member_type must be 'user' or 'user_group' "
435 435 "got {} instead".format(member_type))
436 436
437 437 changes['updated'].append(
438 438 {'change_obj': change_obj, 'type': member_type,
439 439 'id': member_id, 'name': member_name, 'new_perm': perm})
440 440
441 441 # set new permissions
442 442 for member_id, perm, member_type in perm_additions:
443 443 member_id = int(member_id)
444 444 if member_type == 'user':
445 445 member_name = User.get(member_id).username
446 446 _set_perm_user(obj, user=member_id, perm=perm)
447 447 elif member_type == 'user_group':
448 448 # check if we have permissions to alter this usergroup
449 449 member_name = UserGroup.get(member_id).users_group_name
450 450 if not check_perms or has_group_perm(member_name,
451 451 user=cur_user):
452 452 _set_perm_group(obj, users_group=member_id, perm=perm)
453 453 else:
454 454 raise ValueError("member_type must be 'user' or 'user_group' "
455 455 "got {} instead".format(member_type))
456 456
457 457 changes['added'].append(
458 458 {'change_obj': change_obj, 'type': member_type,
459 459 'id': member_id, 'name': member_name, 'new_perm': perm})
460 460
461 461 # delete permissions
462 462 for member_id, perm, member_type in perm_deletions:
463 463 member_id = int(member_id)
464 464 if member_type == 'user':
465 465 member_name = User.get(member_id).username
466 466 _revoke_perm_user(obj, user=member_id)
467 467 elif member_type == 'user_group':
468 468 # check if we have permissions to alter this usergroup
469 469 member_name = UserGroup.get(member_id).users_group_name
470 470 if not check_perms or has_group_perm(member_name,
471 471 user=cur_user):
472 472 _revoke_perm_group(obj, user_group=member_id)
473 473 else:
474 474 raise ValueError("member_type must be 'user' or 'user_group' "
475 475 "got {} instead".format(member_type))
476 476
477 477 changes['deleted'].append(
478 478 {'change_obj': change_obj, 'type': member_type,
479 479 'id': member_id, 'name': member_name, 'new_perm': perm})
480 480
481 481 # if it's not recursive call for all,repos,groups
482 482 # break the loop and don't proceed with other changes
483 483 if recursive not in ['all', 'repos', 'groups']:
484 484 break
485 485
486 486 return changes
487 487
488 488 def update(self, repo_group, form_data):
489 489 try:
490 490 repo_group = self._get_repo_group(repo_group)
491 491 old_path = repo_group.full_path
492 492
493 493 # change properties
494 494 if 'group_description' in form_data:
495 495 repo_group.group_description = form_data['group_description']
496 496
497 497 if 'enable_locking' in form_data:
498 498 repo_group.enable_locking = form_data['enable_locking']
499 499
500 500 if 'group_parent_id' in form_data:
501 501 parent_group = (
502 502 self._get_repo_group(form_data['group_parent_id']))
503 503 repo_group.group_parent_id = (
504 504 parent_group.group_id if parent_group else None)
505 505 repo_group.parent_group = parent_group
506 506
507 507 # mikhail: to update the full_path, we have to explicitly
508 508 # update group_name
509 509 group_name = form_data.get('group_name', repo_group.name)
510 510 repo_group.group_name = repo_group.get_new_name(group_name)
511 511
512 512 new_path = repo_group.full_path
513 513
514 514 if 'user' in form_data:
515 515 repo_group.user = User.get_by_username(form_data['user'])
516 516 repo_group.updated_on = datetime.datetime.now()
517 517 self.sa.add(repo_group)
518 518
519 519 # iterate over all members of this groups and do fixes
520 520 # set locking if given
521 521 # if obj is a repoGroup also fix the name of the group according
522 522 # to the parent
523 523 # if obj is a Repo fix it's name
524 524 # this can be potentially heavy operation
525 525 for obj in repo_group.recursive_groups_and_repos():
526 526 # set the value from it's parent
527 527 obj.enable_locking = repo_group.enable_locking
528 528 if isinstance(obj, RepoGroup):
529 529 new_name = obj.get_new_name(obj.name)
530 530 log.debug('Fixing group %s to new name %s',
531 531 obj.group_name, new_name)
532 532 obj.group_name = new_name
533 533 obj.updated_on = datetime.datetime.now()
534 534 elif isinstance(obj, Repository):
535 535 # we need to get all repositories from this new group and
536 536 # rename them accordingly to new group path
537 537 new_name = obj.get_new_name(obj.just_name)
538 538 log.debug('Fixing repo %s to new name %s',
539 539 obj.repo_name, new_name)
540 540 obj.repo_name = new_name
541 541 obj.updated_on = datetime.datetime.now()
542 542 self.sa.add(obj)
543 543
544 544 self._rename_group(old_path, new_path)
545 545
546 546 # Trigger update event.
547 547 events.trigger(events.RepoGroupUpdateEvent(repo_group))
548 548
549 549 return repo_group
550 550 except Exception:
551 551 log.error(traceback.format_exc())
552 552 raise
553 553
554 554 def delete(self, repo_group, force_delete=False, fs_remove=True):
555 555 repo_group = self._get_repo_group(repo_group)
556 556 if not repo_group:
557 557 return False
558 558 try:
559 559 self.sa.delete(repo_group)
560 560 if fs_remove:
561 561 self._delete_filesystem_group(repo_group, force_delete)
562 562 else:
563 563 log.debug('skipping removal from filesystem')
564 564
565 565 # Trigger delete event.
566 566 events.trigger(events.RepoGroupDeleteEvent(repo_group))
567 567 return True
568 568
569 569 except Exception:
570 570 log.error('Error removing repo_group %s', repo_group)
571 571 raise
572 572
573 573 def grant_user_permission(self, repo_group, user, perm):
574 574 """
575 575 Grant permission for user on given repository group, or update
576 576 existing one if found
577 577
578 578 :param repo_group: Instance of RepoGroup, repositories_group_id,
579 579 or repositories_group name
580 580 :param user: Instance of User, user_id or username
581 581 :param perm: Instance of Permission, or permission_name
582 582 """
583 583
584 584 repo_group = self._get_repo_group(repo_group)
585 585 user = self._get_user(user)
586 586 permission = self._get_perm(perm)
587 587
588 588 # check if we have that permission already
589 589 obj = self.sa.query(UserRepoGroupToPerm)\
590 590 .filter(UserRepoGroupToPerm.user == user)\
591 591 .filter(UserRepoGroupToPerm.group == repo_group)\
592 592 .scalar()
593 593 if obj is None:
594 594 # create new !
595 595 obj = UserRepoGroupToPerm()
596 596 obj.group = repo_group
597 597 obj.user = user
598 598 obj.permission = permission
599 599 self.sa.add(obj)
600 600 log.debug('Granted perm %s to %s on %s', perm, user, repo_group)
601 601 action_logger_generic(
602 602 'granted permission: {} to user: {} on repogroup: {}'.format(
603 603 perm, user, repo_group), namespace='security.repogroup')
604 604 return obj
605 605
606 606 def revoke_user_permission(self, repo_group, user):
607 607 """
608 608 Revoke permission for user on given repository group
609 609
610 610 :param repo_group: Instance of RepoGroup, repositories_group_id,
611 611 or repositories_group name
612 612 :param user: Instance of User, user_id or username
613 613 """
614 614
615 615 repo_group = self._get_repo_group(repo_group)
616 616 user = self._get_user(user)
617 617
618 618 obj = self.sa.query(UserRepoGroupToPerm)\
619 619 .filter(UserRepoGroupToPerm.user == user)\
620 620 .filter(UserRepoGroupToPerm.group == repo_group)\
621 621 .scalar()
622 622 if obj:
623 623 self.sa.delete(obj)
624 624 log.debug('Revoked perm on %s on %s', repo_group, user)
625 625 action_logger_generic(
626 626 'revoked permission from user: {} on repogroup: {}'.format(
627 627 user, repo_group), namespace='security.repogroup')
628 628
629 629 def grant_user_group_permission(self, repo_group, group_name, perm):
630 630 """
631 631 Grant permission for user group on given repository group, or update
632 632 existing one if found
633 633
634 634 :param repo_group: Instance of RepoGroup, repositories_group_id,
635 635 or repositories_group name
636 636 :param group_name: Instance of UserGroup, users_group_id,
637 637 or user group name
638 638 :param perm: Instance of Permission, or permission_name
639 639 """
640 640 repo_group = self._get_repo_group(repo_group)
641 641 group_name = self._get_user_group(group_name)
642 642 permission = self._get_perm(perm)
643 643
644 644 # check if we have that permission already
645 645 obj = self.sa.query(UserGroupRepoGroupToPerm)\
646 646 .filter(UserGroupRepoGroupToPerm.group == repo_group)\
647 647 .filter(UserGroupRepoGroupToPerm.users_group == group_name)\
648 648 .scalar()
649 649
650 650 if obj is None:
651 651 # create new
652 652 obj = UserGroupRepoGroupToPerm()
653 653
654 654 obj.group = repo_group
655 655 obj.users_group = group_name
656 656 obj.permission = permission
657 657 self.sa.add(obj)
658 658 log.debug('Granted perm %s to %s on %s', perm, group_name, repo_group)
659 659 action_logger_generic(
660 660 'granted permission: {} to usergroup: {} on repogroup: {}'.format(
661 661 perm, group_name, repo_group), namespace='security.repogroup')
662 662 return obj
663 663
664 664 def revoke_user_group_permission(self, repo_group, group_name):
665 665 """
666 666 Revoke permission for user group on given repository group
667 667
668 668 :param repo_group: Instance of RepoGroup, repositories_group_id,
669 669 or repositories_group name
670 670 :param group_name: Instance of UserGroup, users_group_id,
671 671 or user group name
672 672 """
673 673 repo_group = self._get_repo_group(repo_group)
674 674 group_name = self._get_user_group(group_name)
675 675
676 676 obj = self.sa.query(UserGroupRepoGroupToPerm)\
677 677 .filter(UserGroupRepoGroupToPerm.group == repo_group)\
678 678 .filter(UserGroupRepoGroupToPerm.users_group == group_name)\
679 679 .scalar()
680 680 if obj:
681 681 self.sa.delete(obj)
682 682 log.debug('Revoked perm to %s on %s', repo_group, group_name)
683 683 action_logger_generic(
684 684 'revoked permission from usergroup: {} on repogroup: {}'.format(
685 685 group_name, repo_group), namespace='security.repogroup')
686 686
687 687 def get_repo_groups_as_dict(self, repo_group_list=None, admin=False,
688 688 super_user_actions=False):
689 689
690 690 from pyramid.threadlocal import get_current_request
691 691 _render = get_current_request().get_partial_renderer(
692 692 'rhodecode:templates/data_table/_dt_elements.mako')
693 693 c = _render.get_call_context()
694 694 h = _render.get_helpers()
695 695
696 696 def quick_menu(repo_group_name):
697 697 return _render('quick_repo_group_menu', repo_group_name)
698 698
699 699 def repo_group_lnk(repo_group_name):
700 700 return _render('repo_group_name', repo_group_name)
701 701
702 702 def last_change(last_change):
703 703 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
704 704 last_change = last_change + datetime.timedelta(seconds=
705 705 (datetime.datetime.now() - datetime.datetime.utcnow()).seconds)
706 706 return _render("last_change", last_change)
707 707
708 708 def desc(desc, personal):
709 709 return _render(
710 710 'repo_group_desc', desc, personal, c.visual.stylify_metatags)
711 711
712 712 def repo_group_actions(repo_group_id, repo_group_name, gr_count):
713 713 return _render(
714 714 'repo_group_actions', repo_group_id, repo_group_name, gr_count)
715 715
716 716 def repo_group_name(repo_group_name, children_groups):
717 717 return _render("repo_group_name", repo_group_name, children_groups)
718 718
719 719 def user_profile(username):
720 720 return _render('user_profile', username)
721 721
722 722 repo_group_data = []
723 723 for group in repo_group_list:
724 724
725 725 row = {
726 726 "menu": quick_menu(group.group_name),
727 727 "name": repo_group_lnk(group.group_name),
728 728 "name_raw": group.group_name,
729 729 "last_change": last_change(group.last_db_change),
730 730 "last_change_raw": datetime_to_time(group.last_db_change),
731 731 "desc": desc(group.description_safe, group.personal),
732 732 "top_level_repos": 0,
733 733 "owner": user_profile(group.user.username)
734 734 }
735 735 if admin:
736 736 repo_count = group.repositories.count()
737 737 children_groups = map(
738 738 h.safe_unicode,
739 739 itertools.chain((g.name for g in group.parents),
740 740 (x.name for x in [group])))
741 741 row.update({
742 742 "action": repo_group_actions(
743 743 group.group_id, group.group_name, repo_count),
744 744 "top_level_repos": repo_count,
745 745 "name": repo_group_name(group.group_name, children_groups),
746 746
747 747 })
748 748 repo_group_data.append(row)
749 749
750 750 return repo_group_data
751 751
752 752 def _get_defaults(self, repo_group_name):
753 753 repo_group = RepoGroup.get_by_group_name(repo_group_name)
754 754
755 755 if repo_group is None:
756 756 return None
757 757
758 758 defaults = repo_group.get_dict()
759 759 defaults['repo_group_name'] = repo_group.name
760 760 defaults['repo_group_description'] = repo_group.group_description
761 761 defaults['repo_group_enable_locking'] = repo_group.enable_locking
762 762
763 763 # we use -1 as this is how in HTML, we mark an empty group
764 764 defaults['repo_group'] = defaults['group_parent_id'] or -1
765 765
766 766 # fill owner
767 767 if repo_group.user:
768 768 defaults.update({'user': repo_group.user.username})
769 769 else:
770 770 replacement_user = User.get_first_super_admin().username
771 771 defaults.update({'user': replacement_user})
772 772
773 773 return defaults
@@ -1,1288 +1,1288 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import datetime
22 22 import mock
23 23 import os
24 24 import sys
25 25 import shutil
26 26
27 27 import pytest
28 28
29 29 from rhodecode.lib.utils import make_db_config
30 30 from rhodecode.lib.vcs.backends.base import Reference
31 31 from rhodecode.lib.vcs.backends.git import (
32 32 GitRepository, GitCommit, discover_git_version)
33 33 from rhodecode.lib.vcs.exceptions import (
34 34 RepositoryError, VCSError, NodeDoesNotExistError)
35 35 from rhodecode.lib.vcs.nodes import (
36 36 NodeKind, FileNode, DirNode, NodeState, SubModuleNode)
37 37 from rhodecode.tests import TEST_GIT_REPO, TEST_GIT_REPO_CLONE, get_new_dir
38 38 from rhodecode.tests.vcs.conftest import BackendTestMixin
39 39
40 40
41 41 pytestmark = pytest.mark.backends("git")
42 42
43 43
44 44 def repo_path_generator():
45 45 """
46 46 Return a different path to be used for cloning repos.
47 47 """
48 48 i = 0
49 49 while True:
50 50 i += 1
51 51 yield '%s-%d' % (TEST_GIT_REPO_CLONE, i)
52 52
53 53
54 54 REPO_PATH_GENERATOR = repo_path_generator()
55 55
56 56
57 57 class TestGitRepository:
58 58
59 59 # pylint: disable=protected-access
60 60
61 61 def __check_for_existing_repo(self):
62 62 if os.path.exists(TEST_GIT_REPO_CLONE):
63 63 self.fail('Cannot test git clone repo as location %s already '
64 64 'exists. You should manually remove it first.'
65 65 % TEST_GIT_REPO_CLONE)
66 66
67 67 @pytest.fixture(autouse=True)
68 68 def prepare(self, request, baseapp):
69 69 self.repo = GitRepository(TEST_GIT_REPO, bare=True)
70 70
71 71 def get_clone_repo(self):
72 72 """
73 73 Return a non bare clone of the base repo.
74 74 """
75 75 clone_path = next(REPO_PATH_GENERATOR)
76 76 repo_clone = GitRepository(
77 77 clone_path, create=True, src_url=self.repo.path, bare=False)
78 78
79 79 return repo_clone
80 80
81 81 def get_empty_repo(self, bare=False):
82 82 """
83 83 Return a non bare empty repo.
84 84 """
85 85 return GitRepository(next(REPO_PATH_GENERATOR), create=True, bare=bare)
86 86
87 87 def test_wrong_repo_path(self):
88 88 wrong_repo_path = '/tmp/errorrepo_git'
89 89 with pytest.raises(RepositoryError):
90 90 GitRepository(wrong_repo_path)
91 91
92 92 def test_repo_clone(self):
93 93 self.__check_for_existing_repo()
94 94 repo = GitRepository(TEST_GIT_REPO)
95 95 repo_clone = GitRepository(
96 96 TEST_GIT_REPO_CLONE,
97 97 src_url=TEST_GIT_REPO, create=True, do_workspace_checkout=True)
98 98 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
99 99 # Checking hashes of commits should be enough
100 100 for commit in repo.get_commits():
101 101 raw_id = commit.raw_id
102 102 assert raw_id == repo_clone.get_commit(raw_id).raw_id
103 103
104 104 def test_repo_clone_without_create(self):
105 105 with pytest.raises(RepositoryError):
106 106 GitRepository(
107 107 TEST_GIT_REPO_CLONE + '_wo_create', src_url=TEST_GIT_REPO)
108 108
109 109 def test_repo_clone_with_update(self):
110 110 repo = GitRepository(TEST_GIT_REPO)
111 111 clone_path = TEST_GIT_REPO_CLONE + '_with_update'
112 112 repo_clone = GitRepository(
113 113 clone_path,
114 114 create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=True)
115 115 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
116 116
117 117 # check if current workdir was updated
118 118 fpath = os.path.join(clone_path, 'MANIFEST.in')
119 119 assert os.path.isfile(fpath)
120 120
121 121 def test_repo_clone_without_update(self):
122 122 repo = GitRepository(TEST_GIT_REPO)
123 123 clone_path = TEST_GIT_REPO_CLONE + '_without_update'
124 124 repo_clone = GitRepository(
125 125 clone_path,
126 126 create=True, src_url=TEST_GIT_REPO, do_workspace_checkout=False)
127 127 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
128 128 # check if current workdir was *NOT* updated
129 129 fpath = os.path.join(clone_path, 'MANIFEST.in')
130 130 # Make sure it's not bare repo
131 131 assert not repo_clone.bare
132 132 assert not os.path.isfile(fpath)
133 133
134 134 def test_repo_clone_into_bare_repo(self):
135 135 repo = GitRepository(TEST_GIT_REPO)
136 136 clone_path = TEST_GIT_REPO_CLONE + '_bare.git'
137 137 repo_clone = GitRepository(
138 138 clone_path, create=True, src_url=repo.path, bare=True)
139 139 assert repo_clone.bare
140 140
141 141 def test_create_repo_is_not_bare_by_default(self):
142 142 repo = GitRepository(get_new_dir('not-bare-by-default'), create=True)
143 143 assert not repo.bare
144 144
145 145 def test_create_bare_repo(self):
146 146 repo = GitRepository(get_new_dir('bare-repo'), create=True, bare=True)
147 147 assert repo.bare
148 148
149 149 def test_update_server_info(self):
150 150 self.repo._update_server_info()
151 151
152 152 def test_fetch(self, vcsbackend_git):
153 153 # Note: This is a git specific part of the API, it's only implemented
154 154 # by the git backend.
155 155 source_repo = vcsbackend_git.repo
156 156 target_repo = vcsbackend_git.create_repo(bare=True)
157 157 target_repo.fetch(source_repo.path)
158 158 # Note: Get a fresh instance, avoids caching trouble
159 159 target_repo = vcsbackend_git.backend(target_repo.path)
160 160 assert len(source_repo.commit_ids) == len(target_repo.commit_ids)
161 161
162 162 def test_commit_ids(self):
163 163 # there are 112 commits (by now)
164 164 # so we can assume they would be available from now on
165 165 subset = {'c1214f7e79e02fc37156ff215cd71275450cffc3',
166 166 '38b5fe81f109cb111f549bfe9bb6b267e10bc557',
167 167 'fa6600f6848800641328adbf7811fd2372c02ab2',
168 168 '102607b09cdd60e2793929c4f90478be29f85a17',
169 169 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
170 170 '2d1028c054665b962fa3d307adfc923ddd528038',
171 171 'd7e0d30fbcae12c90680eb095a4f5f02505ce501',
172 172 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
173 173 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
174 174 '8430a588b43b5d6da365400117c89400326e7992',
175 175 'd955cd312c17b02143c04fa1099a352b04368118',
176 176 'f67b87e5c629c2ee0ba58f85197e423ff28d735b',
177 177 'add63e382e4aabc9e1afdc4bdc24506c269b7618',
178 178 'f298fe1189f1b69779a4423f40b48edf92a703fc',
179 179 'bd9b619eb41994cac43d67cf4ccc8399c1125808',
180 180 '6e125e7c890379446e98980d8ed60fba87d0f6d1',
181 181 'd4a54db9f745dfeba6933bf5b1e79e15d0af20bd',
182 182 '0b05e4ed56c802098dfc813cbe779b2f49e92500',
183 183 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
184 184 '45223f8f114c64bf4d6f853e3c35a369a6305520',
185 185 'ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
186 186 'f5ea29fc42ef67a2a5a7aecff10e1566699acd68',
187 187 '27d48942240f5b91dfda77accd2caac94708cc7d',
188 188 '622f0eb0bafd619d2560c26f80f09e3b0b0d78af',
189 189 'e686b958768ee96af8029fe19c6050b1a8dd3b2b'}
190 190 assert subset.issubset(set(self.repo.commit_ids))
191 191
192 192 def test_slicing(self):
193 193 # 4 1 5 10 95
194 194 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
195 195 (10, 20, 10), (5, 100, 95)]:
196 196 commit_ids = list(self.repo[sfrom:sto])
197 197 assert len(commit_ids) == size
198 198 assert commit_ids[0] == self.repo.get_commit(commit_idx=sfrom)
199 199 assert commit_ids[-1] == self.repo.get_commit(commit_idx=sto - 1)
200 200
201 201 def test_branches(self):
202 202 # TODO: Need more tests here
203 203 # Removed (those are 'remotes' branches for cloned repo)
204 204 # assert 'master' in self.repo.branches
205 205 # assert 'gittree' in self.repo.branches
206 206 # assert 'web-branch' in self.repo.branches
207 207 for __, commit_id in self.repo.branches.items():
208 208 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
209 209
210 210 def test_tags(self):
211 211 # TODO: Need more tests here
212 212 assert 'v0.1.1' in self.repo.tags
213 213 assert 'v0.1.2' in self.repo.tags
214 214 for __, commit_id in self.repo.tags.items():
215 215 assert isinstance(self.repo.get_commit(commit_id), GitCommit)
216 216
217 217 def _test_single_commit_cache(self, commit_id):
218 218 commit = self.repo.get_commit(commit_id)
219 219 assert commit_id in self.repo.commits
220 220 assert commit is self.repo.commits[commit_id]
221 221
222 222 def test_initial_commit(self):
223 223 commit_id = self.repo.commit_ids[0]
224 224 init_commit = self.repo.get_commit(commit_id)
225 225 init_author = init_commit.author
226 226
227 227 assert init_commit.message == 'initial import\n'
228 228 assert init_author == 'Marcin Kuzminski <marcin@python-blog.com>'
229 229 assert init_author == init_commit.committer
230 230 for path in ('vcs/__init__.py',
231 231 'vcs/backends/BaseRepository.py',
232 232 'vcs/backends/__init__.py'):
233 233 assert isinstance(init_commit.get_node(path), FileNode)
234 234 for path in ('', 'vcs', 'vcs/backends'):
235 235 assert isinstance(init_commit.get_node(path), DirNode)
236 236
237 237 with pytest.raises(NodeDoesNotExistError):
238 238 init_commit.get_node(path='foobar')
239 239
240 240 node = init_commit.get_node('vcs/')
241 241 assert hasattr(node, 'kind')
242 242 assert node.kind == NodeKind.DIR
243 243
244 244 node = init_commit.get_node('vcs')
245 245 assert hasattr(node, 'kind')
246 246 assert node.kind == NodeKind.DIR
247 247
248 248 node = init_commit.get_node('vcs/__init__.py')
249 249 assert hasattr(node, 'kind')
250 250 assert node.kind == NodeKind.FILE
251 251
252 252 def test_not_existing_commit(self):
253 253 with pytest.raises(RepositoryError):
254 254 self.repo.get_commit('f' * 40)
255 255
256 256 def test_commit10(self):
257 257
258 258 commit10 = self.repo.get_commit(self.repo.commit_ids[9])
259 259 README = """===
260 260 VCS
261 261 ===
262 262
263 263 Various Version Control System management abstraction layer for Python.
264 264
265 265 Introduction
266 266 ------------
267 267
268 268 TODO: To be written...
269 269
270 270 """
271 271 node = commit10.get_node('README.rst')
272 272 assert node.kind == NodeKind.FILE
273 273 assert node.content == README
274 274
275 275 def test_head(self):
276 276 assert self.repo.head == self.repo.get_commit().raw_id
277 277
278 278 def test_checkout_with_create(self):
279 279 repo_clone = self.get_clone_repo()
280 280
281 281 new_branch = 'new_branch'
282 282 assert repo_clone._current_branch() == 'master'
283 283 assert set(repo_clone.branches) == {'master'}
284 284 repo_clone._checkout(new_branch, create=True)
285 285
286 286 # Branches is a lazy property so we need to recrete the Repo object.
287 287 repo_clone = GitRepository(repo_clone.path)
288 288 assert set(repo_clone.branches) == {'master', new_branch}
289 289 assert repo_clone._current_branch() == new_branch
290 290
291 291 def test_checkout(self):
292 292 repo_clone = self.get_clone_repo()
293 293
294 294 repo_clone._checkout('new_branch', create=True)
295 295 repo_clone._checkout('master')
296 296
297 297 assert repo_clone._current_branch() == 'master'
298 298
299 299 def test_checkout_same_branch(self):
300 300 repo_clone = self.get_clone_repo()
301 301
302 302 repo_clone._checkout('master')
303 303 assert repo_clone._current_branch() == 'master'
304 304
305 305 def test_checkout_branch_already_exists(self):
306 306 repo_clone = self.get_clone_repo()
307 307
308 308 with pytest.raises(RepositoryError):
309 309 repo_clone._checkout('master', create=True)
310 310
311 311 def test_checkout_bare_repo(self):
312 312 with pytest.raises(RepositoryError):
313 313 self.repo._checkout('master')
314 314
315 315 def test_current_branch_bare_repo(self):
316 316 with pytest.raises(RepositoryError):
317 317 self.repo._current_branch()
318 318
319 319 def test_current_branch_empty_repo(self):
320 320 repo = self.get_empty_repo()
321 321 assert repo._current_branch() is None
322 322
323 323 def test_local_clone(self):
324 324 clone_path = next(REPO_PATH_GENERATOR)
325 325 self.repo._local_clone(clone_path, 'master')
326 326 repo_clone = GitRepository(clone_path)
327 327
328 328 assert self.repo.commit_ids == repo_clone.commit_ids
329 329
330 330 def test_local_clone_with_specific_branch(self):
331 331 source_repo = self.get_clone_repo()
332 332
333 333 # Create a new branch in source repo
334 334 new_branch_commit = source_repo.commit_ids[-3]
335 335 source_repo._checkout(new_branch_commit)
336 336 source_repo._checkout('new_branch', create=True)
337 337
338 338 clone_path = next(REPO_PATH_GENERATOR)
339 339 source_repo._local_clone(clone_path, 'new_branch')
340 340 repo_clone = GitRepository(clone_path)
341 341
342 342 assert source_repo.commit_ids[:-3 + 1] == repo_clone.commit_ids
343 343
344 344 clone_path = next(REPO_PATH_GENERATOR)
345 345 source_repo._local_clone(clone_path, 'master')
346 346 repo_clone = GitRepository(clone_path)
347 347
348 348 assert source_repo.commit_ids == repo_clone.commit_ids
349 349
350 350 def test_local_clone_fails_if_target_exists(self):
351 351 with pytest.raises(RepositoryError):
352 352 self.repo._local_clone(self.repo.path, 'master')
353 353
354 354 def test_local_fetch(self):
355 355 target_repo = self.get_empty_repo()
356 356 source_repo = self.get_clone_repo()
357 357
358 358 # Create a new branch in source repo
359 359 master_commit = source_repo.commit_ids[-1]
360 360 new_branch_commit = source_repo.commit_ids[-3]
361 361 source_repo._checkout(new_branch_commit)
362 362 source_repo._checkout('new_branch', create=True)
363 363
364 364 target_repo._local_fetch(source_repo.path, 'new_branch')
365 365 assert target_repo._last_fetch_heads() == [new_branch_commit]
366 366
367 367 target_repo._local_fetch(source_repo.path, 'master')
368 368 assert target_repo._last_fetch_heads() == [master_commit]
369 369
370 370 def test_local_fetch_from_bare_repo(self):
371 371 target_repo = self.get_empty_repo()
372 372 target_repo._local_fetch(self.repo.path, 'master')
373 373
374 374 master_commit = self.repo.commit_ids[-1]
375 375 assert target_repo._last_fetch_heads() == [master_commit]
376 376
377 377 def test_local_fetch_from_same_repo(self):
378 378 with pytest.raises(ValueError):
379 379 self.repo._local_fetch(self.repo.path, 'master')
380 380
381 381 def test_local_fetch_branch_does_not_exist(self):
382 382 target_repo = self.get_empty_repo()
383 383
384 384 with pytest.raises(RepositoryError):
385 385 target_repo._local_fetch(self.repo.path, 'new_branch')
386 386
387 387 def test_local_pull(self):
388 388 target_repo = self.get_empty_repo()
389 389 source_repo = self.get_clone_repo()
390 390
391 391 # Create a new branch in source repo
392 392 master_commit = source_repo.commit_ids[-1]
393 393 new_branch_commit = source_repo.commit_ids[-3]
394 394 source_repo._checkout(new_branch_commit)
395 395 source_repo._checkout('new_branch', create=True)
396 396
397 397 target_repo._local_pull(source_repo.path, 'new_branch')
398 398 target_repo = GitRepository(target_repo.path)
399 399 assert target_repo.head == new_branch_commit
400 400
401 401 target_repo._local_pull(source_repo.path, 'master')
402 402 target_repo = GitRepository(target_repo.path)
403 403 assert target_repo.head == master_commit
404 404
405 405 def test_local_pull_in_bare_repo(self):
406 406 with pytest.raises(RepositoryError):
407 407 self.repo._local_pull(self.repo.path, 'master')
408 408
409 409 def test_local_merge(self):
410 410 target_repo = self.get_empty_repo()
411 411 source_repo = self.get_clone_repo()
412 412
413 413 # Create a new branch in source repo
414 414 master_commit = source_repo.commit_ids[-1]
415 415 new_branch_commit = source_repo.commit_ids[-3]
416 416 source_repo._checkout(new_branch_commit)
417 417 source_repo._checkout('new_branch', create=True)
418 418
419 419 # This is required as one cannot do a -ff-only merge in an empty repo.
420 420 target_repo._local_pull(source_repo.path, 'new_branch')
421 421
422 422 target_repo._local_fetch(source_repo.path, 'master')
423 423 merge_message = 'Merge message\n\nDescription:...'
424 424 user_name = 'Albert Einstein'
425 425 user_email = 'albert@einstein.com'
426 426 target_repo._local_merge(merge_message, user_name, user_email,
427 427 target_repo._last_fetch_heads())
428 428
429 429 target_repo = GitRepository(target_repo.path)
430 430 assert target_repo.commit_ids[-2] == master_commit
431 431 last_commit = target_repo.get_commit(target_repo.head)
432 432 assert last_commit.message.strip() == merge_message
433 433 assert last_commit.author == '%s <%s>' % (user_name, user_email)
434 434
435 435 assert not os.path.exists(
436 436 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
437 437
438 438 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_git):
439 439 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
440 440 vcsbackend_git.ensure_file('README', 'I will conflict with you!!!')
441 441
442 442 target_repo._local_fetch(self.repo.path, 'master')
443 443 with pytest.raises(RepositoryError):
444 444 target_repo._local_merge(
445 445 'merge_message', 'user name', 'user@name.com',
446 446 target_repo._last_fetch_heads())
447 447
448 448 # Check we are not left in an intermediate merge state
449 449 assert not os.path.exists(
450 450 os.path.join(target_repo.path, '.git', 'MERGE_HEAD'))
451 451
452 452 def test_local_merge_into_empty_repo(self):
453 453 target_repo = self.get_empty_repo()
454 454
455 455 # This is required as one cannot do a -ff-only merge in an empty repo.
456 456 target_repo._local_fetch(self.repo.path, 'master')
457 457 with pytest.raises(RepositoryError):
458 458 target_repo._local_merge(
459 459 'merge_message', 'user name', 'user@name.com',
460 460 target_repo._last_fetch_heads())
461 461
462 462 def test_local_merge_in_bare_repo(self):
463 463 with pytest.raises(RepositoryError):
464 464 self.repo._local_merge(
465 465 'merge_message', 'user name', 'user@name.com', None)
466 466
467 467 def test_local_push_non_bare(self):
468 468 target_repo = self.get_empty_repo()
469 469
470 470 pushed_branch = 'pushed_branch'
471 471 self.repo._local_push('master', target_repo.path, pushed_branch)
472 472 # Fix the HEAD of the target repo, or otherwise GitRepository won't
473 473 # report any branches.
474 474 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
475 475 f.write('ref: refs/heads/%s' % pushed_branch)
476 476
477 477 target_repo = GitRepository(target_repo.path)
478 478
479 479 assert (target_repo.branches[pushed_branch] ==
480 480 self.repo.branches['master'])
481 481
482 482 def test_local_push_bare(self):
483 483 target_repo = self.get_empty_repo(bare=True)
484 484
485 485 pushed_branch = 'pushed_branch'
486 486 self.repo._local_push('master', target_repo.path, pushed_branch)
487 487 # Fix the HEAD of the target repo, or otherwise GitRepository won't
488 488 # report any branches.
489 489 with open(os.path.join(target_repo.path, 'HEAD'), 'w') as f:
490 490 f.write('ref: refs/heads/%s' % pushed_branch)
491 491
492 492 target_repo = GitRepository(target_repo.path)
493 493
494 494 assert (target_repo.branches[pushed_branch] ==
495 495 self.repo.branches['master'])
496 496
497 497 def test_local_push_non_bare_target_branch_is_checked_out(self):
498 498 target_repo = self.get_clone_repo()
499 499
500 500 pushed_branch = 'pushed_branch'
501 501 # Create a new branch in source repo
502 502 new_branch_commit = target_repo.commit_ids[-3]
503 503 target_repo._checkout(new_branch_commit)
504 504 target_repo._checkout(pushed_branch, create=True)
505 505
506 506 self.repo._local_push('master', target_repo.path, pushed_branch)
507 507
508 508 target_repo = GitRepository(target_repo.path)
509 509
510 510 assert (target_repo.branches[pushed_branch] ==
511 511 self.repo.branches['master'])
512 512
513 513 def test_local_push_raises_exception_on_conflict(self, vcsbackend_git):
514 514 target_repo = vcsbackend_git.create_repo(number_of_commits=1)
515 515 with pytest.raises(RepositoryError):
516 516 self.repo._local_push('master', target_repo.path, 'master')
517 517
518 518 def test_hooks_can_be_enabled_via_env_variable_for_local_push(self):
519 519 target_repo = self.get_empty_repo(bare=True)
520 520
521 521 with mock.patch.object(self.repo, 'run_git_command') as run_mock:
522 522 self.repo._local_push(
523 523 'master', target_repo.path, 'master', enable_hooks=True)
524 524 env = run_mock.call_args[1]['extra_env']
525 525 assert 'RC_SKIP_HOOKS' not in env
526 526
527 527 def _add_failing_hook(self, repo_path, hook_name, bare=False):
528 528 path_components = (
529 529 ['hooks', hook_name] if bare else ['.git', 'hooks', hook_name])
530 530 hook_path = os.path.join(repo_path, *path_components)
531 531 with open(hook_path, 'w') as f:
532 532 script_lines = [
533 533 '#!%s' % sys.executable,
534 534 'import os',
535 535 'import sys',
536 536 'if os.environ.get("RC_SKIP_HOOKS"):',
537 537 ' sys.exit(0)',
538 538 'sys.exit(1)',
539 539 ]
540 540 f.write('\n'.join(script_lines))
541 os.chmod(hook_path, 0755)
541 os.chmod(hook_path, 0o755)
542 542
543 543 def test_local_push_does_not_execute_hook(self):
544 544 target_repo = self.get_empty_repo()
545 545
546 546 pushed_branch = 'pushed_branch'
547 547 self._add_failing_hook(target_repo.path, 'pre-receive')
548 548 self.repo._local_push('master', target_repo.path, pushed_branch)
549 549 # Fix the HEAD of the target repo, or otherwise GitRepository won't
550 550 # report any branches.
551 551 with open(os.path.join(target_repo.path, '.git', 'HEAD'), 'w') as f:
552 552 f.write('ref: refs/heads/%s' % pushed_branch)
553 553
554 554 target_repo = GitRepository(target_repo.path)
555 555
556 556 assert (target_repo.branches[pushed_branch] ==
557 557 self.repo.branches['master'])
558 558
559 559 def test_local_push_executes_hook(self):
560 560 target_repo = self.get_empty_repo(bare=True)
561 561 self._add_failing_hook(target_repo.path, 'pre-receive', bare=True)
562 562 with pytest.raises(RepositoryError):
563 563 self.repo._local_push(
564 564 'master', target_repo.path, 'master', enable_hooks=True)
565 565
566 566 def test_maybe_prepare_merge_workspace(self):
567 567 workspace = self.repo._maybe_prepare_merge_workspace(
568 568 2, 'pr2', Reference('branch', 'master', 'unused'),
569 569 Reference('branch', 'master', 'unused'))
570 570
571 571 assert os.path.isdir(workspace)
572 572 workspace_repo = GitRepository(workspace)
573 573 assert workspace_repo.branches == self.repo.branches
574 574
575 575 # Calling it a second time should also succeed
576 576 workspace = self.repo._maybe_prepare_merge_workspace(
577 577 2, 'pr2', Reference('branch', 'master', 'unused'),
578 578 Reference('branch', 'master', 'unused'))
579 579 assert os.path.isdir(workspace)
580 580
581 581 def test_maybe_prepare_merge_workspace_different_refs(self):
582 582 workspace = self.repo._maybe_prepare_merge_workspace(
583 583 2, 'pr2', Reference('branch', 'master', 'unused'),
584 584 Reference('branch', 'develop', 'unused'))
585 585
586 586 assert os.path.isdir(workspace)
587 587 workspace_repo = GitRepository(workspace)
588 588 assert workspace_repo.branches == self.repo.branches
589 589
590 590 # Calling it a second time should also succeed
591 591 workspace = self.repo._maybe_prepare_merge_workspace(
592 592 2, 'pr2', Reference('branch', 'master', 'unused'),
593 593 Reference('branch', 'develop', 'unused'))
594 594 assert os.path.isdir(workspace)
595 595
596 596 def test_cleanup_merge_workspace(self):
597 597 workspace = self.repo._maybe_prepare_merge_workspace(
598 598 2, 'pr3', Reference('branch', 'master', 'unused'),
599 599 Reference('branch', 'master', 'unused'))
600 600 self.repo.cleanup_merge_workspace(2, 'pr3')
601 601
602 602 assert not os.path.exists(workspace)
603 603
604 604 def test_cleanup_merge_workspace_invalid_workspace_id(self):
605 605 # No assert: because in case of an inexistent workspace this function
606 606 # should still succeed.
607 607 self.repo.cleanup_merge_workspace(1, 'pr4')
608 608
609 609 def test_set_refs(self):
610 610 test_ref = 'refs/test-refs/abcde'
611 611 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
612 612
613 613 self.repo.set_refs(test_ref, test_commit_id)
614 614 stdout, _ = self.repo.run_git_command(['show-ref'])
615 615 assert test_ref in stdout
616 616 assert test_commit_id in stdout
617 617
618 618 def test_remove_ref(self):
619 619 test_ref = 'refs/test-refs/abcde'
620 620 test_commit_id = 'ecb86e1f424f2608262b130db174a7dfd25a6623'
621 621 self.repo.set_refs(test_ref, test_commit_id)
622 622 stdout, _ = self.repo.run_git_command(['show-ref'])
623 623 assert test_ref in stdout
624 624 assert test_commit_id in stdout
625 625
626 626 self.repo.remove_ref(test_ref)
627 627 stdout, _ = self.repo.run_git_command(['show-ref'])
628 628 assert test_ref not in stdout
629 629 assert test_commit_id not in stdout
630 630
631 631
632 632 class TestGitCommit(object):
633 633
634 634 @pytest.fixture(autouse=True)
635 635 def prepare(self):
636 636 self.repo = GitRepository(TEST_GIT_REPO)
637 637
638 638 def test_default_commit(self):
639 639 tip = self.repo.get_commit()
640 640 assert tip == self.repo.get_commit(None)
641 641 assert tip == self.repo.get_commit('tip')
642 642
643 643 def test_root_node(self):
644 644 tip = self.repo.get_commit()
645 645 assert tip.root is tip.get_node('')
646 646
647 647 def test_lazy_fetch(self):
648 648 """
649 649 Test if commit's nodes expands and are cached as we walk through
650 650 the commit. This test is somewhat hard to write as order of tests
651 651 is a key here. Written by running command after command in a shell.
652 652 """
653 653 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
654 654 assert commit_id in self.repo.commit_ids
655 655 commit = self.repo.get_commit(commit_id)
656 656 assert len(commit.nodes) == 0
657 657 root = commit.root
658 658 assert len(commit.nodes) == 1
659 659 assert len(root.nodes) == 8
660 660 # accessing root.nodes updates commit.nodes
661 661 assert len(commit.nodes) == 9
662 662
663 663 docs = root.get_node('docs')
664 664 # we haven't yet accessed anything new as docs dir was already cached
665 665 assert len(commit.nodes) == 9
666 666 assert len(docs.nodes) == 8
667 667 # accessing docs.nodes updates commit.nodes
668 668 assert len(commit.nodes) == 17
669 669
670 670 assert docs is commit.get_node('docs')
671 671 assert docs is root.nodes[0]
672 672 assert docs is root.dirs[0]
673 673 assert docs is commit.get_node('docs')
674 674
675 675 def test_nodes_with_commit(self):
676 676 commit_id = '2a13f185e4525f9d4b59882791a2d397b90d5ddc'
677 677 commit = self.repo.get_commit(commit_id)
678 678 root = commit.root
679 679 docs = root.get_node('docs')
680 680 assert docs is commit.get_node('docs')
681 681 api = docs.get_node('api')
682 682 assert api is commit.get_node('docs/api')
683 683 index = api.get_node('index.rst')
684 684 assert index is commit.get_node('docs/api/index.rst')
685 685 assert index is commit.get_node('docs')\
686 686 .get_node('api')\
687 687 .get_node('index.rst')
688 688
689 689 def test_branch_and_tags(self):
690 690 """
691 691 rev0 = self.repo.commit_ids[0]
692 692 commit0 = self.repo.get_commit(rev0)
693 693 assert commit0.branch == 'master'
694 694 assert commit0.tags == []
695 695
696 696 rev10 = self.repo.commit_ids[10]
697 697 commit10 = self.repo.get_commit(rev10)
698 698 assert commit10.branch == 'master'
699 699 assert commit10.tags == []
700 700
701 701 rev44 = self.repo.commit_ids[44]
702 702 commit44 = self.repo.get_commit(rev44)
703 703 assert commit44.branch == 'web-branch'
704 704
705 705 tip = self.repo.get_commit('tip')
706 706 assert 'tip' in tip.tags
707 707 """
708 708 # Those tests would fail - branches are now going
709 709 # to be changed at main API in order to support git backend
710 710 pass
711 711
712 712 def test_file_size(self):
713 713 to_check = (
714 714 ('c1214f7e79e02fc37156ff215cd71275450cffc3',
715 715 'vcs/backends/BaseRepository.py', 502),
716 716 ('d7e0d30fbcae12c90680eb095a4f5f02505ce501',
717 717 'vcs/backends/hg.py', 854),
718 718 ('6e125e7c890379446e98980d8ed60fba87d0f6d1',
719 719 'setup.py', 1068),
720 720
721 721 ('d955cd312c17b02143c04fa1099a352b04368118',
722 722 'vcs/backends/base.py', 2921),
723 723 ('ca1eb7957a54bce53b12d1a51b13452f95bc7c7e',
724 724 'vcs/backends/base.py', 3936),
725 725 ('f50f42baeed5af6518ef4b0cb2f1423f3851a941',
726 726 'vcs/backends/base.py', 6189),
727 727 )
728 728 for commit_id, path, size in to_check:
729 729 node = self.repo.get_commit(commit_id).get_node(path)
730 730 assert node.is_file()
731 731 assert node.size == size
732 732
733 733 def test_file_history_from_commits(self):
734 734 node = self.repo[10].get_node('setup.py')
735 735 commit_ids = [commit.raw_id for commit in node.history]
736 736 assert ['ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == commit_ids
737 737
738 738 node = self.repo[20].get_node('setup.py')
739 739 node_ids = [commit.raw_id for commit in node.history]
740 740 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
741 741 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
742 742
743 743 # special case we check history from commit that has this particular
744 744 # file changed this means we check if it's included as well
745 745 node = self.repo.get_commit('191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e') \
746 746 .get_node('setup.py')
747 747 node_ids = [commit.raw_id for commit in node.history]
748 748 assert ['191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
749 749 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'] == node_ids
750 750
751 751 def test_file_history(self):
752 752 # we can only check if those commits are present in the history
753 753 # as we cannot update this test every time file is changed
754 754 files = {
755 755 'setup.py': [
756 756 '54386793436c938cff89326944d4c2702340037d',
757 757 '51d254f0ecf5df2ce50c0b115741f4cf13985dab',
758 758 '998ed409c795fec2012b1c0ca054d99888b22090',
759 759 '5e0eb4c47f56564395f76333f319d26c79e2fb09',
760 760 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
761 761 '7cb3fd1b6d8c20ba89e2264f1c8baebc8a52d36e',
762 762 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
763 763 '191caa5b2c81ed17c0794bf7bb9958f4dcb0b87e',
764 764 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
765 765 ],
766 766 'vcs/nodes.py': [
767 767 '33fa3223355104431402a888fa77a4e9956feb3e',
768 768 'fa014c12c26d10ba682fadb78f2a11c24c8118e1',
769 769 'e686b958768ee96af8029fe19c6050b1a8dd3b2b',
770 770 'ab5721ca0a081f26bf43d9051e615af2cc99952f',
771 771 'c877b68d18e792a66b7f4c529ea02c8f80801542',
772 772 '4313566d2e417cb382948f8d9d7c765330356054',
773 773 '6c2303a793671e807d1cfc70134c9ca0767d98c2',
774 774 '54386793436c938cff89326944d4c2702340037d',
775 775 '54000345d2e78b03a99d561399e8e548de3f3203',
776 776 '1c6b3677b37ea064cb4b51714d8f7498f93f4b2b',
777 777 '2d03ca750a44440fb5ea8b751176d1f36f8e8f46',
778 778 '2a08b128c206db48c2f0b8f70df060e6db0ae4f8',
779 779 '30c26513ff1eb8e5ce0e1c6b477ee5dc50e2f34b',
780 780 'ac71e9503c2ca95542839af0ce7b64011b72ea7c',
781 781 '12669288fd13adba2a9b7dd5b870cc23ffab92d2',
782 782 '5a0c84f3e6fe3473e4c8427199d5a6fc71a9b382',
783 783 '12f2f5e2b38e6ff3fbdb5d722efed9aa72ecb0d5',
784 784 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
785 785 'f50f42baeed5af6518ef4b0cb2f1423f3851a941',
786 786 'd7e390a45f6aa96f04f5e7f583ad4f867431aa25',
787 787 'f15c21f97864b4f071cddfbf2750ec2e23859414',
788 788 'e906ef056cf539a4e4e5fc8003eaf7cf14dd8ade',
789 789 'ea2b108b48aa8f8c9c4a941f66c1a03315ca1c3b',
790 790 '84dec09632a4458f79f50ddbbd155506c460b4f9',
791 791 '0115510b70c7229dbc5dc49036b32e7d91d23acd',
792 792 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
793 793 '3bf1c5868e570e39569d094f922d33ced2fa3b2b',
794 794 'b8d04012574729d2c29886e53b1a43ef16dd00a1',
795 795 '6970b057cffe4aab0a792aa634c89f4bebf01441',
796 796 'dd80b0f6cf5052f17cc738c2951c4f2070200d7f',
797 797 'ff7ca51e58c505fec0dd2491de52c622bb7a806b',
798 798 ],
799 799 'vcs/backends/git.py': [
800 800 '4cf116ad5a457530381135e2f4c453e68a1b0105',
801 801 '9a751d84d8e9408e736329767387f41b36935153',
802 802 'cb681fb539c3faaedbcdf5ca71ca413425c18f01',
803 803 '428f81bb652bcba8d631bce926e8834ff49bdcc6',
804 804 '180ab15aebf26f98f714d8c68715e0f05fa6e1c7',
805 805 '2b8e07312a2e89e92b90426ab97f349f4bce2a3a',
806 806 '50e08c506174d8645a4bb517dd122ac946a0f3bf',
807 807 '54000345d2e78b03a99d561399e8e548de3f3203',
808 808 ],
809 809 }
810 810 for path, commit_ids in files.items():
811 811 node = self.repo.get_commit(commit_ids[0]).get_node(path)
812 812 node_ids = [commit.raw_id for commit in node.history]
813 813 assert set(commit_ids).issubset(set(node_ids)), (
814 814 "We assumed that %s is subset of commit_ids for which file %s "
815 815 "has been changed, and history of that node returned: %s"
816 816 % (commit_ids, path, node_ids))
817 817
818 818 def test_file_annotate(self):
819 819 files = {
820 820 'vcs/backends/__init__.py': {
821 821 'c1214f7e79e02fc37156ff215cd71275450cffc3': {
822 822 'lines_no': 1,
823 823 'commits': [
824 824 'c1214f7e79e02fc37156ff215cd71275450cffc3',
825 825 ],
826 826 },
827 827 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647': {
828 828 'lines_no': 21,
829 829 'commits': [
830 830 '49d3fd156b6f7db46313fac355dca1a0b94a0017',
831 831 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
832 832 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
833 833 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
834 834 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
835 835 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
836 836 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
837 837 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
838 838 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
839 839 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
840 840 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
841 841 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
842 842 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
843 843 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
844 844 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
845 845 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
846 846 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
847 847 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
848 848 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
849 849 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
850 850 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
851 851 ],
852 852 },
853 853 'e29b67bd158580fc90fc5e9111240b90e6e86064': {
854 854 'lines_no': 32,
855 855 'commits': [
856 856 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
857 857 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
858 858 '5eab1222a7cd4bfcbabc218ca6d04276d4e27378',
859 859 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
860 860 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
861 861 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
862 862 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
863 863 '54000345d2e78b03a99d561399e8e548de3f3203',
864 864 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
865 865 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
866 866 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
867 867 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
868 868 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
869 869 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
870 870 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
871 871 '2a13f185e4525f9d4b59882791a2d397b90d5ddc',
872 872 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
873 873 '78c3f0c23b7ee935ec276acb8b8212444c33c396',
874 874 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
875 875 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
876 876 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
877 877 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
878 878 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
879 879 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
880 880 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
881 881 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
882 882 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
883 883 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
884 884 '992f38217b979d0b0987d0bae3cc26dac85d9b19',
885 885 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
886 886 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
887 887 '16fba1ae9334d79b66d7afed2c2dfbfa2ae53647',
888 888 ],
889 889 },
890 890 },
891 891 }
892 892
893 893 for fname, commit_dict in files.items():
894 894 for commit_id, __ in commit_dict.items():
895 895 commit = self.repo.get_commit(commit_id)
896 896
897 897 l1_1 = [x[1] for x in commit.get_file_annotate(fname)]
898 898 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
899 899 assert l1_1 == l1_2
900 900 l1 = l1_1
901 901 l2 = files[fname][commit_id]['commits']
902 902 assert l1 == l2, (
903 903 "The lists of commit_ids for %s@commit_id %s"
904 904 "from annotation list should match each other, "
905 905 "got \n%s \nvs \n%s " % (fname, commit_id, l1, l2))
906 906
907 907 def test_files_state(self):
908 908 """
909 909 Tests state of FileNodes.
910 910 """
911 911 node = self.repo\
912 912 .get_commit('e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0')\
913 913 .get_node('vcs/utils/diffs.py')
914 914 assert node.state, NodeState.ADDED
915 915 assert node.added
916 916 assert not node.changed
917 917 assert not node.not_changed
918 918 assert not node.removed
919 919
920 920 node = self.repo\
921 921 .get_commit('33fa3223355104431402a888fa77a4e9956feb3e')\
922 922 .get_node('.hgignore')
923 923 assert node.state, NodeState.CHANGED
924 924 assert not node.added
925 925 assert node.changed
926 926 assert not node.not_changed
927 927 assert not node.removed
928 928
929 929 node = self.repo\
930 930 .get_commit('e29b67bd158580fc90fc5e9111240b90e6e86064')\
931 931 .get_node('setup.py')
932 932 assert node.state, NodeState.NOT_CHANGED
933 933 assert not node.added
934 934 assert not node.changed
935 935 assert node.not_changed
936 936 assert not node.removed
937 937
938 938 # If node has REMOVED state then trying to fetch it would raise
939 939 # CommitError exception
940 940 commit = self.repo.get_commit(
941 941 'fa6600f6848800641328adbf7811fd2372c02ab2')
942 942 path = 'vcs/backends/BaseRepository.py'
943 943 with pytest.raises(NodeDoesNotExistError):
944 944 commit.get_node(path)
945 945 # but it would be one of ``removed`` (commit's attribute)
946 946 assert path in [rf.path for rf in commit.removed]
947 947
948 948 commit = self.repo.get_commit(
949 949 '54386793436c938cff89326944d4c2702340037d')
950 950 changed = [
951 951 'setup.py', 'tests/test_nodes.py', 'vcs/backends/hg.py',
952 952 'vcs/nodes.py']
953 953 assert set(changed) == set([f.path for f in commit.changed])
954 954
955 955 def test_unicode_branch_refs(self):
956 956 unicode_branches = {
957 957 'refs/heads/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
958 958 u'refs/heads/uniΓ§ΓΆβˆ‚e': 'ΓΌrl',
959 959 }
960 960 with mock.patch(
961 961 ("rhodecode.lib.vcs.backends.git.repository"
962 962 ".GitRepository._refs"),
963 963 unicode_branches):
964 964 branches = self.repo.branches
965 965
966 966 assert 'unicode' in branches
967 967 assert u'uniΓ§ΓΆβˆ‚e' in branches
968 968
969 969 def test_unicode_tag_refs(self):
970 970 unicode_tags = {
971 971 'refs/tags/unicode': '6c0ce52b229aa978889e91b38777f800e85f330b',
972 972 u'refs/tags/uniΓ§ΓΆβˆ‚e': '6c0ce52b229aa978889e91b38777f800e85f330b',
973 973 }
974 974 with mock.patch(
975 975 ("rhodecode.lib.vcs.backends.git.repository"
976 976 ".GitRepository._refs"),
977 977 unicode_tags):
978 978 tags = self.repo.tags
979 979
980 980 assert 'unicode' in tags
981 981 assert u'uniΓ§ΓΆβˆ‚e' in tags
982 982
983 983 def test_commit_message_is_unicode(self):
984 984 for commit in self.repo:
985 985 assert type(commit.message) == unicode
986 986
987 987 def test_commit_author_is_unicode(self):
988 988 for commit in self.repo:
989 989 assert type(commit.author) == unicode
990 990
991 991 def test_repo_files_content_is_unicode(self):
992 992 commit = self.repo.get_commit()
993 993 for node in commit.get_node('/'):
994 994 if node.is_file():
995 995 assert type(node.content) == unicode
996 996
997 997 def test_wrong_path(self):
998 998 # There is 'setup.py' in the root dir but not there:
999 999 path = 'foo/bar/setup.py'
1000 1000 tip = self.repo.get_commit()
1001 1001 with pytest.raises(VCSError):
1002 1002 tip.get_node(path)
1003 1003
1004 1004 @pytest.mark.parametrize("author_email, commit_id", [
1005 1005 ('marcin@python-blog.com', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
1006 1006 ('lukasz.balcerzak@python-center.pl',
1007 1007 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
1008 1008 ('none@none', '8430a588b43b5d6da365400117c89400326e7992'),
1009 1009 ])
1010 1010 def test_author_email(self, author_email, commit_id):
1011 1011 commit = self.repo.get_commit(commit_id)
1012 1012 assert author_email == commit.author_email
1013 1013
1014 1014 @pytest.mark.parametrize("author, commit_id", [
1015 1015 ('Marcin Kuzminski', 'c1214f7e79e02fc37156ff215cd71275450cffc3'),
1016 1016 ('Lukasz Balcerzak', 'ff7ca51e58c505fec0dd2491de52c622bb7a806b'),
1017 1017 ('marcink', '8430a588b43b5d6da365400117c89400326e7992'),
1018 1018 ])
1019 1019 def test_author_username(self, author, commit_id):
1020 1020 commit = self.repo.get_commit(commit_id)
1021 1021 assert author == commit.author_name
1022 1022
1023 1023
1024 1024 class TestLargeFileRepo(object):
1025 1025
1026 1026 def test_large_file(self, backend_git):
1027 1027 conf = make_db_config()
1028 1028 repo = backend_git.create_test_repo('largefiles', conf)
1029 1029
1030 1030 tip = repo.scm_instance().get_commit()
1031 1031
1032 1032 # extract stored LF node into the origin cache
1033 1033 lfs_store = os.path.join(repo.repo_path, repo.repo_name, 'lfs_store')
1034 1034
1035 1035 oid = '7b331c02e313c7599d5a90212e17e6d3cb729bd2e1c9b873c302a63c95a2f9bf'
1036 1036 oid_path = os.path.join(lfs_store, oid)
1037 1037 oid_destination = os.path.join(
1038 1038 conf.get('vcs_git_lfs', 'store_location'), oid)
1039 1039 shutil.copy(oid_path, oid_destination)
1040 1040
1041 1041 node = tip.get_node('1MB.zip')
1042 1042
1043 1043 lf_node = node.get_largefile_node()
1044 1044
1045 1045 assert lf_node.is_largefile() is True
1046 1046 assert lf_node.size == 1024000
1047 1047 assert lf_node.name == '1MB.zip'
1048 1048
1049 1049
1050 1050 @pytest.mark.usefixtures("vcs_repository_support")
1051 1051 class TestGitSpecificWithRepo(BackendTestMixin):
1052 1052
1053 1053 @classmethod
1054 1054 def _get_commits(cls):
1055 1055 return [
1056 1056 {
1057 1057 'message': 'Initial',
1058 1058 'author': 'Joe Doe <joe.doe@example.com>',
1059 1059 'date': datetime.datetime(2010, 1, 1, 20),
1060 1060 'added': [
1061 1061 FileNode('foobar/static/js/admin/base.js', content='base'),
1062 1062 FileNode(
1063 1063 'foobar/static/admin', content='admin',
1064 mode=0120000), # this is a link
1064 mode=0o120000), # this is a link
1065 1065 FileNode('foo', content='foo'),
1066 1066 ],
1067 1067 },
1068 1068 {
1069 1069 'message': 'Second',
1070 1070 'author': 'Joe Doe <joe.doe@example.com>',
1071 1071 'date': datetime.datetime(2010, 1, 1, 22),
1072 1072 'added': [
1073 1073 FileNode('foo2', content='foo2'),
1074 1074 ],
1075 1075 },
1076 1076 ]
1077 1077
1078 1078 def test_paths_slow_traversing(self):
1079 1079 commit = self.repo.get_commit()
1080 1080 assert commit.get_node('foobar').get_node('static').get_node('js')\
1081 1081 .get_node('admin').get_node('base.js').content == 'base'
1082 1082
1083 1083 def test_paths_fast_traversing(self):
1084 1084 commit = self.repo.get_commit()
1085 1085 assert (
1086 1086 commit.get_node('foobar/static/js/admin/base.js').content ==
1087 1087 'base')
1088 1088
1089 1089 def test_get_diff_runs_git_command_with_hashes(self):
1090 1090 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1091 1091 self.repo.get_diff(self.repo[0], self.repo[1])
1092 1092 self.repo.run_git_command.assert_called_once_with(
1093 1093 ['diff', '-U3', '--full-index', '--binary', '-p', '-M',
1094 1094 '--abbrev=40', self.repo._get_commit_id(0),
1095 1095 self.repo._get_commit_id(1)])
1096 1096
1097 1097 def test_get_diff_runs_git_command_with_str_hashes(self):
1098 1098 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1099 1099 self.repo.get_diff(self.repo.EMPTY_COMMIT, self.repo[1])
1100 1100 self.repo.run_git_command.assert_called_once_with(
1101 1101 ['show', '-U3', '--full-index', '--binary', '-p', '-M',
1102 1102 '--abbrev=40', self.repo._get_commit_id(1)])
1103 1103
1104 1104 def test_get_diff_runs_git_command_with_path_if_its_given(self):
1105 1105 self.repo.run_git_command = mock.Mock(return_value=['', ''])
1106 1106 self.repo.get_diff(self.repo[0], self.repo[1], 'foo')
1107 1107 self.repo.run_git_command.assert_called_once_with(
1108 1108 ['diff', '-U3', '--full-index', '--binary', '-p', '-M',
1109 1109 '--abbrev=40', self.repo._get_commit_id(0),
1110 1110 self.repo._get_commit_id(1), '--', 'foo'])
1111 1111
1112 1112
1113 1113 @pytest.mark.usefixtures("vcs_repository_support")
1114 1114 class TestGitRegression(BackendTestMixin):
1115 1115
1116 1116 @classmethod
1117 1117 def _get_commits(cls):
1118 1118 return [
1119 1119 {
1120 1120 'message': 'Initial',
1121 1121 'author': 'Joe Doe <joe.doe@example.com>',
1122 1122 'date': datetime.datetime(2010, 1, 1, 20),
1123 1123 'added': [
1124 1124 FileNode('bot/__init__.py', content='base'),
1125 1125 FileNode('bot/templates/404.html', content='base'),
1126 1126 FileNode('bot/templates/500.html', content='base'),
1127 1127 ],
1128 1128 },
1129 1129 {
1130 1130 'message': 'Second',
1131 1131 'author': 'Joe Doe <joe.doe@example.com>',
1132 1132 'date': datetime.datetime(2010, 1, 1, 22),
1133 1133 'added': [
1134 1134 FileNode('bot/build/migrations/1.py', content='foo2'),
1135 1135 FileNode('bot/build/migrations/2.py', content='foo2'),
1136 1136 FileNode(
1137 1137 'bot/build/static/templates/f.html', content='foo2'),
1138 1138 FileNode(
1139 1139 'bot/build/static/templates/f1.html', content='foo2'),
1140 1140 FileNode('bot/build/templates/err.html', content='foo2'),
1141 1141 FileNode('bot/build/templates/err2.html', content='foo2'),
1142 1142 ],
1143 1143 },
1144 1144 ]
1145 1145
1146 1146 @pytest.mark.parametrize("path, expected_paths", [
1147 1147 ('bot', [
1148 1148 'bot/build',
1149 1149 'bot/templates',
1150 1150 'bot/__init__.py']),
1151 1151 ('bot/build', [
1152 1152 'bot/build/migrations',
1153 1153 'bot/build/static',
1154 1154 'bot/build/templates']),
1155 1155 ('bot/build/static', [
1156 1156 'bot/build/static/templates']),
1157 1157 ('bot/build/static/templates', [
1158 1158 'bot/build/static/templates/f.html',
1159 1159 'bot/build/static/templates/f1.html']),
1160 1160 ('bot/build/templates', [
1161 1161 'bot/build/templates/err.html',
1162 1162 'bot/build/templates/err2.html']),
1163 1163 ('bot/templates/', [
1164 1164 'bot/templates/404.html',
1165 1165 'bot/templates/500.html']),
1166 1166 ])
1167 1167 def test_similar_paths(self, path, expected_paths):
1168 1168 commit = self.repo.get_commit()
1169 1169 paths = [n.path for n in commit.get_nodes(path)]
1170 1170 assert paths == expected_paths
1171 1171
1172 1172
1173 1173 class TestDiscoverGitVersion(object):
1174 1174
1175 1175 def test_returns_git_version(self, baseapp):
1176 1176 version = discover_git_version()
1177 1177 assert version
1178 1178
1179 1179 def test_returns_empty_string_without_vcsserver(self):
1180 1180 mock_connection = mock.Mock()
1181 1181 mock_connection.discover_git_version = mock.Mock(
1182 1182 side_effect=Exception)
1183 1183 with mock.patch('rhodecode.lib.vcs.connection.Git', mock_connection):
1184 1184 version = discover_git_version()
1185 1185 assert version == ''
1186 1186
1187 1187
1188 1188 class TestGetSubmoduleUrl(object):
1189 1189 def test_submodules_file_found(self):
1190 1190 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1191 1191 node = mock.Mock()
1192 1192 with mock.patch.object(
1193 1193 commit, 'get_node', return_value=node) as get_node_mock:
1194 1194 node.content = (
1195 1195 '[submodule "subrepo1"]\n'
1196 1196 '\tpath = subrepo1\n'
1197 1197 '\turl = https://code.rhodecode.com/dulwich\n'
1198 1198 )
1199 1199 result = commit._get_submodule_url('subrepo1')
1200 1200 get_node_mock.assert_called_once_with('.gitmodules')
1201 1201 assert result == 'https://code.rhodecode.com/dulwich'
1202 1202
1203 1203 def test_complex_submodule_path(self):
1204 1204 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1205 1205 node = mock.Mock()
1206 1206 with mock.patch.object(
1207 1207 commit, 'get_node', return_value=node) as get_node_mock:
1208 1208 node.content = (
1209 1209 '[submodule "complex/subrepo/path"]\n'
1210 1210 '\tpath = complex/subrepo/path\n'
1211 1211 '\turl = https://code.rhodecode.com/dulwich\n'
1212 1212 )
1213 1213 result = commit._get_submodule_url('complex/subrepo/path')
1214 1214 get_node_mock.assert_called_once_with('.gitmodules')
1215 1215 assert result == 'https://code.rhodecode.com/dulwich'
1216 1216
1217 1217 def test_submodules_file_not_found(self):
1218 1218 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1219 1219 with mock.patch.object(
1220 1220 commit, 'get_node', side_effect=NodeDoesNotExistError):
1221 1221 result = commit._get_submodule_url('complex/subrepo/path')
1222 1222 assert result is None
1223 1223
1224 1224 def test_path_not_found(self):
1225 1225 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1226 1226 node = mock.Mock()
1227 1227 with mock.patch.object(
1228 1228 commit, 'get_node', return_value=node) as get_node_mock:
1229 1229 node.content = (
1230 1230 '[submodule "subrepo1"]\n'
1231 1231 '\tpath = subrepo1\n'
1232 1232 '\turl = https://code.rhodecode.com/dulwich\n'
1233 1233 )
1234 1234 result = commit._get_submodule_url('subrepo2')
1235 1235 get_node_mock.assert_called_once_with('.gitmodules')
1236 1236 assert result is None
1237 1237
1238 1238 def test_returns_cached_values(self):
1239 1239 commit = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
1240 1240 node = mock.Mock()
1241 1241 with mock.patch.object(
1242 1242 commit, 'get_node', return_value=node) as get_node_mock:
1243 1243 node.content = (
1244 1244 '[submodule "subrepo1"]\n'
1245 1245 '\tpath = subrepo1\n'
1246 1246 '\turl = https://code.rhodecode.com/dulwich\n'
1247 1247 )
1248 1248 for _ in range(3):
1249 1249 commit._get_submodule_url('subrepo1')
1250 1250 get_node_mock.assert_called_once_with('.gitmodules')
1251 1251
1252 1252 def test_get_node_returns_a_link(self):
1253 1253 repository = mock.Mock()
1254 1254 repository.alias = 'git'
1255 1255 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1256 1256 submodule_url = 'https://code.rhodecode.com/dulwich'
1257 1257 get_id_patch = mock.patch.object(
1258 1258 commit, '_get_id_for_path', return_value=(1, 'link'))
1259 1259 get_submodule_patch = mock.patch.object(
1260 1260 commit, '_get_submodule_url', return_value=submodule_url)
1261 1261
1262 1262 with get_id_patch, get_submodule_patch as submodule_mock:
1263 1263 node = commit.get_node('/abcde')
1264 1264
1265 1265 submodule_mock.assert_called_once_with('/abcde')
1266 1266 assert type(node) == SubModuleNode
1267 1267 assert node.url == submodule_url
1268 1268
1269 1269 def test_get_nodes_returns_links(self):
1270 1270 repository = mock.MagicMock()
1271 1271 repository.alias = 'git'
1272 1272 repository._remote.tree_items.return_value = [
1273 1273 ('subrepo', 'stat', 1, 'link')
1274 1274 ]
1275 1275 commit = GitCommit(repository=repository, raw_id='abcdef12', idx=1)
1276 1276 submodule_url = 'https://code.rhodecode.com/dulwich'
1277 1277 get_id_patch = mock.patch.object(
1278 1278 commit, '_get_id_for_path', return_value=(1, 'tree'))
1279 1279 get_submodule_patch = mock.patch.object(
1280 1280 commit, '_get_submodule_url', return_value=submodule_url)
1281 1281
1282 1282 with get_id_patch, get_submodule_patch as submodule_mock:
1283 1283 nodes = commit.get_nodes('/abcde')
1284 1284
1285 1285 submodule_mock.assert_called_once_with('/abcde/subrepo')
1286 1286 assert len(nodes) == 1
1287 1287 assert type(nodes[0]) == SubModuleNode
1288 1288 assert nodes[0].url == submodule_url
@@ -1,275 +1,275 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import stat
22 22
23 23 import pytest
24 24
25 25 from rhodecode.lib.vcs.nodes import DirNode
26 26 from rhodecode.lib.vcs.nodes import FileNode
27 27 from rhodecode.lib.vcs.nodes import Node
28 28 from rhodecode.lib.vcs.nodes import NodeError
29 29 from rhodecode.lib.vcs.nodes import NodeKind
30 30 from rhodecode.tests.vcs.conftest import BackendTestMixin
31 31
32 32
33 33 @pytest.fixture()
34 34 def binary_filenode():
35 35 def node_maker(filename):
36 36 data = (
37 37 "\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x10\x00\x00\x00"
38 38 "\x10\x08\x06\x00\x00\x00\x1f??a\x00\x00\x00\x04gAMA\x00\x00\xaf?7"
39 39 "\x05\x8a?\x00\x00\x00\x19tEXtSoftware\x00Adobe ImageReadyq?e<\x00"
40 40 "\x00\x025IDAT8?\xa5\x93?K\x94Q\x14\x87\x9f\xf7?Q\x1bs4?\x03\x9a"
41 41 "\xa8?B\x02\x8b$\x10[U;i\x13?6h?&h[?\"\x14j?\xa2M\x7fB\x14F\x9aQ?&"
42 42 "\x842?\x0b\x89\"\x82??!?\x9c!\x9c2l??{N\x8bW\x9dY\xb4\t/\x1c?="
43 43 "\x9b?}????\xa9*;9!?\x83\x91?[?\\v*?D\x04\'`EpNp\xa2X\'U?pVq\"Sw."
44 44 "\x1e?\x08\x01D?jw????\xbc??7{|\x9b?\x89$\x01??W@\x15\x9c\x05q`Lt/"
45 45 "\x97?\x94\xa1d?\x18~?\x18?\x18W[%\xb0?\x83??\x14\x88\x8dB?\xa6H"
46 46 "\tL\tl\x19>/\x01`\xac\xabx?\x9cl\nx\xb0\x98\x07\x95\x88D$\"q["
47 47 "\x19?d\x00(o\n\xa0??\x7f\xb9\xa4?\x1bF\x1f\x8e\xac\xa8?j??eUU}?.?"
48 48 "\x9f\x8cE??x\x94??\r\xbdtoJU5\"0N\x10U?\x00??V\t\x02\x9f\x81?U?"
49 49 "\x00\x9eM\xae2?r\x9b7\x83\x82\x8aP3????.?&\"?\xb7ZP \x0c<?O"
50 50 "\xa5\t}\xb8?\x99\xa6?\x87?\x1di|/\xa0??0\xbe\x1fp?d&\x1a\xad"
51 51 "\x95\x8a\x07?\t*\x10??b:?d?.\x13C\x8a?\x12\xbe\xbf\x8e?{???"
52 52 "\x08?\x80\xa7\x13+d\x13>J?\x80\x15T\x95\x9a\x00??S\x8c\r?\xa1"
53 53 "\x03\x07?\x96\x9b\xa7\xab=E??\xa4\xb3?\x19q??B\x91=\x8d??k?J"
54 54 "\x0bV\"??\xf7x?\xa1\x00?\\.\x87\x87???\x02F@D\x99],??\x10#?X"
55 55 "\xb7=\xb9\x10?Z\x1by???cI??\x1ag?\x92\xbc?T?t[\x92\x81?<_\x17~"
56 56 "\x92\x88?H%?\x10Q\x02\x9f\n\x81qQ\x0bm?\x1bX?\xb1AK\xa6\x9e\xb9?u"
57 57 "\xb2?1\xbe|/\x92M@\xa2!F?\xa9>\"\r<DT?>\x92\x8e?>\x9a9Qv\x127?a"
58 58 "\xac?Y?8?:??]X???9\x80\xb7?u?\x0b#BZ\x8d=\x1d?p\x00\x00\x00\x00"
59 59 "IEND\xaeB`\x82")
60 60 return FileNode(filename, content=data)
61 61 return node_maker
62 62
63 63
64 64 class TestNodeBasics:
65 65
66 66 @pytest.mark.parametrize("path", ['/foo', '/foo/bar'])
67 67 @pytest.mark.parametrize(
68 68 "kind", [NodeKind.FILE, NodeKind.DIR], ids=["FILE", "DIR"])
69 69 def test_init_wrong_paths(self, path, kind):
70 70 """
71 71 Cannot innitialize Node objects with path with slash at the beginning.
72 72 """
73 73 with pytest.raises(NodeError):
74 74 Node(path, kind)
75 75
76 76 @pytest.mark.parametrize("path", ['path', 'some/path'])
77 77 @pytest.mark.parametrize(
78 78 "kind", [NodeKind.FILE, NodeKind.DIR], ids=["FILE", "DIR"])
79 79 def test_name(self, path, kind):
80 80 node = Node(path, kind)
81 81 assert node.name == 'path'
82 82
83 83 def test_name_root(self):
84 84 node = Node('', NodeKind.DIR)
85 85 assert node.name == ''
86 86
87 87 def test_root_node_cannot_be_file(self):
88 88 with pytest.raises(NodeError):
89 89 Node('', NodeKind.FILE)
90 90
91 91 def test_kind_setter(self):
92 92 node = Node('', NodeKind.DIR)
93 93 with pytest.raises(NodeError):
94 94 node.kind = NodeKind.FILE
95 95
96 96 def test_compare_equal(self):
97 97 node1 = FileNode('test', content='')
98 98 node2 = FileNode('test', content='')
99 99 assert node1 == node2
100 100 assert not node1 != node2
101 101
102 102 def test_compare_unequal(self):
103 103 node1 = FileNode('test', content='a')
104 104 node2 = FileNode('test', content='b')
105 105 assert node1 != node2
106 106 assert not node1 == node2
107 107
108 108 @pytest.mark.parametrize("node_path, expected_parent_path", [
109 109 ('', ''),
110 110 ('some/path/', 'some/'),
111 111 ('some/longer/path/', 'some/longer/'),
112 112 ])
113 113 def test_parent_path_new(self, node_path, expected_parent_path):
114 114 """
115 115 Tests if node's parent path are properly computed.
116 116 """
117 117 node = Node(node_path, NodeKind.DIR)
118 118 parent_path = node.get_parent_path()
119 119 assert (parent_path.endswith('/') or
120 120 node.is_root() and parent_path == '')
121 121 assert parent_path == expected_parent_path
122 122
123 123 '''
124 124 def _test_trailing_slash(self, path):
125 125 if not path.endswith('/'):
126 126 pytest.fail("Trailing slash tests needs paths to end with slash")
127 127 for kind in NodeKind.FILE, NodeKind.DIR:
128 128 with pytest.raises(NodeError):
129 129 Node(path=path, kind=kind)
130 130
131 131 def test_trailing_slash(self):
132 132 for path in ('/', 'foo/', 'foo/bar/', 'foo/bar/biz/'):
133 133 self._test_trailing_slash(path)
134 134 '''
135 135
136 136 def test_is_file(self):
137 137 node = Node('any', NodeKind.FILE)
138 138 assert node.is_file()
139 139
140 140 node = FileNode('any')
141 141 assert node.is_file()
142 142 with pytest.raises(AttributeError):
143 143 node.nodes
144 144
145 145 def test_is_dir(self):
146 146 node = Node('any_dir', NodeKind.DIR)
147 147 assert node.is_dir()
148 148
149 149 node = DirNode('any_dir')
150 150
151 151 assert node.is_dir()
152 152 with pytest.raises(NodeError):
153 153 node.content
154 154
155 155 def test_dir_node_iter(self):
156 156 nodes = [
157 157 DirNode('docs'),
158 158 DirNode('tests'),
159 159 FileNode('bar'),
160 160 FileNode('foo'),
161 161 FileNode('readme.txt'),
162 162 FileNode('setup.py'),
163 163 ]
164 164 dirnode = DirNode('', nodes=nodes)
165 165 for node in dirnode:
166 166 assert node == dirnode.get_node(node.path)
167 167
168 168 def test_node_state(self):
169 169 """
170 170 Without link to commit nodes should raise NodeError.
171 171 """
172 172 node = FileNode('anything')
173 173 with pytest.raises(NodeError):
174 174 node.state
175 175 node = DirNode('anything')
176 176 with pytest.raises(NodeError):
177 177 node.state
178 178
179 179 def test_file_node_stat(self):
180 180 node = FileNode('foobar', 'empty... almost')
181 181 mode = node.mode # default should be 0100644
182 182 assert mode & stat.S_IRUSR
183 183 assert mode & stat.S_IWUSR
184 184 assert mode & stat.S_IRGRP
185 185 assert mode & stat.S_IROTH
186 186 assert not mode & stat.S_IWGRP
187 187 assert not mode & stat.S_IWOTH
188 188 assert not mode & stat.S_IXUSR
189 189 assert not mode & stat.S_IXGRP
190 190 assert not mode & stat.S_IXOTH
191 191
192 192 def test_file_node_is_executable(self):
193 node = FileNode('foobar', 'empty... almost', mode=0100755)
193 node = FileNode('foobar', 'empty... almost', mode=0o100755)
194 194 assert node.is_executable
195 195
196 node = FileNode('foobar', 'empty... almost', mode=0100500)
196 node = FileNode('foobar', 'empty... almost', mode=0o100500)
197 197 assert node.is_executable
198 198
199 node = FileNode('foobar', 'empty... almost', mode=0100644)
199 node = FileNode('foobar', 'empty... almost', mode=0o100644)
200 200 assert not node.is_executable
201 201
202 202 def test_file_node_is_not_symlink(self):
203 203 node = FileNode('foobar', 'empty...')
204 204 assert not node.is_link()
205 205
206 206 def test_mimetype(self):
207 207 py_node = FileNode('test.py')
208 208 tar_node = FileNode('test.tar.gz')
209 209
210 210 ext = 'CustomExtension'
211 211
212 212 my_node2 = FileNode('myfile2')
213 213 my_node2._mimetype = [ext]
214 214
215 215 my_node3 = FileNode('myfile3')
216 216 my_node3._mimetype = [ext, ext]
217 217
218 218 assert py_node.mimetype == 'text/x-python'
219 219 assert py_node.get_mimetype() == ('text/x-python', None)
220 220
221 221 assert tar_node.mimetype == 'application/x-tar'
222 222 assert tar_node.get_mimetype() == ('application/x-tar', 'gzip')
223 223
224 224 with pytest.raises(NodeError):
225 225 my_node2.get_mimetype()
226 226
227 227 assert my_node3.mimetype == ext
228 228 assert my_node3.get_mimetype() == [ext, ext]
229 229
230 230 def test_lines_counts(self):
231 231 lines = [
232 232 'line1\n',
233 233 'line2\n',
234 234 'line3\n',
235 235 '\n',
236 236 '\n',
237 237 'line4\n',
238 238 ]
239 239 py_node = FileNode('test.py', ''.join(lines))
240 240
241 241 assert (len(lines), len(lines)) == py_node.lines()
242 242 assert (len(lines), len(lines) - 2) == py_node.lines(count_empty=True)
243 243
244 244 def test_lines_no_newline(self):
245 245 py_node = FileNode('test.py', 'oneline')
246 246
247 247 assert (1, 1) == py_node.lines()
248 248 assert (1, 1) == py_node.lines(count_empty=True)
249 249
250 250
251 251 class TestNodeContent(object):
252 252
253 253 def test_if_binary(self, binary_filenode):
254 254 filenode = binary_filenode('calendar.jpg')
255 255 assert filenode.is_binary
256 256
257 257 def test_binary_line_counts(self, binary_filenode):
258 258 tar_node = binary_filenode('archive.tar.gz')
259 259 assert (0, 0) == tar_node.lines(count_empty=True)
260 260
261 261 def test_binary_mimetype(self, binary_filenode):
262 262 tar_node = binary_filenode('archive.tar.gz')
263 263 assert tar_node.mimetype == 'application/x-tar'
264 264
265 265
266 266 @pytest.mark.usefixtures("vcs_repository_support")
267 267 class TestNodesCommits(BackendTestMixin):
268 268
269 269 def test_node_last_commit(self, generate_repo_with_commits):
270 270 repo = generate_repo_with_commits(20)
271 271 last_commit = repo.get_commit()
272 272
273 273 for x in xrange(3):
274 274 node = last_commit.get_node('file_%s.txt' % x)
275 275 assert node.last_commit == repo[x]
General Comments 0
You need to be logged in to leave comments. Login now